]>
Commit | Line | Data |
---|---|---|
26ef5c09 DG |
1 | #ifndef _ASM_POWERPC_CACHE_H |
2 | #define _ASM_POWERPC_CACHE_H | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | ||
26ef5c09 DG |
6 | |
7 | /* bytes per L1 cache line */ | |
8 | #if defined(CONFIG_8xx) || defined(CONFIG_403GCX) | |
9 | #define L1_CACHE_SHIFT 4 | |
10 | #define MAX_COPY_PREFETCH 1 | |
3dfa8773 KG |
11 | #elif defined(CONFIG_PPC_E500MC) |
12 | #define L1_CACHE_SHIFT 6 | |
13 | #define MAX_COPY_PREFETCH 4 | |
26ef5c09 | 14 | #elif defined(CONFIG_PPC32) |
26ef5c09 | 15 | #define MAX_COPY_PREFETCH 4 |
e7f75ad0 DK |
16 | #if defined(CONFIG_PPC_47x) |
17 | #define L1_CACHE_SHIFT 7 | |
18 | #else | |
19 | #define L1_CACHE_SHIFT 5 | |
20 | #endif | |
26ef5c09 DG |
21 | #else /* CONFIG_PPC64 */ |
22 | #define L1_CACHE_SHIFT 7 | |
f4329f2e | 23 | #define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */ |
26ef5c09 DG |
24 | #endif |
25 | ||
26 | #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) | |
27 | ||
28 | #define SMP_CACHE_BYTES L1_CACHE_BYTES | |
26ef5c09 | 29 | |
f4329f2e NP |
30 | #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT) |
31 | ||
26ef5c09 DG |
32 | #if defined(__powerpc64__) && !defined(__ASSEMBLY__) |
33 | struct ppc64_caches { | |
34 | u32 dsize; /* L1 d-cache size */ | |
35 | u32 dline_size; /* L1 d-cache line size */ | |
36 | u32 log_dline_size; | |
37 | u32 dlines_per_page; | |
38 | u32 isize; /* L1 i-cache size */ | |
39 | u32 iline_size; /* L1 i-cache line size */ | |
40 | u32 log_iline_size; | |
41 | u32 ilines_per_page; | |
42 | }; | |
43 | ||
44 | extern struct ppc64_caches ppc64_caches; | |
45 | #endif /* __powerpc64__ && ! __ASSEMBLY__ */ | |
46 | ||
0ce63670 KH |
47 | #if defined(__ASSEMBLY__) |
48 | /* | |
49 | * For a snooping icache, we still need a dummy icbi to purge all the | |
50 | * prefetched instructions from the ifetch buffers. We also need a sync | |
51 | * before the icbi to order the the actual stores to memory that might | |
52 | * have modified instructions with the icbi. | |
53 | */ | |
54 | #define PURGE_PREFETCHED_INS \ | |
55 | sync; \ | |
56 | icbi 0,r3; \ | |
57 | sync; \ | |
58 | isync | |
ae3a197e | 59 | |
0ce63670 | 60 | #else |
54cb27a7 | 61 | #define __read_mostly __attribute__((__section__(".data..read_mostly"))) |
ae3a197e DH |
62 | |
63 | #ifdef CONFIG_6xx | |
64 | extern long _get_L2CR(void); | |
65 | extern long _get_L3CR(void); | |
66 | extern void _set_L2CR(unsigned long); | |
67 | extern void _set_L3CR(unsigned long); | |
68 | #else | |
69 | #define _get_L2CR() 0L | |
70 | #define _get_L3CR() 0L | |
71 | #define _set_L2CR(val) do { } while(0) | |
72 | #define _set_L3CR(val) do { } while(0) | |
bd67fcf9 TB |
73 | #endif |
74 | ||
d6bfa02f CL |
75 | static inline void dcbz(void *addr) |
76 | { | |
77 | __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory"); | |
78 | } | |
79 | ||
80 | static inline void dcbi(void *addr) | |
81 | { | |
82 | __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory"); | |
83 | } | |
84 | ||
85 | static inline void dcbf(void *addr) | |
86 | { | |
87 | __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory"); | |
88 | } | |
89 | ||
90 | static inline void dcbst(void *addr) | |
91 | { | |
92 | __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory"); | |
93 | } | |
ae3a197e | 94 | #endif /* !__ASSEMBLY__ */ |
26ef5c09 DG |
95 | #endif /* __KERNEL__ */ |
96 | #endif /* _ASM_POWERPC_CACHE_H */ |