]>
Commit | Line | Data |
---|---|---|
26ef5c09 DG |
1 | #ifndef _ASM_POWERPC_CACHE_H |
2 | #define _ASM_POWERPC_CACHE_H | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | ||
26ef5c09 DG |
6 | |
7 | /* bytes per L1 cache line */ | |
8 | #if defined(CONFIG_8xx) || defined(CONFIG_403GCX) | |
9 | #define L1_CACHE_SHIFT 4 | |
10 | #define MAX_COPY_PREFETCH 1 | |
3dfa8773 KG |
11 | #elif defined(CONFIG_PPC_E500MC) |
12 | #define L1_CACHE_SHIFT 6 | |
13 | #define MAX_COPY_PREFETCH 4 | |
26ef5c09 | 14 | #elif defined(CONFIG_PPC32) |
26ef5c09 | 15 | #define MAX_COPY_PREFETCH 4 |
e7f75ad0 DK |
16 | #if defined(CONFIG_PPC_47x) |
17 | #define L1_CACHE_SHIFT 7 | |
18 | #else | |
19 | #define L1_CACHE_SHIFT 5 | |
20 | #endif | |
26ef5c09 DG |
21 | #else /* CONFIG_PPC64 */ |
22 | #define L1_CACHE_SHIFT 7 | |
f4329f2e | 23 | #define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */ |
26ef5c09 DG |
24 | #endif |
25 | ||
26 | #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) | |
27 | ||
28 | #define SMP_CACHE_BYTES L1_CACHE_BYTES | |
26ef5c09 | 29 | |
f4329f2e NP |
30 | #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT) |
31 | ||
26ef5c09 | 32 | #if defined(__powerpc64__) && !defined(__ASSEMBLY__) |
e2827fe5 BH |
33 | |
34 | struct ppc_cache_info { | |
35 | u32 size; | |
36 | u32 line_size; | |
37 | u32 block_size; /* L1 only */ | |
38 | u32 log_block_size; | |
39 | u32 blocks_per_page; | |
40 | u32 sets; | |
98a5f361 | 41 | u32 assoc; |
e2827fe5 BH |
42 | }; |
43 | ||
26ef5c09 | 44 | struct ppc64_caches { |
e2827fe5 BH |
45 | struct ppc_cache_info l1d; |
46 | struct ppc_cache_info l1i; | |
65e01f38 BH |
47 | struct ppc_cache_info l2; |
48 | struct ppc_cache_info l3; | |
26ef5c09 DG |
49 | }; |
50 | ||
51 | extern struct ppc64_caches ppc64_caches; | |
52 | #endif /* __powerpc64__ && ! __ASSEMBLY__ */ | |
53 | ||
0ce63670 KH |
54 | #if defined(__ASSEMBLY__) |
55 | /* | |
56 | * For a snooping icache, we still need a dummy icbi to purge all the | |
57 | * prefetched instructions from the ifetch buffers. We also need a sync | |
58 | * before the icbi to order the the actual stores to memory that might | |
59 | * have modified instructions with the icbi. | |
60 | */ | |
61 | #define PURGE_PREFETCHED_INS \ | |
62 | sync; \ | |
63 | icbi 0,r3; \ | |
64 | sync; \ | |
65 | isync | |
ae3a197e | 66 | |
0ce63670 | 67 | #else |
54cb27a7 | 68 | #define __read_mostly __attribute__((__section__(".data..read_mostly"))) |
ae3a197e DH |
69 | |
70 | #ifdef CONFIG_6xx | |
71 | extern long _get_L2CR(void); | |
72 | extern long _get_L3CR(void); | |
73 | extern void _set_L2CR(unsigned long); | |
74 | extern void _set_L3CR(unsigned long); | |
75 | #else | |
76 | #define _get_L2CR() 0L | |
77 | #define _get_L3CR() 0L | |
78 | #define _set_L2CR(val) do { } while(0) | |
79 | #define _set_L3CR(val) do { } while(0) | |
bd67fcf9 TB |
80 | #endif |
81 | ||
d6bfa02f CL |
82 | static inline void dcbz(void *addr) |
83 | { | |
84 | __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory"); | |
85 | } | |
86 | ||
87 | static inline void dcbi(void *addr) | |
88 | { | |
89 | __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory"); | |
90 | } | |
91 | ||
92 | static inline void dcbf(void *addr) | |
93 | { | |
94 | __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory"); | |
95 | } | |
96 | ||
97 | static inline void dcbst(void *addr) | |
98 | { | |
99 | __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory"); | |
100 | } | |
ae3a197e | 101 | #endif /* !__ASSEMBLY__ */ |
26ef5c09 DG |
102 | #endif /* __KERNEL__ */ |
103 | #endif /* _ASM_POWERPC_CACHE_H */ |