]>
Commit | Line | Data |
---|---|---|
43599d1f JO |
1 | #ifndef _PERF_SYS_H |
2 | #define _PERF_SYS_H | |
3 | ||
4 | #include <asm/unistd.h> | |
5 | ||
6 | #if defined(__i386__) | |
7 | #define mb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") | |
8 | #define wmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") | |
9 | #define rmb() asm volatile("lock; addl $0,0(%%esp)" ::: "memory") | |
10 | #define cpu_relax() asm volatile("rep; nop" ::: "memory"); | |
11 | #define CPUINFO_PROC "model name" | |
12 | #ifndef __NR_perf_event_open | |
13 | # define __NR_perf_event_open 336 | |
14 | #endif | |
15 | #ifndef __NR_futex | |
16 | # define __NR_futex 240 | |
17 | #endif | |
18 | #ifndef __NR_gettid | |
19 | # define __NR_gettid 224 | |
20 | #endif | |
21 | #endif | |
22 | ||
23 | #if defined(__x86_64__) | |
24 | #define mb() asm volatile("mfence" ::: "memory") | |
25 | #define wmb() asm volatile("sfence" ::: "memory") | |
26 | #define rmb() asm volatile("lfence" ::: "memory") | |
27 | #define cpu_relax() asm volatile("rep; nop" ::: "memory"); | |
28 | #define CPUINFO_PROC "model name" | |
29 | #ifndef __NR_perf_event_open | |
30 | # define __NR_perf_event_open 298 | |
31 | #endif | |
32 | #ifndef __NR_futex | |
33 | # define __NR_futex 202 | |
34 | #endif | |
35 | #ifndef __NR_gettid | |
36 | # define __NR_gettid 186 | |
37 | #endif | |
38 | #endif | |
39 | ||
40 | #ifdef __powerpc__ | |
41 | #include "../../arch/powerpc/include/uapi/asm/unistd.h" | |
42 | #define mb() asm volatile ("sync" ::: "memory") | |
43 | #define wmb() asm volatile ("sync" ::: "memory") | |
44 | #define rmb() asm volatile ("sync" ::: "memory") | |
45 | #define CPUINFO_PROC "cpu" | |
46 | #endif | |
47 | ||
48 | #ifdef __s390__ | |
49 | #define mb() asm volatile("bcr 15,0" ::: "memory") | |
50 | #define wmb() asm volatile("bcr 15,0" ::: "memory") | |
51 | #define rmb() asm volatile("bcr 15,0" ::: "memory") | |
52 | #endif | |
53 | ||
54 | #ifdef __sh__ | |
55 | #if defined(__SH4A__) || defined(__SH5__) | |
56 | # define mb() asm volatile("synco" ::: "memory") | |
57 | # define wmb() asm volatile("synco" ::: "memory") | |
58 | # define rmb() asm volatile("synco" ::: "memory") | |
59 | #else | |
60 | # define mb() asm volatile("" ::: "memory") | |
61 | # define wmb() asm volatile("" ::: "memory") | |
62 | # define rmb() asm volatile("" ::: "memory") | |
63 | #endif | |
64 | #define CPUINFO_PROC "cpu type" | |
65 | #endif | |
66 | ||
67 | #ifdef __hppa__ | |
68 | #define mb() asm volatile("" ::: "memory") | |
69 | #define wmb() asm volatile("" ::: "memory") | |
70 | #define rmb() asm volatile("" ::: "memory") | |
71 | #define CPUINFO_PROC "cpu" | |
72 | #endif | |
73 | ||
74 | #ifdef __sparc__ | |
75 | #ifdef __LP64__ | |
76 | #define mb() asm volatile("ba,pt %%xcc, 1f\n" \ | |
77 | "membar #StoreLoad\n" \ | |
78 | "1:\n":::"memory") | |
79 | #else | |
80 | #define mb() asm volatile("":::"memory") | |
81 | #endif | |
82 | #define wmb() asm volatile("":::"memory") | |
83 | #define rmb() asm volatile("":::"memory") | |
84 | #define CPUINFO_PROC "cpu" | |
85 | #endif | |
86 | ||
87 | #ifdef __alpha__ | |
88 | #define mb() asm volatile("mb" ::: "memory") | |
89 | #define wmb() asm volatile("wmb" ::: "memory") | |
90 | #define rmb() asm volatile("mb" ::: "memory") | |
91 | #define CPUINFO_PROC "cpu model" | |
92 | #endif | |
93 | ||
94 | #ifdef __ia64__ | |
95 | #define mb() asm volatile ("mf" ::: "memory") | |
96 | #define wmb() asm volatile ("mf" ::: "memory") | |
97 | #define rmb() asm volatile ("mf" ::: "memory") | |
98 | #define cpu_relax() asm volatile ("hint @pause" ::: "memory") | |
99 | #define CPUINFO_PROC "model name" | |
100 | #endif | |
101 | ||
102 | #ifdef __arm__ | |
103 | /* | |
104 | * Use the __kuser_memory_barrier helper in the CPU helper page. See | |
105 | * arch/arm/kernel/entry-armv.S in the kernel source for details. | |
106 | */ | |
107 | #define mb() ((void(*)(void))0xffff0fa0)() | |
108 | #define wmb() ((void(*)(void))0xffff0fa0)() | |
109 | #define rmb() ((void(*)(void))0xffff0fa0)() | |
110 | #define CPUINFO_PROC "Processor" | |
111 | #endif | |
112 | ||
113 | #ifdef __aarch64__ | |
114 | #define mb() asm volatile("dmb ish" ::: "memory") | |
115 | #define wmb() asm volatile("dmb ishst" ::: "memory") | |
116 | #define rmb() asm volatile("dmb ishld" ::: "memory") | |
117 | #define cpu_relax() asm volatile("yield" ::: "memory") | |
118 | #endif | |
119 | ||
120 | #ifdef __mips__ | |
121 | #define mb() asm volatile( \ | |
122 | ".set mips2\n\t" \ | |
123 | "sync\n\t" \ | |
124 | ".set mips0" \ | |
125 | : /* no output */ \ | |
126 | : /* no input */ \ | |
127 | : "memory") | |
128 | #define wmb() mb() | |
129 | #define rmb() mb() | |
130 | #define CPUINFO_PROC "cpu model" | |
131 | #endif | |
132 | ||
133 | #ifdef __arc__ | |
134 | #define mb() asm volatile("" ::: "memory") | |
135 | #define wmb() asm volatile("" ::: "memory") | |
136 | #define rmb() asm volatile("" ::: "memory") | |
137 | #define CPUINFO_PROC "Processor" | |
138 | #endif | |
139 | ||
140 | #ifdef __metag__ | |
141 | #define mb() asm volatile("" ::: "memory") | |
142 | #define wmb() asm volatile("" ::: "memory") | |
143 | #define rmb() asm volatile("" ::: "memory") | |
144 | #define CPUINFO_PROC "CPU" | |
145 | #endif | |
146 | ||
147 | #ifdef __xtensa__ | |
148 | #define mb() asm volatile("memw" ::: "memory") | |
149 | #define wmb() asm volatile("memw" ::: "memory") | |
150 | #define rmb() asm volatile("" ::: "memory") | |
151 | #define CPUINFO_PROC "core ID" | |
152 | #endif | |
153 | ||
154 | #ifdef __tile__ | |
155 | #define mb() asm volatile ("mf" ::: "memory") | |
156 | #define wmb() asm volatile ("mf" ::: "memory") | |
157 | #define rmb() asm volatile ("mf" ::: "memory") | |
158 | #define cpu_relax() asm volatile ("mfspr zero, PASS" ::: "memory") | |
159 | #define CPUINFO_PROC "model name" | |
160 | #endif | |
161 | ||
162 | #define barrier() asm volatile ("" ::: "memory") | |
163 | ||
164 | #ifndef cpu_relax | |
165 | #define cpu_relax() barrier() | |
166 | #endif | |
167 | ||
168 | #endif /* _PERF_SYS_H */ |