]>
Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
1965aae3 PA |
2 | #ifndef _ASM_X86_STRING_32_H |
3 | #define _ASM_X86_STRING_32_H | |
1da177e4 LT |
4 | |
5 | #ifdef __KERNEL__ | |
1da177e4 | 6 | |
06b0f574 | 7 | /* Let gcc decide whether to inline or use the out of line functions */ |
1da177e4 LT |
8 | |
9 | #define __HAVE_ARCH_STRCPY | |
b520b85a | 10 | extern char *strcpy(char *dest, const char *src); |
1da177e4 LT |
11 | |
12 | #define __HAVE_ARCH_STRNCPY | |
b520b85a | 13 | extern char *strncpy(char *dest, const char *src, size_t count); |
1da177e4 LT |
14 | |
15 | #define __HAVE_ARCH_STRCAT | |
b520b85a | 16 | extern char *strcat(char *dest, const char *src); |
1da177e4 LT |
17 | |
18 | #define __HAVE_ARCH_STRNCAT | |
b520b85a | 19 | extern char *strncat(char *dest, const char *src, size_t count); |
1da177e4 LT |
20 | |
21 | #define __HAVE_ARCH_STRCMP | |
b520b85a | 22 | extern int strcmp(const char *cs, const char *ct); |
1da177e4 LT |
23 | |
24 | #define __HAVE_ARCH_STRNCMP | |
b520b85a | 25 | extern int strncmp(const char *cs, const char *ct, size_t count); |
1da177e4 LT |
26 | |
27 | #define __HAVE_ARCH_STRCHR | |
b520b85a | 28 | extern char *strchr(const char *s, int c); |
1da177e4 | 29 | |
1da177e4 | 30 | #define __HAVE_ARCH_STRLEN |
b520b85a | 31 | extern size_t strlen(const char *s); |
1da177e4 | 32 | |
78d64fc2 | 33 | static __always_inline void *__memcpy(void *to, const void *from, size_t n) |
1da177e4 | 34 | { |
78d64fc2 JP |
35 | int d0, d1, d2; |
36 | asm volatile("rep ; movsl\n\t" | |
37 | "movl %4,%%ecx\n\t" | |
38 | "andl $3,%%ecx\n\t" | |
39 | "jz 1f\n\t" | |
40 | "rep ; movsb\n\t" | |
41 | "1:" | |
42 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) | |
43 | : "0" (n / 4), "g" (n), "1" ((long)to), "2" ((long)from) | |
44 | : "memory"); | |
45 | return to; | |
1da177e4 LT |
46 | } |
47 | ||
48 | /* | |
d5b63d78 | 49 | * This looks ugly, but the compiler can optimize it totally, |
1da177e4 LT |
50 | * as the count is constant. |
51 | */ | |
78d64fc2 JP |
52 | static __always_inline void *__constant_memcpy(void *to, const void *from, |
53 | size_t n) | |
1da177e4 | 54 | { |
d5b63d78 | 55 | long esi, edi; |
78d64fc2 JP |
56 | if (!n) |
57 | return to; | |
58 | ||
d5b63d78 | 59 | switch (n) { |
78d64fc2 JP |
60 | case 1: |
61 | *(char *)to = *(char *)from; | |
62 | return to; | |
63 | case 2: | |
64 | *(short *)to = *(short *)from; | |
65 | return to; | |
66 | case 4: | |
67 | *(int *)to = *(int *)from; | |
68 | return to; | |
78d64fc2 JP |
69 | case 3: |
70 | *(short *)to = *(short *)from; | |
71 | *((char *)to + 2) = *((char *)from + 2); | |
72 | return to; | |
73 | case 5: | |
74 | *(int *)to = *(int *)from; | |
75 | *((char *)to + 4) = *((char *)from + 4); | |
76 | return to; | |
77 | case 6: | |
78 | *(int *)to = *(int *)from; | |
79 | *((short *)to + 2) = *((short *)from + 2); | |
80 | return to; | |
81 | case 8: | |
82 | *(int *)to = *(int *)from; | |
83 | *((int *)to + 1) = *((int *)from + 1); | |
84 | return to; | |
d5b63d78 | 85 | } |
78d64fc2 JP |
86 | |
87 | esi = (long)from; | |
88 | edi = (long)to; | |
89 | if (n >= 5 * 4) { | |
d5b63d78 DV |
90 | /* large block: use rep prefix */ |
91 | int ecx; | |
78d64fc2 JP |
92 | asm volatile("rep ; movsl" |
93 | : "=&c" (ecx), "=&D" (edi), "=&S" (esi) | |
94 | : "0" (n / 4), "1" (edi), "2" (esi) | |
95 | : "memory" | |
d5b63d78 DV |
96 | ); |
97 | } else { | |
98 | /* small block: don't clobber ecx + smaller code */ | |
78d64fc2 JP |
99 | if (n >= 4 * 4) |
100 | asm volatile("movsl" | |
101 | : "=&D"(edi), "=&S"(esi) | |
102 | : "0"(edi), "1"(esi) | |
103 | : "memory"); | |
104 | if (n >= 3 * 4) | |
105 | asm volatile("movsl" | |
106 | : "=&D"(edi), "=&S"(esi) | |
107 | : "0"(edi), "1"(esi) | |
108 | : "memory"); | |
109 | if (n >= 2 * 4) | |
110 | asm volatile("movsl" | |
111 | : "=&D"(edi), "=&S"(esi) | |
112 | : "0"(edi), "1"(esi) | |
113 | : "memory"); | |
114 | if (n >= 1 * 4) | |
115 | asm volatile("movsl" | |
116 | : "=&D"(edi), "=&S"(esi) | |
117 | : "0"(edi), "1"(esi) | |
118 | : "memory"); | |
d5b63d78 | 119 | } |
1da177e4 | 120 | switch (n % 4) { |
d5b63d78 | 121 | /* tail */ |
78d64fc2 JP |
122 | case 0: |
123 | return to; | |
124 | case 1: | |
125 | asm volatile("movsb" | |
126 | : "=&D"(edi), "=&S"(esi) | |
127 | : "0"(edi), "1"(esi) | |
128 | : "memory"); | |
129 | return to; | |
130 | case 2: | |
131 | asm volatile("movsw" | |
132 | : "=&D"(edi), "=&S"(esi) | |
133 | : "0"(edi), "1"(esi) | |
134 | : "memory"); | |
135 | return to; | |
136 | default: | |
137 | asm volatile("movsw\n\tmovsb" | |
138 | : "=&D"(edi), "=&S"(esi) | |
139 | : "0"(edi), "1"(esi) | |
140 | : "memory"); | |
141 | return to; | |
1da177e4 LT |
142 | } |
143 | } | |
1da177e4 LT |
144 | |
145 | #define __HAVE_ARCH_MEMCPY | |
6974f0c4 | 146 | extern void *memcpy(void *, const void *, size_t); |
1da177e4 | 147 | |
6974f0c4 | 148 | #ifndef CONFIG_FORTIFY_SOURCE |
1da177e4 LT |
149 | #ifdef CONFIG_X86_USE_3DNOW |
150 | ||
151 | #include <asm/mmx.h> | |
152 | ||
153 | /* | |
154 | * This CPU favours 3DNow strongly (eg AMD Athlon) | |
155 | */ | |
156 | ||
78d64fc2 | 157 | static inline void *__constant_memcpy3d(void *to, const void *from, size_t len) |
1da177e4 LT |
158 | { |
159 | if (len < 512) | |
160 | return __constant_memcpy(to, from, len); | |
161 | return _mmx_memcpy(to, from, len); | |
162 | } | |
163 | ||
78d64fc2 | 164 | static inline void *__memcpy3d(void *to, const void *from, size_t len) |
1da177e4 LT |
165 | { |
166 | if (len < 512) | |
167 | return __memcpy(to, from, len); | |
168 | return _mmx_memcpy(to, from, len); | |
169 | } | |
170 | ||
78d64fc2 JP |
171 | #define memcpy(t, f, n) \ |
172 | (__builtin_constant_p((n)) \ | |
173 | ? __constant_memcpy3d((t), (f), (n)) \ | |
174 | : __memcpy3d((t), (f), (n))) | |
1da177e4 LT |
175 | |
176 | #else | |
177 | ||
178 | /* | |
179 | * No 3D Now! | |
180 | */ | |
78d64fc2 | 181 | |
ff60fab7 | 182 | #define memcpy(t, f, n) __builtin_memcpy(t, f, n) |
1da177e4 LT |
183 | |
184 | #endif | |
6974f0c4 | 185 | #endif /* !CONFIG_FORTIFY_SOURCE */ |
1da177e4 LT |
186 | |
187 | #define __HAVE_ARCH_MEMMOVE | |
78d64fc2 | 188 | void *memmove(void *dest, const void *src, size_t n); |
1da177e4 | 189 | |
6974f0c4 DM |
190 | extern int memcmp(const void *, const void *, size_t); |
191 | #ifndef CONFIG_FORTIFY_SOURCE | |
1da177e4 | 192 | #define memcmp __builtin_memcmp |
6974f0c4 | 193 | #endif |
1da177e4 LT |
194 | |
195 | #define __HAVE_ARCH_MEMCHR | |
78d64fc2 | 196 | extern void *memchr(const void *cs, int c, size_t count); |
1da177e4 | 197 | |
78d64fc2 | 198 | static inline void *__memset_generic(void *s, char c, size_t count) |
1da177e4 | 199 | { |
78d64fc2 JP |
200 | int d0, d1; |
201 | asm volatile("rep\n\t" | |
202 | "stosb" | |
203 | : "=&c" (d0), "=&D" (d1) | |
204 | : "a" (c), "1" (s), "0" (count) | |
205 | : "memory"); | |
206 | return s; | |
1da177e4 LT |
207 | } |
208 | ||
209 | /* we might want to write optimized versions of these later */ | |
78d64fc2 | 210 | #define __constant_count_memset(s, c, count) __memset_generic((s), (c), (count)) |
1da177e4 LT |
211 | |
212 | /* | |
78d64fc2 | 213 | * memset(x, 0, y) is a reasonably common thing to do, so we want to fill |
1da177e4 LT |
214 | * things 32 bits at a time even when we don't know the size of the |
215 | * area at compile-time.. | |
216 | */ | |
78d64fc2 JP |
217 | static __always_inline |
218 | void *__constant_c_memset(void *s, unsigned long c, size_t count) | |
1da177e4 | 219 | { |
78d64fc2 JP |
220 | int d0, d1; |
221 | asm volatile("rep ; stosl\n\t" | |
222 | "testb $2,%b3\n\t" | |
223 | "je 1f\n\t" | |
224 | "stosw\n" | |
225 | "1:\ttestb $1,%b3\n\t" | |
226 | "je 2f\n\t" | |
227 | "stosb\n" | |
228 | "2:" | |
229 | : "=&c" (d0), "=&D" (d1) | |
230 | : "a" (c), "q" (count), "0" (count/4), "1" ((long)s) | |
231 | : "memory"); | |
232 | return s; | |
1da177e4 LT |
233 | } |
234 | ||
235 | /* Added by Gertjan van Wingerde to make minix and sysv module work */ | |
236 | #define __HAVE_ARCH_STRNLEN | |
78d64fc2 | 237 | extern size_t strnlen(const char *s, size_t count); |
1da177e4 LT |
238 | /* end of additional stuff */ |
239 | ||
240 | #define __HAVE_ARCH_STRSTR | |
1da177e4 LT |
241 | extern char *strstr(const char *cs, const char *ct); |
242 | ||
243 | /* | |
244 | * This looks horribly ugly, but the compiler can optimize it totally, | |
245 | * as we by now know that both pattern and count is constant.. | |
246 | */ | |
78d64fc2 JP |
247 | static __always_inline |
248 | void *__constant_c_and_count_memset(void *s, unsigned long pattern, | |
249 | size_t count) | |
1da177e4 LT |
250 | { |
251 | switch (count) { | |
78d64fc2 JP |
252 | case 0: |
253 | return s; | |
254 | case 1: | |
255 | *(unsigned char *)s = pattern & 0xff; | |
256 | return s; | |
257 | case 2: | |
258 | *(unsigned short *)s = pattern & 0xffff; | |
259 | return s; | |
260 | case 3: | |
261 | *(unsigned short *)s = pattern & 0xffff; | |
262 | *((unsigned char *)s + 2) = pattern & 0xff; | |
263 | return s; | |
264 | case 4: | |
265 | *(unsigned long *)s = pattern; | |
266 | return s; | |
267 | } | |
268 | ||
269 | #define COMMON(x) \ | |
270 | asm volatile("rep ; stosl" \ | |
271 | x \ | |
272 | : "=&c" (d0), "=&D" (d1) \ | |
1a20d3ec | 273 | : "a" (eax), "0" (count/4), "1" ((long)s) \ |
78d64fc2 JP |
274 | : "memory") |
275 | ||
276 | { | |
277 | int d0, d1; | |
1a20d3ec | 278 | unsigned long eax = pattern; |
1a20d3ec | 279 | |
78d64fc2 | 280 | switch (count % 4) { |
1da177e4 | 281 | case 0: |
78d64fc2 | 282 | COMMON(""); |
1da177e4 LT |
283 | return s; |
284 | case 1: | |
78d64fc2 | 285 | COMMON("\n\tstosb"); |
1da177e4 LT |
286 | return s; |
287 | case 2: | |
78d64fc2 | 288 | COMMON("\n\tstosw"); |
1da177e4 | 289 | return s; |
78d64fc2 JP |
290 | default: |
291 | COMMON("\n\tstosw\n\tstosb"); | |
1da177e4 | 292 | return s; |
78d64fc2 | 293 | } |
1da177e4 | 294 | } |
78d64fc2 | 295 | |
1da177e4 LT |
296 | #undef COMMON |
297 | } | |
298 | ||
78d64fc2 JP |
299 | #define __constant_c_x_memset(s, c, count) \ |
300 | (__builtin_constant_p(count) \ | |
301 | ? __constant_c_and_count_memset((s), (c), (count)) \ | |
302 | : __constant_c_memset((s), (c), (count))) | |
1da177e4 | 303 | |
78d64fc2 JP |
304 | #define __memset(s, c, count) \ |
305 | (__builtin_constant_p(count) \ | |
306 | ? __constant_count_memset((s), (c), (count)) \ | |
307 | : __memset_generic((s), (c), (count))) | |
1da177e4 LT |
308 | |
309 | #define __HAVE_ARCH_MEMSET | |
6974f0c4 DM |
310 | extern void *memset(void *, int, size_t); |
311 | #ifndef CONFIG_FORTIFY_SOURCE | |
ff60fab7 | 312 | #define memset(s, c, count) __builtin_memset(s, c, count) |
6974f0c4 | 313 | #endif /* !CONFIG_FORTIFY_SOURCE */ |
1da177e4 | 314 | |
4c512485 MW |
315 | #define __HAVE_ARCH_MEMSET16 |
316 | static inline void *memset16(uint16_t *s, uint16_t v, size_t n) | |
317 | { | |
318 | int d0, d1; | |
319 | asm volatile("rep\n\t" | |
320 | "stosw" | |
321 | : "=&c" (d0), "=&D" (d1) | |
322 | : "a" (v), "1" (s), "0" (n) | |
323 | : "memory"); | |
324 | return s; | |
325 | } | |
326 | ||
327 | #define __HAVE_ARCH_MEMSET32 | |
328 | static inline void *memset32(uint32_t *s, uint32_t v, size_t n) | |
329 | { | |
330 | int d0, d1; | |
331 | asm volatile("rep\n\t" | |
332 | "stosl" | |
333 | : "=&c" (d0), "=&D" (d1) | |
334 | : "a" (v), "1" (s), "0" (n) | |
335 | : "memory"); | |
336 | return s; | |
337 | } | |
338 | ||
1da177e4 LT |
339 | /* |
340 | * find the first occurrence of byte 'c', or 1 past the area if none | |
341 | */ | |
342 | #define __HAVE_ARCH_MEMSCAN | |
78d64fc2 | 343 | extern void *memscan(void *addr, int c, size_t size); |
1da177e4 LT |
344 | |
345 | #endif /* __KERNEL__ */ | |
346 | ||
1965aae3 | 347 | #endif /* _ASM_X86_STRING_32_H */ |