]>
Commit | Line | Data |
---|---|---|
1965aae3 PA |
1 | #ifndef _ASM_X86_STRING_64_H |
2 | #define _ASM_X86_STRING_64_H | |
1da177e4 LT |
3 | |
4 | #ifdef __KERNEL__ | |
3637efb0 | 5 | #include <linux/jump_label.h> |
1da177e4 | 6 | |
953b2f1e | 7 | /* Written 2002 by Andi Kleen */ |
1da177e4 | 8 | |
953b2f1e JP |
9 | /* Only used for special circumstances. Stolen from i386/string.h */ |
10 | static __always_inline void *__inline_memcpy(void *to, const void *from, size_t n) | |
1da177e4 | 11 | { |
953b2f1e JP |
12 | unsigned long d0, d1, d2; |
13 | asm volatile("rep ; movsl\n\t" | |
14 | "testb $2,%b4\n\t" | |
15 | "je 1f\n\t" | |
16 | "movsw\n" | |
17 | "1:\ttestb $1,%b4\n\t" | |
18 | "je 2f\n\t" | |
19 | "movsb\n" | |
20 | "2:" | |
21 | : "=&c" (d0), "=&D" (d1), "=&S" (d2) | |
22 | : "0" (n / 4), "q" (n), "1" ((long)to), "2" ((long)from) | |
23 | : "memory"); | |
24 | return to; | |
1da177e4 LT |
25 | } |
26 | ||
27 | /* Even with __builtin_ the compiler may decide to use the out of line | |
28 | function. */ | |
29 | ||
30 | #define __HAVE_ARCH_MEMCPY 1 | |
a75ca545 | 31 | extern void *memcpy(void *to, const void *from, size_t len); |
393f203f AR |
32 | extern void *__memcpy(void *to, const void *from, size_t len); |
33 | ||
f8561296 | 34 | #ifndef CONFIG_KMEMCHECK |
a75ca545 | 35 | #if (__GNUC__ == 4 && __GNUC_MINOR__ < 3) || __GNUC__ < 4 |
953b2f1e JP |
36 | #define memcpy(dst, src, len) \ |
37 | ({ \ | |
38 | size_t __len = (len); \ | |
39 | void *__ret; \ | |
40 | if (__builtin_constant_p(len) && __len >= 64) \ | |
41 | __ret = __memcpy((dst), (src), __len); \ | |
42 | else \ | |
43 | __ret = __builtin_memcpy((dst), (src), __len); \ | |
44 | __ret; \ | |
45 | }) | |
aac57f81 | 46 | #endif |
f8561296 VN |
47 | #else |
48 | /* | |
49 | * kmemcheck becomes very happy if we use the REP instructions unconditionally, | |
50 | * because it means that we know both memory operands in advance. | |
51 | */ | |
52 | #define memcpy(dst, src, len) __inline_memcpy((dst), (src), (len)) | |
53 | #endif | |
1da177e4 LT |
54 | |
55 | #define __HAVE_ARCH_MEMSET | |
6edfba1b | 56 | void *memset(void *s, int c, size_t n); |
393f203f | 57 | void *__memset(void *s, int c, size_t n); |
1da177e4 LT |
58 | |
59 | #define __HAVE_ARCH_MEMMOVE | |
953b2f1e | 60 | void *memmove(void *dest, const void *src, size_t count); |
393f203f | 61 | void *__memmove(void *dest, const void *src, size_t count); |
1da177e4 | 62 | |
953b2f1e JP |
63 | int memcmp(const void *cs, const void *ct, size_t count); |
64 | size_t strlen(const char *s); | |
65 | char *strcpy(char *dest, const char *src); | |
66 | char *strcat(char *dest, const char *src); | |
67 | int strcmp(const char *cs, const char *ct); | |
1da177e4 | 68 | |
393f203f AR |
69 | #if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__) |
70 | ||
71 | /* | |
72 | * For files that not instrumented (e.g. mm/slub.c) we | |
73 | * should use not instrumented version of mem* functions. | |
74 | */ | |
75 | ||
76 | #undef memcpy | |
77 | #define memcpy(dst, src, len) __memcpy(dst, src, len) | |
78 | #define memmove(dst, src, len) __memmove(dst, src, len) | |
79 | #define memset(s, c, n) __memset(s, c, n) | |
80 | #endif | |
81 | ||
3637efb0 TL |
82 | DECLARE_STATIC_KEY_FALSE(mcsafe_key); |
83 | ||
92b0729c TL |
84 | /** |
85 | * memcpy_mcsafe - copy memory with indication if a machine check happened | |
86 | * | |
87 | * @dst: destination address | |
88 | * @src: source address | |
89 | * @cnt: number of bytes to copy | |
90 | * | |
91 | * Low level memory copy function that catches machine checks | |
92 | * | |
cbf8b5a2 | 93 | * Return 0 for success, -EFAULT for fail |
92b0729c | 94 | */ |
cbf8b5a2 | 95 | int memcpy_mcsafe(void *dst, const void *src, size_t cnt); |
92b0729c | 96 | |
1da177e4 LT |
97 | #endif /* __KERNEL__ */ |
98 | ||
1965aae3 | 99 | #endif /* _ASM_X86_STRING_64_H */ |