]>
Commit | Line | Data |
---|---|---|
a010b409 SI |
1 | From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001 |
2 | From: Brian Behlendorf <behlendorf1@llnl.gov> | |
3 | Date: Wed, 11 Jul 2018 13:10:40 -0700 | |
4 | Subject: [PATCH] Fix kernel unaligned access on sparc64 | |
5 | ||
6 | Update the SA_COPY_DATA macro to check if architecture supports | |
7 | efficient unaligned memory accesses at compile time. Otherwise | |
8 | fallback to using the sa_copy_data() function. | |
9 | ||
10 | The kernel provided CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS is | |
11 | used to determine availability in kernel space. In user space | |
12 | the x86_64, x86, powerpc, and sometimes arm architectures will | |
13 | define the HAVE_EFFICIENT_UNALIGNED_ACCESS macro. | |
14 | ||
15 | Signed-off-by: Brian Behlendorf <behlendorf1@llnl.gov> | |
16 | Closes #7642 | |
17 | Closes #7684 | |
18 | ||
19 | Signed-off-by: Stoiko Ivanov <s.ivanov@proxmox.com> | |
20 | --- | |
21 | lib/libspl/include/sys/isa_defs.h | 7 +++++++ | |
22 | module/icp/algs/modes/ccm.c | 2 +- | |
23 | module/zfs/sa.c | 35 ++++++++++++++++++++--------------- | |
24 | 3 files changed, 28 insertions(+), 16 deletions(-) | |
25 | ||
26 | diff --git a/lib/libspl/include/sys/isa_defs.h b/lib/libspl/include/sys/isa_defs.h | |
27 | index a5bea039..7a90e077 100644 | |
28 | --- a/lib/libspl/include/sys/isa_defs.h | |
29 | +++ b/lib/libspl/include/sys/isa_defs.h | |
30 | @@ -55,6 +55,7 @@ extern "C" { | |
31 | #endif | |
32 | ||
33 | #define _SUNOS_VTOC_16 | |
34 | +#define HAVE_EFFICIENT_UNALIGNED_ACCESS | |
35 | ||
36 | /* i386 arch specific defines */ | |
37 | #elif defined(__i386) || defined(__i386__) | |
38 | @@ -76,6 +77,7 @@ extern "C" { | |
39 | #endif | |
40 | ||
41 | #define _SUNOS_VTOC_16 | |
42 | +#define HAVE_EFFICIENT_UNALIGNED_ACCESS | |
43 | ||
44 | /* powerpc arch specific defines */ | |
45 | #elif defined(__powerpc) || defined(__powerpc__) || defined(__powerpc64__) | |
46 | @@ -99,6 +101,7 @@ extern "C" { | |
47 | #endif | |
48 | ||
49 | #define _SUNOS_VTOC_16 | |
50 | +#define HAVE_EFFICIENT_UNALIGNED_ACCESS | |
51 | ||
52 | /* arm arch specific defines */ | |
53 | #elif defined(__arm) || defined(__arm__) || defined(__aarch64__) | |
54 | @@ -129,6 +132,10 @@ extern "C" { | |
55 | ||
56 | #define _SUNOS_VTOC_16 | |
57 | ||
58 | +#if defined(__ARM_FEATURE_UNALIGNED) | |
59 | +#define HAVE_EFFICIENT_UNALIGNED_ACCESS | |
60 | +#endif | |
61 | + | |
62 | /* sparc arch specific defines */ | |
63 | #elif defined(__sparc) || defined(__sparc__) | |
64 | ||
65 | diff --git a/module/icp/algs/modes/ccm.c b/module/icp/algs/modes/ccm.c | |
66 | index 22aeb0a6..fb41194f 100644 | |
67 | --- a/module/icp/algs/modes/ccm.c | |
68 | +++ b/module/icp/algs/modes/ccm.c | |
69 | @@ -28,7 +28,7 @@ | |
70 | #include <sys/crypto/common.h> | |
71 | #include <sys/crypto/impl.h> | |
72 | ||
73 | -#if defined(__i386) || defined(__amd64) | |
74 | +#ifdef HAVE_EFFICIENT_UNALIGNED_ACCESS | |
75 | #include <sys/byteorder.h> | |
76 | #define UNALIGNED_POINTERS_PERMITTED | |
77 | #endif | |
78 | diff --git a/module/zfs/sa.c b/module/zfs/sa.c | |
79 | index 8046dbde..1fb1a8b5 100644 | |
80 | --- a/module/zfs/sa.c | |
81 | +++ b/module/zfs/sa.c | |
82 | @@ -147,21 +147,26 @@ arc_byteswap_func_t sa_bswap_table[] = { | |
83 | zfs_acl_byteswap, | |
84 | }; | |
85 | ||
86 | -#define SA_COPY_DATA(f, s, t, l) \ | |
87 | - { \ | |
88 | - if (f == NULL) { \ | |
89 | - if (l == 8) { \ | |
90 | - *(uint64_t *)t = *(uint64_t *)s; \ | |
91 | - } else if (l == 16) { \ | |
92 | - *(uint64_t *)t = *(uint64_t *)s; \ | |
93 | - *(uint64_t *)((uintptr_t)t + 8) = \ | |
94 | - *(uint64_t *)((uintptr_t)s + 8); \ | |
95 | - } else { \ | |
96 | - bcopy(s, t, l); \ | |
97 | - } \ | |
98 | - } else \ | |
99 | - sa_copy_data(f, s, t, l); \ | |
100 | - } | |
101 | +#ifdef HAVE_EFFICIENT_UNALIGNED_ACCESS | |
102 | +#define SA_COPY_DATA(f, s, t, l) \ | |
103 | +do { \ | |
104 | + if (f == NULL) { \ | |
105 | + if (l == 8) { \ | |
106 | + *(uint64_t *)t = *(uint64_t *)s; \ | |
107 | + } else if (l == 16) { \ | |
108 | + *(uint64_t *)t = *(uint64_t *)s; \ | |
109 | + *(uint64_t *)((uintptr_t)t + 8) = \ | |
110 | + *(uint64_t *)((uintptr_t)s + 8); \ | |
111 | + } else { \ | |
112 | + bcopy(s, t, l); \ | |
113 | + } \ | |
114 | + } else { \ | |
115 | + sa_copy_data(f, s, t, l); \ | |
116 | + } \ | |
117 | +} while (0) | |
118 | +#else | |
119 | +#define SA_COPY_DATA(f, s, t, l) sa_copy_data(f, s, t, l) | |
120 | +#endif | |
121 | ||
122 | /* | |
123 | * This table is fixed and cannot be changed. Its purpose is to |