#define _R_23(_0, _1, REG2, REG3, ...) REG2, REG3
#define R_23(REG...) _R_23(REG, 1, 2, 3)
-#define ASM_BUG() ASSERT(0)
+#define ZFS_ASM_BUG() ASSERT(0)
const uint8_t gf_clmul_mod_lt[4*256][16];
: : [SRC] "r" (src)); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
"pxor %" VR1(r) ", %" VR3(r)); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
"movdqa %" VR1(r) ", %" VR3(r)); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
: : [SRC] "r" (src)); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
: : [DST] "r" (dst)); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
"pxor %xmm13, %" VR1(r)); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
_MUL2_x2(r); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
[lt] "r" (gf_clmul_mod_lt[4*(c)])); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
_MULx2(c, R_01(r)); \
break; \
default: \
- ASM_BUG(); \
+ ZFS_ASM_BUG(); \
} \
}
#if defined(__x86_64)
#if defined(HAVE_SSSE3) || defined(HAVE_AVX2) || defined(HAVE_AVX512BW)
-
+/* BEGIN CSTYLED */
const uint8_t
-__attribute__((aligned(256))) gf_clmul_mod_lt[4*256][16] = {
+__attribute__((aligned(256))) gf_clmul_mod_lt[4*256][16] =
+{
{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 },
{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
{ 0x00, 0xff, 0xfe, 0x01, 0xfc, 0x03, 0x02, 0xfd,
0xf8, 0x07, 0x06, 0xf9, 0x04, 0xfb, 0xfa, 0x05 }
};
-
+/* END CSTYLED */
#endif /* defined(HAVE_SSSE3) || defined(HAVE_AVX2) || defined(HAVE_AVX512BW) */
#endif /* defined(__x86_64) */