]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blame - arch/s390/crypto/crc32-vx.c
Merge remote-tracking branches 'asoc/topic/tas6424', 'asoc/topic/tfa9879', 'asoc...
[mirror_ubuntu-focal-kernel.git] / arch / s390 / crypto / crc32-vx.c
CommitLineData
20a884f5 1// SPDX-License-Identifier: GPL-2.0
f848dbd3
HB
2/*
3 * Crypto-API module for CRC-32 algorithms implemented with the
4 * z/Architecture Vector Extension Facility.
5 *
6 * Copyright IBM Corp. 2015
7 * Author(s): Hendrik Brueckner <brueckner@linux.vnet.ibm.com>
8 */
9#define KMSG_COMPONENT "crc32-vx"
10#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
11
12#include <linux/module.h>
13#include <linux/cpufeature.h>
14#include <linux/crc32.h>
15#include <crypto/internal/hash.h>
16#include <asm/fpu/api.h>
17
18
19#define CRC32_BLOCK_SIZE 1
20#define CRC32_DIGEST_SIZE 4
21
22#define VX_MIN_LEN 64
23#define VX_ALIGNMENT 16L
24#define VX_ALIGN_MASK (VX_ALIGNMENT - 1)
25
26struct crc_ctx {
27 u32 key;
28};
29
30struct crc_desc_ctx {
31 u32 crc;
32};
33
34/* Prototypes for functions in assembly files */
35u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
36u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
37u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
38
39/*
40 * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
41 *
42 * Creates a function to perform a particular CRC-32 computation. Depending
43 * on the message buffer, the hardware-accelerated or software implementation
44 * is used. Note that the message buffer is aligned to improve fetch
45 * operations of VECTOR LOAD MULTIPLE instructions.
46 *
47 */
48#define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw) \
49 static u32 __pure ___fname(u32 crc, \
50 unsigned char const *data, size_t datalen) \
51 { \
52 struct kernel_fpu vxstate; \
53 unsigned long prealign, aligned, remaining; \
54 \
134a24cd
CB
55 if (datalen < VX_MIN_LEN + VX_ALIGN_MASK) \
56 return ___crc32_sw(crc, data, datalen); \
57 \
f848dbd3
HB
58 if ((unsigned long)data & VX_ALIGN_MASK) { \
59 prealign = VX_ALIGNMENT - \
60 ((unsigned long)data & VX_ALIGN_MASK); \
61 datalen -= prealign; \
62 crc = ___crc32_sw(crc, data, prealign); \
63 data = (void *)((unsigned long)data + prealign); \
64 } \
65 \
f848dbd3
HB
66 aligned = datalen & ~VX_ALIGN_MASK; \
67 remaining = datalen & VX_ALIGN_MASK; \
68 \
69 kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW); \
70 crc = ___crc32_vx(crc, data, aligned); \
7f79695c 71 kernel_fpu_end(&vxstate, KERNEL_VXR_LOW); \
f848dbd3
HB
72 \
73 if (remaining) \
74 crc = ___crc32_sw(crc, data + aligned, remaining); \
75 \
76 return crc; \
77 }
78
79DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
80DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
81DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
82
83
84static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
85{
86 struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
87
88 mctx->key = 0;
89 return 0;
90}
91
92static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
93{
94 struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
95
96 mctx->key = ~0;
97 return 0;
98}
99
100static int crc32_vx_init(struct shash_desc *desc)
101{
102 struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
103 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
104
105 ctx->crc = mctx->key;
106 return 0;
107}
108
109static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
110 unsigned int newkeylen)
111{
112 struct crc_ctx *mctx = crypto_shash_ctx(tfm);
113
114 if (newkeylen != sizeof(mctx->key)) {
115 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
116 return -EINVAL;
117 }
118 mctx->key = le32_to_cpu(*(__le32 *)newkey);
119 return 0;
120}
121
122static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
123 unsigned int newkeylen)
124{
125 struct crc_ctx *mctx = crypto_shash_ctx(tfm);
126
127 if (newkeylen != sizeof(mctx->key)) {
128 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
129 return -EINVAL;
130 }
131 mctx->key = be32_to_cpu(*(__be32 *)newkey);
132 return 0;
133}
134
135static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
136{
137 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
138
139 *(__le32 *)out = cpu_to_le32p(&ctx->crc);
140 return 0;
141}
142
143static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
144{
145 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
146
147 *(__be32 *)out = cpu_to_be32p(&ctx->crc);
148 return 0;
149}
150
151static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
152{
153 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
154
155 /*
156 * Perform a final XOR with 0xFFFFFFFF to be in sync
157 * with the generic crc32c shash implementation.
158 */
159 *(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
160 return 0;
161}
162
163static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
164 u8 *out)
165{
166 *(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
167 return 0;
168}
169
170static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
171 u8 *out)
172{
173 *(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
174 return 0;
175}
176
177static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
178 u8 *out)
179{
180 /*
181 * Perform a final XOR with 0xFFFFFFFF to be in sync
182 * with the generic crc32c shash implementation.
183 */
184 *(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
185 return 0;
186}
187
188
189#define CRC32_VX_FINUP(alg, func) \
190 static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data, \
191 unsigned int datalen, u8 *out) \
192 { \
193 return __ ## alg ## _vx_finup(shash_desc_ctx(desc), \
194 data, datalen, out); \
195 }
196
197CRC32_VX_FINUP(crc32le, crc32_le_vx)
198CRC32_VX_FINUP(crc32be, crc32_be_vx)
199CRC32_VX_FINUP(crc32c, crc32c_le_vx)
200
201#define CRC32_VX_DIGEST(alg, func) \
202 static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
203 unsigned int len, u8 *out) \
204 { \
205 return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm), \
206 data, len, out); \
207 }
208
209CRC32_VX_DIGEST(crc32le, crc32_le_vx)
210CRC32_VX_DIGEST(crc32be, crc32_be_vx)
211CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
212
213#define CRC32_VX_UPDATE(alg, func) \
214 static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
215 unsigned int datalen) \
216 { \
217 struct crc_desc_ctx *ctx = shash_desc_ctx(desc); \
218 ctx->crc = func(ctx->crc, data, datalen); \
219 return 0; \
220 }
221
222CRC32_VX_UPDATE(crc32le, crc32_le_vx)
223CRC32_VX_UPDATE(crc32be, crc32_be_vx)
224CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
225
226
227static struct shash_alg crc32_vx_algs[] = {
228 /* CRC-32 LE */
229 {
230 .init = crc32_vx_init,
231 .setkey = crc32_vx_setkey,
232 .update = crc32le_vx_update,
233 .final = crc32le_vx_final,
234 .finup = crc32le_vx_finup,
235 .digest = crc32le_vx_digest,
236 .descsize = sizeof(struct crc_desc_ctx),
237 .digestsize = CRC32_DIGEST_SIZE,
238 .base = {
239 .cra_name = "crc32",
240 .cra_driver_name = "crc32-vx",
241 .cra_priority = 200,
242 .cra_blocksize = CRC32_BLOCK_SIZE,
243 .cra_ctxsize = sizeof(struct crc_ctx),
244 .cra_module = THIS_MODULE,
245 .cra_init = crc32_vx_cra_init_zero,
246 },
247 },
248 /* CRC-32 BE */
249 {
250 .init = crc32_vx_init,
251 .setkey = crc32be_vx_setkey,
252 .update = crc32be_vx_update,
253 .final = crc32be_vx_final,
254 .finup = crc32be_vx_finup,
255 .digest = crc32be_vx_digest,
256 .descsize = sizeof(struct crc_desc_ctx),
257 .digestsize = CRC32_DIGEST_SIZE,
258 .base = {
259 .cra_name = "crc32be",
260 .cra_driver_name = "crc32be-vx",
261 .cra_priority = 200,
262 .cra_blocksize = CRC32_BLOCK_SIZE,
263 .cra_ctxsize = sizeof(struct crc_ctx),
264 .cra_module = THIS_MODULE,
265 .cra_init = crc32_vx_cra_init_zero,
266 },
267 },
268 /* CRC-32C LE */
269 {
270 .init = crc32_vx_init,
271 .setkey = crc32_vx_setkey,
272 .update = crc32c_vx_update,
273 .final = crc32c_vx_final,
274 .finup = crc32c_vx_finup,
275 .digest = crc32c_vx_digest,
276 .descsize = sizeof(struct crc_desc_ctx),
277 .digestsize = CRC32_DIGEST_SIZE,
278 .base = {
279 .cra_name = "crc32c",
280 .cra_driver_name = "crc32c-vx",
281 .cra_priority = 200,
282 .cra_blocksize = CRC32_BLOCK_SIZE,
283 .cra_ctxsize = sizeof(struct crc_ctx),
284 .cra_module = THIS_MODULE,
285 .cra_init = crc32_vx_cra_init_invert,
286 },
287 },
288};
289
290
291static int __init crc_vx_mod_init(void)
292{
293 return crypto_register_shashes(crc32_vx_algs,
294 ARRAY_SIZE(crc32_vx_algs));
295}
296
297static void __exit crc_vx_mod_exit(void)
298{
299 crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
300}
301
302module_cpu_feature_match(VXRS, crc_vx_mod_init);
303module_exit(crc_vx_mod_exit);
304
305MODULE_AUTHOR("Hendrik Brueckner <brueckner@linux.vnet.ibm.com>");
306MODULE_LICENSE("GPL");
307
308MODULE_ALIAS_CRYPTO("crc32");
309MODULE_ALIAS_CRYPTO("crc32-vx");
310MODULE_ALIAS_CRYPTO("crc32c");
311MODULE_ALIAS_CRYPTO("crc32c-vx");