]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - arch/x86/crypto/ghash-clmulni-intel_glue.c
Merge tag 'nfsd-5.2' of git://linux-nfs.org/~bfields/linux
[mirror_ubuntu-jammy-kernel.git] / arch / x86 / crypto / ghash-clmulni-intel_glue.c
1 /*
2 * Accelerated GHASH implementation with Intel PCLMULQDQ-NI
3 * instructions. This file contains glue code.
4 *
5 * Copyright (c) 2009 Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License version 2 as published
10 * by the Free Software Foundation.
11 */
12
13 #include <linux/err.h>
14 #include <linux/module.h>
15 #include <linux/init.h>
16 #include <linux/kernel.h>
17 #include <linux/crypto.h>
18 #include <crypto/algapi.h>
19 #include <crypto/cryptd.h>
20 #include <crypto/gf128mul.h>
21 #include <crypto/internal/hash.h>
22 #include <crypto/internal/simd.h>
23 #include <asm/cpu_device_id.h>
24 #include <asm/simd.h>
25
26 #define GHASH_BLOCK_SIZE 16
27 #define GHASH_DIGEST_SIZE 16
28
29 void clmul_ghash_mul(char *dst, const u128 *shash);
30
31 void clmul_ghash_update(char *dst, const char *src, unsigned int srclen,
32 const u128 *shash);
33
34 struct ghash_async_ctx {
35 struct cryptd_ahash *cryptd_tfm;
36 };
37
38 struct ghash_ctx {
39 u128 shash;
40 };
41
42 struct ghash_desc_ctx {
43 u8 buffer[GHASH_BLOCK_SIZE];
44 u32 bytes;
45 };
46
47 static int ghash_init(struct shash_desc *desc)
48 {
49 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
50
51 memset(dctx, 0, sizeof(*dctx));
52
53 return 0;
54 }
55
56 static int ghash_setkey(struct crypto_shash *tfm,
57 const u8 *key, unsigned int keylen)
58 {
59 struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
60 be128 *x = (be128 *)key;
61 u64 a, b;
62
63 if (keylen != GHASH_BLOCK_SIZE) {
64 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
65 return -EINVAL;
66 }
67
68 /* perform multiplication by 'x' in GF(2^128) */
69 a = be64_to_cpu(x->a);
70 b = be64_to_cpu(x->b);
71
72 ctx->shash.a = (b << 1) | (a >> 63);
73 ctx->shash.b = (a << 1) | (b >> 63);
74
75 if (a >> 63)
76 ctx->shash.b ^= ((u64)0xc2) << 56;
77
78 return 0;
79 }
80
81 static int ghash_update(struct shash_desc *desc,
82 const u8 *src, unsigned int srclen)
83 {
84 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
85 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
86 u8 *dst = dctx->buffer;
87
88 kernel_fpu_begin();
89 if (dctx->bytes) {
90 int n = min(srclen, dctx->bytes);
91 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
92
93 dctx->bytes -= n;
94 srclen -= n;
95
96 while (n--)
97 *pos++ ^= *src++;
98
99 if (!dctx->bytes)
100 clmul_ghash_mul(dst, &ctx->shash);
101 }
102
103 clmul_ghash_update(dst, src, srclen, &ctx->shash);
104 kernel_fpu_end();
105
106 if (srclen & 0xf) {
107 src += srclen - (srclen & 0xf);
108 srclen &= 0xf;
109 dctx->bytes = GHASH_BLOCK_SIZE - srclen;
110 while (srclen--)
111 *dst++ ^= *src++;
112 }
113
114 return 0;
115 }
116
117 static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
118 {
119 u8 *dst = dctx->buffer;
120
121 if (dctx->bytes) {
122 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
123
124 while (dctx->bytes--)
125 *tmp++ ^= 0;
126
127 kernel_fpu_begin();
128 clmul_ghash_mul(dst, &ctx->shash);
129 kernel_fpu_end();
130 }
131
132 dctx->bytes = 0;
133 }
134
135 static int ghash_final(struct shash_desc *desc, u8 *dst)
136 {
137 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
138 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
139 u8 *buf = dctx->buffer;
140
141 ghash_flush(ctx, dctx);
142 memcpy(dst, buf, GHASH_BLOCK_SIZE);
143
144 return 0;
145 }
146
147 static struct shash_alg ghash_alg = {
148 .digestsize = GHASH_DIGEST_SIZE,
149 .init = ghash_init,
150 .update = ghash_update,
151 .final = ghash_final,
152 .setkey = ghash_setkey,
153 .descsize = sizeof(struct ghash_desc_ctx),
154 .base = {
155 .cra_name = "__ghash",
156 .cra_driver_name = "__ghash-pclmulqdqni",
157 .cra_priority = 0,
158 .cra_flags = CRYPTO_ALG_INTERNAL,
159 .cra_blocksize = GHASH_BLOCK_SIZE,
160 .cra_ctxsize = sizeof(struct ghash_ctx),
161 .cra_module = THIS_MODULE,
162 },
163 };
164
165 static int ghash_async_init(struct ahash_request *req)
166 {
167 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
168 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
169 struct ahash_request *cryptd_req = ahash_request_ctx(req);
170 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
171 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
172 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
173
174 desc->tfm = child;
175 return crypto_shash_init(desc);
176 }
177
178 static int ghash_async_update(struct ahash_request *req)
179 {
180 struct ahash_request *cryptd_req = ahash_request_ctx(req);
181 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
182 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
183 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
184
185 if (!crypto_simd_usable() ||
186 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
187 memcpy(cryptd_req, req, sizeof(*req));
188 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
189 return crypto_ahash_update(cryptd_req);
190 } else {
191 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
192 return shash_ahash_update(req, desc);
193 }
194 }
195
196 static int ghash_async_final(struct ahash_request *req)
197 {
198 struct ahash_request *cryptd_req = ahash_request_ctx(req);
199 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
200 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
201 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
202
203 if (!crypto_simd_usable() ||
204 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
205 memcpy(cryptd_req, req, sizeof(*req));
206 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
207 return crypto_ahash_final(cryptd_req);
208 } else {
209 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
210 return crypto_shash_final(desc, req->result);
211 }
212 }
213
214 static int ghash_async_import(struct ahash_request *req, const void *in)
215 {
216 struct ahash_request *cryptd_req = ahash_request_ctx(req);
217 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
218 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
219
220 ghash_async_init(req);
221 memcpy(dctx, in, sizeof(*dctx));
222 return 0;
223
224 }
225
226 static int ghash_async_export(struct ahash_request *req, void *out)
227 {
228 struct ahash_request *cryptd_req = ahash_request_ctx(req);
229 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
230 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
231
232 memcpy(out, dctx, sizeof(*dctx));
233 return 0;
234
235 }
236
237 static int ghash_async_digest(struct ahash_request *req)
238 {
239 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
240 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
241 struct ahash_request *cryptd_req = ahash_request_ctx(req);
242 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
243
244 if (!crypto_simd_usable() ||
245 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
246 memcpy(cryptd_req, req, sizeof(*req));
247 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
248 return crypto_ahash_digest(cryptd_req);
249 } else {
250 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
251 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
252
253 desc->tfm = child;
254 return shash_ahash_digest(req, desc);
255 }
256 }
257
258 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
259 unsigned int keylen)
260 {
261 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
262 struct crypto_ahash *child = &ctx->cryptd_tfm->base;
263 int err;
264
265 crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
266 crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
267 & CRYPTO_TFM_REQ_MASK);
268 err = crypto_ahash_setkey(child, key, keylen);
269 crypto_ahash_set_flags(tfm, crypto_ahash_get_flags(child)
270 & CRYPTO_TFM_RES_MASK);
271
272 return err;
273 }
274
275 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
276 {
277 struct cryptd_ahash *cryptd_tfm;
278 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
279
280 cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni",
281 CRYPTO_ALG_INTERNAL,
282 CRYPTO_ALG_INTERNAL);
283 if (IS_ERR(cryptd_tfm))
284 return PTR_ERR(cryptd_tfm);
285 ctx->cryptd_tfm = cryptd_tfm;
286 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
287 sizeof(struct ahash_request) +
288 crypto_ahash_reqsize(&cryptd_tfm->base));
289
290 return 0;
291 }
292
293 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
294 {
295 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
296
297 cryptd_free_ahash(ctx->cryptd_tfm);
298 }
299
300 static struct ahash_alg ghash_async_alg = {
301 .init = ghash_async_init,
302 .update = ghash_async_update,
303 .final = ghash_async_final,
304 .setkey = ghash_async_setkey,
305 .digest = ghash_async_digest,
306 .export = ghash_async_export,
307 .import = ghash_async_import,
308 .halg = {
309 .digestsize = GHASH_DIGEST_SIZE,
310 .statesize = sizeof(struct ghash_desc_ctx),
311 .base = {
312 .cra_name = "ghash",
313 .cra_driver_name = "ghash-clmulni",
314 .cra_priority = 400,
315 .cra_ctxsize = sizeof(struct ghash_async_ctx),
316 .cra_flags = CRYPTO_ALG_ASYNC,
317 .cra_blocksize = GHASH_BLOCK_SIZE,
318 .cra_module = THIS_MODULE,
319 .cra_init = ghash_async_init_tfm,
320 .cra_exit = ghash_async_exit_tfm,
321 },
322 },
323 };
324
325 static const struct x86_cpu_id pcmul_cpu_id[] = {
326 X86_FEATURE_MATCH(X86_FEATURE_PCLMULQDQ), /* Pickle-Mickle-Duck */
327 {}
328 };
329 MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
330
331 static int __init ghash_pclmulqdqni_mod_init(void)
332 {
333 int err;
334
335 if (!x86_match_cpu(pcmul_cpu_id))
336 return -ENODEV;
337
338 err = crypto_register_shash(&ghash_alg);
339 if (err)
340 goto err_out;
341 err = crypto_register_ahash(&ghash_async_alg);
342 if (err)
343 goto err_shash;
344
345 return 0;
346
347 err_shash:
348 crypto_unregister_shash(&ghash_alg);
349 err_out:
350 return err;
351 }
352
353 static void __exit ghash_pclmulqdqni_mod_exit(void)
354 {
355 crypto_unregister_ahash(&ghash_async_alg);
356 crypto_unregister_shash(&ghash_alg);
357 }
358
359 module_init(ghash_pclmulqdqni_mod_init);
360 module_exit(ghash_pclmulqdqni_mod_exit);
361
362 MODULE_LICENSE("GPL");
363 MODULE_DESCRIPTION("GHASH Message Digest Algorithm, "
364 "accelerated by PCLMULQDQ-NI");
365 MODULE_ALIAS_CRYPTO("ghash");