]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blame - arch/arm64/crypto/aes-ce-ccm-glue.c
treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 500
[mirror_ubuntu-jammy-kernel.git] / arch / arm64 / crypto / aes-ce-ccm-glue.c
CommitLineData
d2912cb1 1// SPDX-License-Identifier: GPL-2.0-only
a3fd8210
AB
2/*
3 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
4 *
5092fcf3 5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
a3fd8210
AB
6 */
7
8#include <asm/neon.h>
5092fcf3 9#include <asm/simd.h>
a3fd8210
AB
10#include <asm/unaligned.h>
11#include <crypto/aes.h>
a3fd8210 12#include <crypto/scatterwalk.h>
34ed9a35 13#include <crypto/internal/aead.h>
e52b7023 14#include <crypto/internal/simd.h>
cf2c0fe7 15#include <crypto/internal/skcipher.h>
a3fd8210
AB
16#include <linux/module.h>
17
12ac3efe
AB
18#include "aes-ce-setkey.h"
19
a3fd8210
AB
20static int num_rounds(struct crypto_aes_ctx *ctx)
21{
22 /*
23 * # of rounds specified by AES:
24 * 128 bit key 10 rounds
25 * 192 bit key 12 rounds
26 * 256 bit key 14 rounds
27 * => n byte key => 6 + (n/4) rounds
28 */
29 return 6 + ctx->key_length / 4;
30}
31
32asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
33 u32 *macp, u32 const rk[], u32 rounds);
34
35asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
36 u32 const rk[], u32 rounds, u8 mac[],
37 u8 ctr[]);
38
39asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
40 u32 const rk[], u32 rounds, u8 mac[],
41 u8 ctr[]);
42
43asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
44 u32 rounds);
45
5092fcf3
AB
46asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
47
a3fd8210
AB
48static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
49 unsigned int key_len)
50{
51 struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
52 int ret;
53
12ac3efe 54 ret = ce_aes_expandkey(ctx, in_key, key_len);
a3fd8210
AB
55 if (!ret)
56 return 0;
57
58 tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
59 return -EINVAL;
60}
61
62static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
63{
64 if ((authsize & 1) || authsize < 4)
65 return -EINVAL;
66 return 0;
67}
68
69static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
70{
71 struct crypto_aead *aead = crypto_aead_reqtfm(req);
72 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
73 u32 l = req->iv[0] + 1;
74
75 /* verify that CCM dimension 'L' is set correctly in the IV */
76 if (l < 2 || l > 8)
77 return -EINVAL;
78
79 /* verify that msglen can in fact be represented in L bytes */
80 if (l < 4 && msglen >> (8 * l))
81 return -EOVERFLOW;
82
83 /*
84 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
85 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
86 */
87 n[0] = 0;
88 n[1] = cpu_to_be32(msglen);
89
90 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
91
92 /*
93 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
94 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
95 * (already set by caller)
96 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
97 * - bit 6 : indicates presence of authenticate-only data
98 */
99 maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
100 if (req->assoclen)
101 maciv[0] |= 0x40;
102
103 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
104 return 0;
105}
106
5092fcf3 107static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
bd2ad885 108 u32 abytes, u32 *macp)
5092fcf3 109{
e52b7023 110 if (crypto_simd_usable()) {
bd2ad885 111 kernel_neon_begin();
5092fcf3
AB
112 ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
113 num_rounds(key));
bd2ad885 114 kernel_neon_end();
5092fcf3
AB
115 } else {
116 if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
117 int added = min(abytes, AES_BLOCK_SIZE - *macp);
118
119 crypto_xor(&mac[*macp], in, added);
120
121 *macp += added;
122 in += added;
123 abytes -= added;
124 }
125
969e2f59 126 while (abytes >= AES_BLOCK_SIZE) {
5092fcf3
AB
127 __aes_arm64_encrypt(key->key_enc, mac, mac,
128 num_rounds(key));
129 crypto_xor(mac, in, AES_BLOCK_SIZE);
130
131 in += AES_BLOCK_SIZE;
132 abytes -= AES_BLOCK_SIZE;
133 }
134
135 if (abytes > 0) {
136 __aes_arm64_encrypt(key->key_enc, mac, mac,
137 num_rounds(key));
138 crypto_xor(mac, in, abytes);
139 *macp = abytes;
5092fcf3
AB
140 }
141 }
142}
143
bd2ad885 144static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
a3fd8210
AB
145{
146 struct crypto_aead *aead = crypto_aead_reqtfm(req);
147 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
148 struct __packed { __be16 l; __be32 h; u16 len; } ltag;
149 struct scatter_walk walk;
150 u32 len = req->assoclen;
151 u32 macp = 0;
152
153 /* prepend the AAD with a length tag */
154 if (len < 0xff00) {
155 ltag.l = cpu_to_be16(len);
156 ltag.len = 2;
157 } else {
158 ltag.l = cpu_to_be16(0xfffe);
159 put_unaligned_be32(len, &ltag.h);
160 ltag.len = 6;
161 }
162
bd2ad885 163 ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp);
2642d6ab 164 scatterwalk_start(&walk, req->src);
a3fd8210
AB
165
166 do {
167 u32 n = scatterwalk_clamp(&walk, len);
168 u8 *p;
169
170 if (!n) {
171 scatterwalk_start(&walk, sg_next(walk.sg));
172 n = scatterwalk_clamp(&walk, len);
173 }
174 p = scatterwalk_map(&walk);
bd2ad885 175 ccm_update_mac(ctx, mac, p, n, &macp);
a3fd8210
AB
176 len -= n;
177
178 scatterwalk_unmap(p);
179 scatterwalk_advance(&walk, n);
180 scatterwalk_done(&walk, 0, len);
181 } while (len);
182}
183
5092fcf3
AB
184static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
185 struct crypto_aes_ctx *ctx, bool enc)
186{
187 u8 buf[AES_BLOCK_SIZE];
188 int err = 0;
189
190 while (walk->nbytes) {
191 int blocks = walk->nbytes / AES_BLOCK_SIZE;
192 u32 tail = walk->nbytes % AES_BLOCK_SIZE;
193 u8 *dst = walk->dst.virt.addr;
194 u8 *src = walk->src.virt.addr;
195 u32 nbytes = walk->nbytes;
196
197 if (nbytes == walk->total && tail > 0) {
198 blocks++;
199 tail = 0;
200 }
201
202 do {
203 u32 bsize = AES_BLOCK_SIZE;
204
205 if (nbytes < AES_BLOCK_SIZE)
206 bsize = nbytes;
207
208 crypto_inc(walk->iv, AES_BLOCK_SIZE);
209 __aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
210 num_rounds(ctx));
211 __aes_arm64_encrypt(ctx->key_enc, mac, mac,
212 num_rounds(ctx));
213 if (enc)
214 crypto_xor(mac, src, bsize);
215 crypto_xor_cpy(dst, src, buf, bsize);
216 if (!enc)
217 crypto_xor(mac, dst, bsize);
218 dst += bsize;
219 src += bsize;
220 nbytes -= bsize;
221 } while (--blocks);
222
223 err = skcipher_walk_done(walk, tail);
224 }
225
226 if (!err) {
227 __aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
228 __aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
229 crypto_xor(mac, buf, AES_BLOCK_SIZE);
230 }
231 return err;
232}
233
a3fd8210
AB
234static int ccm_encrypt(struct aead_request *req)
235{
236 struct crypto_aead *aead = crypto_aead_reqtfm(req);
237 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
cf2c0fe7 238 struct skcipher_walk walk;
a3fd8210
AB
239 u8 __aligned(8) mac[AES_BLOCK_SIZE];
240 u8 buf[AES_BLOCK_SIZE];
241 u32 len = req->cryptlen;
242 int err;
243
244 err = ccm_init_mac(req, mac, len);
245 if (err)
246 return err;
247
a3fd8210 248 if (req->assoclen)
bd2ad885 249 ccm_calculate_auth_mac(req, mac);
a3fd8210
AB
250
251 /* preserve the original iv for the final round */
252 memcpy(buf, req->iv, AES_BLOCK_SIZE);
253
f9352900 254 err = skcipher_walk_aead_encrypt(&walk, req, false);
a3fd8210 255
e52b7023 256 if (crypto_simd_usable()) {
5092fcf3
AB
257 while (walk.nbytes) {
258 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
a3fd8210 259
5092fcf3
AB
260 if (walk.nbytes == walk.total)
261 tail = 0;
a3fd8210 262
bd2ad885 263 kernel_neon_begin();
5092fcf3
AB
264 ce_aes_ccm_encrypt(walk.dst.virt.addr,
265 walk.src.virt.addr,
266 walk.nbytes - tail, ctx->key_enc,
267 num_rounds(ctx), mac, walk.iv);
bd2ad885 268 kernel_neon_end();
a3fd8210 269
5092fcf3
AB
270 err = skcipher_walk_done(&walk, tail);
271 }
bd2ad885
AB
272 if (!err) {
273 kernel_neon_begin();
5092fcf3
AB
274 ce_aes_ccm_final(mac, buf, ctx->key_enc,
275 num_rounds(ctx));
bd2ad885
AB
276 kernel_neon_end();
277 }
5092fcf3
AB
278 } else {
279 err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
280 }
a3fd8210
AB
281 if (err)
282 return err;
283
284 /* copy authtag to end of dst */
cf2c0fe7 285 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
a3fd8210
AB
286 crypto_aead_authsize(aead), 1);
287
288 return 0;
289}
290
291static int ccm_decrypt(struct aead_request *req)
292{
293 struct crypto_aead *aead = crypto_aead_reqtfm(req);
294 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
295 unsigned int authsize = crypto_aead_authsize(aead);
cf2c0fe7 296 struct skcipher_walk walk;
a3fd8210
AB
297 u8 __aligned(8) mac[AES_BLOCK_SIZE];
298 u8 buf[AES_BLOCK_SIZE];
299 u32 len = req->cryptlen - authsize;
300 int err;
301
302 err = ccm_init_mac(req, mac, len);
303 if (err)
304 return err;
305
a3fd8210 306 if (req->assoclen)
bd2ad885 307 ccm_calculate_auth_mac(req, mac);
a3fd8210
AB
308
309 /* preserve the original iv for the final round */
310 memcpy(buf, req->iv, AES_BLOCK_SIZE);
311
f9352900 312 err = skcipher_walk_aead_decrypt(&walk, req, false);
a3fd8210 313
e52b7023 314 if (crypto_simd_usable()) {
5092fcf3
AB
315 while (walk.nbytes) {
316 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
a3fd8210 317
5092fcf3
AB
318 if (walk.nbytes == walk.total)
319 tail = 0;
a3fd8210 320
bd2ad885 321 kernel_neon_begin();
5092fcf3
AB
322 ce_aes_ccm_decrypt(walk.dst.virt.addr,
323 walk.src.virt.addr,
324 walk.nbytes - tail, ctx->key_enc,
325 num_rounds(ctx), mac, walk.iv);
bd2ad885 326 kernel_neon_end();
a3fd8210 327
5092fcf3
AB
328 err = skcipher_walk_done(&walk, tail);
329 }
bd2ad885
AB
330 if (!err) {
331 kernel_neon_begin();
5092fcf3
AB
332 ce_aes_ccm_final(mac, buf, ctx->key_enc,
333 num_rounds(ctx));
bd2ad885
AB
334 kernel_neon_end();
335 }
5092fcf3
AB
336 } else {
337 err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
338 }
a3fd8210
AB
339
340 if (err)
341 return err;
342
343 /* compare calculated auth tag with the stored one */
cf2c0fe7
HX
344 scatterwalk_map_and_copy(buf, req->src,
345 req->assoclen + req->cryptlen - authsize,
a3fd8210
AB
346 authsize, 0);
347
2642d6ab 348 if (crypto_memneq(mac, buf, authsize))
a3fd8210
AB
349 return -EBADMSG;
350 return 0;
351}
352
2642d6ab
HX
353static struct aead_alg ccm_aes_alg = {
354 .base = {
355 .cra_name = "ccm(aes)",
356 .cra_driver_name = "ccm-aes-ce",
2642d6ab
HX
357 .cra_priority = 300,
358 .cra_blocksize = 1,
359 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
2642d6ab
HX
360 .cra_module = THIS_MODULE,
361 },
362 .ivsize = AES_BLOCK_SIZE,
cf2c0fe7 363 .chunksize = AES_BLOCK_SIZE,
2642d6ab
HX
364 .maxauthsize = AES_BLOCK_SIZE,
365 .setkey = ccm_setkey,
366 .setauthsize = ccm_setauthsize,
367 .encrypt = ccm_encrypt,
368 .decrypt = ccm_decrypt,
a3fd8210
AB
369};
370
371static int __init aes_mod_init(void)
372{
aaba098f 373 if (!cpu_have_named_feature(AES))
a3fd8210 374 return -ENODEV;
2642d6ab 375 return crypto_register_aead(&ccm_aes_alg);
a3fd8210
AB
376}
377
378static void __exit aes_mod_exit(void)
379{
2642d6ab 380 crypto_unregister_aead(&ccm_aes_alg);
a3fd8210
AB
381}
382
383module_init(aes_mod_init);
384module_exit(aes_mod_exit);
385
386MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
387MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
388MODULE_LICENSE("GPL v2");
5d26a105 389MODULE_ALIAS_CRYPTO("ccm(aes)");