]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blame - drivers/crypto/amcc/crypto4xx_alg.c
Merge branch 'timers-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel...
[mirror_ubuntu-jammy-kernel.git] / drivers / crypto / amcc / crypto4xx_alg.c
CommitLineData
c942fddf 1// SPDX-License-Identifier: GPL-2.0-or-later
049359d6
JH
2/**
3 * AMCC SoC PPC4xx Crypto Driver
4 *
5 * Copyright (c) 2008 Applied Micro Circuits Corporation.
6 * All rights reserved. James Hsiao <jhsiao@amcc.com>
7 *
049359d6
JH
8 * This file implements the Linux crypto algorithms.
9 */
10
11#include <linux/kernel.h>
12#include <linux/interrupt.h>
13#include <linux/spinlock_types.h>
14#include <linux/scatterlist.h>
15#include <linux/crypto.h>
16#include <linux/hash.h>
17#include <crypto/internal/hash.h>
18#include <linux/dma-mapping.h>
19#include <crypto/algapi.h>
a0aae821 20#include <crypto/aead.h>
049359d6 21#include <crypto/aes.h>
59231368 22#include <crypto/gcm.h>
049359d6 23#include <crypto/sha.h>
f2a13e7c 24#include <crypto/ctr.h>
ce05ffe1 25#include <crypto/skcipher.h>
049359d6 26#include "crypto4xx_reg_def.h"
049359d6 27#include "crypto4xx_core.h"
249c8d98 28#include "crypto4xx_sa.h"
049359d6 29
3a4eac79
JH
30static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
31 u32 save_iv, u32 ld_h, u32 ld_iv,
32 u32 hdr_proc, u32 h, u32 c, u32 pad_type,
33 u32 op_grp, u32 op, u32 dir)
049359d6
JH
34{
35 sa->sa_command_0.w = 0;
36 sa->sa_command_0.bf.save_hash_state = save_h;
37 sa->sa_command_0.bf.save_iv = save_iv;
38 sa->sa_command_0.bf.load_hash_state = ld_h;
39 sa->sa_command_0.bf.load_iv = ld_iv;
40 sa->sa_command_0.bf.hdr_proc = hdr_proc;
41 sa->sa_command_0.bf.hash_alg = h;
42 sa->sa_command_0.bf.cipher_alg = c;
43 sa->sa_command_0.bf.pad_type = pad_type & 3;
44 sa->sa_command_0.bf.extend_pad = pad_type >> 2;
45 sa->sa_command_0.bf.op_group = op_grp;
46 sa->sa_command_0.bf.opcode = op;
47 sa->sa_command_0.bf.dir = dir;
48}
49
3a4eac79
JH
50static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
51 u32 hmac_mc, u32 cfb, u32 esn,
52 u32 sn_mask, u32 mute, u32 cp_pad,
53 u32 cp_pay, u32 cp_hdr)
049359d6
JH
54{
55 sa->sa_command_1.w = 0;
56 sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
57 sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
58 sa->sa_command_1.bf.feedback_mode = cfb,
59 sa->sa_command_1.bf.sa_rev = 1;
5a4326d3 60 sa->sa_command_1.bf.hmac_muting = hmac_mc;
049359d6
JH
61 sa->sa_command_1.bf.extended_seq_num = esn;
62 sa->sa_command_1.bf.seq_num_mask = sn_mask;
63 sa->sa_command_1.bf.mutable_bit_proc = mute;
64 sa->sa_command_1.bf.copy_pad = cp_pad;
65 sa->sa_command_1.bf.copy_payload = cp_pay;
66 sa->sa_command_1.bf.copy_hdr = cp_hdr;
67}
68
ce05ffe1 69static inline int crypto4xx_crypt(struct skcipher_request *req,
a8d79d7b 70 const unsigned int ivlen, bool decrypt)
049359d6 71{
ce05ffe1
CL
72 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
73 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
c4e90650 74 __le32 iv[AES_IV_SIZE];
049359d6 75
cd4dcd6d 76 if (ivlen)
ce05ffe1 77 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
049359d6
JH
78
79 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ce05ffe1 80 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
658c9d2b 81 ctx->sa_len, 0, NULL);
049359d6
JH
82}
83
ce05ffe1 84int crypto4xx_encrypt_noiv(struct skcipher_request *req)
049359d6 85{
a8d79d7b
CL
86 return crypto4xx_crypt(req, 0, false);
87}
049359d6 88
ce05ffe1 89int crypto4xx_encrypt_iv(struct skcipher_request *req)
a8d79d7b
CL
90{
91 return crypto4xx_crypt(req, AES_IV_SIZE, false);
92}
049359d6 93
ce05ffe1 94int crypto4xx_decrypt_noiv(struct skcipher_request *req)
a8d79d7b
CL
95{
96 return crypto4xx_crypt(req, 0, true);
97}
98
ce05ffe1 99int crypto4xx_decrypt_iv(struct skcipher_request *req)
a8d79d7b
CL
100{
101 return crypto4xx_crypt(req, AES_IV_SIZE, true);
049359d6
JH
102}
103
104/**
105 * AES Functions
106 */
ce05ffe1 107static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
049359d6
JH
108 const u8 *key,
109 unsigned int keylen,
110 unsigned char cm,
111 u8 fb)
112{
ce05ffe1 113 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
049359d6
JH
114 struct dynamic_sa_ctl *sa;
115 int rc;
116
117 if (keylen != AES_KEYSIZE_256 &&
118 keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
ce05ffe1 119 crypto_skcipher_set_flags(cipher,
049359d6
JH
120 CRYPTO_TFM_RES_BAD_KEY_LEN);
121 return -EINVAL;
122 }
123
124 /* Create SA */
2f77690d 125 if (ctx->sa_in || ctx->sa_out)
049359d6
JH
126 crypto4xx_free_sa(ctx);
127
128 rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
129 if (rc)
130 return rc;
131
049359d6 132 /* Setup SA */
9e0a0b3a 133 sa = ctx->sa_in;
049359d6 134
25baaf8e
CL
135 set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
136 SA_NOT_SAVE_IV : SA_SAVE_IV),
137 SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
138 SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
049359d6
JH
139 SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
140 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
141 SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
142 DIR_INBOUND);
143
144 set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
145 fb, SA_EXTENDED_SN_OFF,
146 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
147 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
148 SA_NOT_COPY_HDR);
4865b122
CL
149 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
150 key, keylen);
453e3090 151 sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
049359d6 152 sa->sa_command_1.bf.key_len = keylen >> 3;
049359d6
JH
153
154 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
9e0a0b3a 155 sa = ctx->sa_out;
049359d6 156 sa->sa_command_0.bf.dir = DIR_OUTBOUND;
25baaf8e
CL
157 /*
158 * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
159 * it's the DIR_(IN|OUT)BOUND that matters
160 */
161 sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
049359d6
JH
162
163 return 0;
164}
165
ce05ffe1 166int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
049359d6
JH
167 const u8 *key, unsigned int keylen)
168{
169 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
170 CRYPTO_FEEDBACK_MODE_NO_FB);
171}
172
ce05ffe1 173int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
f2a13e7c
CL
174 const u8 *key, unsigned int keylen)
175{
176 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
177 CRYPTO_FEEDBACK_MODE_128BIT_CFB);
178}
179
ce05ffe1 180int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
f2a13e7c
CL
181 const u8 *key, unsigned int keylen)
182{
183 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
184 CRYPTO_FEEDBACK_MODE_NO_FB);
185}
186
ce05ffe1 187int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
f2a13e7c
CL
188 const u8 *key, unsigned int keylen)
189{
190 return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
191 CRYPTO_FEEDBACK_MODE_64BIT_OFB);
192}
193
ce05ffe1 194int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
f2a13e7c
CL
195 const u8 *key, unsigned int keylen)
196{
ce05ffe1 197 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
f2a13e7c
CL
198 int rc;
199
200 rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
201 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
202 if (rc)
203 return rc;
204
2f77690d
CL
205 ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
206 CTR_RFC3686_NONCE_SIZE]);
f2a13e7c
CL
207
208 return 0;
209}
210
ce05ffe1 211int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
f2a13e7c 212{
ce05ffe1
CL
213 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
214 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
cd4dcd6d 215 __le32 iv[AES_IV_SIZE / 4] = {
2f77690d 216 ctx->iv_nonce,
ce05ffe1
CL
217 cpu_to_le32p((u32 *) req->iv),
218 cpu_to_le32p((u32 *) (req->iv + 4)),
cd4dcd6d 219 cpu_to_le32(1) };
f2a13e7c
CL
220
221 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ce05ffe1 222 req->cryptlen, iv, AES_IV_SIZE,
658c9d2b 223 ctx->sa_out, ctx->sa_len, 0, NULL);
f2a13e7c
CL
224}
225
ce05ffe1 226int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
f2a13e7c 227{
ce05ffe1
CL
228 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
229 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
cd4dcd6d 230 __le32 iv[AES_IV_SIZE / 4] = {
2f77690d 231 ctx->iv_nonce,
ce05ffe1
CL
232 cpu_to_le32p((u32 *) req->iv),
233 cpu_to_le32p((u32 *) (req->iv + 4)),
cd4dcd6d 234 cpu_to_le32(1) };
f2a13e7c
CL
235
236 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
ce05ffe1 237 req->cryptlen, iv, AES_IV_SIZE,
658c9d2b 238 ctx->sa_out, ctx->sa_len, 0, NULL);
f2a13e7c
CL
239}
240
98e87e3d
CL
241static int
242crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
243{
244 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
245 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
246 size_t iv_len = crypto_skcipher_ivsize(cipher);
247 unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
248 unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
249 AES_BLOCK_SIZE;
250
251 /*
252 * The hardware uses only the last 32-bits as the counter while the
253 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
254 * the whole IV is a counter. So fallback if the counter is going to
255 * overlow.
256 */
257 if (counter + nblks < counter) {
9848e4c8 258 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
98e87e3d
CL
259 int ret;
260
9848e4c8 261 skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
98e87e3d
CL
262 skcipher_request_set_callback(subreq, req->base.flags,
263 NULL, NULL);
264 skcipher_request_set_crypt(subreq, req->src, req->dst,
265 req->cryptlen, req->iv);
266 ret = encrypt ? crypto_skcipher_encrypt(subreq)
267 : crypto_skcipher_decrypt(subreq);
268 skcipher_request_zero(subreq);
269 return ret;
270 }
271
272 return encrypt ? crypto4xx_encrypt_iv(req)
273 : crypto4xx_decrypt_iv(req);
274}
275
276static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
277 struct crypto_skcipher *cipher,
278 const u8 *key,
279 unsigned int keylen)
280{
281 int rc;
282
9848e4c8 283 crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
98e87e3d 284 CRYPTO_TFM_REQ_MASK);
9848e4c8 285 crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
98e87e3d 286 crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
9848e4c8 287 rc = crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
98e87e3d
CL
288 crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
289 crypto_skcipher_set_flags(cipher,
9848e4c8 290 crypto_sync_skcipher_get_flags(ctx->sw_cipher.cipher) &
98e87e3d
CL
291 CRYPTO_TFM_RES_MASK);
292
293 return rc;
294}
295
296int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
297 const u8 *key, unsigned int keylen)
298{
299 struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
300 int rc;
301
302 rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
303 if (rc)
304 return rc;
305
306 return crypto4xx_setkey_aes(cipher, key, keylen,
307 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
308}
309
310int crypto4xx_encrypt_ctr(struct skcipher_request *req)
311{
312 return crypto4xx_ctr_crypt(req, true);
313}
314
315int crypto4xx_decrypt_ctr(struct skcipher_request *req)
316{
317 return crypto4xx_ctr_crypt(req, false);
318}
319
65ea8b67 320static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
584201f1 321 unsigned int len,
65ea8b67
CL
322 bool is_ccm, bool decrypt)
323{
324 struct crypto_aead *aead = crypto_aead_reqtfm(req);
325
326 /* authsize has to be a multiple of 4 */
327 if (aead->authsize & 3)
328 return true;
329
330 /*
584201f1
CL
331 * hardware does not handle cases where plaintext
332 * is less than a block.
65ea8b67 333 */
584201f1 334 if (len < AES_BLOCK_SIZE)
65ea8b67
CL
335 return true;
336
584201f1
CL
337 /* assoc len needs to be a multiple of 4 and <= 1020 */
338 if (req->assoclen & 0x3 || req->assoclen > 1020)
65ea8b67
CL
339 return true;
340
341 /* CCM supports only counter field length of 2 and 4 bytes */
342 if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
343 return true;
344
65ea8b67
CL
345 return false;
346}
347
348static int crypto4xx_aead_fallback(struct aead_request *req,
349 struct crypto4xx_ctx *ctx, bool do_decrypt)
350{
c4e90650 351 struct aead_request *subreq = aead_request_ctx(req);
65ea8b67
CL
352
353 aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
354 aead_request_set_callback(subreq, req->base.flags,
355 req->base.complete, req->base.data);
356 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
357 req->iv);
358 aead_request_set_ad(subreq, req->assoclen);
359 return do_decrypt ? crypto_aead_decrypt(subreq) :
360 crypto_aead_encrypt(subreq);
361}
362
98e87e3d
CL
363static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
364 struct crypto_aead *cipher,
365 const u8 *key,
366 unsigned int keylen)
65ea8b67
CL
367{
368 int rc;
369
370 crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
371 crypto_aead_set_flags(ctx->sw_cipher.aead,
372 crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
373 rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
374 crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
375 crypto_aead_set_flags(cipher,
376 crypto_aead_get_flags(ctx->sw_cipher.aead) &
377 CRYPTO_TFM_RES_MASK);
378
379 return rc;
380}
381
382/**
383 * AES-CCM Functions
384 */
385
386int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
387 unsigned int keylen)
388{
389 struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
390 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
391 struct dynamic_sa_ctl *sa;
392 int rc = 0;
393
98e87e3d 394 rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
65ea8b67
CL
395 if (rc)
396 return rc;
397
398 if (ctx->sa_in || ctx->sa_out)
399 crypto4xx_free_sa(ctx);
400
401 rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
402 if (rc)
403 return rc;
404
405 /* Setup SA */
406 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
407 sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
408
0b5a7f71 409 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
65ea8b67
CL
410 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
411 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
412 SA_CIPHER_ALG_AES,
413 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
414 SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
415
416 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
417 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
418 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
419 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
420 SA_NOT_COPY_HDR);
421
422 sa->sa_command_1.bf.key_len = keylen >> 3;
423
424 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
425
426 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
427 sa = (struct dynamic_sa_ctl *) ctx->sa_out;
428
429 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
430 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
431 SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
432 SA_CIPHER_ALG_AES,
433 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
434 SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
435
436 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
437 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
438 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
439 SA_COPY_PAD, SA_COPY_PAYLOAD,
440 SA_NOT_COPY_HDR);
441
442 sa->sa_command_1.bf.key_len = keylen >> 3;
443 return 0;
444}
445
446static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
447{
448 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
658c9d2b 449 struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
65ea8b67 450 struct crypto_aead *aead = crypto_aead_reqtfm(req);
65ea8b67 451 __le32 iv[16];
c4e90650 452 u32 tmp_sa[SA_AES128_CCM_LEN + 4];
65ea8b67 453 struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
584201f1 454 unsigned int len = req->cryptlen;
65ea8b67
CL
455
456 if (decrypt)
457 len -= crypto_aead_authsize(aead);
458
584201f1
CL
459 if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
460 return crypto4xx_aead_fallback(req, ctx, decrypt);
461
c4e90650 462 memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
65ea8b67
CL
463 sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
464
465 if (req->iv[0] == 1) {
466 /* CRYPTO_MODE_AES_ICM */
467 sa->sa_command_1.bf.crypto_mode9_8 = 1;
468 }
469
470 iv[3] = cpu_to_le32(0);
471 crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
472
473 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
474 len, iv, sizeof(iv),
658c9d2b 475 sa, ctx->sa_len, req->assoclen, rctx->dst);
65ea8b67
CL
476}
477
478int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
479{
480 return crypto4xx_crypt_aes_ccm(req, false);
481}
482
483int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
484{
485 return crypto4xx_crypt_aes_ccm(req, true);
486}
487
488int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
489 unsigned int authsize)
490{
491 struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
492 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
493
494 return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
495}
496
59231368
CL
497/**
498 * AES-GCM Functions
499 */
500
501static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
502{
503 switch (keylen) {
504 case 16:
505 case 24:
506 case 32:
507 return 0;
508 default:
509 return -EINVAL;
510 }
511}
512
513static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
514 unsigned int keylen)
515{
516 struct crypto_cipher *aes_tfm = NULL;
517 uint8_t src[16] = { 0 };
518 int rc = 0;
519
1ad0f160 520 aes_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_NEED_FALLBACK);
59231368
CL
521 if (IS_ERR(aes_tfm)) {
522 rc = PTR_ERR(aes_tfm);
523 pr_warn("could not load aes cipher driver: %d\n", rc);
524 return rc;
525 }
526
527 rc = crypto_cipher_setkey(aes_tfm, key, keylen);
528 if (rc) {
529 pr_err("setkey() failed: %d\n", rc);
530 goto out;
531 }
532
533 crypto_cipher_encrypt_one(aes_tfm, src, src);
534 crypto4xx_memcpy_to_le32(hash_start, src, 16);
535out:
536 crypto_free_cipher(aes_tfm);
537 return rc;
538}
539
540int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
541 const u8 *key, unsigned int keylen)
542{
543 struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
544 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
545 struct dynamic_sa_ctl *sa;
546 int rc = 0;
547
548 if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0) {
549 crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
550 return -EINVAL;
551 }
552
98e87e3d 553 rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
59231368
CL
554 if (rc)
555 return rc;
556
557 if (ctx->sa_in || ctx->sa_out)
558 crypto4xx_free_sa(ctx);
559
560 rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
561 if (rc)
562 return rc;
563
564 sa = (struct dynamic_sa_ctl *) ctx->sa_in;
565
566 sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
567 set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
568 SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
569 SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
570 SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
571 SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
572 DIR_INBOUND);
573 set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
574 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
575 SA_SEQ_MASK_ON, SA_MC_DISABLE,
576 SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
577 SA_NOT_COPY_HDR);
578
579 sa->sa_command_1.bf.key_len = keylen >> 3;
580
581 crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
582 key, keylen);
583
584 rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
585 key, keylen);
586 if (rc) {
587 pr_err("GCM hash key setting failed = %d\n", rc);
588 goto err;
589 }
590
591 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
592 sa = (struct dynamic_sa_ctl *) ctx->sa_out;
593 sa->sa_command_0.bf.dir = DIR_OUTBOUND;
594 sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
595
596 return 0;
597err:
598 crypto4xx_free_sa(ctx);
599 return rc;
600}
601
602static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
603 bool decrypt)
604{
605 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
584201f1 606 struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
59231368 607 __le32 iv[4];
584201f1
CL
608 unsigned int len = req->cryptlen;
609
610 if (decrypt)
611 len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
59231368 612
584201f1 613 if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
59231368
CL
614 return crypto4xx_aead_fallback(req, ctx, decrypt);
615
616 crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
617 iv[3] = cpu_to_le32(1);
618
59231368
CL
619 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
620 len, iv, sizeof(iv),
621 decrypt ? ctx->sa_in : ctx->sa_out,
658c9d2b 622 ctx->sa_len, req->assoclen, rctx->dst);
59231368
CL
623}
624
625int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
626{
627 return crypto4xx_crypt_aes_gcm(req, false);
628}
629
630int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
631{
632 return crypto4xx_crypt_aes_gcm(req, true);
633}
634
049359d6
JH
635/**
636 * HASH SHA1 Functions
637 */
638static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
639 unsigned int sa_len,
640 unsigned char ha,
641 unsigned char hm)
642{
643 struct crypto_alg *alg = tfm->__crt_alg;
a0aae821 644 struct crypto4xx_alg *my_alg;
049359d6 645 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
9e0a0b3a 646 struct dynamic_sa_hash160 *sa;
049359d6
JH
647 int rc;
648
a0aae821
CL
649 my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
650 alg.u.hash);
049359d6 651 ctx->dev = my_alg->dev;
049359d6
JH
652
653 /* Create SA */
2f77690d 654 if (ctx->sa_in || ctx->sa_out)
049359d6
JH
655 crypto4xx_free_sa(ctx);
656
657 rc = crypto4xx_alloc_sa(ctx, sa_len);
658 if (rc)
659 return rc;
660
6b1679f4
HX
661 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
662 sizeof(struct crypto4xx_ctx));
9e0a0b3a
CL
663 sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
664 set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
049359d6
JH
665 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
666 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
667 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
668 SA_OPCODE_HASH, DIR_INBOUND);
9e0a0b3a 669 set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
049359d6
JH
670 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
671 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
672 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
673 SA_NOT_COPY_HDR);
049359d6 674 /* Need to zero hash digest in SA */
9e0a0b3a
CL
675 memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
676 memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
049359d6
JH
677
678 return 0;
679}
680
681int crypto4xx_hash_init(struct ahash_request *req)
682{
683 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
684 int ds;
685 struct dynamic_sa_ctl *sa;
686
9e0a0b3a 687 sa = ctx->sa_in;
049359d6
JH
688 ds = crypto_ahash_digestsize(
689 __crypto_ahash_cast(req->base.tfm));
690 sa->sa_command_0.bf.digest_len = ds >> 2;
691 sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
049359d6
JH
692
693 return 0;
694}
695
696int crypto4xx_hash_update(struct ahash_request *req)
697{
cd4dcd6d 698 struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
049359d6 699 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
cd4dcd6d
CL
700 struct scatterlist dst;
701 unsigned int ds = crypto_ahash_digestsize(ahash);
049359d6 702
cd4dcd6d 703 sg_init_one(&dst, req->result, ds);
049359d6 704
cd4dcd6d
CL
705 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
706 req->nbytes, NULL, 0, ctx->sa_in,
658c9d2b 707 ctx->sa_len, 0, NULL);
049359d6
JH
708}
709
710int crypto4xx_hash_final(struct ahash_request *req)
711{
712 return 0;
713}
714
715int crypto4xx_hash_digest(struct ahash_request *req)
716{
cd4dcd6d 717 struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
049359d6 718 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
cd4dcd6d
CL
719 struct scatterlist dst;
720 unsigned int ds = crypto_ahash_digestsize(ahash);
049359d6 721
cd4dcd6d 722 sg_init_one(&dst, req->result, ds);
049359d6 723
cd4dcd6d
CL
724 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
725 req->nbytes, NULL, 0, ctx->sa_in,
658c9d2b 726 ctx->sa_len, 0, NULL);
049359d6
JH
727}
728
729/**
730 * SHA1 Algorithm
731 */
732int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
733{
734 return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
735 SA_HASH_MODE_HASH);
736}