1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * CCM: Counter with CBC-MAC
5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
8 #include <crypto/internal/aead.h>
9 #include <crypto/internal/cipher.h>
10 #include <crypto/internal/hash.h>
11 #include <crypto/internal/skcipher.h>
12 #include <crypto/scatterwalk.h>
13 #include <linux/err.h>
14 #include <linux/init.h>
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/slab.h>
19 struct ccm_instance_ctx
{
20 struct crypto_skcipher_spawn ctr
;
21 struct crypto_ahash_spawn mac
;
24 struct crypto_ccm_ctx
{
25 struct crypto_ahash
*mac
;
26 struct crypto_skcipher
*ctr
;
29 struct crypto_rfc4309_ctx
{
30 struct crypto_aead
*child
;
34 struct crypto_rfc4309_req_ctx
{
35 struct scatterlist src
[3];
36 struct scatterlist dst
[3];
37 struct aead_request subreq
;
40 struct crypto_ccm_req_priv_ctx
{
45 struct scatterlist src
[3];
46 struct scatterlist dst
[3];
48 struct ahash_request ahreq
;
49 struct skcipher_request skreq
;
53 struct cbcmac_tfm_ctx
{
54 struct crypto_cipher
*child
;
57 struct cbcmac_desc_ctx
{
61 static inline struct crypto_ccm_req_priv_ctx
*crypto_ccm_reqctx(
62 struct aead_request
*req
)
64 unsigned long align
= crypto_aead_alignmask(crypto_aead_reqtfm(req
));
66 return (void *)PTR_ALIGN((u8
*)aead_request_ctx(req
), align
+ 1);
69 static int set_msg_len(u8
*block
, unsigned int msglen
, int csize
)
73 memset(block
, 0, csize
);
78 else if (msglen
> (1 << (8 * csize
)))
81 data
= cpu_to_be32(msglen
);
82 memcpy(block
- csize
, (u8
*)&data
+ 4 - csize
, csize
);
87 static int crypto_ccm_setkey(struct crypto_aead
*aead
, const u8
*key
,
90 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
91 struct crypto_skcipher
*ctr
= ctx
->ctr
;
92 struct crypto_ahash
*mac
= ctx
->mac
;
95 crypto_skcipher_clear_flags(ctr
, CRYPTO_TFM_REQ_MASK
);
96 crypto_skcipher_set_flags(ctr
, crypto_aead_get_flags(aead
) &
98 err
= crypto_skcipher_setkey(ctr
, key
, keylen
);
102 crypto_ahash_clear_flags(mac
, CRYPTO_TFM_REQ_MASK
);
103 crypto_ahash_set_flags(mac
, crypto_aead_get_flags(aead
) &
104 CRYPTO_TFM_REQ_MASK
);
105 return crypto_ahash_setkey(mac
, key
, keylen
);
108 static int crypto_ccm_setauthsize(struct crypto_aead
*tfm
,
109 unsigned int authsize
)
127 static int format_input(u8
*info
, struct aead_request
*req
,
128 unsigned int cryptlen
)
130 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
131 unsigned int lp
= req
->iv
[0];
132 unsigned int l
= lp
+ 1;
135 m
= crypto_aead_authsize(aead
);
137 memcpy(info
, req
->iv
, 16);
139 /* format control info per RFC 3610 and
140 * NIST Special Publication 800-38C
142 *info
|= (8 * ((m
- 2) / 2));
146 return set_msg_len(info
+ 16 - l
, cryptlen
, l
);
149 static int format_adata(u8
*adata
, unsigned int a
)
153 /* add control info for associated data
154 * RFC 3610 and NIST Special Publication 800-38C
157 *(__be16
*)adata
= cpu_to_be16(a
);
160 *(__be16
*)adata
= cpu_to_be16(0xfffe);
161 *(__be32
*)&adata
[2] = cpu_to_be32(a
);
168 static int crypto_ccm_auth(struct aead_request
*req
, struct scatterlist
*plain
,
169 unsigned int cryptlen
)
171 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
172 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
173 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
174 struct ahash_request
*ahreq
= &pctx
->ahreq
;
175 unsigned int assoclen
= req
->assoclen
;
176 struct scatterlist sg
[3];
177 u8
*odata
= pctx
->odata
;
178 u8
*idata
= pctx
->idata
;
181 /* format control data for input */
182 err
= format_input(odata
, req
, cryptlen
);
186 sg_init_table(sg
, 3);
187 sg_set_buf(&sg
[0], odata
, 16);
189 /* format associated data and compute into mac */
191 ilen
= format_adata(idata
, assoclen
);
192 sg_set_buf(&sg
[1], idata
, ilen
);
193 sg_chain(sg
, 3, req
->src
);
196 sg_chain(sg
, 2, req
->src
);
199 ahash_request_set_tfm(ahreq
, ctx
->mac
);
200 ahash_request_set_callback(ahreq
, pctx
->flags
, NULL
, NULL
);
201 ahash_request_set_crypt(ahreq
, sg
, NULL
, assoclen
+ ilen
+ 16);
202 err
= crypto_ahash_init(ahreq
);
205 err
= crypto_ahash_update(ahreq
);
209 /* we need to pad the MAC input to a round multiple of the block size */
210 ilen
= 16 - (assoclen
+ ilen
) % 16;
212 memset(idata
, 0, ilen
);
213 sg_init_table(sg
, 2);
214 sg_set_buf(&sg
[0], idata
, ilen
);
216 sg_chain(sg
, 2, plain
);
221 ahash_request_set_crypt(ahreq
, plain
, pctx
->odata
, cryptlen
);
222 err
= crypto_ahash_finup(ahreq
);
227 static void crypto_ccm_encrypt_done(struct crypto_async_request
*areq
, int err
)
229 struct aead_request
*req
= areq
->data
;
230 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
231 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
232 u8
*odata
= pctx
->odata
;
235 scatterwalk_map_and_copy(odata
, req
->dst
,
236 req
->assoclen
+ req
->cryptlen
,
237 crypto_aead_authsize(aead
), 1);
238 aead_request_complete(req
, err
);
241 static inline int crypto_ccm_check_iv(const u8
*iv
)
243 /* 2 <= L <= 8, so 1 <= L' <= 7. */
244 if (1 > iv
[0] || iv
[0] > 7)
250 static int crypto_ccm_init_crypt(struct aead_request
*req
, u8
*tag
)
252 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
253 struct scatterlist
*sg
;
257 err
= crypto_ccm_check_iv(iv
);
261 pctx
->flags
= aead_request_flags(req
);
263 /* Note: rfc 3610 and NIST 800-38C require counter of
264 * zero to encrypt auth tag.
266 memset(iv
+ 15 - iv
[0], 0, iv
[0] + 1);
268 sg_init_table(pctx
->src
, 3);
269 sg_set_buf(pctx
->src
, tag
, 16);
270 sg
= scatterwalk_ffwd(pctx
->src
+ 1, req
->src
, req
->assoclen
);
271 if (sg
!= pctx
->src
+ 1)
272 sg_chain(pctx
->src
, 2, sg
);
274 if (req
->src
!= req
->dst
) {
275 sg_init_table(pctx
->dst
, 3);
276 sg_set_buf(pctx
->dst
, tag
, 16);
277 sg
= scatterwalk_ffwd(pctx
->dst
+ 1, req
->dst
, req
->assoclen
);
278 if (sg
!= pctx
->dst
+ 1)
279 sg_chain(pctx
->dst
, 2, sg
);
285 static int crypto_ccm_encrypt(struct aead_request
*req
)
287 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
288 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
289 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
290 struct skcipher_request
*skreq
= &pctx
->skreq
;
291 struct scatterlist
*dst
;
292 unsigned int cryptlen
= req
->cryptlen
;
293 u8
*odata
= pctx
->odata
;
297 err
= crypto_ccm_init_crypt(req
, odata
);
301 err
= crypto_ccm_auth(req
, sg_next(pctx
->src
), cryptlen
);
306 if (req
->src
!= req
->dst
)
309 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
310 skcipher_request_set_callback(skreq
, pctx
->flags
,
311 crypto_ccm_encrypt_done
, req
);
312 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
313 err
= crypto_skcipher_encrypt(skreq
);
317 /* copy authtag to end of dst */
318 scatterwalk_map_and_copy(odata
, sg_next(dst
), cryptlen
,
319 crypto_aead_authsize(aead
), 1);
323 static void crypto_ccm_decrypt_done(struct crypto_async_request
*areq
,
326 struct aead_request
*req
= areq
->data
;
327 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
328 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
329 unsigned int authsize
= crypto_aead_authsize(aead
);
330 unsigned int cryptlen
= req
->cryptlen
- authsize
;
331 struct scatterlist
*dst
;
335 dst
= sg_next(req
->src
== req
->dst
? pctx
->src
: pctx
->dst
);
338 err
= crypto_ccm_auth(req
, dst
, cryptlen
);
339 if (!err
&& crypto_memneq(pctx
->auth_tag
, pctx
->odata
, authsize
))
342 aead_request_complete(req
, err
);
345 static int crypto_ccm_decrypt(struct aead_request
*req
)
347 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
348 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(aead
);
349 struct crypto_ccm_req_priv_ctx
*pctx
= crypto_ccm_reqctx(req
);
350 struct skcipher_request
*skreq
= &pctx
->skreq
;
351 struct scatterlist
*dst
;
352 unsigned int authsize
= crypto_aead_authsize(aead
);
353 unsigned int cryptlen
= req
->cryptlen
;
354 u8
*authtag
= pctx
->auth_tag
;
355 u8
*odata
= pctx
->odata
;
356 u8
*iv
= pctx
->idata
;
359 cryptlen
-= authsize
;
361 err
= crypto_ccm_init_crypt(req
, authtag
);
365 scatterwalk_map_and_copy(authtag
, sg_next(pctx
->src
), cryptlen
,
369 if (req
->src
!= req
->dst
)
372 memcpy(iv
, req
->iv
, 16);
374 skcipher_request_set_tfm(skreq
, ctx
->ctr
);
375 skcipher_request_set_callback(skreq
, pctx
->flags
,
376 crypto_ccm_decrypt_done
, req
);
377 skcipher_request_set_crypt(skreq
, pctx
->src
, dst
, cryptlen
+ 16, iv
);
378 err
= crypto_skcipher_decrypt(skreq
);
382 err
= crypto_ccm_auth(req
, sg_next(dst
), cryptlen
);
387 if (crypto_memneq(authtag
, odata
, authsize
))
393 static int crypto_ccm_init_tfm(struct crypto_aead
*tfm
)
395 struct aead_instance
*inst
= aead_alg_instance(tfm
);
396 struct ccm_instance_ctx
*ictx
= aead_instance_ctx(inst
);
397 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
398 struct crypto_ahash
*mac
;
399 struct crypto_skcipher
*ctr
;
403 mac
= crypto_spawn_ahash(&ictx
->mac
);
407 ctr
= crypto_spawn_skcipher(&ictx
->ctr
);
415 align
= crypto_aead_alignmask(tfm
);
416 align
&= ~(crypto_tfm_ctx_alignment() - 1);
417 crypto_aead_set_reqsize(
419 align
+ sizeof(struct crypto_ccm_req_priv_ctx
) +
420 max(crypto_ahash_reqsize(mac
), crypto_skcipher_reqsize(ctr
)));
425 crypto_free_ahash(mac
);
429 static void crypto_ccm_exit_tfm(struct crypto_aead
*tfm
)
431 struct crypto_ccm_ctx
*ctx
= crypto_aead_ctx(tfm
);
433 crypto_free_ahash(ctx
->mac
);
434 crypto_free_skcipher(ctx
->ctr
);
437 static void crypto_ccm_free(struct aead_instance
*inst
)
439 struct ccm_instance_ctx
*ctx
= aead_instance_ctx(inst
);
441 crypto_drop_ahash(&ctx
->mac
);
442 crypto_drop_skcipher(&ctx
->ctr
);
446 static int crypto_ccm_create_common(struct crypto_template
*tmpl
,
448 const char *ctr_name
,
449 const char *mac_name
)
452 struct aead_instance
*inst
;
453 struct ccm_instance_ctx
*ictx
;
454 struct skcipher_alg
*ctr
;
455 struct hash_alg_common
*mac
;
458 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_AEAD
, &mask
);
462 inst
= kzalloc(sizeof(*inst
) + sizeof(*ictx
), GFP_KERNEL
);
465 ictx
= aead_instance_ctx(inst
);
467 err
= crypto_grab_ahash(&ictx
->mac
, aead_crypto_instance(inst
),
468 mac_name
, 0, mask
| CRYPTO_ALG_ASYNC
);
471 mac
= crypto_spawn_ahash_alg(&ictx
->mac
);
474 if (strncmp(mac
->base
.cra_name
, "cbcmac(", 7) != 0 ||
475 mac
->digestsize
!= 16)
478 err
= crypto_grab_skcipher(&ictx
->ctr
, aead_crypto_instance(inst
),
482 ctr
= crypto_spawn_skcipher_alg(&ictx
->ctr
);
484 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
486 if (strncmp(ctr
->base
.cra_name
, "ctr(", 4) != 0 ||
487 crypto_skcipher_alg_ivsize(ctr
) != 16 ||
488 ctr
->base
.cra_blocksize
!= 1)
491 /* ctr and cbcmac must use the same underlying block cipher. */
492 if (strcmp(ctr
->base
.cra_name
+ 4, mac
->base
.cra_name
+ 7) != 0)
496 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
497 "ccm(%s", ctr
->base
.cra_name
+ 4) >= CRYPTO_MAX_ALG_NAME
)
500 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
501 "ccm_base(%s,%s)", ctr
->base
.cra_driver_name
,
502 mac
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
505 inst
->alg
.base
.cra_priority
= (mac
->base
.cra_priority
+
506 ctr
->base
.cra_priority
) / 2;
507 inst
->alg
.base
.cra_blocksize
= 1;
508 inst
->alg
.base
.cra_alignmask
= mac
->base
.cra_alignmask
|
509 ctr
->base
.cra_alignmask
;
510 inst
->alg
.ivsize
= 16;
511 inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(ctr
);
512 inst
->alg
.maxauthsize
= 16;
513 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_ccm_ctx
);
514 inst
->alg
.init
= crypto_ccm_init_tfm
;
515 inst
->alg
.exit
= crypto_ccm_exit_tfm
;
516 inst
->alg
.setkey
= crypto_ccm_setkey
;
517 inst
->alg
.setauthsize
= crypto_ccm_setauthsize
;
518 inst
->alg
.encrypt
= crypto_ccm_encrypt
;
519 inst
->alg
.decrypt
= crypto_ccm_decrypt
;
521 inst
->free
= crypto_ccm_free
;
523 err
= aead_register_instance(tmpl
, inst
);
526 crypto_ccm_free(inst
);
531 static int crypto_ccm_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
533 const char *cipher_name
;
534 char ctr_name
[CRYPTO_MAX_ALG_NAME
];
535 char mac_name
[CRYPTO_MAX_ALG_NAME
];
537 cipher_name
= crypto_attr_alg_name(tb
[1]);
538 if (IS_ERR(cipher_name
))
539 return PTR_ERR(cipher_name
);
541 if (snprintf(ctr_name
, CRYPTO_MAX_ALG_NAME
, "ctr(%s)",
542 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
543 return -ENAMETOOLONG
;
545 if (snprintf(mac_name
, CRYPTO_MAX_ALG_NAME
, "cbcmac(%s)",
546 cipher_name
) >= CRYPTO_MAX_ALG_NAME
)
547 return -ENAMETOOLONG
;
549 return crypto_ccm_create_common(tmpl
, tb
, ctr_name
, mac_name
);
552 static int crypto_ccm_base_create(struct crypto_template
*tmpl
,
555 const char *ctr_name
;
556 const char *mac_name
;
558 ctr_name
= crypto_attr_alg_name(tb
[1]);
559 if (IS_ERR(ctr_name
))
560 return PTR_ERR(ctr_name
);
562 mac_name
= crypto_attr_alg_name(tb
[2]);
563 if (IS_ERR(mac_name
))
564 return PTR_ERR(mac_name
);
566 return crypto_ccm_create_common(tmpl
, tb
, ctr_name
, mac_name
);
569 static int crypto_rfc4309_setkey(struct crypto_aead
*parent
, const u8
*key
,
572 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
573 struct crypto_aead
*child
= ctx
->child
;
579 memcpy(ctx
->nonce
, key
+ keylen
, 3);
581 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
582 crypto_aead_set_flags(child
, crypto_aead_get_flags(parent
) &
583 CRYPTO_TFM_REQ_MASK
);
584 return crypto_aead_setkey(child
, key
, keylen
);
587 static int crypto_rfc4309_setauthsize(struct crypto_aead
*parent
,
588 unsigned int authsize
)
590 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(parent
);
601 return crypto_aead_setauthsize(ctx
->child
, authsize
);
604 static struct aead_request
*crypto_rfc4309_crypt(struct aead_request
*req
)
606 struct crypto_rfc4309_req_ctx
*rctx
= aead_request_ctx(req
);
607 struct aead_request
*subreq
= &rctx
->subreq
;
608 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
609 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(aead
);
610 struct crypto_aead
*child
= ctx
->child
;
611 struct scatterlist
*sg
;
612 u8
*iv
= PTR_ALIGN((u8
*)(subreq
+ 1) + crypto_aead_reqsize(child
),
613 crypto_aead_alignmask(child
) + 1);
618 memcpy(iv
+ 1, ctx
->nonce
, 3);
619 memcpy(iv
+ 4, req
->iv
, 8);
621 scatterwalk_map_and_copy(iv
+ 16, req
->src
, 0, req
->assoclen
- 8, 0);
623 sg_init_table(rctx
->src
, 3);
624 sg_set_buf(rctx
->src
, iv
+ 16, req
->assoclen
- 8);
625 sg
= scatterwalk_ffwd(rctx
->src
+ 1, req
->src
, req
->assoclen
);
626 if (sg
!= rctx
->src
+ 1)
627 sg_chain(rctx
->src
, 2, sg
);
629 if (req
->src
!= req
->dst
) {
630 sg_init_table(rctx
->dst
, 3);
631 sg_set_buf(rctx
->dst
, iv
+ 16, req
->assoclen
- 8);
632 sg
= scatterwalk_ffwd(rctx
->dst
+ 1, req
->dst
, req
->assoclen
);
633 if (sg
!= rctx
->dst
+ 1)
634 sg_chain(rctx
->dst
, 2, sg
);
637 aead_request_set_tfm(subreq
, child
);
638 aead_request_set_callback(subreq
, req
->base
.flags
, req
->base
.complete
,
640 aead_request_set_crypt(subreq
, rctx
->src
,
641 req
->src
== req
->dst
? rctx
->src
: rctx
->dst
,
643 aead_request_set_ad(subreq
, req
->assoclen
- 8);
648 static int crypto_rfc4309_encrypt(struct aead_request
*req
)
650 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
653 req
= crypto_rfc4309_crypt(req
);
655 return crypto_aead_encrypt(req
);
658 static int crypto_rfc4309_decrypt(struct aead_request
*req
)
660 if (req
->assoclen
!= 16 && req
->assoclen
!= 20)
663 req
= crypto_rfc4309_crypt(req
);
665 return crypto_aead_decrypt(req
);
668 static int crypto_rfc4309_init_tfm(struct crypto_aead
*tfm
)
670 struct aead_instance
*inst
= aead_alg_instance(tfm
);
671 struct crypto_aead_spawn
*spawn
= aead_instance_ctx(inst
);
672 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
673 struct crypto_aead
*aead
;
676 aead
= crypto_spawn_aead(spawn
);
678 return PTR_ERR(aead
);
682 align
= crypto_aead_alignmask(aead
);
683 align
&= ~(crypto_tfm_ctx_alignment() - 1);
684 crypto_aead_set_reqsize(
686 sizeof(struct crypto_rfc4309_req_ctx
) +
687 ALIGN(crypto_aead_reqsize(aead
), crypto_tfm_ctx_alignment()) +
693 static void crypto_rfc4309_exit_tfm(struct crypto_aead
*tfm
)
695 struct crypto_rfc4309_ctx
*ctx
= crypto_aead_ctx(tfm
);
697 crypto_free_aead(ctx
->child
);
700 static void crypto_rfc4309_free(struct aead_instance
*inst
)
702 crypto_drop_aead(aead_instance_ctx(inst
));
706 static int crypto_rfc4309_create(struct crypto_template
*tmpl
,
710 struct aead_instance
*inst
;
711 struct crypto_aead_spawn
*spawn
;
712 struct aead_alg
*alg
;
715 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_AEAD
, &mask
);
719 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
723 spawn
= aead_instance_ctx(inst
);
724 err
= crypto_grab_aead(spawn
, aead_crypto_instance(inst
),
725 crypto_attr_alg_name(tb
[1]), 0, mask
);
729 alg
= crypto_spawn_aead_alg(spawn
);
733 /* We only support 16-byte blocks. */
734 if (crypto_aead_alg_ivsize(alg
) != 16)
737 /* Not a stream cipher? */
738 if (alg
->base
.cra_blocksize
!= 1)
742 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
743 "rfc4309(%s)", alg
->base
.cra_name
) >=
744 CRYPTO_MAX_ALG_NAME
||
745 snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
746 "rfc4309(%s)", alg
->base
.cra_driver_name
) >=
750 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
751 inst
->alg
.base
.cra_blocksize
= 1;
752 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
754 inst
->alg
.ivsize
= 8;
755 inst
->alg
.chunksize
= crypto_aead_alg_chunksize(alg
);
756 inst
->alg
.maxauthsize
= 16;
758 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_rfc4309_ctx
);
760 inst
->alg
.init
= crypto_rfc4309_init_tfm
;
761 inst
->alg
.exit
= crypto_rfc4309_exit_tfm
;
763 inst
->alg
.setkey
= crypto_rfc4309_setkey
;
764 inst
->alg
.setauthsize
= crypto_rfc4309_setauthsize
;
765 inst
->alg
.encrypt
= crypto_rfc4309_encrypt
;
766 inst
->alg
.decrypt
= crypto_rfc4309_decrypt
;
768 inst
->free
= crypto_rfc4309_free
;
770 err
= aead_register_instance(tmpl
, inst
);
773 crypto_rfc4309_free(inst
);
778 static int crypto_cbcmac_digest_setkey(struct crypto_shash
*parent
,
779 const u8
*inkey
, unsigned int keylen
)
781 struct cbcmac_tfm_ctx
*ctx
= crypto_shash_ctx(parent
);
783 return crypto_cipher_setkey(ctx
->child
, inkey
, keylen
);
786 static int crypto_cbcmac_digest_init(struct shash_desc
*pdesc
)
788 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
789 int bs
= crypto_shash_digestsize(pdesc
->tfm
);
790 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(pdesc
->tfm
) - bs
;
798 static int crypto_cbcmac_digest_update(struct shash_desc
*pdesc
, const u8
*p
,
801 struct crypto_shash
*parent
= pdesc
->tfm
;
802 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
803 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
804 struct crypto_cipher
*tfm
= tctx
->child
;
805 int bs
= crypto_shash_digestsize(parent
);
806 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
809 unsigned int l
= min(len
, bs
- ctx
->len
);
811 crypto_xor(dg
+ ctx
->len
, p
, l
);
816 if (ctx
->len
== bs
) {
817 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
825 static int crypto_cbcmac_digest_final(struct shash_desc
*pdesc
, u8
*out
)
827 struct crypto_shash
*parent
= pdesc
->tfm
;
828 struct cbcmac_tfm_ctx
*tctx
= crypto_shash_ctx(parent
);
829 struct cbcmac_desc_ctx
*ctx
= shash_desc_ctx(pdesc
);
830 struct crypto_cipher
*tfm
= tctx
->child
;
831 int bs
= crypto_shash_digestsize(parent
);
832 u8
*dg
= (u8
*)ctx
+ crypto_shash_descsize(parent
) - bs
;
835 crypto_cipher_encrypt_one(tfm
, dg
, dg
);
841 static int cbcmac_init_tfm(struct crypto_tfm
*tfm
)
843 struct crypto_cipher
*cipher
;
844 struct crypto_instance
*inst
= (void *)tfm
->__crt_alg
;
845 struct crypto_cipher_spawn
*spawn
= crypto_instance_ctx(inst
);
846 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
848 cipher
= crypto_spawn_cipher(spawn
);
850 return PTR_ERR(cipher
);
857 static void cbcmac_exit_tfm(struct crypto_tfm
*tfm
)
859 struct cbcmac_tfm_ctx
*ctx
= crypto_tfm_ctx(tfm
);
860 crypto_free_cipher(ctx
->child
);
863 static int cbcmac_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
865 struct shash_instance
*inst
;
866 struct crypto_cipher_spawn
*spawn
;
867 struct crypto_alg
*alg
;
871 err
= crypto_check_attr_type(tb
, CRYPTO_ALG_TYPE_SHASH
, &mask
);
875 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
878 spawn
= shash_instance_ctx(inst
);
880 err
= crypto_grab_cipher(spawn
, shash_crypto_instance(inst
),
881 crypto_attr_alg_name(tb
[1]), 0, mask
);
884 alg
= crypto_spawn_cipher_alg(spawn
);
886 err
= crypto_inst_setname(shash_crypto_instance(inst
), tmpl
->name
, alg
);
890 inst
->alg
.base
.cra_priority
= alg
->cra_priority
;
891 inst
->alg
.base
.cra_blocksize
= 1;
893 inst
->alg
.digestsize
= alg
->cra_blocksize
;
894 inst
->alg
.descsize
= ALIGN(sizeof(struct cbcmac_desc_ctx
),
895 alg
->cra_alignmask
+ 1) +
898 inst
->alg
.base
.cra_ctxsize
= sizeof(struct cbcmac_tfm_ctx
);
899 inst
->alg
.base
.cra_init
= cbcmac_init_tfm
;
900 inst
->alg
.base
.cra_exit
= cbcmac_exit_tfm
;
902 inst
->alg
.init
= crypto_cbcmac_digest_init
;
903 inst
->alg
.update
= crypto_cbcmac_digest_update
;
904 inst
->alg
.final
= crypto_cbcmac_digest_final
;
905 inst
->alg
.setkey
= crypto_cbcmac_digest_setkey
;
907 inst
->free
= shash_free_singlespawn_instance
;
909 err
= shash_register_instance(tmpl
, inst
);
912 shash_free_singlespawn_instance(inst
);
917 static struct crypto_template crypto_ccm_tmpls
[] = {
920 .create
= cbcmac_create
,
921 .module
= THIS_MODULE
,
924 .create
= crypto_ccm_base_create
,
925 .module
= THIS_MODULE
,
928 .create
= crypto_ccm_create
,
929 .module
= THIS_MODULE
,
932 .create
= crypto_rfc4309_create
,
933 .module
= THIS_MODULE
,
937 static int __init
crypto_ccm_module_init(void)
939 return crypto_register_templates(crypto_ccm_tmpls
,
940 ARRAY_SIZE(crypto_ccm_tmpls
));
943 static void __exit
crypto_ccm_module_exit(void)
945 crypto_unregister_templates(crypto_ccm_tmpls
,
946 ARRAY_SIZE(crypto_ccm_tmpls
));
949 subsys_initcall(crypto_ccm_module_init
);
950 module_exit(crypto_ccm_module_exit
);
952 MODULE_LICENSE("GPL");
953 MODULE_DESCRIPTION("Counter with CBC MAC");
954 MODULE_ALIAS_CRYPTO("ccm_base");
955 MODULE_ALIAS_CRYPTO("rfc4309");
956 MODULE_ALIAS_CRYPTO("ccm");
957 MODULE_ALIAS_CRYPTO("cbcmac");
958 MODULE_IMPORT_NS(CRYPTO_INTERNAL
);