1 // SPDX-License-Identifier: GPL-2.0
3 * ESSIV skcipher and aead template for block encryption
5 * This template encapsulates the ESSIV IV generation algorithm used by
6 * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7 * used for block encryption, by encrypting it using the hash of the
8 * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9 * number in LE representation zero-padded to the size of the IV, but this
10 * is not assumed by this driver.
12 * The typical use of this template is to instantiate the skcipher
13 * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14 * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15 * also permits ESSIV to be used in combination with the authenc template,
16 * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17 * we need to instantiate an aead that accepts the same special key format
18 * as the authenc template, and deals with the way the encrypted IV is
19 * embedded into the AAD area of the aead request. This means the AEAD
20 * flavor produced by this template is tightly coupled to the way dm-crypt
23 * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
26 * adiantum length-preserving encryption mode
28 * Copyright 2018 Google LLC
31 #include <crypto/authenc.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/cipher.h>
34 #include <crypto/internal/hash.h>
35 #include <crypto/internal/skcipher.h>
36 #include <crypto/scatterwalk.h>
37 #include <linux/module.h>
41 struct essiv_instance_ctx
{
43 struct crypto_skcipher_spawn skcipher_spawn
;
44 struct crypto_aead_spawn aead_spawn
;
46 char essiv_cipher_name
[CRYPTO_MAX_ALG_NAME
];
47 char shash_driver_name
[CRYPTO_MAX_ALG_NAME
];
50 struct essiv_tfm_ctx
{
52 struct crypto_skcipher
*skcipher
;
53 struct crypto_aead
*aead
;
55 struct crypto_cipher
*essiv_cipher
;
56 struct crypto_shash
*hash
;
60 struct essiv_aead_request_ctx
{
61 struct scatterlist sg
[4];
63 struct aead_request aead_req
;
66 static int essiv_skcipher_setkey(struct crypto_skcipher
*tfm
,
67 const u8
*key
, unsigned int keylen
)
69 struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
70 u8 salt
[HASH_MAX_DIGESTSIZE
];
73 crypto_skcipher_clear_flags(tctx
->u
.skcipher
, CRYPTO_TFM_REQ_MASK
);
74 crypto_skcipher_set_flags(tctx
->u
.skcipher
,
75 crypto_skcipher_get_flags(tfm
) &
77 err
= crypto_skcipher_setkey(tctx
->u
.skcipher
, key
, keylen
);
81 err
= crypto_shash_tfm_digest(tctx
->hash
, key
, keylen
, salt
);
85 crypto_cipher_clear_flags(tctx
->essiv_cipher
, CRYPTO_TFM_REQ_MASK
);
86 crypto_cipher_set_flags(tctx
->essiv_cipher
,
87 crypto_skcipher_get_flags(tfm
) &
89 return crypto_cipher_setkey(tctx
->essiv_cipher
, salt
,
90 crypto_shash_digestsize(tctx
->hash
));
93 static int essiv_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
96 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
97 SHASH_DESC_ON_STACK(desc
, tctx
->hash
);
98 struct crypto_authenc_keys keys
;
99 u8 salt
[HASH_MAX_DIGESTSIZE
];
102 crypto_aead_clear_flags(tctx
->u
.aead
, CRYPTO_TFM_REQ_MASK
);
103 crypto_aead_set_flags(tctx
->u
.aead
, crypto_aead_get_flags(tfm
) &
104 CRYPTO_TFM_REQ_MASK
);
105 err
= crypto_aead_setkey(tctx
->u
.aead
, key
, keylen
);
109 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
112 desc
->tfm
= tctx
->hash
;
113 err
= crypto_shash_init(desc
) ?:
114 crypto_shash_update(desc
, keys
.enckey
, keys
.enckeylen
) ?:
115 crypto_shash_finup(desc
, keys
.authkey
, keys
.authkeylen
, salt
);
119 crypto_cipher_clear_flags(tctx
->essiv_cipher
, CRYPTO_TFM_REQ_MASK
);
120 crypto_cipher_set_flags(tctx
->essiv_cipher
, crypto_aead_get_flags(tfm
) &
121 CRYPTO_TFM_REQ_MASK
);
122 return crypto_cipher_setkey(tctx
->essiv_cipher
, salt
,
123 crypto_shash_digestsize(tctx
->hash
));
126 static int essiv_aead_setauthsize(struct crypto_aead
*tfm
,
127 unsigned int authsize
)
129 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
131 return crypto_aead_setauthsize(tctx
->u
.aead
, authsize
);
134 static void essiv_skcipher_done(struct crypto_async_request
*areq
, int err
)
136 struct skcipher_request
*req
= areq
->data
;
138 skcipher_request_complete(req
, err
);
141 static int essiv_skcipher_crypt(struct skcipher_request
*req
, bool enc
)
143 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
144 const struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
145 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
147 crypto_cipher_encrypt_one(tctx
->essiv_cipher
, req
->iv
, req
->iv
);
149 skcipher_request_set_tfm(subreq
, tctx
->u
.skcipher
);
150 skcipher_request_set_crypt(subreq
, req
->src
, req
->dst
, req
->cryptlen
,
152 skcipher_request_set_callback(subreq
, skcipher_request_flags(req
),
153 essiv_skcipher_done
, req
);
155 return enc
? crypto_skcipher_encrypt(subreq
) :
156 crypto_skcipher_decrypt(subreq
);
159 static int essiv_skcipher_encrypt(struct skcipher_request
*req
)
161 return essiv_skcipher_crypt(req
, true);
164 static int essiv_skcipher_decrypt(struct skcipher_request
*req
)
166 return essiv_skcipher_crypt(req
, false);
169 static void essiv_aead_done(struct crypto_async_request
*areq
, int err
)
171 struct aead_request
*req
= areq
->data
;
172 struct essiv_aead_request_ctx
*rctx
= aead_request_ctx(req
);
175 aead_request_complete(req
, err
);
178 static int essiv_aead_crypt(struct aead_request
*req
, bool enc
)
180 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
181 const struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
182 struct essiv_aead_request_ctx
*rctx
= aead_request_ctx(req
);
183 struct aead_request
*subreq
= &rctx
->aead_req
;
184 struct scatterlist
*src
= req
->src
;
187 crypto_cipher_encrypt_one(tctx
->essiv_cipher
, req
->iv
, req
->iv
);
190 * dm-crypt embeds the sector number and the IV in the AAD region, so
191 * we have to copy the converted IV into the right scatterlist before
195 if (req
->src
== req
->dst
|| !enc
) {
196 scatterwalk_map_and_copy(req
->iv
, req
->dst
,
197 req
->assoclen
- crypto_aead_ivsize(tfm
),
198 crypto_aead_ivsize(tfm
), 1);
200 u8
*iv
= (u8
*)aead_request_ctx(req
) + tctx
->ivoffset
;
201 int ivsize
= crypto_aead_ivsize(tfm
);
202 int ssize
= req
->assoclen
- ivsize
;
203 struct scatterlist
*sg
;
209 nents
= sg_nents_for_len(req
->src
, ssize
);
213 memcpy(iv
, req
->iv
, ivsize
);
214 sg_init_table(rctx
->sg
, 4);
216 if (unlikely(nents
> 1)) {
218 * This is a case that rarely occurs in practice, but
219 * for correctness, we have to deal with it nonetheless.
221 rctx
->assoc
= kmalloc(ssize
, GFP_ATOMIC
);
225 scatterwalk_map_and_copy(rctx
->assoc
, req
->src
, 0,
227 sg_set_buf(rctx
->sg
, rctx
->assoc
, ssize
);
229 sg_set_page(rctx
->sg
, sg_page(req
->src
), ssize
,
233 sg_set_buf(rctx
->sg
+ 1, iv
, ivsize
);
234 sg
= scatterwalk_ffwd(rctx
->sg
+ 2, req
->src
, req
->assoclen
);
235 if (sg
!= rctx
->sg
+ 2)
236 sg_chain(rctx
->sg
, 3, sg
);
241 aead_request_set_tfm(subreq
, tctx
->u
.aead
);
242 aead_request_set_ad(subreq
, req
->assoclen
);
243 aead_request_set_callback(subreq
, aead_request_flags(req
),
244 essiv_aead_done
, req
);
245 aead_request_set_crypt(subreq
, src
, req
->dst
, req
->cryptlen
, req
->iv
);
247 err
= enc
? crypto_aead_encrypt(subreq
) :
248 crypto_aead_decrypt(subreq
);
250 if (rctx
->assoc
&& err
!= -EINPROGRESS
)
255 static int essiv_aead_encrypt(struct aead_request
*req
)
257 return essiv_aead_crypt(req
, true);
260 static int essiv_aead_decrypt(struct aead_request
*req
)
262 return essiv_aead_crypt(req
, false);
265 static int essiv_init_tfm(struct essiv_instance_ctx
*ictx
,
266 struct essiv_tfm_ctx
*tctx
)
268 struct crypto_cipher
*essiv_cipher
;
269 struct crypto_shash
*hash
;
272 essiv_cipher
= crypto_alloc_cipher(ictx
->essiv_cipher_name
, 0, 0);
273 if (IS_ERR(essiv_cipher
))
274 return PTR_ERR(essiv_cipher
);
276 hash
= crypto_alloc_shash(ictx
->shash_driver_name
, 0, 0);
279 goto err_free_essiv_cipher
;
282 tctx
->essiv_cipher
= essiv_cipher
;
287 err_free_essiv_cipher
:
288 crypto_free_cipher(essiv_cipher
);
292 static int essiv_skcipher_init_tfm(struct crypto_skcipher
*tfm
)
294 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
295 struct essiv_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
296 struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
297 struct crypto_skcipher
*skcipher
;
300 skcipher
= crypto_spawn_skcipher(&ictx
->u
.skcipher_spawn
);
301 if (IS_ERR(skcipher
))
302 return PTR_ERR(skcipher
);
304 crypto_skcipher_set_reqsize(tfm
, sizeof(struct skcipher_request
) +
305 crypto_skcipher_reqsize(skcipher
));
307 err
= essiv_init_tfm(ictx
, tctx
);
309 crypto_free_skcipher(skcipher
);
313 tctx
->u
.skcipher
= skcipher
;
317 static int essiv_aead_init_tfm(struct crypto_aead
*tfm
)
319 struct aead_instance
*inst
= aead_alg_instance(tfm
);
320 struct essiv_instance_ctx
*ictx
= aead_instance_ctx(inst
);
321 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
322 struct crypto_aead
*aead
;
323 unsigned int subreq_size
;
326 BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx
, aead_req
) !=
327 sizeof(struct essiv_aead_request_ctx
));
329 aead
= crypto_spawn_aead(&ictx
->u
.aead_spawn
);
331 return PTR_ERR(aead
);
333 subreq_size
= sizeof_field(struct essiv_aead_request_ctx
, aead_req
) +
334 crypto_aead_reqsize(aead
);
336 tctx
->ivoffset
= offsetof(struct essiv_aead_request_ctx
, aead_req
) +
338 crypto_aead_set_reqsize(tfm
, tctx
->ivoffset
+ crypto_aead_ivsize(aead
));
340 err
= essiv_init_tfm(ictx
, tctx
);
342 crypto_free_aead(aead
);
350 static void essiv_skcipher_exit_tfm(struct crypto_skcipher
*tfm
)
352 struct essiv_tfm_ctx
*tctx
= crypto_skcipher_ctx(tfm
);
354 crypto_free_skcipher(tctx
->u
.skcipher
);
355 crypto_free_cipher(tctx
->essiv_cipher
);
356 crypto_free_shash(tctx
->hash
);
359 static void essiv_aead_exit_tfm(struct crypto_aead
*tfm
)
361 struct essiv_tfm_ctx
*tctx
= crypto_aead_ctx(tfm
);
363 crypto_free_aead(tctx
->u
.aead
);
364 crypto_free_cipher(tctx
->essiv_cipher
);
365 crypto_free_shash(tctx
->hash
);
368 static void essiv_skcipher_free_instance(struct skcipher_instance
*inst
)
370 struct essiv_instance_ctx
*ictx
= skcipher_instance_ctx(inst
);
372 crypto_drop_skcipher(&ictx
->u
.skcipher_spawn
);
376 static void essiv_aead_free_instance(struct aead_instance
*inst
)
378 struct essiv_instance_ctx
*ictx
= aead_instance_ctx(inst
);
380 crypto_drop_aead(&ictx
->u
.aead_spawn
);
384 static bool parse_cipher_name(char *essiv_cipher_name
, const char *cra_name
)
389 /* find the last opening parens */
390 p
= strrchr(cra_name
, '(');
394 /* find the first closing parens in the tail of the string */
400 if (len
>= CRYPTO_MAX_ALG_NAME
)
403 memcpy(essiv_cipher_name
, p
, len
);
404 essiv_cipher_name
[len
] = '\0';
408 static bool essiv_supported_algorithms(const char *essiv_cipher_name
,
409 struct shash_alg
*hash_alg
,
412 struct crypto_alg
*alg
;
415 alg
= crypto_alg_mod_lookup(essiv_cipher_name
,
416 CRYPTO_ALG_TYPE_CIPHER
,
417 CRYPTO_ALG_TYPE_MASK
);
421 if (hash_alg
->digestsize
< alg
->cra_cipher
.cia_min_keysize
||
422 hash_alg
->digestsize
> alg
->cra_cipher
.cia_max_keysize
)
425 if (ivsize
!= alg
->cra_blocksize
)
428 if (crypto_shash_alg_needs_key(hash_alg
))
438 static int essiv_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
440 struct crypto_attr_type
*algt
;
441 const char *inner_cipher_name
;
442 const char *shash_name
;
443 struct skcipher_instance
*skcipher_inst
= NULL
;
444 struct aead_instance
*aead_inst
= NULL
;
445 struct crypto_instance
*inst
;
446 struct crypto_alg
*base
, *block_base
;
447 struct essiv_instance_ctx
*ictx
;
448 struct skcipher_alg
*skcipher_alg
= NULL
;
449 struct aead_alg
*aead_alg
= NULL
;
450 struct crypto_alg
*_hash_alg
;
451 struct shash_alg
*hash_alg
;
457 algt
= crypto_get_attr_type(tb
);
459 return PTR_ERR(algt
);
461 inner_cipher_name
= crypto_attr_alg_name(tb
[1]);
462 if (IS_ERR(inner_cipher_name
))
463 return PTR_ERR(inner_cipher_name
);
465 shash_name
= crypto_attr_alg_name(tb
[2]);
466 if (IS_ERR(shash_name
))
467 return PTR_ERR(shash_name
);
469 type
= algt
->type
& algt
->mask
;
470 mask
= crypto_algt_inherited_mask(algt
);
473 case CRYPTO_ALG_TYPE_SKCIPHER
:
474 skcipher_inst
= kzalloc(sizeof(*skcipher_inst
) +
475 sizeof(*ictx
), GFP_KERNEL
);
478 inst
= skcipher_crypto_instance(skcipher_inst
);
479 base
= &skcipher_inst
->alg
.base
;
480 ictx
= crypto_instance_ctx(inst
);
482 /* Symmetric cipher, e.g., "cbc(aes)" */
483 err
= crypto_grab_skcipher(&ictx
->u
.skcipher_spawn
, inst
,
484 inner_cipher_name
, 0, mask
);
487 skcipher_alg
= crypto_spawn_skcipher_alg(&ictx
->u
.skcipher_spawn
);
488 block_base
= &skcipher_alg
->base
;
489 ivsize
= crypto_skcipher_alg_ivsize(skcipher_alg
);
492 case CRYPTO_ALG_TYPE_AEAD
:
493 aead_inst
= kzalloc(sizeof(*aead_inst
) +
494 sizeof(*ictx
), GFP_KERNEL
);
497 inst
= aead_crypto_instance(aead_inst
);
498 base
= &aead_inst
->alg
.base
;
499 ictx
= crypto_instance_ctx(inst
);
501 /* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
502 err
= crypto_grab_aead(&ictx
->u
.aead_spawn
, inst
,
503 inner_cipher_name
, 0, mask
);
506 aead_alg
= crypto_spawn_aead_alg(&ictx
->u
.aead_spawn
);
507 block_base
= &aead_alg
->base
;
508 if (!strstarts(block_base
->cra_name
, "authenc(")) {
509 pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
511 goto out_drop_skcipher
;
513 ivsize
= aead_alg
->ivsize
;
520 if (!parse_cipher_name(ictx
->essiv_cipher_name
, block_base
->cra_name
)) {
521 pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
523 goto out_drop_skcipher
;
526 /* Synchronous hash, e.g., "sha256" */
527 _hash_alg
= crypto_alg_mod_lookup(shash_name
,
528 CRYPTO_ALG_TYPE_SHASH
,
529 CRYPTO_ALG_TYPE_MASK
| mask
);
530 if (IS_ERR(_hash_alg
)) {
531 err
= PTR_ERR(_hash_alg
);
532 goto out_drop_skcipher
;
534 hash_alg
= __crypto_shash_alg(_hash_alg
);
536 /* Check the set of algorithms */
537 if (!essiv_supported_algorithms(ictx
->essiv_cipher_name
, hash_alg
,
539 pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
540 block_base
->cra_name
, hash_alg
->base
.cra_name
);
545 /* record the driver name so we can instantiate this exact algo later */
546 strlcpy(ictx
->shash_driver_name
, hash_alg
->base
.cra_driver_name
,
547 CRYPTO_MAX_ALG_NAME
);
549 /* Instance fields */
552 if (snprintf(base
->cra_name
, CRYPTO_MAX_ALG_NAME
,
553 "essiv(%s,%s)", block_base
->cra_name
,
554 hash_alg
->base
.cra_name
) >= CRYPTO_MAX_ALG_NAME
)
556 if (snprintf(base
->cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
557 "essiv(%s,%s)", block_base
->cra_driver_name
,
558 hash_alg
->base
.cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
562 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
565 base
->cra_flags
|= (hash_alg
->base
.cra_flags
&
566 CRYPTO_ALG_INHERITED_FLAGS
);
567 base
->cra_blocksize
= block_base
->cra_blocksize
;
568 base
->cra_ctxsize
= sizeof(struct essiv_tfm_ctx
);
569 base
->cra_alignmask
= block_base
->cra_alignmask
;
570 base
->cra_priority
= block_base
->cra_priority
;
572 if (type
== CRYPTO_ALG_TYPE_SKCIPHER
) {
573 skcipher_inst
->alg
.setkey
= essiv_skcipher_setkey
;
574 skcipher_inst
->alg
.encrypt
= essiv_skcipher_encrypt
;
575 skcipher_inst
->alg
.decrypt
= essiv_skcipher_decrypt
;
576 skcipher_inst
->alg
.init
= essiv_skcipher_init_tfm
;
577 skcipher_inst
->alg
.exit
= essiv_skcipher_exit_tfm
;
579 skcipher_inst
->alg
.min_keysize
= crypto_skcipher_alg_min_keysize(skcipher_alg
);
580 skcipher_inst
->alg
.max_keysize
= crypto_skcipher_alg_max_keysize(skcipher_alg
);
581 skcipher_inst
->alg
.ivsize
= ivsize
;
582 skcipher_inst
->alg
.chunksize
= crypto_skcipher_alg_chunksize(skcipher_alg
);
583 skcipher_inst
->alg
.walksize
= crypto_skcipher_alg_walksize(skcipher_alg
);
585 skcipher_inst
->free
= essiv_skcipher_free_instance
;
587 err
= skcipher_register_instance(tmpl
, skcipher_inst
);
589 aead_inst
->alg
.setkey
= essiv_aead_setkey
;
590 aead_inst
->alg
.setauthsize
= essiv_aead_setauthsize
;
591 aead_inst
->alg
.encrypt
= essiv_aead_encrypt
;
592 aead_inst
->alg
.decrypt
= essiv_aead_decrypt
;
593 aead_inst
->alg
.init
= essiv_aead_init_tfm
;
594 aead_inst
->alg
.exit
= essiv_aead_exit_tfm
;
596 aead_inst
->alg
.ivsize
= ivsize
;
597 aead_inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(aead_alg
);
598 aead_inst
->alg
.chunksize
= crypto_aead_alg_chunksize(aead_alg
);
600 aead_inst
->free
= essiv_aead_free_instance
;
602 err
= aead_register_instance(tmpl
, aead_inst
);
608 crypto_mod_put(_hash_alg
);
612 crypto_mod_put(_hash_alg
);
614 if (type
== CRYPTO_ALG_TYPE_SKCIPHER
)
615 crypto_drop_skcipher(&ictx
->u
.skcipher_spawn
);
617 crypto_drop_aead(&ictx
->u
.aead_spawn
);
619 kfree(skcipher_inst
);
624 /* essiv(cipher_name, shash_name) */
625 static struct crypto_template essiv_tmpl
= {
627 .create
= essiv_create
,
628 .module
= THIS_MODULE
,
631 static int __init
essiv_module_init(void)
633 return crypto_register_template(&essiv_tmpl
);
636 static void __exit
essiv_module_exit(void)
638 crypto_unregister_template(&essiv_tmpl
);
641 subsys_initcall(essiv_module_init
);
642 module_exit(essiv_module_exit
);
644 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
645 MODULE_LICENSE("GPL v2");
646 MODULE_ALIAS_CRYPTO("essiv");
647 MODULE_IMPORT_NS(CRYPTO_INTERNAL
);