2 * seqiv: Sequence Number IV Generator
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt. This algorithm is mainly useful for CTR and similar modes.
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/internal/geniv.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/null.h>
19 #include <crypto/rng.h>
20 #include <crypto/scatterwalk.h>
21 #include <linux/err.h>
22 #include <linux/init.h>
23 #include <linux/kernel.h>
24 #include <linux/module.h>
25 #include <linux/slab.h>
26 #include <linux/spinlock.h>
27 #include <linux/string.h>
31 u8 salt
[] __attribute__ ((aligned(__alignof__(u32
))));
34 struct seqiv_aead_ctx
{
35 /* aead_geniv_ctx must be first the element */
36 struct aead_geniv_ctx geniv
;
37 struct crypto_blkcipher
*null
;
38 u8 salt
[] __attribute__ ((aligned(__alignof__(u32
))));
41 static void seqiv_free(struct crypto_instance
*inst
);
43 static void seqiv_complete2(struct skcipher_givcrypt_request
*req
, int err
)
45 struct ablkcipher_request
*subreq
= skcipher_givcrypt_reqctx(req
);
46 struct crypto_ablkcipher
*geniv
;
48 if (err
== -EINPROGRESS
)
54 geniv
= skcipher_givcrypt_reqtfm(req
);
55 memcpy(req
->creq
.info
, subreq
->info
, crypto_ablkcipher_ivsize(geniv
));
61 static void seqiv_complete(struct crypto_async_request
*base
, int err
)
63 struct skcipher_givcrypt_request
*req
= base
->data
;
65 seqiv_complete2(req
, err
);
66 skcipher_givcrypt_complete(req
, err
);
69 static void seqiv_aead_complete2(struct aead_givcrypt_request
*req
, int err
)
71 struct aead_request
*subreq
= aead_givcrypt_reqctx(req
);
72 struct crypto_aead
*geniv
;
74 if (err
== -EINPROGRESS
)
80 geniv
= aead_givcrypt_reqtfm(req
);
81 memcpy(req
->areq
.iv
, subreq
->iv
, crypto_aead_ivsize(geniv
));
87 static void seqiv_aead_complete(struct crypto_async_request
*base
, int err
)
89 struct aead_givcrypt_request
*req
= base
->data
;
91 seqiv_aead_complete2(req
, err
);
92 aead_givcrypt_complete(req
, err
);
95 static void seqiv_aead_encrypt_complete2(struct aead_request
*req
, int err
)
97 struct aead_request
*subreq
= aead_request_ctx(req
);
98 struct crypto_aead
*geniv
;
100 if (err
== -EINPROGRESS
)
106 geniv
= crypto_aead_reqtfm(req
);
107 memcpy(req
->iv
, subreq
->iv
, crypto_aead_ivsize(geniv
));
113 static void seqiv_aead_encrypt_complete(struct crypto_async_request
*base
,
116 struct aead_request
*req
= base
->data
;
118 seqiv_aead_encrypt_complete2(req
, err
);
119 aead_request_complete(req
, err
);
122 static void seqiv_geniv(struct seqiv_ctx
*ctx
, u8
*info
, u64 seq
,
125 unsigned int len
= ivsize
;
127 if (ivsize
> sizeof(u64
)) {
128 memset(info
, 0, ivsize
- sizeof(u64
));
131 seq
= cpu_to_be64(seq
);
132 memcpy(info
+ ivsize
- len
, &seq
, len
);
133 crypto_xor(info
, ctx
->salt
, ivsize
);
136 static int seqiv_givencrypt(struct skcipher_givcrypt_request
*req
)
138 struct crypto_ablkcipher
*geniv
= skcipher_givcrypt_reqtfm(req
);
139 struct seqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
140 struct ablkcipher_request
*subreq
= skcipher_givcrypt_reqctx(req
);
141 crypto_completion_t
compl;
147 ablkcipher_request_set_tfm(subreq
, skcipher_geniv_cipher(geniv
));
149 compl = req
->creq
.base
.complete
;
150 data
= req
->creq
.base
.data
;
151 info
= req
->creq
.info
;
153 ivsize
= crypto_ablkcipher_ivsize(geniv
);
155 if (unlikely(!IS_ALIGNED((unsigned long)info
,
156 crypto_ablkcipher_alignmask(geniv
) + 1))) {
157 info
= kmalloc(ivsize
, req
->creq
.base
.flags
&
158 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
163 compl = seqiv_complete
;
167 ablkcipher_request_set_callback(subreq
, req
->creq
.base
.flags
, compl,
169 ablkcipher_request_set_crypt(subreq
, req
->creq
.src
, req
->creq
.dst
,
170 req
->creq
.nbytes
, info
);
172 seqiv_geniv(ctx
, info
, req
->seq
, ivsize
);
173 memcpy(req
->giv
, info
, ivsize
);
175 err
= crypto_ablkcipher_encrypt(subreq
);
176 if (unlikely(info
!= req
->creq
.info
))
177 seqiv_complete2(req
, err
);
181 static int seqiv_aead_givencrypt(struct aead_givcrypt_request
*req
)
183 struct crypto_aead
*geniv
= aead_givcrypt_reqtfm(req
);
184 struct seqiv_ctx
*ctx
= crypto_aead_ctx(geniv
);
185 struct aead_request
*areq
= &req
->areq
;
186 struct aead_request
*subreq
= aead_givcrypt_reqctx(req
);
187 crypto_completion_t
compl;
193 aead_request_set_tfm(subreq
, aead_geniv_base(geniv
));
195 compl = areq
->base
.complete
;
196 data
= areq
->base
.data
;
199 ivsize
= crypto_aead_ivsize(geniv
);
201 if (unlikely(!IS_ALIGNED((unsigned long)info
,
202 crypto_aead_alignmask(geniv
) + 1))) {
203 info
= kmalloc(ivsize
, areq
->base
.flags
&
204 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
209 compl = seqiv_aead_complete
;
213 aead_request_set_callback(subreq
, areq
->base
.flags
, compl, data
);
214 aead_request_set_crypt(subreq
, areq
->src
, areq
->dst
, areq
->cryptlen
,
216 aead_request_set_assoc(subreq
, areq
->assoc
, areq
->assoclen
);
218 seqiv_geniv(ctx
, info
, req
->seq
, ivsize
);
219 memcpy(req
->giv
, info
, ivsize
);
221 err
= crypto_aead_encrypt(subreq
);
222 if (unlikely(info
!= areq
->iv
))
223 seqiv_aead_complete2(req
, err
);
227 static int seqiv_aead_encrypt(struct aead_request
*req
)
229 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
230 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
231 struct aead_request
*subreq
= aead_request_ctx(req
);
232 crypto_completion_t
compl;
235 unsigned int ivsize
= 8;
238 if (req
->cryptlen
< ivsize
)
241 aead_request_set_tfm(subreq
, ctx
->geniv
.child
);
243 compl = req
->base
.complete
;
244 data
= req
->base
.data
;
247 if (req
->src
!= req
->dst
) {
248 struct blkcipher_desc desc
= {
252 err
= crypto_blkcipher_encrypt(&desc
, req
->dst
, req
->src
,
253 req
->assoclen
+ req
->cryptlen
);
258 if (unlikely(!IS_ALIGNED((unsigned long)info
,
259 crypto_aead_alignmask(geniv
) + 1))) {
260 info
= kmalloc(ivsize
, req
->base
.flags
&
261 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
266 memcpy(info
, req
->iv
, ivsize
);
267 compl = seqiv_aead_encrypt_complete
;
271 aead_request_set_callback(subreq
, req
->base
.flags
, compl, data
);
272 aead_request_set_crypt(subreq
, req
->dst
, req
->dst
,
273 req
->cryptlen
- ivsize
, info
);
274 aead_request_set_ad(subreq
, req
->assoclen
+ ivsize
);
276 crypto_xor(info
, ctx
->salt
, ivsize
);
277 scatterwalk_map_and_copy(info
, req
->dst
, req
->assoclen
, ivsize
, 1);
279 err
= crypto_aead_encrypt(subreq
);
280 if (unlikely(info
!= req
->iv
))
281 seqiv_aead_encrypt_complete2(req
, err
);
285 static int seqiv_aead_decrypt(struct aead_request
*req
)
287 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
288 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
289 struct aead_request
*subreq
= aead_request_ctx(req
);
290 crypto_completion_t
compl;
292 unsigned int ivsize
= 8;
294 if (req
->cryptlen
< ivsize
+ crypto_aead_authsize(geniv
))
297 aead_request_set_tfm(subreq
, ctx
->geniv
.child
);
299 compl = req
->base
.complete
;
300 data
= req
->base
.data
;
302 aead_request_set_callback(subreq
, req
->base
.flags
, compl, data
);
303 aead_request_set_crypt(subreq
, req
->src
, req
->dst
,
304 req
->cryptlen
- ivsize
, req
->iv
);
305 aead_request_set_ad(subreq
, req
->assoclen
+ ivsize
);
307 scatterwalk_map_and_copy(req
->iv
, req
->src
, req
->assoclen
, ivsize
, 0);
309 return crypto_aead_decrypt(subreq
);
312 static int seqiv_init(struct crypto_tfm
*tfm
)
314 struct crypto_ablkcipher
*geniv
= __crypto_ablkcipher_cast(tfm
);
315 struct seqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
318 spin_lock_init(&ctx
->lock
);
320 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
);
323 if (!crypto_get_default_rng()) {
324 crypto_ablkcipher_crt(geniv
)->givencrypt
= seqiv_givencrypt
;
325 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
326 crypto_ablkcipher_ivsize(geniv
));
327 crypto_put_default_rng();
330 return err
?: skcipher_geniv_init(tfm
);
333 static int seqiv_old_aead_init(struct crypto_tfm
*tfm
)
335 struct crypto_aead
*geniv
= __crypto_aead_cast(tfm
);
336 struct seqiv_ctx
*ctx
= crypto_aead_ctx(geniv
);
339 spin_lock_init(&ctx
->lock
);
341 crypto_aead_set_reqsize(__crypto_aead_cast(tfm
),
342 sizeof(struct aead_request
));
344 if (!crypto_get_default_rng()) {
345 geniv
->givencrypt
= seqiv_aead_givencrypt
;
346 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
347 crypto_aead_ivsize(geniv
));
348 crypto_put_default_rng();
351 return err
?: aead_geniv_init(tfm
);
354 static int seqiv_aead_init_common(struct crypto_aead
*geniv
,
355 unsigned int reqsize
)
357 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
360 spin_lock_init(&ctx
->geniv
.lock
);
362 crypto_aead_set_reqsize(geniv
, sizeof(struct aead_request
));
364 err
= crypto_get_default_rng();
368 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
369 crypto_aead_ivsize(geniv
));
370 crypto_put_default_rng();
374 ctx
->null
= crypto_get_default_null_skcipher();
375 err
= PTR_ERR(ctx
->null
);
376 if (IS_ERR(ctx
->null
))
379 err
= aead_geniv_init(crypto_aead_tfm(geniv
));
383 ctx
->geniv
.child
= geniv
->child
;
384 geniv
->child
= geniv
;
390 crypto_put_default_null_skcipher();
394 static int seqiv_aead_init(struct crypto_aead
*tfm
)
396 return seqiv_aead_init_common(tfm
, sizeof(struct aead_request
));
399 static void seqiv_aead_exit(struct crypto_aead
*tfm
)
401 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
403 crypto_free_aead(ctx
->geniv
.child
);
404 crypto_put_default_null_skcipher();
407 static int seqiv_ablkcipher_create(struct crypto_template
*tmpl
,
410 struct crypto_instance
*inst
;
413 inst
= skcipher_geniv_alloc(tmpl
, tb
, 0, 0);
416 return PTR_ERR(inst
);
419 if (inst
->alg
.cra_ablkcipher
.ivsize
< sizeof(u64
))
422 inst
->alg
.cra_init
= seqiv_init
;
423 inst
->alg
.cra_exit
= skcipher_geniv_exit
;
425 inst
->alg
.cra_ctxsize
+= inst
->alg
.cra_ablkcipher
.ivsize
;
426 inst
->alg
.cra_ctxsize
+= sizeof(struct seqiv_ctx
);
428 inst
->alg
.cra_alignmask
|= __alignof__(u32
) - 1;
430 err
= crypto_register_instance(tmpl
, inst
);
438 skcipher_geniv_free(inst
);
442 static int seqiv_old_aead_create(struct crypto_template
*tmpl
,
443 struct aead_instance
*aead
)
445 struct crypto_instance
*inst
= aead_crypto_instance(aead
);
448 if (inst
->alg
.cra_aead
.ivsize
< sizeof(u64
))
451 inst
->alg
.cra_init
= seqiv_old_aead_init
;
452 inst
->alg
.cra_exit
= aead_geniv_exit
;
454 inst
->alg
.cra_ctxsize
= inst
->alg
.cra_aead
.ivsize
;
455 inst
->alg
.cra_ctxsize
+= sizeof(struct seqiv_ctx
);
457 err
= crypto_register_instance(tmpl
, inst
);
465 aead_geniv_free(aead
);
469 static int seqiv_aead_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
471 struct aead_instance
*inst
;
472 struct crypto_aead_spawn
*spawn
;
473 struct aead_alg
*alg
;
476 inst
= aead_geniv_alloc(tmpl
, tb
, 0, 0);
479 return PTR_ERR(inst
);
481 inst
->alg
.base
.cra_alignmask
|= __alignof__(u32
) - 1;
483 if (inst
->alg
.base
.cra_aead
.encrypt
)
484 return seqiv_old_aead_create(tmpl
, inst
);
486 spawn
= aead_instance_ctx(inst
);
487 alg
= crypto_spawn_aead_alg(spawn
);
489 if (alg
->base
.cra_aead
.encrypt
)
493 if (inst
->alg
.ivsize
!= sizeof(u64
))
496 inst
->alg
.encrypt
= seqiv_aead_encrypt
;
497 inst
->alg
.decrypt
= seqiv_aead_decrypt
;
499 inst
->alg
.init
= seqiv_aead_init
;
500 inst
->alg
.exit
= seqiv_aead_exit
;
502 inst
->alg
.base
.cra_ctxsize
= sizeof(struct seqiv_aead_ctx
);
503 inst
->alg
.base
.cra_ctxsize
+= inst
->alg
.ivsize
;
506 err
= aead_register_instance(tmpl
, inst
);
514 aead_geniv_free(inst
);
518 static int seqiv_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
520 struct crypto_attr_type
*algt
;
523 algt
= crypto_get_attr_type(tb
);
525 return PTR_ERR(algt
);
527 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & CRYPTO_ALG_TYPE_MASK
)
528 err
= seqiv_ablkcipher_create(tmpl
, tb
);
530 err
= seqiv_aead_create(tmpl
, tb
);
535 static void seqiv_free(struct crypto_instance
*inst
)
537 if ((inst
->alg
.cra_flags
^ CRYPTO_ALG_TYPE_AEAD
) & CRYPTO_ALG_TYPE_MASK
)
538 skcipher_geniv_free(inst
);
540 aead_geniv_free(aead_instance(inst
));
543 static struct crypto_template seqiv_tmpl
= {
545 .create
= seqiv_create
,
547 .module
= THIS_MODULE
,
550 static int __init
seqiv_module_init(void)
552 return crypto_register_template(&seqiv_tmpl
);
555 static void __exit
seqiv_module_exit(void)
557 crypto_unregister_template(&seqiv_tmpl
);
560 module_init(seqiv_module_init
);
561 module_exit(seqiv_module_exit
);
563 MODULE_LICENSE("GPL");
564 MODULE_DESCRIPTION("Sequence Number IV Generator");
565 MODULE_ALIAS_CRYPTO("seqiv");