2 * seqiv: Sequence Number IV Generator
4 * This generator generates an IV based on a sequence number by xoring it
5 * with a salt. This algorithm is mainly useful for CTR and similar modes.
7 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
16 #include <crypto/internal/geniv.h>
17 #include <crypto/internal/skcipher.h>
18 #include <crypto/null.h>
19 #include <crypto/rng.h>
20 #include <crypto/scatterwalk.h>
21 #include <linux/err.h>
22 #include <linux/init.h>
23 #include <linux/kernel.h>
24 #include <linux/module.h>
25 #include <linux/slab.h>
26 #include <linux/spinlock.h>
27 #include <linux/string.h>
29 struct seqniv_request_ctx
{
30 struct scatterlist dst
[2];
31 struct aead_request subreq
;
36 u8 salt
[] __attribute__ ((aligned(__alignof__(u32
))));
39 struct seqiv_aead_ctx
{
40 /* aead_geniv_ctx must be first the element */
41 struct aead_geniv_ctx geniv
;
42 struct crypto_blkcipher
*null
;
43 u8 salt
[] __attribute__ ((aligned(__alignof__(u32
))));
46 static void seqiv_free(struct crypto_instance
*inst
);
48 static void seqiv_complete2(struct skcipher_givcrypt_request
*req
, int err
)
50 struct ablkcipher_request
*subreq
= skcipher_givcrypt_reqctx(req
);
51 struct crypto_ablkcipher
*geniv
;
53 if (err
== -EINPROGRESS
)
59 geniv
= skcipher_givcrypt_reqtfm(req
);
60 memcpy(req
->creq
.info
, subreq
->info
, crypto_ablkcipher_ivsize(geniv
));
66 static void seqiv_complete(struct crypto_async_request
*base
, int err
)
68 struct skcipher_givcrypt_request
*req
= base
->data
;
70 seqiv_complete2(req
, err
);
71 skcipher_givcrypt_complete(req
, err
);
74 static void seqiv_aead_complete2(struct aead_givcrypt_request
*req
, int err
)
76 struct aead_request
*subreq
= aead_givcrypt_reqctx(req
);
77 struct crypto_aead
*geniv
;
79 if (err
== -EINPROGRESS
)
85 geniv
= aead_givcrypt_reqtfm(req
);
86 memcpy(req
->areq
.iv
, subreq
->iv
, crypto_aead_ivsize(geniv
));
92 static void seqiv_aead_complete(struct crypto_async_request
*base
, int err
)
94 struct aead_givcrypt_request
*req
= base
->data
;
96 seqiv_aead_complete2(req
, err
);
97 aead_givcrypt_complete(req
, err
);
100 static void seqiv_aead_encrypt_complete2(struct aead_request
*req
, int err
)
102 struct aead_request
*subreq
= aead_request_ctx(req
);
103 struct crypto_aead
*geniv
;
105 if (err
== -EINPROGRESS
)
111 geniv
= crypto_aead_reqtfm(req
);
112 memcpy(req
->iv
, subreq
->iv
, crypto_aead_ivsize(geniv
));
118 static void seqiv_aead_encrypt_complete(struct crypto_async_request
*base
,
121 struct aead_request
*req
= base
->data
;
123 seqiv_aead_encrypt_complete2(req
, err
);
124 aead_request_complete(req
, err
);
127 static void seqniv_aead_encrypt_complete2(struct aead_request
*req
, int err
)
129 unsigned int ivsize
= 8;
132 if (err
== -EINPROGRESS
)
135 /* Swap IV and ESP header back to correct order. */
136 scatterwalk_map_and_copy(data
, req
->dst
, 0, req
->assoclen
+ ivsize
, 0);
137 scatterwalk_map_and_copy(data
+ ivsize
, req
->dst
, 0, req
->assoclen
, 1);
138 scatterwalk_map_and_copy(data
, req
->dst
, req
->assoclen
, ivsize
, 1);
141 static void seqniv_aead_encrypt_complete(struct crypto_async_request
*base
,
144 struct aead_request
*req
= base
->data
;
146 seqniv_aead_encrypt_complete2(req
, err
);
147 aead_request_complete(req
, err
);
150 static void seqniv_aead_decrypt_complete2(struct aead_request
*req
, int err
)
154 if (err
== -EINPROGRESS
)
157 /* Move ESP header back to correct location. */
158 scatterwalk_map_and_copy(data
, req
->dst
, 16, req
->assoclen
- 8, 0);
159 scatterwalk_map_and_copy(data
, req
->dst
, 8, req
->assoclen
- 8, 1);
162 static void seqniv_aead_decrypt_complete(struct crypto_async_request
*base
,
165 struct aead_request
*req
= base
->data
;
167 seqniv_aead_decrypt_complete2(req
, err
);
168 aead_request_complete(req
, err
);
171 static void seqiv_geniv(struct seqiv_ctx
*ctx
, u8
*info
, u64 seq
,
174 unsigned int len
= ivsize
;
176 if (ivsize
> sizeof(u64
)) {
177 memset(info
, 0, ivsize
- sizeof(u64
));
180 seq
= cpu_to_be64(seq
);
181 memcpy(info
+ ivsize
- len
, &seq
, len
);
182 crypto_xor(info
, ctx
->salt
, ivsize
);
185 static int seqiv_givencrypt(struct skcipher_givcrypt_request
*req
)
187 struct crypto_ablkcipher
*geniv
= skcipher_givcrypt_reqtfm(req
);
188 struct seqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
189 struct ablkcipher_request
*subreq
= skcipher_givcrypt_reqctx(req
);
190 crypto_completion_t
compl;
196 ablkcipher_request_set_tfm(subreq
, skcipher_geniv_cipher(geniv
));
198 compl = req
->creq
.base
.complete
;
199 data
= req
->creq
.base
.data
;
200 info
= req
->creq
.info
;
202 ivsize
= crypto_ablkcipher_ivsize(geniv
);
204 if (unlikely(!IS_ALIGNED((unsigned long)info
,
205 crypto_ablkcipher_alignmask(geniv
) + 1))) {
206 info
= kmalloc(ivsize
, req
->creq
.base
.flags
&
207 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
212 compl = seqiv_complete
;
216 ablkcipher_request_set_callback(subreq
, req
->creq
.base
.flags
, compl,
218 ablkcipher_request_set_crypt(subreq
, req
->creq
.src
, req
->creq
.dst
,
219 req
->creq
.nbytes
, info
);
221 seqiv_geniv(ctx
, info
, req
->seq
, ivsize
);
222 memcpy(req
->giv
, info
, ivsize
);
224 err
= crypto_ablkcipher_encrypt(subreq
);
225 if (unlikely(info
!= req
->creq
.info
))
226 seqiv_complete2(req
, err
);
230 static int seqiv_aead_givencrypt(struct aead_givcrypt_request
*req
)
232 struct crypto_aead
*geniv
= aead_givcrypt_reqtfm(req
);
233 struct seqiv_ctx
*ctx
= crypto_aead_ctx(geniv
);
234 struct aead_request
*areq
= &req
->areq
;
235 struct aead_request
*subreq
= aead_givcrypt_reqctx(req
);
236 crypto_completion_t
compl;
242 aead_request_set_tfm(subreq
, aead_geniv_base(geniv
));
244 compl = areq
->base
.complete
;
245 data
= areq
->base
.data
;
248 ivsize
= crypto_aead_ivsize(geniv
);
250 if (unlikely(!IS_ALIGNED((unsigned long)info
,
251 crypto_aead_alignmask(geniv
) + 1))) {
252 info
= kmalloc(ivsize
, areq
->base
.flags
&
253 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
258 compl = seqiv_aead_complete
;
262 aead_request_set_callback(subreq
, areq
->base
.flags
, compl, data
);
263 aead_request_set_crypt(subreq
, areq
->src
, areq
->dst
, areq
->cryptlen
,
265 aead_request_set_assoc(subreq
, areq
->assoc
, areq
->assoclen
);
267 seqiv_geniv(ctx
, info
, req
->seq
, ivsize
);
268 memcpy(req
->giv
, info
, ivsize
);
270 err
= crypto_aead_encrypt(subreq
);
271 if (unlikely(info
!= areq
->iv
))
272 seqiv_aead_complete2(req
, err
);
276 static int seqniv_aead_encrypt(struct aead_request
*req
)
278 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
279 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
280 struct seqniv_request_ctx
*rctx
= aead_request_ctx(req
);
281 struct aead_request
*subreq
= &rctx
->subreq
;
282 struct scatterlist
*dst
;
283 crypto_completion_t
compl;
285 unsigned int ivsize
= 8;
286 u8 buf
[20] __attribute__ ((aligned(__alignof__(u32
))));
289 if (req
->cryptlen
< ivsize
)
292 /* ESP AD is at most 12 bytes (ESN). */
293 if (req
->assoclen
> 12)
296 aead_request_set_tfm(subreq
, ctx
->geniv
.child
);
298 compl = seqniv_aead_encrypt_complete
;
301 if (req
->src
!= req
->dst
) {
302 struct blkcipher_desc desc
= {
306 err
= crypto_blkcipher_encrypt(&desc
, req
->dst
, req
->src
,
307 req
->assoclen
+ req
->cryptlen
);
312 dst
= scatterwalk_ffwd(rctx
->dst
, req
->dst
, ivsize
);
314 aead_request_set_callback(subreq
, req
->base
.flags
, compl, data
);
315 aead_request_set_crypt(subreq
, dst
, dst
,
316 req
->cryptlen
- ivsize
, req
->iv
);
317 aead_request_set_ad(subreq
, req
->assoclen
);
319 memcpy(buf
, req
->iv
, ivsize
);
320 crypto_xor(buf
, ctx
->salt
, ivsize
);
321 memcpy(req
->iv
, buf
, ivsize
);
323 /* Swap order of IV and ESP AD for ICV generation. */
324 scatterwalk_map_and_copy(buf
+ ivsize
, req
->dst
, 0, req
->assoclen
, 0);
325 scatterwalk_map_and_copy(buf
, req
->dst
, 0, req
->assoclen
+ ivsize
, 1);
327 err
= crypto_aead_encrypt(subreq
);
328 seqniv_aead_encrypt_complete2(req
, err
);
332 static int seqiv_aead_encrypt(struct aead_request
*req
)
334 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
335 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
336 struct aead_request
*subreq
= aead_request_ctx(req
);
337 crypto_completion_t
compl;
340 unsigned int ivsize
= 8;
343 if (req
->cryptlen
< ivsize
)
346 aead_request_set_tfm(subreq
, ctx
->geniv
.child
);
348 compl = req
->base
.complete
;
349 data
= req
->base
.data
;
352 if (req
->src
!= req
->dst
) {
353 struct blkcipher_desc desc
= {
357 err
= crypto_blkcipher_encrypt(&desc
, req
->dst
, req
->src
,
358 req
->assoclen
+ req
->cryptlen
);
363 if (unlikely(!IS_ALIGNED((unsigned long)info
,
364 crypto_aead_alignmask(geniv
) + 1))) {
365 info
= kmalloc(ivsize
, req
->base
.flags
&
366 CRYPTO_TFM_REQ_MAY_SLEEP
? GFP_KERNEL
:
371 memcpy(info
, req
->iv
, ivsize
);
372 compl = seqiv_aead_encrypt_complete
;
376 aead_request_set_callback(subreq
, req
->base
.flags
, compl, data
);
377 aead_request_set_crypt(subreq
, req
->dst
, req
->dst
,
378 req
->cryptlen
- ivsize
, info
);
379 aead_request_set_ad(subreq
, req
->assoclen
+ ivsize
);
381 crypto_xor(info
, ctx
->salt
, ivsize
);
382 scatterwalk_map_and_copy(info
, req
->dst
, req
->assoclen
, ivsize
, 1);
384 err
= crypto_aead_encrypt(subreq
);
385 if (unlikely(info
!= req
->iv
))
386 seqiv_aead_encrypt_complete2(req
, err
);
390 static int seqniv_aead_decrypt(struct aead_request
*req
)
392 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
393 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
394 struct seqniv_request_ctx
*rctx
= aead_request_ctx(req
);
395 struct aead_request
*subreq
= &rctx
->subreq
;
396 struct scatterlist
*dst
;
397 crypto_completion_t
compl;
399 unsigned int ivsize
= 8;
403 if (req
->cryptlen
< ivsize
+ crypto_aead_authsize(geniv
))
406 aead_request_set_tfm(subreq
, ctx
->geniv
.child
);
408 compl = req
->base
.complete
;
409 data
= req
->base
.data
;
411 if (req
->assoclen
> 12)
413 else if (req
->assoclen
> 8) {
414 compl = seqniv_aead_decrypt_complete
;
418 if (req
->src
!= req
->dst
) {
419 struct blkcipher_desc desc
= {
423 err
= crypto_blkcipher_encrypt(&desc
, req
->dst
, req
->src
,
424 req
->assoclen
+ req
->cryptlen
);
429 /* Move ESP AD forward for ICV generation. */
430 scatterwalk_map_and_copy(buf
, req
->dst
, 0, req
->assoclen
+ ivsize
, 0);
431 memcpy(req
->iv
, buf
+ req
->assoclen
, ivsize
);
432 scatterwalk_map_and_copy(buf
, req
->dst
, ivsize
, req
->assoclen
, 1);
434 dst
= scatterwalk_ffwd(rctx
->dst
, req
->dst
, ivsize
);
436 aead_request_set_callback(subreq
, req
->base
.flags
, compl, data
);
437 aead_request_set_crypt(subreq
, dst
, dst
,
438 req
->cryptlen
- ivsize
, req
->iv
);
439 aead_request_set_ad(subreq
, req
->assoclen
);
441 err
= crypto_aead_decrypt(subreq
);
442 if (req
->assoclen
> 8)
443 seqniv_aead_decrypt_complete2(req
, err
);
447 static int seqiv_aead_decrypt(struct aead_request
*req
)
449 struct crypto_aead
*geniv
= crypto_aead_reqtfm(req
);
450 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
451 struct aead_request
*subreq
= aead_request_ctx(req
);
452 crypto_completion_t
compl;
454 unsigned int ivsize
= 8;
456 if (req
->cryptlen
< ivsize
+ crypto_aead_authsize(geniv
))
459 aead_request_set_tfm(subreq
, ctx
->geniv
.child
);
461 compl = req
->base
.complete
;
462 data
= req
->base
.data
;
464 aead_request_set_callback(subreq
, req
->base
.flags
, compl, data
);
465 aead_request_set_crypt(subreq
, req
->src
, req
->dst
,
466 req
->cryptlen
- ivsize
, req
->iv
);
467 aead_request_set_ad(subreq
, req
->assoclen
+ ivsize
);
469 scatterwalk_map_and_copy(req
->iv
, req
->src
, req
->assoclen
, ivsize
, 0);
470 if (req
->src
!= req
->dst
)
471 scatterwalk_map_and_copy(req
->iv
, req
->dst
,
472 req
->assoclen
, ivsize
, 1);
474 return crypto_aead_decrypt(subreq
);
477 static int seqiv_init(struct crypto_tfm
*tfm
)
479 struct crypto_ablkcipher
*geniv
= __crypto_ablkcipher_cast(tfm
);
480 struct seqiv_ctx
*ctx
= crypto_ablkcipher_ctx(geniv
);
483 spin_lock_init(&ctx
->lock
);
485 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
);
488 if (!crypto_get_default_rng()) {
489 crypto_ablkcipher_crt(geniv
)->givencrypt
= seqiv_givencrypt
;
490 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
491 crypto_ablkcipher_ivsize(geniv
));
492 crypto_put_default_rng();
495 return err
?: skcipher_geniv_init(tfm
);
498 static int seqiv_old_aead_init(struct crypto_tfm
*tfm
)
500 struct crypto_aead
*geniv
= __crypto_aead_cast(tfm
);
501 struct seqiv_ctx
*ctx
= crypto_aead_ctx(geniv
);
504 spin_lock_init(&ctx
->lock
);
506 crypto_aead_set_reqsize(__crypto_aead_cast(tfm
),
507 sizeof(struct aead_request
));
509 if (!crypto_get_default_rng()) {
510 geniv
->givencrypt
= seqiv_aead_givencrypt
;
511 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
512 crypto_aead_ivsize(geniv
));
513 crypto_put_default_rng();
516 return err
?: aead_geniv_init(tfm
);
519 static int seqiv_aead_init_common(struct crypto_tfm
*tfm
, unsigned int reqsize
)
521 struct crypto_aead
*geniv
= __crypto_aead_cast(tfm
);
522 struct seqiv_aead_ctx
*ctx
= crypto_aead_ctx(geniv
);
525 spin_lock_init(&ctx
->geniv
.lock
);
527 crypto_aead_set_reqsize(geniv
, sizeof(struct aead_request
));
529 err
= crypto_get_default_rng();
533 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
534 crypto_aead_ivsize(geniv
));
535 crypto_put_default_rng();
539 ctx
->null
= crypto_get_default_null_skcipher();
540 err
= PTR_ERR(ctx
->null
);
541 if (IS_ERR(ctx
->null
))
544 err
= aead_geniv_init(tfm
);
548 ctx
->geniv
.child
= geniv
->child
;
549 geniv
->child
= geniv
;
555 crypto_put_default_null_skcipher();
559 static int seqiv_aead_init(struct crypto_tfm
*tfm
)
561 return seqiv_aead_init_common(tfm
, sizeof(struct aead_request
));
564 static int seqniv_aead_init(struct crypto_tfm
*tfm
)
566 return seqiv_aead_init_common(tfm
, sizeof(struct seqniv_request_ctx
));
569 static void seqiv_aead_exit(struct crypto_tfm
*tfm
)
571 struct seqiv_aead_ctx
*ctx
= crypto_tfm_ctx(tfm
);
573 crypto_free_aead(ctx
->geniv
.child
);
574 crypto_put_default_null_skcipher();
577 static int seqiv_ablkcipher_create(struct crypto_template
*tmpl
,
580 struct crypto_instance
*inst
;
583 inst
= skcipher_geniv_alloc(tmpl
, tb
, 0, 0);
586 return PTR_ERR(inst
);
589 if (inst
->alg
.cra_ablkcipher
.ivsize
< sizeof(u64
))
592 inst
->alg
.cra_init
= seqiv_init
;
593 inst
->alg
.cra_exit
= skcipher_geniv_exit
;
595 inst
->alg
.cra_ctxsize
+= inst
->alg
.cra_ablkcipher
.ivsize
;
596 inst
->alg
.cra_ctxsize
+= sizeof(struct seqiv_ctx
);
598 inst
->alg
.cra_alignmask
|= __alignof__(u32
) - 1;
600 err
= crypto_register_instance(tmpl
, inst
);
608 skcipher_geniv_free(inst
);
612 static int seqiv_old_aead_create(struct crypto_template
*tmpl
,
613 struct aead_instance
*aead
)
615 struct crypto_instance
*inst
= aead_crypto_instance(aead
);
618 if (inst
->alg
.cra_aead
.ivsize
< sizeof(u64
))
621 inst
->alg
.cra_init
= seqiv_old_aead_init
;
622 inst
->alg
.cra_exit
= aead_geniv_exit
;
624 inst
->alg
.cra_ctxsize
= inst
->alg
.cra_aead
.ivsize
;
625 inst
->alg
.cra_ctxsize
+= sizeof(struct seqiv_ctx
);
627 err
= crypto_register_instance(tmpl
, inst
);
635 aead_geniv_free(aead
);
639 static int seqiv_aead_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
641 struct aead_instance
*inst
;
642 struct crypto_aead_spawn
*spawn
;
643 struct aead_alg
*alg
;
646 inst
= aead_geniv_alloc(tmpl
, tb
, 0, 0);
649 return PTR_ERR(inst
);
651 inst
->alg
.base
.cra_alignmask
|= __alignof__(u32
) - 1;
653 if (inst
->alg
.base
.cra_aead
.encrypt
)
654 return seqiv_old_aead_create(tmpl
, inst
);
656 spawn
= aead_instance_ctx(inst
);
657 alg
= crypto_spawn_aead_alg(spawn
);
659 if (alg
->base
.cra_aead
.encrypt
)
663 if (inst
->alg
.ivsize
!= sizeof(u64
))
666 inst
->alg
.encrypt
= seqiv_aead_encrypt
;
667 inst
->alg
.decrypt
= seqiv_aead_decrypt
;
669 inst
->alg
.base
.cra_init
= seqiv_aead_init
;
670 inst
->alg
.base
.cra_exit
= seqiv_aead_exit
;
672 inst
->alg
.base
.cra_ctxsize
= sizeof(struct seqiv_aead_ctx
);
673 inst
->alg
.base
.cra_ctxsize
+= inst
->alg
.base
.cra_aead
.ivsize
;
676 err
= aead_register_instance(tmpl
, inst
);
684 aead_geniv_free(inst
);
688 static int seqiv_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
690 struct crypto_attr_type
*algt
;
693 algt
= crypto_get_attr_type(tb
);
695 return PTR_ERR(algt
);
697 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & CRYPTO_ALG_TYPE_MASK
)
698 err
= seqiv_ablkcipher_create(tmpl
, tb
);
700 err
= seqiv_aead_create(tmpl
, tb
);
705 static int seqniv_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
707 struct aead_instance
*inst
;
708 struct crypto_aead_spawn
*spawn
;
709 struct aead_alg
*alg
;
712 inst
= aead_geniv_alloc(tmpl
, tb
, 0, 0);
717 spawn
= aead_instance_ctx(inst
);
718 alg
= crypto_spawn_aead_alg(spawn
);
720 if (alg
->base
.cra_aead
.encrypt
)
724 if (inst
->alg
.ivsize
!= sizeof(u64
))
727 inst
->alg
.encrypt
= seqniv_aead_encrypt
;
728 inst
->alg
.decrypt
= seqniv_aead_decrypt
;
730 inst
->alg
.base
.cra_init
= seqniv_aead_init
;
731 inst
->alg
.base
.cra_exit
= seqiv_aead_exit
;
733 inst
->alg
.base
.cra_alignmask
|= __alignof__(u32
) - 1;
734 inst
->alg
.base
.cra_ctxsize
= sizeof(struct seqiv_aead_ctx
);
735 inst
->alg
.base
.cra_ctxsize
+= inst
->alg
.ivsize
;
738 err
= aead_register_instance(tmpl
, inst
);
746 aead_geniv_free(inst
);
750 static void seqiv_free(struct crypto_instance
*inst
)
752 if ((inst
->alg
.cra_flags
^ CRYPTO_ALG_TYPE_AEAD
) & CRYPTO_ALG_TYPE_MASK
)
753 skcipher_geniv_free(inst
);
755 aead_geniv_free(aead_instance(inst
));
758 static struct crypto_template seqiv_tmpl
= {
760 .create
= seqiv_create
,
762 .module
= THIS_MODULE
,
765 static struct crypto_template seqniv_tmpl
= {
767 .create
= seqniv_create
,
769 .module
= THIS_MODULE
,
772 static int __init
seqiv_module_init(void)
776 err
= crypto_register_template(&seqiv_tmpl
);
780 err
= crypto_register_template(&seqniv_tmpl
);
788 crypto_unregister_template(&seqiv_tmpl
);
792 static void __exit
seqiv_module_exit(void)
794 crypto_unregister_template(&seqniv_tmpl
);
795 crypto_unregister_template(&seqiv_tmpl
);
798 module_init(seqiv_module_init
);
799 module_exit(seqiv_module_exit
);
801 MODULE_LICENSE("GPL");
802 MODULE_DESCRIPTION("Sequence Number IV Generator");
803 MODULE_ALIAS_CRYPTO("seqiv");
804 MODULE_ALIAS_CRYPTO("seqniv");