2 * AEAD: Authenticated Encryption with Associated Data
4 * This file provides API support for AEAD algorithms.
6 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms of the GNU General Public License as published by the Free
10 * Software Foundation; either version 2 of the License, or (at your option)
15 #include <crypto/internal/geniv.h>
16 #include <crypto/internal/rng.h>
17 #include <crypto/null.h>
18 #include <crypto/scatterwalk.h>
19 #include <linux/err.h>
20 #include <linux/init.h>
21 #include <linux/kernel.h>
22 #include <linux/module.h>
23 #include <linux/rtnetlink.h>
24 #include <linux/slab.h>
25 #include <linux/seq_file.h>
26 #include <linux/cryptouser.h>
27 #include <net/netlink.h>
31 static int setkey_unaligned(struct crypto_aead
*tfm
, const u8
*key
,
34 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
36 u8
*buffer
, *alignbuffer
;
39 absize
= keylen
+ alignmask
;
40 buffer
= kmalloc(absize
, GFP_ATOMIC
);
44 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
45 memcpy(alignbuffer
, key
, keylen
);
46 ret
= crypto_aead_alg(tfm
)->setkey(tfm
, alignbuffer
, keylen
);
47 memset(alignbuffer
, 0, keylen
);
52 int crypto_aead_setkey(struct crypto_aead
*tfm
,
53 const u8
*key
, unsigned int keylen
)
55 unsigned long alignmask
= crypto_aead_alignmask(tfm
);
57 if ((unsigned long)key
& alignmask
)
58 return setkey_unaligned(tfm
, key
, keylen
);
60 return crypto_aead_alg(tfm
)->setkey(tfm
, key
, keylen
);
62 EXPORT_SYMBOL_GPL(crypto_aead_setkey
);
64 int crypto_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
68 if (authsize
> crypto_aead_maxauthsize(tfm
))
71 if (crypto_aead_alg(tfm
)->setauthsize
) {
72 err
= crypto_aead_alg(tfm
)->setauthsize(tfm
, authsize
);
77 tfm
->authsize
= authsize
;
80 EXPORT_SYMBOL_GPL(crypto_aead_setauthsize
);
82 static void crypto_aead_exit_tfm(struct crypto_tfm
*tfm
)
84 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
85 struct aead_alg
*alg
= crypto_aead_alg(aead
);
90 static int crypto_aead_init_tfm(struct crypto_tfm
*tfm
)
92 struct crypto_aead
*aead
= __crypto_aead_cast(tfm
);
93 struct aead_alg
*alg
= crypto_aead_alg(aead
);
95 aead
->authsize
= alg
->maxauthsize
;
98 aead
->base
.exit
= crypto_aead_exit_tfm
;
101 return alg
->init(aead
);
107 static int crypto_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
109 struct crypto_report_aead raead
;
110 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
112 strncpy(raead
.type
, "aead", sizeof(raead
.type
));
113 strncpy(raead
.geniv
, "<none>", sizeof(raead
.geniv
));
115 raead
.blocksize
= alg
->cra_blocksize
;
116 raead
.maxauthsize
= aead
->maxauthsize
;
117 raead
.ivsize
= aead
->ivsize
;
119 if (nla_put(skb
, CRYPTOCFGA_REPORT_AEAD
,
120 sizeof(struct crypto_report_aead
), &raead
))
121 goto nla_put_failure
;
128 static int crypto_aead_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
134 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
135 __attribute__ ((unused
));
136 static void crypto_aead_show(struct seq_file
*m
, struct crypto_alg
*alg
)
138 struct aead_alg
*aead
= container_of(alg
, struct aead_alg
, base
);
140 seq_printf(m
, "type : aead\n");
141 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
143 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
144 seq_printf(m
, "ivsize : %u\n", aead
->ivsize
);
145 seq_printf(m
, "maxauthsize : %u\n", aead
->maxauthsize
);
146 seq_printf(m
, "geniv : <none>\n");
149 static void crypto_aead_free_instance(struct crypto_instance
*inst
)
151 struct aead_instance
*aead
= aead_instance(inst
);
154 inst
->tmpl
->free(inst
);
161 static const struct crypto_type crypto_aead_type
= {
162 .extsize
= crypto_alg_extsize
,
163 .init_tfm
= crypto_aead_init_tfm
,
164 .free
= crypto_aead_free_instance
,
165 #ifdef CONFIG_PROC_FS
166 .show
= crypto_aead_show
,
168 .report
= crypto_aead_report
,
169 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
170 .maskset
= CRYPTO_ALG_TYPE_MASK
,
171 .type
= CRYPTO_ALG_TYPE_AEAD
,
172 .tfmsize
= offsetof(struct crypto_aead
, base
),
175 static int aead_geniv_setkey(struct crypto_aead
*tfm
,
176 const u8
*key
, unsigned int keylen
)
178 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
180 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
183 static int aead_geniv_setauthsize(struct crypto_aead
*tfm
,
184 unsigned int authsize
)
186 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
188 return crypto_aead_setauthsize(ctx
->child
, authsize
);
191 struct aead_instance
*aead_geniv_alloc(struct crypto_template
*tmpl
,
192 struct rtattr
**tb
, u32 type
, u32 mask
)
195 struct crypto_aead_spawn
*spawn
;
196 struct crypto_attr_type
*algt
;
197 struct aead_instance
*inst
;
198 struct aead_alg
*alg
;
200 unsigned int maxauthsize
;
203 algt
= crypto_get_attr_type(tb
);
205 return ERR_CAST(algt
);
207 if ((algt
->type
^ CRYPTO_ALG_TYPE_AEAD
) & algt
->mask
)
208 return ERR_PTR(-EINVAL
);
210 name
= crypto_attr_alg_name(tb
[1]);
212 return ERR_CAST(name
);
214 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
216 return ERR_PTR(-ENOMEM
);
218 spawn
= aead_instance_ctx(inst
);
220 /* Ignore async algorithms if necessary. */
221 mask
|= crypto_requires_sync(algt
->type
, algt
->mask
);
223 crypto_set_aead_spawn(spawn
, aead_crypto_instance(inst
));
224 err
= crypto_grab_aead(spawn
, name
, type
, mask
);
228 alg
= crypto_spawn_aead_alg(spawn
);
230 ivsize
= crypto_aead_alg_ivsize(alg
);
231 maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
234 if (ivsize
< sizeof(u64
))
238 if (snprintf(inst
->alg
.base
.cra_name
, CRYPTO_MAX_ALG_NAME
,
239 "%s(%s)", tmpl
->name
, alg
->base
.cra_name
) >=
242 if (snprintf(inst
->alg
.base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
243 "%s(%s)", tmpl
->name
, alg
->base
.cra_driver_name
) >=
247 inst
->alg
.base
.cra_flags
= alg
->base
.cra_flags
& CRYPTO_ALG_ASYNC
;
248 inst
->alg
.base
.cra_priority
= alg
->base
.cra_priority
;
249 inst
->alg
.base
.cra_blocksize
= alg
->base
.cra_blocksize
;
250 inst
->alg
.base
.cra_alignmask
= alg
->base
.cra_alignmask
;
251 inst
->alg
.base
.cra_ctxsize
= sizeof(struct aead_geniv_ctx
);
253 inst
->alg
.setkey
= aead_geniv_setkey
;
254 inst
->alg
.setauthsize
= aead_geniv_setauthsize
;
256 inst
->alg
.ivsize
= ivsize
;
257 inst
->alg
.maxauthsize
= maxauthsize
;
263 crypto_drop_aead(spawn
);
269 EXPORT_SYMBOL_GPL(aead_geniv_alloc
);
271 void aead_geniv_free(struct aead_instance
*inst
)
273 crypto_drop_aead(aead_instance_ctx(inst
));
276 EXPORT_SYMBOL_GPL(aead_geniv_free
);
278 int aead_init_geniv(struct crypto_aead
*aead
)
280 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(aead
);
281 struct aead_instance
*inst
= aead_alg_instance(aead
);
282 struct crypto_aead
*child
;
285 spin_lock_init(&ctx
->lock
);
287 err
= crypto_get_default_rng();
291 err
= crypto_rng_get_bytes(crypto_default_rng
, ctx
->salt
,
292 crypto_aead_ivsize(aead
));
293 crypto_put_default_rng();
297 ctx
->sknull
= crypto_get_default_null_skcipher2();
298 err
= PTR_ERR(ctx
->sknull
);
299 if (IS_ERR(ctx
->sknull
))
302 child
= crypto_spawn_aead(aead_instance_ctx(inst
));
303 err
= PTR_ERR(child
);
308 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(child
) +
309 sizeof(struct aead_request
));
317 crypto_put_default_null_skcipher2();
320 EXPORT_SYMBOL_GPL(aead_init_geniv
);
322 void aead_exit_geniv(struct crypto_aead
*tfm
)
324 struct aead_geniv_ctx
*ctx
= crypto_aead_ctx(tfm
);
326 crypto_free_aead(ctx
->child
);
327 crypto_put_default_null_skcipher2();
329 EXPORT_SYMBOL_GPL(aead_exit_geniv
);
331 int crypto_grab_aead(struct crypto_aead_spawn
*spawn
, const char *name
,
334 spawn
->base
.frontend
= &crypto_aead_type
;
335 return crypto_grab_spawn(&spawn
->base
, name
, type
, mask
);
337 EXPORT_SYMBOL_GPL(crypto_grab_aead
);
339 struct crypto_aead
*crypto_alloc_aead(const char *alg_name
, u32 type
, u32 mask
)
341 return crypto_alloc_tfm(alg_name
, &crypto_aead_type
, type
, mask
);
343 EXPORT_SYMBOL_GPL(crypto_alloc_aead
);
345 static int aead_prepare_alg(struct aead_alg
*alg
)
347 struct crypto_alg
*base
= &alg
->base
;
349 if (max3(alg
->maxauthsize
, alg
->ivsize
, alg
->chunksize
) >
354 alg
->chunksize
= base
->cra_blocksize
;
356 base
->cra_type
= &crypto_aead_type
;
357 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
358 base
->cra_flags
|= CRYPTO_ALG_TYPE_AEAD
;
363 int crypto_register_aead(struct aead_alg
*alg
)
365 struct crypto_alg
*base
= &alg
->base
;
368 err
= aead_prepare_alg(alg
);
372 return crypto_register_alg(base
);
374 EXPORT_SYMBOL_GPL(crypto_register_aead
);
376 void crypto_unregister_aead(struct aead_alg
*alg
)
378 crypto_unregister_alg(&alg
->base
);
380 EXPORT_SYMBOL_GPL(crypto_unregister_aead
);
382 int crypto_register_aeads(struct aead_alg
*algs
, int count
)
386 for (i
= 0; i
< count
; i
++) {
387 ret
= crypto_register_aead(&algs
[i
]);
395 for (--i
; i
>= 0; --i
)
396 crypto_unregister_aead(&algs
[i
]);
400 EXPORT_SYMBOL_GPL(crypto_register_aeads
);
402 void crypto_unregister_aeads(struct aead_alg
*algs
, int count
)
406 for (i
= count
- 1; i
>= 0; --i
)
407 crypto_unregister_aead(&algs
[i
]);
409 EXPORT_SYMBOL_GPL(crypto_unregister_aeads
);
411 int aead_register_instance(struct crypto_template
*tmpl
,
412 struct aead_instance
*inst
)
416 err
= aead_prepare_alg(&inst
->alg
);
420 return crypto_register_instance(tmpl
, aead_crypto_instance(inst
));
422 EXPORT_SYMBOL_GPL(aead_register_instance
);
424 MODULE_LICENSE("GPL");
425 MODULE_DESCRIPTION("Authenticated Encryption with Associated Data (AEAD)");