]>
Commit | Line | Data |
---|---|---|
b5b7f088 HX |
1 | /* |
2 | * Asynchronous block chaining cipher operations. | |
c4ede64a | 3 | * |
b5b7f088 HX |
4 | * This is the asynchronous version of blkcipher.c indicating completion |
5 | * via a callback. | |
6 | * | |
7 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> | |
8 | * | |
9 | * This program is free software; you can redistribute it and/or modify it | |
10 | * under the terms of the GNU General Public License as published by the Free | |
c4ede64a | 11 | * Software Foundation; either version 2 of the License, or (at your option) |
b5b7f088 HX |
12 | * any later version. |
13 | * | |
14 | */ | |
15 | ||
378f4f51 | 16 | #include <crypto/internal/skcipher.h> |
0b67fb65 | 17 | #include <linux/cpumask.h> |
378f4f51 | 18 | #include <linux/err.h> |
b5b7f088 | 19 | #include <linux/init.h> |
791b4d5f | 20 | #include <linux/kernel.h> |
b5b7f088 | 21 | #include <linux/module.h> |
b9c55aa4 HX |
22 | #include <linux/rtnetlink.h> |
23 | #include <linux/sched.h> | |
791b4d5f | 24 | #include <linux/slab.h> |
b5b7f088 HX |
25 | #include <linux/seq_file.h> |
26 | ||
378f4f51 HX |
27 | #include "internal.h" |
28 | ||
0b67fb65 HX |
29 | static const char *skcipher_default_geniv __read_mostly; |
30 | ||
791b4d5f HX |
31 | static int setkey_unaligned(struct crypto_ablkcipher *tfm, const u8 *key, |
32 | unsigned int keylen) | |
ca7c3938 SS |
33 | { |
34 | struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); | |
35 | unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); | |
36 | int ret; | |
37 | u8 *buffer, *alignbuffer; | |
38 | unsigned long absize; | |
39 | ||
40 | absize = keylen + alignmask; | |
41 | buffer = kmalloc(absize, GFP_ATOMIC); | |
42 | if (!buffer) | |
43 | return -ENOMEM; | |
44 | ||
45 | alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); | |
46 | memcpy(alignbuffer, key, keylen); | |
47 | ret = cipher->setkey(tfm, alignbuffer, keylen); | |
06817176 | 48 | memset(alignbuffer, 0, keylen); |
ca7c3938 SS |
49 | kfree(buffer); |
50 | return ret; | |
51 | } | |
52 | ||
b5b7f088 HX |
53 | static int setkey(struct crypto_ablkcipher *tfm, const u8 *key, |
54 | unsigned int keylen) | |
55 | { | |
56 | struct ablkcipher_alg *cipher = crypto_ablkcipher_alg(tfm); | |
ca7c3938 | 57 | unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); |
b5b7f088 HX |
58 | |
59 | if (keylen < cipher->min_keysize || keylen > cipher->max_keysize) { | |
60 | crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); | |
61 | return -EINVAL; | |
62 | } | |
63 | ||
ca7c3938 SS |
64 | if ((unsigned long)key & alignmask) |
65 | return setkey_unaligned(tfm, key, keylen); | |
66 | ||
b5b7f088 HX |
67 | return cipher->setkey(tfm, key, keylen); |
68 | } | |
69 | ||
70 | static unsigned int crypto_ablkcipher_ctxsize(struct crypto_alg *alg, u32 type, | |
71 | u32 mask) | |
72 | { | |
73 | return alg->cra_ctxsize; | |
74 | } | |
75 | ||
b9c55aa4 HX |
76 | int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req) |
77 | { | |
78 | return crypto_ablkcipher_encrypt(&req->creq); | |
79 | } | |
80 | ||
81 | int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req) | |
82 | { | |
83 | return crypto_ablkcipher_decrypt(&req->creq); | |
84 | } | |
85 | ||
b5b7f088 HX |
86 | static int crypto_init_ablkcipher_ops(struct crypto_tfm *tfm, u32 type, |
87 | u32 mask) | |
88 | { | |
89 | struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; | |
90 | struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher; | |
91 | ||
92 | if (alg->ivsize > PAGE_SIZE / 8) | |
93 | return -EINVAL; | |
94 | ||
95 | crt->setkey = setkey; | |
96 | crt->encrypt = alg->encrypt; | |
97 | crt->decrypt = alg->decrypt; | |
b9c55aa4 HX |
98 | if (!alg->ivsize) { |
99 | crt->givencrypt = skcipher_null_givencrypt; | |
100 | crt->givdecrypt = skcipher_null_givdecrypt; | |
101 | } | |
ecfc4329 | 102 | crt->base = __crypto_ablkcipher_cast(tfm); |
b5b7f088 HX |
103 | crt->ivsize = alg->ivsize; |
104 | ||
105 | return 0; | |
106 | } | |
107 | ||
108 | static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg) | |
109 | __attribute__ ((unused)); | |
110 | static void crypto_ablkcipher_show(struct seq_file *m, struct crypto_alg *alg) | |
111 | { | |
112 | struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher; | |
113 | ||
114 | seq_printf(m, "type : ablkcipher\n"); | |
189ed66e HX |
115 | seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? |
116 | "yes" : "no"); | |
b5b7f088 HX |
117 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); |
118 | seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize); | |
119 | seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize); | |
120 | seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize); | |
23508e11 | 121 | seq_printf(m, "geniv : %s\n", ablkcipher->geniv ?: "<default>"); |
b5b7f088 HX |
122 | } |
123 | ||
124 | const struct crypto_type crypto_ablkcipher_type = { | |
125 | .ctxsize = crypto_ablkcipher_ctxsize, | |
126 | .init = crypto_init_ablkcipher_ops, | |
127 | #ifdef CONFIG_PROC_FS | |
128 | .show = crypto_ablkcipher_show, | |
129 | #endif | |
130 | }; | |
131 | EXPORT_SYMBOL_GPL(crypto_ablkcipher_type); | |
132 | ||
61da88e2 HX |
133 | static int no_givdecrypt(struct skcipher_givcrypt_request *req) |
134 | { | |
135 | return -ENOSYS; | |
136 | } | |
137 | ||
138 | static int crypto_init_givcipher_ops(struct crypto_tfm *tfm, u32 type, | |
139 | u32 mask) | |
140 | { | |
141 | struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher; | |
142 | struct ablkcipher_tfm *crt = &tfm->crt_ablkcipher; | |
143 | ||
144 | if (alg->ivsize > PAGE_SIZE / 8) | |
145 | return -EINVAL; | |
146 | ||
ecfc4329 HX |
147 | crt->setkey = tfm->__crt_alg->cra_flags & CRYPTO_ALG_GENIV ? |
148 | alg->setkey : setkey; | |
61da88e2 HX |
149 | crt->encrypt = alg->encrypt; |
150 | crt->decrypt = alg->decrypt; | |
151 | crt->givencrypt = alg->givencrypt; | |
152 | crt->givdecrypt = alg->givdecrypt ?: no_givdecrypt; | |
ecfc4329 | 153 | crt->base = __crypto_ablkcipher_cast(tfm); |
61da88e2 HX |
154 | crt->ivsize = alg->ivsize; |
155 | ||
156 | return 0; | |
157 | } | |
158 | ||
159 | static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg) | |
160 | __attribute__ ((unused)); | |
161 | static void crypto_givcipher_show(struct seq_file *m, struct crypto_alg *alg) | |
162 | { | |
163 | struct ablkcipher_alg *ablkcipher = &alg->cra_ablkcipher; | |
164 | ||
165 | seq_printf(m, "type : givcipher\n"); | |
189ed66e HX |
166 | seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ? |
167 | "yes" : "no"); | |
61da88e2 HX |
168 | seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); |
169 | seq_printf(m, "min keysize : %u\n", ablkcipher->min_keysize); | |
170 | seq_printf(m, "max keysize : %u\n", ablkcipher->max_keysize); | |
171 | seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize); | |
23508e11 | 172 | seq_printf(m, "geniv : %s\n", ablkcipher->geniv ?: "<built-in>"); |
61da88e2 HX |
173 | } |
174 | ||
175 | const struct crypto_type crypto_givcipher_type = { | |
176 | .ctxsize = crypto_ablkcipher_ctxsize, | |
177 | .init = crypto_init_givcipher_ops, | |
178 | #ifdef CONFIG_PROC_FS | |
179 | .show = crypto_givcipher_show, | |
180 | #endif | |
181 | }; | |
182 | EXPORT_SYMBOL_GPL(crypto_givcipher_type); | |
183 | ||
ecfc4329 HX |
184 | const char *crypto_default_geniv(const struct crypto_alg *alg) |
185 | { | |
63b5ac28 HX |
186 | if (((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == |
187 | CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : | |
188 | alg->cra_ablkcipher.ivsize) != | |
189 | alg->cra_blocksize) | |
190 | return "chainiv"; | |
191 | ||
0b67fb65 HX |
192 | return alg->cra_flags & CRYPTO_ALG_ASYNC ? |
193 | "eseqiv" : skcipher_default_geniv; | |
ecfc4329 HX |
194 | } |
195 | ||
b9c55aa4 HX |
196 | static int crypto_givcipher_default(struct crypto_alg *alg, u32 type, u32 mask) |
197 | { | |
198 | struct rtattr *tb[3]; | |
199 | struct { | |
200 | struct rtattr attr; | |
201 | struct crypto_attr_type data; | |
202 | } ptype; | |
203 | struct { | |
204 | struct rtattr attr; | |
205 | struct crypto_attr_alg data; | |
206 | } palg; | |
207 | struct crypto_template *tmpl; | |
208 | struct crypto_instance *inst; | |
209 | struct crypto_alg *larval; | |
210 | const char *geniv; | |
211 | int err; | |
212 | ||
213 | larval = crypto_larval_lookup(alg->cra_driver_name, | |
435578ae | 214 | (type & ~CRYPTO_ALG_TYPE_MASK) | |
b9c55aa4 | 215 | CRYPTO_ALG_TYPE_GIVCIPHER, |
435578ae | 216 | mask | CRYPTO_ALG_TYPE_MASK); |
b9c55aa4 HX |
217 | err = PTR_ERR(larval); |
218 | if (IS_ERR(larval)) | |
219 | goto out; | |
220 | ||
221 | err = -EAGAIN; | |
222 | if (!crypto_is_larval(larval)) | |
223 | goto drop_larval; | |
224 | ||
225 | ptype.attr.rta_len = sizeof(ptype); | |
226 | ptype.attr.rta_type = CRYPTOA_TYPE; | |
227 | ptype.data.type = type | CRYPTO_ALG_GENIV; | |
228 | /* GENIV tells the template that we're making a default geniv. */ | |
229 | ptype.data.mask = mask | CRYPTO_ALG_GENIV; | |
230 | tb[0] = &ptype.attr; | |
231 | ||
232 | palg.attr.rta_len = sizeof(palg); | |
233 | palg.attr.rta_type = CRYPTOA_ALG; | |
234 | /* Must use the exact name to locate ourselves. */ | |
235 | memcpy(palg.data.name, alg->cra_driver_name, CRYPTO_MAX_ALG_NAME); | |
236 | tb[1] = &palg.attr; | |
237 | ||
238 | tb[2] = NULL; | |
239 | ||
240 | if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == | |
241 | CRYPTO_ALG_TYPE_BLKCIPHER) | |
242 | geniv = alg->cra_blkcipher.geniv; | |
243 | else | |
244 | geniv = alg->cra_ablkcipher.geniv; | |
245 | ||
246 | if (!geniv) | |
247 | geniv = crypto_default_geniv(alg); | |
248 | ||
249 | tmpl = crypto_lookup_template(geniv); | |
250 | err = -ENOENT; | |
251 | if (!tmpl) | |
252 | goto kill_larval; | |
253 | ||
254 | inst = tmpl->alloc(tb); | |
255 | err = PTR_ERR(inst); | |
256 | if (IS_ERR(inst)) | |
257 | goto put_tmpl; | |
258 | ||
259 | if ((err = crypto_register_instance(tmpl, inst))) { | |
260 | tmpl->free(inst); | |
261 | goto put_tmpl; | |
262 | } | |
263 | ||
264 | /* Redo the lookup to use the instance we just registered. */ | |
265 | err = -EAGAIN; | |
266 | ||
267 | put_tmpl: | |
268 | crypto_tmpl_put(tmpl); | |
269 | kill_larval: | |
270 | crypto_larval_kill(larval); | |
271 | drop_larval: | |
272 | crypto_mod_put(larval); | |
273 | out: | |
274 | crypto_mod_put(alg); | |
275 | return err; | |
276 | } | |
277 | ||
278 | static struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type, | |
279 | u32 mask) | |
280 | { | |
281 | struct crypto_alg *alg; | |
282 | ||
283 | alg = crypto_alg_mod_lookup(name, type, mask); | |
284 | if (IS_ERR(alg)) | |
285 | return alg; | |
286 | ||
287 | if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == | |
288 | CRYPTO_ALG_TYPE_GIVCIPHER) | |
289 | return alg; | |
290 | ||
291 | if (!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == | |
292 | CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : | |
293 | alg->cra_ablkcipher.ivsize)) | |
294 | return alg; | |
295 | ||
b170a137 HX |
296 | crypto_mod_put(alg); |
297 | alg = crypto_alg_mod_lookup(name, type | CRYPTO_ALG_TESTED, | |
298 | mask & ~CRYPTO_ALG_TESTED); | |
299 | if (IS_ERR(alg)) | |
300 | return alg; | |
301 | ||
302 | if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == | |
303 | CRYPTO_ALG_TYPE_GIVCIPHER) { | |
304 | if ((alg->cra_flags ^ type ^ ~mask) & CRYPTO_ALG_TESTED) { | |
305 | crypto_mod_put(alg); | |
306 | alg = ERR_PTR(-ENOENT); | |
307 | } | |
308 | return alg; | |
309 | } | |
310 | ||
311 | BUG_ON(!((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == | |
312 | CRYPTO_ALG_TYPE_BLKCIPHER ? alg->cra_blkcipher.ivsize : | |
313 | alg->cra_ablkcipher.ivsize)); | |
314 | ||
b9c55aa4 HX |
315 | return ERR_PTR(crypto_givcipher_default(alg, type, mask)); |
316 | } | |
317 | ||
378f4f51 HX |
318 | int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name, |
319 | u32 type, u32 mask) | |
320 | { | |
321 | struct crypto_alg *alg; | |
322 | int err; | |
323 | ||
324 | type = crypto_skcipher_type(type); | |
325 | mask = crypto_skcipher_mask(mask); | |
326 | ||
b9c55aa4 | 327 | alg = crypto_lookup_skcipher(name, type, mask); |
378f4f51 HX |
328 | if (IS_ERR(alg)) |
329 | return PTR_ERR(alg); | |
330 | ||
331 | err = crypto_init_spawn(&spawn->base, alg, spawn->base.inst, mask); | |
332 | crypto_mod_put(alg); | |
333 | return err; | |
334 | } | |
335 | EXPORT_SYMBOL_GPL(crypto_grab_skcipher); | |
336 | ||
b9c55aa4 HX |
337 | struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name, |
338 | u32 type, u32 mask) | |
339 | { | |
340 | struct crypto_tfm *tfm; | |
341 | int err; | |
342 | ||
343 | type = crypto_skcipher_type(type); | |
344 | mask = crypto_skcipher_mask(mask); | |
345 | ||
346 | for (;;) { | |
347 | struct crypto_alg *alg; | |
348 | ||
349 | alg = crypto_lookup_skcipher(alg_name, type, mask); | |
350 | if (IS_ERR(alg)) { | |
351 | err = PTR_ERR(alg); | |
352 | goto err; | |
353 | } | |
354 | ||
355 | tfm = __crypto_alloc_tfm(alg, type, mask); | |
356 | if (!IS_ERR(tfm)) | |
357 | return __crypto_ablkcipher_cast(tfm); | |
358 | ||
359 | crypto_mod_put(alg); | |
360 | err = PTR_ERR(tfm); | |
361 | ||
362 | err: | |
363 | if (err != -EAGAIN) | |
364 | break; | |
365 | if (signal_pending(current)) { | |
366 | err = -EINTR; | |
367 | break; | |
368 | } | |
369 | } | |
370 | ||
371 | return ERR_PTR(err); | |
372 | } | |
373 | EXPORT_SYMBOL_GPL(crypto_alloc_ablkcipher); | |
0b67fb65 HX |
374 | |
375 | static int __init skcipher_module_init(void) | |
376 | { | |
377 | skcipher_default_geniv = num_possible_cpus() > 1 ? | |
378 | "eseqiv" : "chainiv"; | |
379 | return 0; | |
380 | } | |
381 | ||
382 | static void skcipher_module_exit(void) | |
383 | { | |
384 | } | |
385 | ||
386 | module_init(skcipher_module_init); | |
387 | module_exit(skcipher_module_exit); |