]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - include/linux/crypto.h
[CRYPTO] api: Align tfm context as wide as possible
[mirror_ubuntu-artful-kernel.git] / include / linux / crypto.h
1 /*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
19
20 #include <linux/config.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/types.h>
24 #include <linux/list.h>
25 #include <linux/string.h>
26 #include <asm/page.h>
27
28 /*
29 * Algorithm masks and types.
30 */
31 #define CRYPTO_ALG_TYPE_MASK 0x000000ff
32 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
33 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
34 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
35
36 /*
37 * Transform masks and values (for crt_flags).
38 */
39 #define CRYPTO_TFM_MODE_MASK 0x000000ff
40 #define CRYPTO_TFM_REQ_MASK 0x000fff00
41 #define CRYPTO_TFM_RES_MASK 0xfff00000
42
43 #define CRYPTO_TFM_MODE_ECB 0x00000001
44 #define CRYPTO_TFM_MODE_CBC 0x00000002
45 #define CRYPTO_TFM_MODE_CFB 0x00000004
46 #define CRYPTO_TFM_MODE_CTR 0x00000008
47
48 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
49 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
50 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
51 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
52 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
53 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
54 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
55
56 /*
57 * Miscellaneous stuff.
58 */
59 #define CRYPTO_UNSPEC 0
60 #define CRYPTO_MAX_ALG_NAME 64
61
62 #define CRYPTO_DIR_ENCRYPT 1
63 #define CRYPTO_DIR_DECRYPT 0
64
65 struct scatterlist;
66 struct crypto_tfm;
67
68 struct cipher_desc {
69 struct crypto_tfm *tfm;
70 void (*crfn)(void *ctx, u8 *dst, const u8 *src);
71 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
72 const u8 *src, unsigned int nbytes);
73 void *info;
74 };
75
76 /*
77 * Algorithms: modular crypto algorithm implementations, managed
78 * via crypto_register_alg() and crypto_unregister_alg().
79 */
80 struct cipher_alg {
81 unsigned int cia_min_keysize;
82 unsigned int cia_max_keysize;
83 int (*cia_setkey)(void *ctx, const u8 *key,
84 unsigned int keylen, u32 *flags);
85 void (*cia_encrypt)(void *ctx, u8 *dst, const u8 *src);
86 void (*cia_decrypt)(void *ctx, u8 *dst, const u8 *src);
87
88 unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
89 u8 *dst, const u8 *src,
90 unsigned int nbytes);
91 unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc,
92 u8 *dst, const u8 *src,
93 unsigned int nbytes);
94 unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc,
95 u8 *dst, const u8 *src,
96 unsigned int nbytes);
97 unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc,
98 u8 *dst, const u8 *src,
99 unsigned int nbytes);
100 };
101
102 struct digest_alg {
103 unsigned int dia_digestsize;
104 void (*dia_init)(void *ctx);
105 void (*dia_update)(void *ctx, const u8 *data, unsigned int len);
106 void (*dia_final)(void *ctx, u8 *out);
107 int (*dia_setkey)(void *ctx, const u8 *key,
108 unsigned int keylen, u32 *flags);
109 };
110
111 struct compress_alg {
112 int (*coa_init)(void *ctx);
113 void (*coa_exit)(void *ctx);
114 int (*coa_compress)(void *ctx, const u8 *src, unsigned int slen,
115 u8 *dst, unsigned int *dlen);
116 int (*coa_decompress)(void *ctx, const u8 *src, unsigned int slen,
117 u8 *dst, unsigned int *dlen);
118 };
119
120 #define cra_cipher cra_u.cipher
121 #define cra_digest cra_u.digest
122 #define cra_compress cra_u.compress
123
124 struct crypto_alg {
125 struct list_head cra_list;
126 u32 cra_flags;
127 unsigned int cra_blocksize;
128 unsigned int cra_ctxsize;
129 unsigned int cra_alignmask;
130
131 int cra_priority;
132
133 const char cra_name[CRYPTO_MAX_ALG_NAME];
134 const char cra_driver_name[CRYPTO_MAX_ALG_NAME];
135
136 union {
137 struct cipher_alg cipher;
138 struct digest_alg digest;
139 struct compress_alg compress;
140 } cra_u;
141
142 struct module *cra_module;
143 };
144
145 /*
146 * Algorithm registration interface.
147 */
148 int crypto_register_alg(struct crypto_alg *alg);
149 int crypto_unregister_alg(struct crypto_alg *alg);
150
151 /*
152 * Algorithm query interface.
153 */
154 #ifdef CONFIG_CRYPTO
155 int crypto_alg_available(const char *name, u32 flags);
156 #else
157 static inline int crypto_alg_available(const char *name, u32 flags)
158 {
159 return 0;
160 }
161 #endif
162
163 /*
164 * Transforms: user-instantiated objects which encapsulate algorithms
165 * and core processing logic. Managed via crypto_alloc_tfm() and
166 * crypto_free_tfm(), as well as the various helpers below.
167 */
168
169 struct cipher_tfm {
170 void *cit_iv;
171 unsigned int cit_ivsize;
172 u32 cit_mode;
173 int (*cit_setkey)(struct crypto_tfm *tfm,
174 const u8 *key, unsigned int keylen);
175 int (*cit_encrypt)(struct crypto_tfm *tfm,
176 struct scatterlist *dst,
177 struct scatterlist *src,
178 unsigned int nbytes);
179 int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
180 struct scatterlist *dst,
181 struct scatterlist *src,
182 unsigned int nbytes, u8 *iv);
183 int (*cit_decrypt)(struct crypto_tfm *tfm,
184 struct scatterlist *dst,
185 struct scatterlist *src,
186 unsigned int nbytes);
187 int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
188 struct scatterlist *dst,
189 struct scatterlist *src,
190 unsigned int nbytes, u8 *iv);
191 void (*cit_xor_block)(u8 *dst, const u8 *src);
192 };
193
194 struct digest_tfm {
195 void (*dit_init)(struct crypto_tfm *tfm);
196 void (*dit_update)(struct crypto_tfm *tfm,
197 struct scatterlist *sg, unsigned int nsg);
198 void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
199 void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
200 unsigned int nsg, u8 *out);
201 int (*dit_setkey)(struct crypto_tfm *tfm,
202 const u8 *key, unsigned int keylen);
203 #ifdef CONFIG_CRYPTO_HMAC
204 void *dit_hmac_block;
205 #endif
206 };
207
208 struct compress_tfm {
209 int (*cot_compress)(struct crypto_tfm *tfm,
210 const u8 *src, unsigned int slen,
211 u8 *dst, unsigned int *dlen);
212 int (*cot_decompress)(struct crypto_tfm *tfm,
213 const u8 *src, unsigned int slen,
214 u8 *dst, unsigned int *dlen);
215 };
216
217 #define crt_cipher crt_u.cipher
218 #define crt_digest crt_u.digest
219 #define crt_compress crt_u.compress
220
221 struct crypto_tfm {
222
223 u32 crt_flags;
224
225 union {
226 struct cipher_tfm cipher;
227 struct digest_tfm digest;
228 struct compress_tfm compress;
229 } crt_u;
230
231 struct crypto_alg *__crt_alg;
232
233 char __crt_ctx[] __attribute__ ((__aligned__));
234 };
235
236 /*
237 * Transform user interface.
238 */
239
240 /*
241 * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
242 * If that fails and the kernel supports dynamically loadable modules, it
243 * will then attempt to load a module of the same name or alias. A refcount
244 * is grabbed on the algorithm which is then associated with the new transform.
245 *
246 * crypto_free_tfm() frees up the transform and any associated resources,
247 * then drops the refcount on the associated algorithm.
248 */
249 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
250 void crypto_free_tfm(struct crypto_tfm *tfm);
251
252 /*
253 * Transform helpers which query the underlying algorithm.
254 */
255 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
256 {
257 return tfm->__crt_alg->cra_name;
258 }
259
260 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
261 {
262 return module_name(tfm->__crt_alg->cra_module);
263 }
264
265 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
266 {
267 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
268 }
269
270 static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm)
271 {
272 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
273 return tfm->__crt_alg->cra_cipher.cia_min_keysize;
274 }
275
276 static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm)
277 {
278 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
279 return tfm->__crt_alg->cra_cipher.cia_max_keysize;
280 }
281
282 static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
283 {
284 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
285 return tfm->crt_cipher.cit_ivsize;
286 }
287
288 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
289 {
290 return tfm->__crt_alg->cra_blocksize;
291 }
292
293 static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
294 {
295 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
296 return tfm->__crt_alg->cra_digest.dia_digestsize;
297 }
298
299 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
300 {
301 return tfm->__crt_alg->cra_alignmask;
302 }
303
304 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
305 {
306 return tfm->__crt_ctx;
307 }
308
309 static inline unsigned int crypto_tfm_ctx_alignment(void)
310 {
311 struct crypto_tfm *tfm;
312 return __alignof__(tfm->__crt_ctx);
313 }
314
315 /*
316 * API wrappers.
317 */
318 static inline void crypto_digest_init(struct crypto_tfm *tfm)
319 {
320 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
321 tfm->crt_digest.dit_init(tfm);
322 }
323
324 static inline void crypto_digest_update(struct crypto_tfm *tfm,
325 struct scatterlist *sg,
326 unsigned int nsg)
327 {
328 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
329 tfm->crt_digest.dit_update(tfm, sg, nsg);
330 }
331
332 static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
333 {
334 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
335 tfm->crt_digest.dit_final(tfm, out);
336 }
337
338 static inline void crypto_digest_digest(struct crypto_tfm *tfm,
339 struct scatterlist *sg,
340 unsigned int nsg, u8 *out)
341 {
342 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
343 tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
344 }
345
346 static inline int crypto_digest_setkey(struct crypto_tfm *tfm,
347 const u8 *key, unsigned int keylen)
348 {
349 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
350 if (tfm->crt_digest.dit_setkey == NULL)
351 return -ENOSYS;
352 return tfm->crt_digest.dit_setkey(tfm, key, keylen);
353 }
354
355 static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
356 const u8 *key, unsigned int keylen)
357 {
358 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
359 return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
360 }
361
362 static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
363 struct scatterlist *dst,
364 struct scatterlist *src,
365 unsigned int nbytes)
366 {
367 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
368 return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes);
369 }
370
371 static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm,
372 struct scatterlist *dst,
373 struct scatterlist *src,
374 unsigned int nbytes, u8 *iv)
375 {
376 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
377 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
378 return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv);
379 }
380
381 static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
382 struct scatterlist *dst,
383 struct scatterlist *src,
384 unsigned int nbytes)
385 {
386 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
387 return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes);
388 }
389
390 static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm,
391 struct scatterlist *dst,
392 struct scatterlist *src,
393 unsigned int nbytes, u8 *iv)
394 {
395 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
396 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
397 return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv);
398 }
399
400 static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
401 const u8 *src, unsigned int len)
402 {
403 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
404 memcpy(tfm->crt_cipher.cit_iv, src, len);
405 }
406
407 static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
408 u8 *dst, unsigned int len)
409 {
410 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
411 memcpy(dst, tfm->crt_cipher.cit_iv, len);
412 }
413
414 static inline int crypto_comp_compress(struct crypto_tfm *tfm,
415 const u8 *src, unsigned int slen,
416 u8 *dst, unsigned int *dlen)
417 {
418 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
419 return tfm->crt_compress.cot_compress(tfm, src, slen, dst, dlen);
420 }
421
422 static inline int crypto_comp_decompress(struct crypto_tfm *tfm,
423 const u8 *src, unsigned int slen,
424 u8 *dst, unsigned int *dlen)
425 {
426 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
427 return tfm->crt_compress.cot_decompress(tfm, src, slen, dst, dlen);
428 }
429
430 /*
431 * HMAC support.
432 */
433 #ifdef CONFIG_CRYPTO_HMAC
434 void crypto_hmac_init(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen);
435 void crypto_hmac_update(struct crypto_tfm *tfm,
436 struct scatterlist *sg, unsigned int nsg);
437 void crypto_hmac_final(struct crypto_tfm *tfm, u8 *key,
438 unsigned int *keylen, u8 *out);
439 void crypto_hmac(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen,
440 struct scatterlist *sg, unsigned int nsg, u8 *out);
441 #endif /* CONFIG_CRYPTO_HMAC */
442
443 #endif /* _LINUX_CRYPTO_H */
444