]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blob - include/crypto/algapi.h
Merge branch 'mm-pat-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git...
[mirror_ubuntu-artful-kernel.git] / include / crypto / algapi.h
1 /*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
14
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18 #include <linux/skbuff.h>
19
20 struct crypto_aead;
21 struct crypto_instance;
22 struct module;
23 struct rtattr;
24 struct seq_file;
25
26 struct crypto_type {
27 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
28 unsigned int (*extsize)(struct crypto_alg *alg);
29 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
30 int (*init_tfm)(struct crypto_tfm *tfm);
31 void (*show)(struct seq_file *m, struct crypto_alg *alg);
32 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
33 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
34 void (*free)(struct crypto_instance *inst);
35
36 unsigned int type;
37 unsigned int maskclear;
38 unsigned int maskset;
39 unsigned int tfmsize;
40 };
41
42 struct crypto_instance {
43 struct crypto_alg alg;
44
45 struct crypto_template *tmpl;
46 struct hlist_node list;
47
48 void *__ctx[] CRYPTO_MINALIGN_ATTR;
49 };
50
51 struct crypto_template {
52 struct list_head list;
53 struct hlist_head instances;
54 struct module *module;
55
56 struct crypto_instance *(*alloc)(struct rtattr **tb);
57 void (*free)(struct crypto_instance *inst);
58 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
59
60 char name[CRYPTO_MAX_ALG_NAME];
61 };
62
63 struct crypto_spawn {
64 struct list_head list;
65 struct crypto_alg *alg;
66 struct crypto_instance *inst;
67 const struct crypto_type *frontend;
68 u32 mask;
69 };
70
71 struct crypto_queue {
72 struct list_head list;
73 struct list_head *backlog;
74
75 unsigned int qlen;
76 unsigned int max_qlen;
77 };
78
79 struct scatter_walk {
80 struct scatterlist *sg;
81 unsigned int offset;
82 };
83
84 struct blkcipher_walk {
85 union {
86 struct {
87 struct page *page;
88 unsigned long offset;
89 } phys;
90
91 struct {
92 u8 *page;
93 u8 *addr;
94 } virt;
95 } src, dst;
96
97 struct scatter_walk in;
98 unsigned int nbytes;
99
100 struct scatter_walk out;
101 unsigned int total;
102
103 void *page;
104 u8 *buffer;
105 u8 *iv;
106 unsigned int ivsize;
107
108 int flags;
109 unsigned int walk_blocksize;
110 unsigned int cipher_blocksize;
111 unsigned int alignmask;
112 };
113
114 struct ablkcipher_walk {
115 struct {
116 struct page *page;
117 unsigned int offset;
118 } src, dst;
119
120 struct scatter_walk in;
121 unsigned int nbytes;
122 struct scatter_walk out;
123 unsigned int total;
124 struct list_head buffers;
125 u8 *iv_buffer;
126 u8 *iv;
127 int flags;
128 unsigned int blocksize;
129 };
130
131 extern const struct crypto_type crypto_ablkcipher_type;
132 extern const struct crypto_type crypto_blkcipher_type;
133
134 void crypto_mod_put(struct crypto_alg *alg);
135
136 int crypto_register_template(struct crypto_template *tmpl);
137 void crypto_unregister_template(struct crypto_template *tmpl);
138 struct crypto_template *crypto_lookup_template(const char *name);
139
140 int crypto_register_instance(struct crypto_template *tmpl,
141 struct crypto_instance *inst);
142 int crypto_unregister_instance(struct crypto_instance *inst);
143
144 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
145 struct crypto_instance *inst, u32 mask);
146 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
147 struct crypto_instance *inst,
148 const struct crypto_type *frontend);
149 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
150 u32 type, u32 mask);
151
152 void crypto_drop_spawn(struct crypto_spawn *spawn);
153 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
154 u32 mask);
155 void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
156
157 static inline void crypto_set_spawn(struct crypto_spawn *spawn,
158 struct crypto_instance *inst)
159 {
160 spawn->inst = inst;
161 }
162
163 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
164 int crypto_check_attr_type(struct rtattr **tb, u32 type);
165 const char *crypto_attr_alg_name(struct rtattr *rta);
166 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
167 const struct crypto_type *frontend,
168 u32 type, u32 mask);
169
170 static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
171 u32 type, u32 mask)
172 {
173 return crypto_attr_alg2(rta, NULL, type, mask);
174 }
175
176 int crypto_attr_u32(struct rtattr *rta, u32 *num);
177 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
178 struct crypto_alg *alg);
179 void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
180 unsigned int head);
181 struct crypto_instance *crypto_alloc_instance(const char *name,
182 struct crypto_alg *alg);
183
184 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
185 int crypto_enqueue_request(struct crypto_queue *queue,
186 struct crypto_async_request *request);
187 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
188 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
189 static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
190 {
191 return queue->qlen;
192 }
193
194 /* These functions require the input/output to be aligned as u32. */
195 void crypto_inc(u8 *a, unsigned int size);
196 void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
197
198 int blkcipher_walk_done(struct blkcipher_desc *desc,
199 struct blkcipher_walk *walk, int err);
200 int blkcipher_walk_virt(struct blkcipher_desc *desc,
201 struct blkcipher_walk *walk);
202 int blkcipher_walk_phys(struct blkcipher_desc *desc,
203 struct blkcipher_walk *walk);
204 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
205 struct blkcipher_walk *walk,
206 unsigned int blocksize);
207 int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
208 struct blkcipher_walk *walk,
209 struct crypto_aead *tfm,
210 unsigned int blocksize);
211
212 int ablkcipher_walk_done(struct ablkcipher_request *req,
213 struct ablkcipher_walk *walk, int err);
214 int ablkcipher_walk_phys(struct ablkcipher_request *req,
215 struct ablkcipher_walk *walk);
216 void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
217
218 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
219 {
220 return PTR_ALIGN(crypto_tfm_ctx(tfm),
221 crypto_tfm_alg_alignmask(tfm) + 1);
222 }
223
224 static inline struct crypto_instance *crypto_tfm_alg_instance(
225 struct crypto_tfm *tfm)
226 {
227 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
228 }
229
230 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
231 {
232 return inst->__ctx;
233 }
234
235 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
236 struct crypto_ablkcipher *tfm)
237 {
238 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
239 }
240
241 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
242 {
243 return crypto_tfm_ctx(&tfm->base);
244 }
245
246 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
247 {
248 return crypto_tfm_ctx_aligned(&tfm->base);
249 }
250
251 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
252 struct crypto_spawn *spawn)
253 {
254 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
255 u32 mask = CRYPTO_ALG_TYPE_MASK;
256
257 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
258 }
259
260 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
261 {
262 return crypto_tfm_ctx(&tfm->base);
263 }
264
265 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
266 {
267 return crypto_tfm_ctx_aligned(&tfm->base);
268 }
269
270 static inline struct crypto_cipher *crypto_spawn_cipher(
271 struct crypto_spawn *spawn)
272 {
273 u32 type = CRYPTO_ALG_TYPE_CIPHER;
274 u32 mask = CRYPTO_ALG_TYPE_MASK;
275
276 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
277 }
278
279 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
280 {
281 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
282 }
283
284 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
285 struct scatterlist *dst,
286 struct scatterlist *src,
287 unsigned int nbytes)
288 {
289 walk->in.sg = src;
290 walk->out.sg = dst;
291 walk->total = nbytes;
292 }
293
294 static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
295 struct scatterlist *dst,
296 struct scatterlist *src,
297 unsigned int nbytes)
298 {
299 walk->in.sg = src;
300 walk->out.sg = dst;
301 walk->total = nbytes;
302 INIT_LIST_HEAD(&walk->buffers);
303 }
304
305 static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
306 {
307 if (unlikely(!list_empty(&walk->buffers)))
308 __ablkcipher_walk_complete(walk);
309 }
310
311 static inline struct crypto_async_request *crypto_get_backlog(
312 struct crypto_queue *queue)
313 {
314 return queue->backlog == &queue->list ? NULL :
315 container_of(queue->backlog, struct crypto_async_request, list);
316 }
317
318 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
319 struct ablkcipher_request *request)
320 {
321 return crypto_enqueue_request(queue, &request->base);
322 }
323
324 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
325 struct crypto_queue *queue)
326 {
327 return ablkcipher_request_cast(crypto_dequeue_request(queue));
328 }
329
330 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
331 {
332 return req->__ctx;
333 }
334
335 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
336 struct crypto_ablkcipher *tfm)
337 {
338 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
339 }
340
341 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
342 u32 type, u32 mask)
343 {
344 return crypto_attr_alg(tb[1], type, mask);
345 }
346
347 /*
348 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
349 * Otherwise returns zero.
350 */
351 static inline int crypto_requires_sync(u32 type, u32 mask)
352 {
353 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
354 }
355
356 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
357
358 /**
359 * crypto_memneq - Compare two areas of memory without leaking
360 * timing information.
361 *
362 * @a: One area of memory
363 * @b: Another area of memory
364 * @size: The size of the area.
365 *
366 * Returns 0 when data is equal, 1 otherwise.
367 */
368 static inline int crypto_memneq(const void *a, const void *b, size_t size)
369 {
370 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
371 }
372
373 static inline void crypto_yield(u32 flags)
374 {
375 #if !defined(CONFIG_PREEMPT) || defined(CONFIG_PREEMPT_VOLUNTARY)
376 if (flags & CRYPTO_TFM_REQ_MAY_SLEEP)
377 cond_resched();
378 #endif
379 }
380
381 #endif /* _CRYPTO_ALGAPI_H */