]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blob - include/crypto/algapi.h
Merge branch 'devel' into next
[mirror_ubuntu-jammy-kernel.git] / include / crypto / algapi.h
1 /*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12 #ifndef _CRYPTO_ALGAPI_H
13 #define _CRYPTO_ALGAPI_H
14
15 #include <linux/crypto.h>
16 #include <linux/list.h>
17 #include <linux/kernel.h>
18
19 struct module;
20 struct rtattr;
21 struct seq_file;
22
23 struct crypto_type {
24 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
25 unsigned int (*extsize)(struct crypto_alg *alg,
26 const struct crypto_type *frontend);
27 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
28 int (*init_tfm)(struct crypto_tfm *tfm,
29 const struct crypto_type *frontend);
30 void (*show)(struct seq_file *m, struct crypto_alg *alg);
31 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
32
33 unsigned int type;
34 unsigned int maskclear;
35 unsigned int maskset;
36 unsigned int tfmsize;
37 };
38
39 struct crypto_instance {
40 struct crypto_alg alg;
41
42 struct crypto_template *tmpl;
43 struct hlist_node list;
44
45 void *__ctx[] CRYPTO_MINALIGN_ATTR;
46 };
47
48 struct crypto_template {
49 struct list_head list;
50 struct hlist_head instances;
51 struct module *module;
52
53 struct crypto_instance *(*alloc)(struct rtattr **tb);
54 void (*free)(struct crypto_instance *inst);
55
56 char name[CRYPTO_MAX_ALG_NAME];
57 };
58
59 struct crypto_spawn {
60 struct list_head list;
61 struct crypto_alg *alg;
62 struct crypto_instance *inst;
63 u32 mask;
64 };
65
66 struct crypto_queue {
67 struct list_head list;
68 struct list_head *backlog;
69
70 unsigned int qlen;
71 unsigned int max_qlen;
72 };
73
74 struct scatter_walk {
75 struct scatterlist *sg;
76 unsigned int offset;
77 };
78
79 struct blkcipher_walk {
80 union {
81 struct {
82 struct page *page;
83 unsigned long offset;
84 } phys;
85
86 struct {
87 u8 *page;
88 u8 *addr;
89 } virt;
90 } src, dst;
91
92 struct scatter_walk in;
93 unsigned int nbytes;
94
95 struct scatter_walk out;
96 unsigned int total;
97
98 void *page;
99 u8 *buffer;
100 u8 *iv;
101
102 int flags;
103 unsigned int blocksize;
104 };
105
106 extern const struct crypto_type crypto_ablkcipher_type;
107 extern const struct crypto_type crypto_aead_type;
108 extern const struct crypto_type crypto_blkcipher_type;
109 extern const struct crypto_type crypto_hash_type;
110
111 void crypto_mod_put(struct crypto_alg *alg);
112
113 int crypto_register_template(struct crypto_template *tmpl);
114 void crypto_unregister_template(struct crypto_template *tmpl);
115 struct crypto_template *crypto_lookup_template(const char *name);
116
117 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
118 struct crypto_instance *inst, u32 mask);
119 void crypto_drop_spawn(struct crypto_spawn *spawn);
120 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
121 u32 mask);
122
123 static inline void crypto_set_spawn(struct crypto_spawn *spawn,
124 struct crypto_instance *inst)
125 {
126 spawn->inst = inst;
127 }
128
129 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
130 int crypto_check_attr_type(struct rtattr **tb, u32 type);
131 const char *crypto_attr_alg_name(struct rtattr *rta);
132 struct crypto_alg *crypto_attr_alg(struct rtattr *rta, u32 type, u32 mask);
133 int crypto_attr_u32(struct rtattr *rta, u32 *num);
134 struct crypto_instance *crypto_alloc_instance(const char *name,
135 struct crypto_alg *alg);
136
137 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
138 int crypto_enqueue_request(struct crypto_queue *queue,
139 struct crypto_async_request *request);
140 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
141 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
142
143 /* These functions require the input/output to be aligned as u32. */
144 void crypto_inc(u8 *a, unsigned int size);
145 void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
146
147 int blkcipher_walk_done(struct blkcipher_desc *desc,
148 struct blkcipher_walk *walk, int err);
149 int blkcipher_walk_virt(struct blkcipher_desc *desc,
150 struct blkcipher_walk *walk);
151 int blkcipher_walk_phys(struct blkcipher_desc *desc,
152 struct blkcipher_walk *walk);
153 int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
154 struct blkcipher_walk *walk,
155 unsigned int blocksize);
156
157 static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
158 {
159 unsigned long addr = (unsigned long)crypto_tfm_ctx(tfm);
160 unsigned long align = crypto_tfm_alg_alignmask(tfm);
161
162 if (align <= crypto_tfm_ctx_alignment())
163 align = 1;
164 return (void *)ALIGN(addr, align);
165 }
166
167 static inline struct crypto_instance *crypto_tfm_alg_instance(
168 struct crypto_tfm *tfm)
169 {
170 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
171 }
172
173 static inline void *crypto_instance_ctx(struct crypto_instance *inst)
174 {
175 return inst->__ctx;
176 }
177
178 static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
179 struct crypto_ablkcipher *tfm)
180 {
181 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
182 }
183
184 static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
185 {
186 return crypto_tfm_ctx(&tfm->base);
187 }
188
189 static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
190 {
191 return crypto_tfm_ctx_aligned(&tfm->base);
192 }
193
194 static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
195 {
196 return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
197 }
198
199 static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
200 {
201 return crypto_tfm_ctx(&tfm->base);
202 }
203
204 static inline struct crypto_instance *crypto_aead_alg_instance(
205 struct crypto_aead *aead)
206 {
207 return crypto_tfm_alg_instance(&aead->base);
208 }
209
210 static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
211 struct crypto_spawn *spawn)
212 {
213 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
214 u32 mask = CRYPTO_ALG_TYPE_MASK;
215
216 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
217 }
218
219 static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
220 {
221 return crypto_tfm_ctx(&tfm->base);
222 }
223
224 static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
225 {
226 return crypto_tfm_ctx_aligned(&tfm->base);
227 }
228
229 static inline struct crypto_cipher *crypto_spawn_cipher(
230 struct crypto_spawn *spawn)
231 {
232 u32 type = CRYPTO_ALG_TYPE_CIPHER;
233 u32 mask = CRYPTO_ALG_TYPE_MASK;
234
235 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
236 }
237
238 static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
239 {
240 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
241 }
242
243 static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
244 {
245 u32 type = CRYPTO_ALG_TYPE_HASH;
246 u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
247
248 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
249 }
250
251 static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
252 {
253 return crypto_tfm_ctx(&tfm->base);
254 }
255
256 static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
257 {
258 return crypto_tfm_ctx_aligned(&tfm->base);
259 }
260
261 static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
262 struct scatterlist *dst,
263 struct scatterlist *src,
264 unsigned int nbytes)
265 {
266 walk->in.sg = src;
267 walk->out.sg = dst;
268 walk->total = nbytes;
269 }
270
271 static inline struct crypto_async_request *crypto_get_backlog(
272 struct crypto_queue *queue)
273 {
274 return queue->backlog == &queue->list ? NULL :
275 container_of(queue->backlog, struct crypto_async_request, list);
276 }
277
278 static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
279 struct ablkcipher_request *request)
280 {
281 return crypto_enqueue_request(queue, &request->base);
282 }
283
284 static inline struct ablkcipher_request *ablkcipher_dequeue_request(
285 struct crypto_queue *queue)
286 {
287 return ablkcipher_request_cast(crypto_dequeue_request(queue));
288 }
289
290 static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
291 {
292 return req->__ctx;
293 }
294
295 static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
296 struct crypto_ablkcipher *tfm)
297 {
298 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
299 }
300
301 static inline void *aead_request_ctx(struct aead_request *req)
302 {
303 return req->__ctx;
304 }
305
306 static inline void aead_request_complete(struct aead_request *req, int err)
307 {
308 req->base.complete(&req->base, err);
309 }
310
311 static inline u32 aead_request_flags(struct aead_request *req)
312 {
313 return req->base.flags;
314 }
315
316 static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
317 u32 type, u32 mask)
318 {
319 return crypto_attr_alg(tb[1], type, mask);
320 }
321
322 /*
323 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
324 * Otherwise returns zero.
325 */
326 static inline int crypto_requires_sync(u32 type, u32 mask)
327 {
328 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
329 }
330
331 #endif /* _CRYPTO_ALGAPI_H */
332