]>
Commit | Line | Data |
---|---|---|
2874c5fd | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
378f4f51 HX |
2 | /* |
3 | * Symmetric key ciphers. | |
4 | * | |
5 | * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au> | |
378f4f51 HX |
6 | */ |
7 | ||
8 | #ifndef _CRYPTO_INTERNAL_SKCIPHER_H | |
9 | #define _CRYPTO_INTERNAL_SKCIPHER_H | |
10 | ||
11 | #include <crypto/algapi.h> | |
61da88e2 | 12 | #include <crypto/skcipher.h> |
b286d8b1 | 13 | #include <linux/list.h> |
ecfc4329 HX |
14 | #include <linux/types.h> |
15 | ||
b286d8b1 | 16 | struct aead_request; |
ecfc4329 | 17 | struct rtattr; |
378f4f51 | 18 | |
4e6c3df4 HX |
19 | struct skcipher_instance { |
20 | void (*free)(struct skcipher_instance *inst); | |
21 | union { | |
22 | struct { | |
23 | char head[offsetof(struct skcipher_alg, base)]; | |
24 | struct crypto_instance base; | |
25 | } s; | |
26 | struct skcipher_alg alg; | |
27 | }; | |
28 | }; | |
29 | ||
378f4f51 HX |
30 | struct crypto_skcipher_spawn { |
31 | struct crypto_spawn base; | |
32 | }; | |
33 | ||
b286d8b1 HX |
34 | struct skcipher_walk { |
35 | union { | |
36 | struct { | |
37 | struct page *page; | |
38 | unsigned long offset; | |
39 | } phys; | |
40 | ||
41 | struct { | |
42 | u8 *page; | |
43 | void *addr; | |
44 | } virt; | |
45 | } src, dst; | |
46 | ||
47 | struct scatter_walk in; | |
48 | unsigned int nbytes; | |
49 | ||
50 | struct scatter_walk out; | |
51 | unsigned int total; | |
52 | ||
53 | struct list_head buffers; | |
54 | ||
55 | u8 *page; | |
56 | u8 *buffer; | |
57 | u8 *oiv; | |
58 | void *iv; | |
59 | ||
60 | unsigned int ivsize; | |
61 | ||
62 | int flags; | |
63 | unsigned int blocksize; | |
c821f6ab | 64 | unsigned int stride; |
b286d8b1 HX |
65 | unsigned int alignmask; |
66 | }; | |
67 | ||
4e6c3df4 HX |
68 | static inline struct crypto_instance *skcipher_crypto_instance( |
69 | struct skcipher_instance *inst) | |
70 | { | |
71 | return &inst->s.base; | |
72 | } | |
73 | ||
74 | static inline struct skcipher_instance *skcipher_alg_instance( | |
75 | struct crypto_skcipher *skcipher) | |
76 | { | |
77 | return container_of(crypto_skcipher_alg(skcipher), | |
78 | struct skcipher_instance, alg); | |
79 | } | |
80 | ||
81 | static inline void *skcipher_instance_ctx(struct skcipher_instance *inst) | |
82 | { | |
83 | return crypto_instance_ctx(skcipher_crypto_instance(inst)); | |
84 | } | |
85 | ||
86 | static inline void skcipher_request_complete(struct skcipher_request *req, int err) | |
87 | { | |
88 | req->base.complete(&req->base, err); | |
89 | } | |
90 | ||
b9f76ddd EB |
91 | int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, |
92 | struct crypto_instance *inst, | |
93 | const char *name, u32 type, u32 mask); | |
3a01d0ee | 94 | |
378f4f51 HX |
95 | static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn) |
96 | { | |
97 | crypto_drop_spawn(&spawn->base); | |
98 | } | |
99 | ||
3a01d0ee | 100 | static inline struct skcipher_alg *crypto_skcipher_spawn_alg( |
378f4f51 HX |
101 | struct crypto_skcipher_spawn *spawn) |
102 | { | |
3a01d0ee | 103 | return container_of(spawn->base.alg, struct skcipher_alg, base); |
378f4f51 HX |
104 | } |
105 | ||
4e6c3df4 HX |
106 | static inline struct skcipher_alg *crypto_spawn_skcipher_alg( |
107 | struct crypto_skcipher_spawn *spawn) | |
108 | { | |
3a01d0ee | 109 | return crypto_skcipher_spawn_alg(spawn); |
4e6c3df4 HX |
110 | } |
111 | ||
3a01d0ee | 112 | static inline struct crypto_skcipher *crypto_spawn_skcipher( |
378f4f51 HX |
113 | struct crypto_skcipher_spawn *spawn) |
114 | { | |
3a01d0ee | 115 | return crypto_spawn_tfm2(&spawn->base); |
378f4f51 HX |
116 | } |
117 | ||
4e6c3df4 HX |
118 | static inline void crypto_skcipher_set_reqsize( |
119 | struct crypto_skcipher *skcipher, unsigned int reqsize) | |
120 | { | |
121 | skcipher->reqsize = reqsize; | |
122 | } | |
123 | ||
124 | int crypto_register_skcipher(struct skcipher_alg *alg); | |
125 | void crypto_unregister_skcipher(struct skcipher_alg *alg); | |
126 | int crypto_register_skciphers(struct skcipher_alg *algs, int count); | |
127 | void crypto_unregister_skciphers(struct skcipher_alg *algs, int count); | |
128 | int skcipher_register_instance(struct crypto_template *tmpl, | |
129 | struct skcipher_instance *inst); | |
130 | ||
b286d8b1 HX |
131 | int skcipher_walk_done(struct skcipher_walk *walk, int err); |
132 | int skcipher_walk_virt(struct skcipher_walk *walk, | |
133 | struct skcipher_request *req, | |
134 | bool atomic); | |
135 | void skcipher_walk_atomise(struct skcipher_walk *walk); | |
136 | int skcipher_walk_async(struct skcipher_walk *walk, | |
137 | struct skcipher_request *req); | |
34bc085c HX |
138 | int skcipher_walk_aead_encrypt(struct skcipher_walk *walk, |
139 | struct aead_request *req, bool atomic); | |
140 | int skcipher_walk_aead_decrypt(struct skcipher_walk *walk, | |
141 | struct aead_request *req, bool atomic); | |
b286d8b1 HX |
142 | void skcipher_walk_complete(struct skcipher_walk *walk, int err); |
143 | ||
6017826b AB |
144 | static inline void skcipher_walk_abort(struct skcipher_walk *walk) |
145 | { | |
146 | skcipher_walk_done(walk, -ECANCELED); | |
147 | } | |
148 | ||
7a7ffe65 HX |
149 | static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm) |
150 | { | |
151 | return crypto_tfm_ctx(&tfm->base); | |
152 | } | |
153 | ||
154 | static inline void *skcipher_request_ctx(struct skcipher_request *req) | |
155 | { | |
156 | return req->__ctx; | |
157 | } | |
158 | ||
159 | static inline u32 skcipher_request_flags(struct skcipher_request *req) | |
160 | { | |
161 | return req->base.flags; | |
162 | } | |
163 | ||
4e6c3df4 HX |
164 | static inline unsigned int crypto_skcipher_alg_min_keysize( |
165 | struct skcipher_alg *alg) | |
166 | { | |
4e6c3df4 HX |
167 | return alg->min_keysize; |
168 | } | |
169 | ||
170 | static inline unsigned int crypto_skcipher_alg_max_keysize( | |
171 | struct skcipher_alg *alg) | |
172 | { | |
4e6c3df4 HX |
173 | return alg->max_keysize; |
174 | } | |
175 | ||
314d0f0e EB |
176 | static inline unsigned int crypto_skcipher_alg_walksize( |
177 | struct skcipher_alg *alg) | |
178 | { | |
314d0f0e EB |
179 | return alg->walksize; |
180 | } | |
314d0f0e EB |
181 | |
182 | /** | |
183 | * crypto_skcipher_walksize() - obtain walk size | |
184 | * @tfm: cipher handle | |
185 | * | |
186 | * In some cases, algorithms can only perform optimally when operating on | |
187 | * multiple blocks in parallel. This is reflected by the walksize, which | |
188 | * must be a multiple of the chunksize (or equal if the concern does not | |
189 | * apply) | |
190 | * | |
191 | * Return: walk size in bytes | |
192 | */ | |
193 | static inline unsigned int crypto_skcipher_walksize( | |
194 | struct crypto_skcipher *tfm) | |
195 | { | |
196 | return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm)); | |
197 | } | |
198 | ||
0872da16 EB |
199 | /* Helpers for simple block cipher modes of operation */ |
200 | struct skcipher_ctx_simple { | |
201 | struct crypto_cipher *cipher; /* underlying block cipher */ | |
202 | }; | |
203 | static inline struct crypto_cipher * | |
204 | skcipher_cipher_simple(struct crypto_skcipher *tfm) | |
205 | { | |
206 | struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm); | |
207 | ||
208 | return ctx->cipher; | |
209 | } | |
b3c16bfc HX |
210 | |
211 | struct skcipher_instance *skcipher_alloc_instance_simple( | |
212 | struct crypto_template *tmpl, struct rtattr **tb); | |
213 | ||
214 | static inline struct crypto_alg *skcipher_ialg_simple( | |
215 | struct skcipher_instance *inst) | |
216 | { | |
217 | struct crypto_spawn *spawn = skcipher_instance_ctx(inst); | |
218 | ||
219 | return spawn->alg; | |
220 | } | |
0872da16 | 221 | |
378f4f51 HX |
222 | #endif /* _CRYPTO_INTERNAL_SKCIPHER_H */ |
223 |