]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blame - crypto/ctr.c
block: provide plug based way of signaling forced no-wait semantics
[mirror_ubuntu-jammy-kernel.git] / crypto / ctr.c
CommitLineData
2874c5fd 1// SPDX-License-Identifier: GPL-2.0-or-later
23e353c8
JL
2/*
3 * CTR: Counter mode
4 *
5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
23e353c8
JL
6 */
7
8#include <crypto/algapi.h>
5311f248 9#include <crypto/ctr.h>
69d3150c 10#include <crypto/internal/skcipher.h>
23e353c8
JL
11#include <linux/err.h>
12#include <linux/init.h>
13#include <linux/kernel.h>
14#include <linux/module.h>
23e353c8
JL
15#include <linux/slab.h>
16
5311f248 17struct crypto_rfc3686_ctx {
b2b39c2f 18 struct crypto_skcipher *child;
5311f248 19 u8 nonce[CTR_RFC3686_NONCE_SIZE];
23e353c8
JL
20};
21
69d3150c
JK
22struct crypto_rfc3686_req_ctx {
23 u8 iv[CTR_RFC3686_BLOCK_SIZE];
b2b39c2f 24 struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
69d3150c
JK
25};
26
11f14630 27static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
5311f248 28 struct crypto_cipher *tfm)
0971eb0d
HX
29{
30 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248
HX
31 unsigned long alignmask = crypto_cipher_alignmask(tfm);
32 u8 *ctrblk = walk->iv;
6650c4de 33 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
5311f248 34 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
0971eb0d
HX
35 u8 *src = walk->src.virt.addr;
36 u8 *dst = walk->dst.virt.addr;
37 unsigned int nbytes = walk->nbytes;
38
39 crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
45fe93df 40 crypto_xor_cpy(dst, keystream, src, nbytes);
5311f248
HX
41
42 crypto_inc(ctrblk, bsize);
0971eb0d
HX
43}
44
11f14630 45static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
5311f248 46 struct crypto_cipher *tfm)
23e353c8
JL
47{
48 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
49 crypto_cipher_alg(tfm)->cia_encrypt;
50 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248 51 u8 *ctrblk = walk->iv;
23e353c8
JL
52 u8 *src = walk->src.virt.addr;
53 u8 *dst = walk->dst.virt.addr;
54 unsigned int nbytes = walk->nbytes;
55
56 do {
57 /* create keystream */
0971eb0d
HX
58 fn(crypto_cipher_tfm(tfm), dst, ctrblk);
59 crypto_xor(dst, src, bsize);
23e353c8
JL
60
61 /* increment counter in counterblock */
5311f248 62 crypto_inc(ctrblk, bsize);
23e353c8 63
23e353c8
JL
64 src += bsize;
65 dst += bsize;
0971eb0d 66 } while ((nbytes -= bsize) >= bsize);
23e353c8 67
0971eb0d 68 return nbytes;
23e353c8
JL
69}
70
11f14630 71static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
5311f248 72 struct crypto_cipher *tfm)
23e353c8
JL
73{
74 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
75 crypto_cipher_alg(tfm)->cia_encrypt;
76 unsigned int bsize = crypto_cipher_blocksize(tfm);
5311f248 77 unsigned long alignmask = crypto_cipher_alignmask(tfm);
23e353c8 78 unsigned int nbytes = walk->nbytes;
5311f248 79 u8 *ctrblk = walk->iv;
23e353c8 80 u8 *src = walk->src.virt.addr;
6650c4de 81 u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
5311f248 82 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
23e353c8
JL
83
84 do {
85 /* create keystream */
86 fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
0971eb0d 87 crypto_xor(src, keystream, bsize);
23e353c8
JL
88
89 /* increment counter in counterblock */
5311f248 90 crypto_inc(ctrblk, bsize);
23e353c8 91
23e353c8 92 src += bsize;
0971eb0d 93 } while ((nbytes -= bsize) >= bsize);
23e353c8 94
0971eb0d 95 return nbytes;
23e353c8
JL
96}
97
11f14630 98static int crypto_ctr_crypt(struct skcipher_request *req)
23e353c8 99{
11f14630
EB
100 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
101 struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
102 const unsigned int bsize = crypto_cipher_blocksize(cipher);
103 struct skcipher_walk walk;
104 unsigned int nbytes;
23e353c8
JL
105 int err;
106
11f14630 107 err = skcipher_walk_virt(&walk, req, false);
23e353c8 108
0971eb0d 109 while (walk.nbytes >= bsize) {
23e353c8 110 if (walk.src.virt.addr == walk.dst.virt.addr)
11f14630 111 nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
23e353c8 112 else
11f14630 113 nbytes = crypto_ctr_crypt_segment(&walk, cipher);
23e353c8 114
11f14630 115 err = skcipher_walk_done(&walk, nbytes);
23e353c8 116 }
0971eb0d
HX
117
118 if (walk.nbytes) {
11f14630
EB
119 crypto_ctr_crypt_final(&walk, cipher);
120 err = skcipher_walk_done(&walk, 0);
0971eb0d
HX
121 }
122
23e353c8
JL
123 return err;
124}
125
11f14630 126static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
23e353c8 127{
11f14630 128 struct skcipher_instance *inst;
23e353c8 129 struct crypto_alg *alg;
23e353c8
JL
130 int err;
131
b3c16bfc 132 inst = skcipher_alloc_instance_simple(tmpl, tb);
11f14630
EB
133 if (IS_ERR(inst))
134 return PTR_ERR(inst);
23e353c8 135
b3c16bfc
HX
136 alg = skcipher_ialg_simple(inst);
137
5311f248 138 /* Block size must be >= 4 bytes. */
23e353c8 139 err = -EINVAL;
5311f248 140 if (alg->cra_blocksize < 4)
11f14630 141 goto out_free_inst;
23e353c8 142
3f8214ea 143 /* If this is false we'd fail the alignment of crypto_inc. */
5311f248 144 if (alg->cra_blocksize % 4)
11f14630 145 goto out_free_inst;
23e353c8 146
11f14630
EB
147 /* CTR mode is a stream cipher. */
148 inst->alg.base.cra_blocksize = 1;
23e353c8 149
11f14630
EB
150 /*
151 * To simplify the implementation, configure the skcipher walk to only
152 * give a partial block at the very end, never earlier.
153 */
154 inst->alg.chunksize = alg->cra_blocksize;
23e353c8 155
11f14630
EB
156 inst->alg.encrypt = crypto_ctr_crypt;
157 inst->alg.decrypt = crypto_ctr_crypt;
23e353c8 158
11f14630 159 err = skcipher_register_instance(tmpl, inst);
b3c16bfc 160 if (err) {
11f14630 161out_free_inst:
b3c16bfc
HX
162 inst->free(inst);
163 }
164
11f14630 165 return err;
23e353c8
JL
166}
167
b2b39c2f 168static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
69d3150c 169 const u8 *key, unsigned int keylen)
5311f248 170{
b2b39c2f
HX
171 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
172 struct crypto_skcipher *child = ctx->child;
5311f248
HX
173
174 /* the nonce is stored in bytes at end of key */
175 if (keylen < CTR_RFC3686_NONCE_SIZE)
176 return -EINVAL;
177
178 memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
179 CTR_RFC3686_NONCE_SIZE);
180
181 keylen -= CTR_RFC3686_NONCE_SIZE;
182
b2b39c2f
HX
183 crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
184 crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
185 CRYPTO_TFM_REQ_MASK);
af5034e8 186 return crypto_skcipher_setkey(child, key, keylen);
5311f248
HX
187}
188
b2b39c2f 189static int crypto_rfc3686_crypt(struct skcipher_request *req)
5311f248 190{
b2b39c2f
HX
191 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
192 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
193 struct crypto_skcipher *child = ctx->child;
194 unsigned long align = crypto_skcipher_alignmask(tfm);
69d3150c 195 struct crypto_rfc3686_req_ctx *rctx =
b2b39c2f
HX
196 (void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
197 struct skcipher_request *subreq = &rctx->subreq;
69d3150c 198 u8 *iv = rctx->iv;
5311f248
HX
199
200 /* set up counter block */
201 memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
b2b39c2f 202 memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
5311f248
HX
203
204 /* initialize counter portion of counter block */
205 *(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
206 cpu_to_be32(1);
207
b2b39c2f
HX
208 skcipher_request_set_tfm(subreq, child);
209 skcipher_request_set_callback(subreq, req->base.flags,
210 req->base.complete, req->base.data);
211 skcipher_request_set_crypt(subreq, req->src, req->dst,
212 req->cryptlen, iv);
5311f248 213
b2b39c2f 214 return crypto_skcipher_encrypt(subreq);
5311f248
HX
215}
216
b2b39c2f 217static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
5311f248 218{
b2b39c2f
HX
219 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
220 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
221 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
222 struct crypto_skcipher *cipher;
69d3150c 223 unsigned long align;
b2b39c2f 224 unsigned int reqsize;
5311f248 225
60425a8b 226 cipher = crypto_spawn_skcipher(spawn);
5311f248
HX
227 if (IS_ERR(cipher))
228 return PTR_ERR(cipher);
229
230 ctx->child = cipher;
231
b2b39c2f 232 align = crypto_skcipher_alignmask(tfm);
69d3150c 233 align &= ~(crypto_tfm_ctx_alignment() - 1);
b2b39c2f
HX
234 reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
235 crypto_skcipher_reqsize(cipher);
236 crypto_skcipher_set_reqsize(tfm, reqsize);
69d3150c 237
5311f248
HX
238 return 0;
239}
240
b2b39c2f 241static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
5311f248 242{
b2b39c2f
HX
243 struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
244
245 crypto_free_skcipher(ctx->child);
246}
5311f248 247
b2b39c2f
HX
248static void crypto_rfc3686_free(struct skcipher_instance *inst)
249{
250 struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
251
252 crypto_drop_skcipher(spawn);
253 kfree(inst);
5311f248
HX
254}
255
b2b39c2f
HX
256static int crypto_rfc3686_create(struct crypto_template *tmpl,
257 struct rtattr **tb)
5311f248 258{
69d3150c 259 struct crypto_attr_type *algt;
b2b39c2f
HX
260 struct skcipher_instance *inst;
261 struct skcipher_alg *alg;
69d3150c 262 struct crypto_skcipher_spawn *spawn;
d2c2a85c
MC
263 u32 mask;
264
5311f248
HX
265 int err;
266
69d3150c 267 algt = crypto_get_attr_type(tb);
69d3150c 268 if (IS_ERR(algt))
b2b39c2f 269 return PTR_ERR(algt);
5311f248 270
b2b39c2f
HX
271 if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
272 return -EINVAL;
69d3150c 273
69d3150c
JK
274 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
275 if (!inst)
b2b39c2f 276 return -ENOMEM;
69d3150c 277
d2c2a85c
MC
278 mask = crypto_requires_sync(algt->type, algt->mask) |
279 crypto_requires_off(algt->type, algt->mask,
280 CRYPTO_ALG_NEED_FALLBACK);
281
b2b39c2f 282 spawn = skcipher_instance_ctx(inst);
69d3150c 283
b9f76ddd 284 err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
a108dfcf 285 crypto_attr_alg_name(tb[1]), 0, mask);
69d3150c
JK
286 if (err)
287 goto err_free_inst;
288
b2b39c2f 289 alg = crypto_spawn_skcipher_alg(spawn);
69d3150c 290
5311f248
HX
291 /* We only support 16-byte blocks. */
292 err = -EINVAL;
b2b39c2f 293 if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
a108dfcf 294 goto err_free_inst;
5311f248
HX
295
296 /* Not a stream cipher? */
b2b39c2f 297 if (alg->base.cra_blocksize != 1)
a108dfcf 298 goto err_free_inst;
5311f248 299
69d3150c 300 err = -ENAMETOOLONG;
b2b39c2f
HX
301 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
302 "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
a108dfcf 303 goto err_free_inst;
b2b39c2f
HX
304 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
305 "rfc3686(%s)", alg->base.cra_driver_name) >=
306 CRYPTO_MAX_ALG_NAME)
a108dfcf 307 goto err_free_inst;
5311f248 308
b2b39c2f
HX
309 inst->alg.base.cra_priority = alg->base.cra_priority;
310 inst->alg.base.cra_blocksize = 1;
311 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
5311f248 312
b2b39c2f 313 inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
69d3150c 314
b2b39c2f
HX
315 inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
316 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
317 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
318 CTR_RFC3686_NONCE_SIZE;
319 inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
320 CTR_RFC3686_NONCE_SIZE;
5311f248 321
b2b39c2f
HX
322 inst->alg.setkey = crypto_rfc3686_setkey;
323 inst->alg.encrypt = crypto_rfc3686_crypt;
324 inst->alg.decrypt = crypto_rfc3686_crypt;
69d3150c 325
b2b39c2f 326 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
0a270321 327
b2b39c2f
HX
328 inst->alg.init = crypto_rfc3686_init_tfm;
329 inst->alg.exit = crypto_rfc3686_exit_tfm;
5311f248 330
b2b39c2f 331 inst->free = crypto_rfc3686_free;
5311f248 332
b2b39c2f 333 err = skcipher_register_instance(tmpl, inst);
a108dfcf 334 if (err) {
69d3150c 335err_free_inst:
a108dfcf
EB
336 crypto_rfc3686_free(inst);
337 }
338 return err;
5311f248
HX
339}
340
9f8ef365
XW
341static struct crypto_template crypto_ctr_tmpls[] = {
342 {
343 .name = "ctr",
344 .create = crypto_ctr_create,
345 .module = THIS_MODULE,
346 }, {
347 .name = "rfc3686",
348 .create = crypto_rfc3686_create,
349 .module = THIS_MODULE,
350 },
5311f248
HX
351};
352
23e353c8
JL
353static int __init crypto_ctr_module_init(void)
354{
9f8ef365
XW
355 return crypto_register_templates(crypto_ctr_tmpls,
356 ARRAY_SIZE(crypto_ctr_tmpls));
23e353c8
JL
357}
358
359static void __exit crypto_ctr_module_exit(void)
360{
9f8ef365
XW
361 crypto_unregister_templates(crypto_ctr_tmpls,
362 ARRAY_SIZE(crypto_ctr_tmpls));
23e353c8
JL
363}
364
c4741b23 365subsys_initcall(crypto_ctr_module_init);
23e353c8
JL
366module_exit(crypto_ctr_module_exit);
367
368MODULE_LICENSE("GPL");
11f14630 369MODULE_DESCRIPTION("CTR block cipher mode of operation");
5d26a105 370MODULE_ALIAS_CRYPTO("rfc3686");
4943ba16 371MODULE_ALIAS_CRYPTO("ctr");