]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blame - arch/x86/crypto/glue_helper.c
treewide: Replace GPLv2 boilerplate/reference with SPDX - rule 156
[mirror_ubuntu-jammy-kernel.git] / arch / x86 / crypto / glue_helper.c
CommitLineData
1a59d1b8 1// SPDX-License-Identifier: GPL-2.0-or-later
596d8750
JK
2/*
3 * Shared glue code for 128bit block ciphers
4 *
a05248ed 5 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
596d8750
JK
6 *
7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
8 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 * CTR part based on code (crypto/ctr.c) by:
10 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
596d8750
JK
11 */
12
13#include <linux/module.h>
14#include <crypto/b128ops.h>
692016bd 15#include <crypto/gf128mul.h>
065ce327 16#include <crypto/internal/skcipher.h>
596d8750
JK
17#include <crypto/xts.h>
18#include <asm/crypto/glue_helper.h>
596d8750 19
f15f2a25
EB
20int glue_ecb_req_128bit(const struct common_glue_ctx *gctx,
21 struct skcipher_request *req)
22{
23 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
24 const unsigned int bsize = 128 / 8;
25 struct skcipher_walk walk;
26 bool fpu_enabled = false;
27 unsigned int nbytes;
28 int err;
29
30 err = skcipher_walk_virt(&walk, req, false);
31
32 while ((nbytes = walk.nbytes)) {
33 const u8 *src = walk.src.virt.addr;
34 u8 *dst = walk.dst.virt.addr;
35 unsigned int func_bytes;
36 unsigned int i;
37
75d8a553
EB
38 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
39 &walk, fpu_enabled, nbytes);
f15f2a25
EB
40 for (i = 0; i < gctx->num_funcs; i++) {
41 func_bytes = bsize * gctx->funcs[i].num_blocks;
42
43 if (nbytes < func_bytes)
44 continue;
45
46 /* Process multi-block batch */
47 do {
48 gctx->funcs[i].fn_u.ecb(ctx, dst, src);
49 src += func_bytes;
50 dst += func_bytes;
51 nbytes -= func_bytes;
52 } while (nbytes >= func_bytes);
53
54 if (nbytes < bsize)
55 break;
56 }
57 err = skcipher_walk_done(&walk, nbytes);
58 }
59
60 glue_fpu_end(fpu_enabled);
61 return err;
62}
63EXPORT_SYMBOL_GPL(glue_ecb_req_128bit);
64
f15f2a25
EB
65int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,
66 struct skcipher_request *req)
67{
68 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
69 const unsigned int bsize = 128 / 8;
70 struct skcipher_walk walk;
71 unsigned int nbytes;
72 int err;
73
74 err = skcipher_walk_virt(&walk, req, false);
75
76 while ((nbytes = walk.nbytes)) {
77 const u128 *src = (u128 *)walk.src.virt.addr;
78 u128 *dst = (u128 *)walk.dst.virt.addr;
79 u128 *iv = (u128 *)walk.iv;
80
81 do {
82 u128_xor(dst, src, iv);
83 fn(ctx, (u8 *)dst, (u8 *)dst);
84 iv = dst;
85 src++;
86 dst++;
87 nbytes -= bsize;
88 } while (nbytes >= bsize);
89
90 *(u128 *)walk.iv = *iv;
91 err = skcipher_walk_done(&walk, nbytes);
92 }
93 return err;
94}
95EXPORT_SYMBOL_GPL(glue_cbc_encrypt_req_128bit);
96
f15f2a25
EB
97int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx,
98 struct skcipher_request *req)
99{
100 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
101 const unsigned int bsize = 128 / 8;
102 struct skcipher_walk walk;
103 bool fpu_enabled = false;
104 unsigned int nbytes;
105 int err;
106
107 err = skcipher_walk_virt(&walk, req, false);
108
109 while ((nbytes = walk.nbytes)) {
110 const u128 *src = walk.src.virt.addr;
111 u128 *dst = walk.dst.virt.addr;
112 unsigned int func_bytes, num_blocks;
113 unsigned int i;
114 u128 last_iv;
115
75d8a553
EB
116 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
117 &walk, fpu_enabled, nbytes);
f15f2a25
EB
118 /* Start of the last block. */
119 src += nbytes / bsize - 1;
120 dst += nbytes / bsize - 1;
121
122 last_iv = *src;
123
124 for (i = 0; i < gctx->num_funcs; i++) {
125 num_blocks = gctx->funcs[i].num_blocks;
126 func_bytes = bsize * num_blocks;
127
128 if (nbytes < func_bytes)
129 continue;
130
131 /* Process multi-block batch */
132 do {
133 src -= num_blocks - 1;
134 dst -= num_blocks - 1;
135
136 gctx->funcs[i].fn_u.cbc(ctx, dst, src);
137
138 nbytes -= func_bytes;
139 if (nbytes < bsize)
140 goto done;
141
142 u128_xor(dst, dst, --src);
143 dst--;
144 } while (nbytes >= func_bytes);
145 }
146done:
147 u128_xor(dst, dst, (u128 *)walk.iv);
148 *(u128 *)walk.iv = last_iv;
149 err = skcipher_walk_done(&walk, nbytes);
150 }
151
152 glue_fpu_end(fpu_enabled);
153 return err;
154}
155EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit);
156
f15f2a25
EB
157int glue_ctr_req_128bit(const struct common_glue_ctx *gctx,
158 struct skcipher_request *req)
159{
160 void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
161 const unsigned int bsize = 128 / 8;
162 struct skcipher_walk walk;
163 bool fpu_enabled = false;
164 unsigned int nbytes;
165 int err;
166
167 err = skcipher_walk_virt(&walk, req, false);
168
169 while ((nbytes = walk.nbytes) >= bsize) {
170 const u128 *src = walk.src.virt.addr;
171 u128 *dst = walk.dst.virt.addr;
172 unsigned int func_bytes, num_blocks;
173 unsigned int i;
174 le128 ctrblk;
175
75d8a553
EB
176 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
177 &walk, fpu_enabled, nbytes);
f15f2a25
EB
178
179 be128_to_le128(&ctrblk, (be128 *)walk.iv);
180
181 for (i = 0; i < gctx->num_funcs; i++) {
182 num_blocks = gctx->funcs[i].num_blocks;
183 func_bytes = bsize * num_blocks;
184
185 if (nbytes < func_bytes)
186 continue;
187
188 /* Process multi-block batch */
189 do {
190 gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk);
191 src += num_blocks;
192 dst += num_blocks;
193 nbytes -= func_bytes;
194 } while (nbytes >= func_bytes);
195
196 if (nbytes < bsize)
197 break;
198 }
199
200 le128_to_be128((be128 *)walk.iv, &ctrblk);
201 err = skcipher_walk_done(&walk, nbytes);
202 }
203
204 glue_fpu_end(fpu_enabled);
205
206 if (nbytes) {
207 le128 ctrblk;
208 u128 tmp;
209
210 be128_to_le128(&ctrblk, (be128 *)walk.iv);
211 memcpy(&tmp, walk.src.virt.addr, nbytes);
212 gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, &tmp, &tmp,
213 &ctrblk);
214 memcpy(walk.dst.virt.addr, &tmp, nbytes);
215 le128_to_be128((be128 *)walk.iv, &ctrblk);
216
217 err = skcipher_walk_done(&walk, 0);
218 }
219
220 return err;
221}
222EXPORT_SYMBOL_GPL(glue_ctr_req_128bit);
223
065ce327
HX
224static unsigned int __glue_xts_req_128bit(const struct common_glue_ctx *gctx,
225 void *ctx,
226 struct skcipher_walk *walk)
227{
228 const unsigned int bsize = 128 / 8;
229 unsigned int nbytes = walk->nbytes;
230 u128 *src = walk->src.virt.addr;
231 u128 *dst = walk->dst.virt.addr;
232 unsigned int num_blocks, func_bytes;
233 unsigned int i;
234
235 /* Process multi-block batch */
236 for (i = 0; i < gctx->num_funcs; i++) {
237 num_blocks = gctx->funcs[i].num_blocks;
238 func_bytes = bsize * num_blocks;
239
240 if (nbytes >= func_bytes) {
241 do {
242 gctx->funcs[i].fn_u.xts(ctx, dst, src,
243 walk->iv);
244
245 src += num_blocks;
246 dst += num_blocks;
247 nbytes -= func_bytes;
248 } while (nbytes >= func_bytes);
249
250 if (nbytes < bsize)
251 goto done;
252 }
253 }
254
255done:
256 return nbytes;
257}
258
065ce327
HX
259int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
260 struct skcipher_request *req,
261 common_glue_func_t tweak_fn, void *tweak_ctx,
262 void *crypt_ctx)
263{
264 const unsigned int bsize = 128 / 8;
265 struct skcipher_walk walk;
266 bool fpu_enabled = false;
267 unsigned int nbytes;
268 int err;
269
270 err = skcipher_walk_virt(&walk, req, false);
271 nbytes = walk.nbytes;
272 if (!nbytes)
273 return err;
274
275 /* set minimum length to bsize, for tweak_fn */
75d8a553
EB
276 fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
277 &walk, fpu_enabled,
278 nbytes < bsize ? bsize : nbytes);
065ce327
HX
279
280 /* calculate first value of T */
281 tweak_fn(tweak_ctx, walk.iv, walk.iv);
282
283 while (nbytes) {
284 nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
285
286 err = skcipher_walk_done(&walk, nbytes);
287 nbytes = walk.nbytes;
288 }
289
290 glue_fpu_end(fpu_enabled);
291
292 return err;
293}
294EXPORT_SYMBOL_GPL(glue_xts_req_128bit);
295
a05248ed
JK
296void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, le128 *iv,
297 common_glue_func_t fn)
298{
299 le128 ivblk = *iv;
300
301 /* generate next IV */
692016bd 302 gf128mul_x_ble(iv, &ivblk);
a05248ed
JK
303
304 /* CC <- T xor C */
305 u128_xor(dst, src, (u128 *)&ivblk);
306
307 /* PP <- D(Key2,CC) */
308 fn(ctx, (u8 *)dst, (u8 *)dst);
309
310 /* P <- T xor PP */
311 u128_xor(dst, dst, (u128 *)&ivblk);
312}
313EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one);
314
596d8750 315MODULE_LICENSE("GPL");