]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - crypto/pcbc.c
Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/sparc
[mirror_ubuntu-artful-kernel.git] / crypto / pcbc.c
CommitLineData
91652be5
DH
1/*
2 * PCBC: Propagating Cipher Block Chaining mode
3 *
4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
5 * Written by David Howells (dhowells@redhat.com)
6 *
7 * Derived from cbc.c
8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
13 * any later version.
14 *
15 */
16
043a4400 17#include <crypto/internal/skcipher.h>
91652be5
DH
18#include <linux/err.h>
19#include <linux/init.h>
20#include <linux/kernel.h>
21#include <linux/module.h>
91652be5
DH
22#include <linux/slab.h>
23
24struct crypto_pcbc_ctx {
25 struct crypto_cipher *child;
91652be5
DH
26};
27
043a4400 28static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
91652be5
DH
29 unsigned int keylen)
30{
043a4400 31 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
91652be5
DH
32 struct crypto_cipher *child = ctx->child;
33 int err;
34
35 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
043a4400
HX
36 crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
37 CRYPTO_TFM_REQ_MASK);
91652be5 38 err = crypto_cipher_setkey(child, key, keylen);
043a4400
HX
39 crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
40 CRYPTO_TFM_RES_MASK);
91652be5
DH
41 return err;
42}
43
043a4400
HX
44static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
45 struct skcipher_walk *walk,
d0b9007a 46 struct crypto_cipher *tfm)
91652be5 47{
91652be5
DH
48 int bsize = crypto_cipher_blocksize(tfm);
49 unsigned int nbytes = walk->nbytes;
50 u8 *src = walk->src.virt.addr;
51 u8 *dst = walk->dst.virt.addr;
52 u8 *iv = walk->iv;
53
54 do {
d0b9007a 55 crypto_xor(iv, src, bsize);
043a4400 56 crypto_cipher_encrypt_one(tfm, dst, iv);
91652be5 57 memcpy(iv, dst, bsize);
d0b9007a 58 crypto_xor(iv, src, bsize);
91652be5
DH
59
60 src += bsize;
61 dst += bsize;
62 } while ((nbytes -= bsize) >= bsize);
63
64 return nbytes;
65}
66
043a4400
HX
67static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
68 struct skcipher_walk *walk,
d0b9007a 69 struct crypto_cipher *tfm)
91652be5 70{
91652be5
DH
71 int bsize = crypto_cipher_blocksize(tfm);
72 unsigned int nbytes = walk->nbytes;
73 u8 *src = walk->src.virt.addr;
74 u8 *iv = walk->iv;
75 u8 tmpbuf[bsize];
76
77 do {
78 memcpy(tmpbuf, src, bsize);
d0b9007a 79 crypto_xor(iv, src, bsize);
043a4400 80 crypto_cipher_encrypt_one(tfm, src, iv);
d0b9007a
HX
81 memcpy(iv, tmpbuf, bsize);
82 crypto_xor(iv, src, bsize);
91652be5
DH
83
84 src += bsize;
85 } while ((nbytes -= bsize) >= bsize);
86
87 memcpy(walk->iv, iv, bsize);
88
89 return nbytes;
90}
91
043a4400 92static int crypto_pcbc_encrypt(struct skcipher_request *req)
91652be5 93{
043a4400
HX
94 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
95 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
91652be5 96 struct crypto_cipher *child = ctx->child;
043a4400
HX
97 struct skcipher_walk walk;
98 unsigned int nbytes;
91652be5
DH
99 int err;
100
043a4400 101 err = skcipher_walk_virt(&walk, req, false);
91652be5
DH
102
103 while ((nbytes = walk.nbytes)) {
104 if (walk.src.virt.addr == walk.dst.virt.addr)
043a4400 105 nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
d0b9007a 106 child);
91652be5 107 else
043a4400 108 nbytes = crypto_pcbc_encrypt_segment(req, &walk,
d0b9007a 109 child);
043a4400 110 err = skcipher_walk_done(&walk, nbytes);
91652be5
DH
111 }
112
113 return err;
114}
115
043a4400
HX
116static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
117 struct skcipher_walk *walk,
d0b9007a 118 struct crypto_cipher *tfm)
91652be5 119{
91652be5
DH
120 int bsize = crypto_cipher_blocksize(tfm);
121 unsigned int nbytes = walk->nbytes;
122 u8 *src = walk->src.virt.addr;
123 u8 *dst = walk->dst.virt.addr;
124 u8 *iv = walk->iv;
125
126 do {
043a4400 127 crypto_cipher_decrypt_one(tfm, dst, src);
d0b9007a 128 crypto_xor(dst, iv, bsize);
91652be5 129 memcpy(iv, src, bsize);
d0b9007a 130 crypto_xor(iv, dst, bsize);
91652be5
DH
131
132 src += bsize;
133 dst += bsize;
134 } while ((nbytes -= bsize) >= bsize);
135
136 memcpy(walk->iv, iv, bsize);
137
138 return nbytes;
139}
140
043a4400
HX
141static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
142 struct skcipher_walk *walk,
d0b9007a 143 struct crypto_cipher *tfm)
91652be5 144{
91652be5
DH
145 int bsize = crypto_cipher_blocksize(tfm);
146 unsigned int nbytes = walk->nbytes;
147 u8 *src = walk->src.virt.addr;
148 u8 *iv = walk->iv;
043a4400 149 u8 tmpbuf[bsize] __attribute__ ((aligned(__alignof__(u32))));
91652be5
DH
150
151 do {
152 memcpy(tmpbuf, src, bsize);
043a4400 153 crypto_cipher_decrypt_one(tfm, src, src);
d0b9007a 154 crypto_xor(src, iv, bsize);
91652be5 155 memcpy(iv, tmpbuf, bsize);
d0b9007a 156 crypto_xor(iv, src, bsize);
91652be5
DH
157
158 src += bsize;
159 } while ((nbytes -= bsize) >= bsize);
160
161 memcpy(walk->iv, iv, bsize);
162
163 return nbytes;
164}
165
043a4400 166static int crypto_pcbc_decrypt(struct skcipher_request *req)
91652be5 167{
043a4400
HX
168 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
169 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
91652be5 170 struct crypto_cipher *child = ctx->child;
043a4400
HX
171 struct skcipher_walk walk;
172 unsigned int nbytes;
91652be5
DH
173 int err;
174
043a4400 175 err = skcipher_walk_virt(&walk, req, false);
91652be5
DH
176
177 while ((nbytes = walk.nbytes)) {
178 if (walk.src.virt.addr == walk.dst.virt.addr)
043a4400 179 nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
d0b9007a 180 child);
91652be5 181 else
043a4400 182 nbytes = crypto_pcbc_decrypt_segment(req, &walk,
d0b9007a 183 child);
043a4400 184 err = skcipher_walk_done(&walk, nbytes);
91652be5
DH
185 }
186
187 return err;
188}
189
043a4400 190static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
91652be5 191{
043a4400
HX
192 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
193 struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
194 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
2e306ee0 195 struct crypto_cipher *cipher;
91652be5 196
2e306ee0
HX
197 cipher = crypto_spawn_cipher(spawn);
198 if (IS_ERR(cipher))
199 return PTR_ERR(cipher);
91652be5 200
2e306ee0 201 ctx->child = cipher;
91652be5
DH
202 return 0;
203}
204
043a4400 205static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
91652be5 206{
043a4400
HX
207 struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
208
91652be5
DH
209 crypto_free_cipher(ctx->child);
210}
211
043a4400
HX
212static void crypto_pcbc_free(struct skcipher_instance *inst)
213{
214 crypto_drop_skcipher(skcipher_instance_ctx(inst));
215 kfree(inst);
216}
217
218static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
91652be5 219{
043a4400
HX
220 struct skcipher_instance *inst;
221 struct crypto_attr_type *algt;
222 struct crypto_spawn *spawn;
91652be5 223 struct crypto_alg *alg;
ebc610e5
HX
224 int err;
225
043a4400
HX
226 algt = crypto_get_attr_type(tb);
227 if (IS_ERR(algt))
228 return PTR_ERR(algt);
229
230 if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
231 ~CRYPTO_ALG_INTERNAL)
232 return -EINVAL;
91652be5 233
043a4400
HX
234 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
235 if (!inst)
236 return -ENOMEM;
237
238 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
239 (algt->type & CRYPTO_ALG_INTERNAL),
240 CRYPTO_ALG_TYPE_MASK |
241 (algt->mask & CRYPTO_ALG_INTERNAL));
242 err = PTR_ERR(alg);
91652be5 243 if (IS_ERR(alg))
043a4400
HX
244 goto err_free_inst;
245
246 spawn = skcipher_instance_ctx(inst);
247 err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
248 CRYPTO_ALG_TYPE_MASK);
249 crypto_mod_put(alg);
250 if (err)
251 goto err_free_inst;
91652be5 252
043a4400
HX
253 err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
254 if (err)
255 goto err_drop_spawn;
91652be5 256
043a4400
HX
257 inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
258 inst->alg.base.cra_priority = alg->cra_priority;
259 inst->alg.base.cra_blocksize = alg->cra_blocksize;
260 inst->alg.base.cra_alignmask = alg->cra_alignmask;
91652be5 261
d0b9007a 262 /* We access the data as u32s when xoring. */
043a4400 263 inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
d0b9007a 264
043a4400
HX
265 inst->alg.ivsize = alg->cra_blocksize;
266 inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
267 inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
91652be5 268
043a4400 269 inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
91652be5 270
043a4400
HX
271 inst->alg.init = crypto_pcbc_init_tfm;
272 inst->alg.exit = crypto_pcbc_exit_tfm;
91652be5 273
043a4400
HX
274 inst->alg.setkey = crypto_pcbc_setkey;
275 inst->alg.encrypt = crypto_pcbc_encrypt;
276 inst->alg.decrypt = crypto_pcbc_decrypt;
91652be5 277
043a4400 278 inst->free = crypto_pcbc_free;
91652be5 279
043a4400
HX
280 err = skcipher_register_instance(tmpl, inst);
281 if (err)
282 goto err_drop_spawn;
283
284out:
285 return err;
286
287err_drop_spawn:
288 crypto_drop_spawn(spawn);
289err_free_inst:
91652be5 290 kfree(inst);
043a4400 291 goto out;
91652be5
DH
292}
293
294static struct crypto_template crypto_pcbc_tmpl = {
295 .name = "pcbc",
043a4400 296 .create = crypto_pcbc_create,
91652be5
DH
297 .module = THIS_MODULE,
298};
299
300static int __init crypto_pcbc_module_init(void)
301{
302 return crypto_register_template(&crypto_pcbc_tmpl);
303}
304
305static void __exit crypto_pcbc_module_exit(void)
306{
307 crypto_unregister_template(&crypto_pcbc_tmpl);
308}
309
310module_init(crypto_pcbc_module_init);
311module_exit(crypto_pcbc_module_exit);
312
313MODULE_LICENSE("GPL");
314MODULE_DESCRIPTION("PCBC block cipher algorithm");
4943ba16 315MODULE_ALIAS_CRYPTO("pcbc");