2 * PCBC: Propagating Cipher Block Chaining mode
4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
5 * Written by David Howells (dhowells@redhat.com)
8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)
17 #include <crypto/internal/skcipher.h>
18 #include <linux/err.h>
19 #include <linux/init.h>
20 #include <linux/kernel.h>
21 #include <linux/module.h>
22 #include <linux/slab.h>
24 struct crypto_pcbc_ctx
{
25 struct crypto_cipher
*child
;
28 static int crypto_pcbc_setkey(struct crypto_skcipher
*parent
, const u8
*key
,
31 struct crypto_pcbc_ctx
*ctx
= crypto_skcipher_ctx(parent
);
32 struct crypto_cipher
*child
= ctx
->child
;
35 crypto_cipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
36 crypto_cipher_set_flags(child
, crypto_skcipher_get_flags(parent
) &
38 err
= crypto_cipher_setkey(child
, key
, keylen
);
39 crypto_skcipher_set_flags(parent
, crypto_cipher_get_flags(child
) &
44 static int crypto_pcbc_encrypt_segment(struct skcipher_request
*req
,
45 struct skcipher_walk
*walk
,
46 struct crypto_cipher
*tfm
)
48 int bsize
= crypto_cipher_blocksize(tfm
);
49 unsigned int nbytes
= walk
->nbytes
;
50 u8
*src
= walk
->src
.virt
.addr
;
51 u8
*dst
= walk
->dst
.virt
.addr
;
55 crypto_xor(iv
, src
, bsize
);
56 crypto_cipher_encrypt_one(tfm
, dst
, iv
);
57 memcpy(iv
, dst
, bsize
);
58 crypto_xor(iv
, src
, bsize
);
62 } while ((nbytes
-= bsize
) >= bsize
);
67 static int crypto_pcbc_encrypt_inplace(struct skcipher_request
*req
,
68 struct skcipher_walk
*walk
,
69 struct crypto_cipher
*tfm
)
71 int bsize
= crypto_cipher_blocksize(tfm
);
72 unsigned int nbytes
= walk
->nbytes
;
73 u8
*src
= walk
->src
.virt
.addr
;
78 memcpy(tmpbuf
, src
, bsize
);
79 crypto_xor(iv
, src
, bsize
);
80 crypto_cipher_encrypt_one(tfm
, src
, iv
);
81 memcpy(iv
, tmpbuf
, bsize
);
82 crypto_xor(iv
, src
, bsize
);
85 } while ((nbytes
-= bsize
) >= bsize
);
87 memcpy(walk
->iv
, iv
, bsize
);
92 static int crypto_pcbc_encrypt(struct skcipher_request
*req
)
94 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
95 struct crypto_pcbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
96 struct crypto_cipher
*child
= ctx
->child
;
97 struct skcipher_walk walk
;
101 err
= skcipher_walk_virt(&walk
, req
, false);
103 while ((nbytes
= walk
.nbytes
)) {
104 if (walk
.src
.virt
.addr
== walk
.dst
.virt
.addr
)
105 nbytes
= crypto_pcbc_encrypt_inplace(req
, &walk
,
108 nbytes
= crypto_pcbc_encrypt_segment(req
, &walk
,
110 err
= skcipher_walk_done(&walk
, nbytes
);
116 static int crypto_pcbc_decrypt_segment(struct skcipher_request
*req
,
117 struct skcipher_walk
*walk
,
118 struct crypto_cipher
*tfm
)
120 int bsize
= crypto_cipher_blocksize(tfm
);
121 unsigned int nbytes
= walk
->nbytes
;
122 u8
*src
= walk
->src
.virt
.addr
;
123 u8
*dst
= walk
->dst
.virt
.addr
;
127 crypto_cipher_decrypt_one(tfm
, dst
, src
);
128 crypto_xor(dst
, iv
, bsize
);
129 memcpy(iv
, src
, bsize
);
130 crypto_xor(iv
, dst
, bsize
);
134 } while ((nbytes
-= bsize
) >= bsize
);
136 memcpy(walk
->iv
, iv
, bsize
);
141 static int crypto_pcbc_decrypt_inplace(struct skcipher_request
*req
,
142 struct skcipher_walk
*walk
,
143 struct crypto_cipher
*tfm
)
145 int bsize
= crypto_cipher_blocksize(tfm
);
146 unsigned int nbytes
= walk
->nbytes
;
147 u8
*src
= walk
->src
.virt
.addr
;
149 u8 tmpbuf
[bsize
] __attribute__ ((aligned(__alignof__(u32
))));
152 memcpy(tmpbuf
, src
, bsize
);
153 crypto_cipher_decrypt_one(tfm
, src
, src
);
154 crypto_xor(src
, iv
, bsize
);
155 memcpy(iv
, tmpbuf
, bsize
);
156 crypto_xor(iv
, src
, bsize
);
159 } while ((nbytes
-= bsize
) >= bsize
);
161 memcpy(walk
->iv
, iv
, bsize
);
166 static int crypto_pcbc_decrypt(struct skcipher_request
*req
)
168 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
169 struct crypto_pcbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
170 struct crypto_cipher
*child
= ctx
->child
;
171 struct skcipher_walk walk
;
175 err
= skcipher_walk_virt(&walk
, req
, false);
177 while ((nbytes
= walk
.nbytes
)) {
178 if (walk
.src
.virt
.addr
== walk
.dst
.virt
.addr
)
179 nbytes
= crypto_pcbc_decrypt_inplace(req
, &walk
,
182 nbytes
= crypto_pcbc_decrypt_segment(req
, &walk
,
184 err
= skcipher_walk_done(&walk
, nbytes
);
190 static int crypto_pcbc_init_tfm(struct crypto_skcipher
*tfm
)
192 struct skcipher_instance
*inst
= skcipher_alg_instance(tfm
);
193 struct crypto_spawn
*spawn
= skcipher_instance_ctx(inst
);
194 struct crypto_pcbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
195 struct crypto_cipher
*cipher
;
197 cipher
= crypto_spawn_cipher(spawn
);
199 return PTR_ERR(cipher
);
205 static void crypto_pcbc_exit_tfm(struct crypto_skcipher
*tfm
)
207 struct crypto_pcbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
209 crypto_free_cipher(ctx
->child
);
212 static void crypto_pcbc_free(struct skcipher_instance
*inst
)
214 crypto_drop_skcipher(skcipher_instance_ctx(inst
));
218 static int crypto_pcbc_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
220 struct skcipher_instance
*inst
;
221 struct crypto_attr_type
*algt
;
222 struct crypto_spawn
*spawn
;
223 struct crypto_alg
*alg
;
226 algt
= crypto_get_attr_type(tb
);
228 return PTR_ERR(algt
);
230 if (((algt
->type
^ CRYPTO_ALG_TYPE_SKCIPHER
) & algt
->mask
) &
231 ~CRYPTO_ALG_INTERNAL
)
234 inst
= kzalloc(sizeof(*inst
) + sizeof(*spawn
), GFP_KERNEL
);
238 alg
= crypto_get_attr_alg(tb
, CRYPTO_ALG_TYPE_CIPHER
|
239 (algt
->type
& CRYPTO_ALG_INTERNAL
),
240 CRYPTO_ALG_TYPE_MASK
|
241 (algt
->mask
& CRYPTO_ALG_INTERNAL
));
246 spawn
= skcipher_instance_ctx(inst
);
247 err
= crypto_init_spawn(spawn
, alg
, skcipher_crypto_instance(inst
),
248 CRYPTO_ALG_TYPE_MASK
);
253 err
= crypto_inst_setname(skcipher_crypto_instance(inst
), "pcbc", alg
);
257 inst
->alg
.base
.cra_flags
= alg
->cra_flags
& CRYPTO_ALG_INTERNAL
;
258 inst
->alg
.base
.cra_priority
= alg
->cra_priority
;
259 inst
->alg
.base
.cra_blocksize
= alg
->cra_blocksize
;
260 inst
->alg
.base
.cra_alignmask
= alg
->cra_alignmask
;
262 /* We access the data as u32s when xoring. */
263 inst
->alg
.base
.cra_alignmask
|= __alignof__(u32
) - 1;
265 inst
->alg
.ivsize
= alg
->cra_blocksize
;
266 inst
->alg
.min_keysize
= alg
->cra_cipher
.cia_min_keysize
;
267 inst
->alg
.max_keysize
= alg
->cra_cipher
.cia_max_keysize
;
269 inst
->alg
.base
.cra_ctxsize
= sizeof(struct crypto_pcbc_ctx
);
271 inst
->alg
.init
= crypto_pcbc_init_tfm
;
272 inst
->alg
.exit
= crypto_pcbc_exit_tfm
;
274 inst
->alg
.setkey
= crypto_pcbc_setkey
;
275 inst
->alg
.encrypt
= crypto_pcbc_encrypt
;
276 inst
->alg
.decrypt
= crypto_pcbc_decrypt
;
278 inst
->free
= crypto_pcbc_free
;
280 err
= skcipher_register_instance(tmpl
, inst
);
288 crypto_drop_spawn(spawn
);
294 static struct crypto_template crypto_pcbc_tmpl
= {
296 .create
= crypto_pcbc_create
,
297 .module
= THIS_MODULE
,
300 static int __init
crypto_pcbc_module_init(void)
302 return crypto_register_template(&crypto_pcbc_tmpl
);
305 static void __exit
crypto_pcbc_module_exit(void)
307 crypto_unregister_template(&crypto_pcbc_tmpl
);
310 module_init(crypto_pcbc_module_init
);
311 module_exit(crypto_pcbc_module_exit
);
313 MODULE_LICENSE("GPL");
314 MODULE_DESCRIPTION("PCBC block cipher algorithm");
315 MODULE_ALIAS_CRYPTO("pcbc");