]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blob - drivers/crypto/vmx/aes_cbc.c
crypto: vmx - Fix sleep-in-atomic bugs
[mirror_ubuntu-bionic-kernel.git] / drivers / crypto / vmx / aes_cbc.c
1 /**
2 * AES CBC routines supporting VMX instructions on the Power 8
3 *
4 * Copyright (C) 2015 International Business Machines Inc.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; version 2 only.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program; if not, write to the Free Software
17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18 *
19 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
20 */
21
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include <crypto/skcipher.h>
31
32 #include "aesp8-ppc.h"
33
34 struct p8_aes_cbc_ctx {
35 struct crypto_skcipher *fallback;
36 struct aes_key enc_key;
37 struct aes_key dec_key;
38 };
39
40 static int p8_aes_cbc_init(struct crypto_tfm *tfm)
41 {
42 const char *alg = crypto_tfm_alg_name(tfm);
43 struct crypto_skcipher *fallback;
44 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
45
46 fallback = crypto_alloc_skcipher(alg, 0,
47 CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
48
49 if (IS_ERR(fallback)) {
50 printk(KERN_ERR
51 "Failed to allocate transformation for '%s': %ld\n",
52 alg, PTR_ERR(fallback));
53 return PTR_ERR(fallback);
54 }
55
56 crypto_skcipher_set_flags(
57 fallback,
58 crypto_skcipher_get_flags((struct crypto_skcipher *)tfm));
59 ctx->fallback = fallback;
60
61 return 0;
62 }
63
64 static void p8_aes_cbc_exit(struct crypto_tfm *tfm)
65 {
66 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
67
68 if (ctx->fallback) {
69 crypto_free_skcipher(ctx->fallback);
70 ctx->fallback = NULL;
71 }
72 }
73
74 static int p8_aes_cbc_setkey(struct crypto_tfm *tfm, const u8 *key,
75 unsigned int keylen)
76 {
77 int ret;
78 struct p8_aes_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
79
80 preempt_disable();
81 pagefault_disable();
82 enable_kernel_vsx();
83 ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
84 ret += aes_p8_set_decrypt_key(key, keylen * 8, &ctx->dec_key);
85 disable_kernel_vsx();
86 pagefault_enable();
87 preempt_enable();
88
89 ret += crypto_skcipher_setkey(ctx->fallback, key, keylen);
90 return ret;
91 }
92
93 static int p8_aes_cbc_encrypt(struct blkcipher_desc *desc,
94 struct scatterlist *dst,
95 struct scatterlist *src, unsigned int nbytes)
96 {
97 int ret;
98 struct blkcipher_walk walk;
99 struct p8_aes_cbc_ctx *ctx =
100 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
101
102 if (in_interrupt()) {
103 SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
104 skcipher_request_set_tfm(req, ctx->fallback);
105 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
106 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
107 ret = crypto_skcipher_encrypt(req);
108 skcipher_request_zero(req);
109 } else {
110 blkcipher_walk_init(&walk, dst, src, nbytes);
111 ret = blkcipher_walk_virt(desc, &walk);
112 while ((nbytes = walk.nbytes)) {
113 preempt_disable();
114 pagefault_disable();
115 enable_kernel_vsx();
116 aes_p8_cbc_encrypt(walk.src.virt.addr,
117 walk.dst.virt.addr,
118 nbytes & AES_BLOCK_MASK,
119 &ctx->enc_key, walk.iv, 1);
120 disable_kernel_vsx();
121 pagefault_enable();
122 preempt_enable();
123
124 nbytes &= AES_BLOCK_SIZE - 1;
125 ret = blkcipher_walk_done(desc, &walk, nbytes);
126 }
127 }
128
129 return ret;
130 }
131
132 static int p8_aes_cbc_decrypt(struct blkcipher_desc *desc,
133 struct scatterlist *dst,
134 struct scatterlist *src, unsigned int nbytes)
135 {
136 int ret;
137 struct blkcipher_walk walk;
138 struct p8_aes_cbc_ctx *ctx =
139 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
140
141 if (in_interrupt()) {
142 SKCIPHER_REQUEST_ON_STACK(req, ctx->fallback);
143 skcipher_request_set_tfm(req, ctx->fallback);
144 skcipher_request_set_callback(req, desc->flags, NULL, NULL);
145 skcipher_request_set_crypt(req, src, dst, nbytes, desc->info);
146 ret = crypto_skcipher_decrypt(req);
147 skcipher_request_zero(req);
148 } else {
149 blkcipher_walk_init(&walk, dst, src, nbytes);
150 ret = blkcipher_walk_virt(desc, &walk);
151 while ((nbytes = walk.nbytes)) {
152 preempt_disable();
153 pagefault_disable();
154 enable_kernel_vsx();
155 aes_p8_cbc_encrypt(walk.src.virt.addr,
156 walk.dst.virt.addr,
157 nbytes & AES_BLOCK_MASK,
158 &ctx->dec_key, walk.iv, 0);
159 disable_kernel_vsx();
160 pagefault_enable();
161 preempt_enable();
162
163 nbytes &= AES_BLOCK_SIZE - 1;
164 ret = blkcipher_walk_done(desc, &walk, nbytes);
165 }
166 }
167
168 return ret;
169 }
170
171
172 struct crypto_alg p8_aes_cbc_alg = {
173 .cra_name = "cbc(aes)",
174 .cra_driver_name = "p8_aes_cbc",
175 .cra_module = THIS_MODULE,
176 .cra_priority = 2000,
177 .cra_type = &crypto_blkcipher_type,
178 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
179 .cra_alignmask = 0,
180 .cra_blocksize = AES_BLOCK_SIZE,
181 .cra_ctxsize = sizeof(struct p8_aes_cbc_ctx),
182 .cra_init = p8_aes_cbc_init,
183 .cra_exit = p8_aes_cbc_exit,
184 .cra_blkcipher = {
185 .ivsize = AES_BLOCK_SIZE,
186 .min_keysize = AES_MIN_KEY_SIZE,
187 .max_keysize = AES_MAX_KEY_SIZE,
188 .setkey = p8_aes_cbc_setkey,
189 .encrypt = p8_aes_cbc_encrypt,
190 .decrypt = p8_aes_cbc_decrypt,
191 },
192 };