]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - arch/s390/crypto/aes_s390.c
[CRYPTO] padlock: Added block cipher versions of CBC/ECB
[mirror_ubuntu-bionic-kernel.git] / arch / s390 / crypto / aes_s390.c
CommitLineData
bf754ae8
JG
1/*
2 * Cryptographic API.
3 *
4 * s390 implementation of the AES Cipher Algorithm.
5 *
6 * s390 Version:
7 * Copyright (C) 2005 IBM Deutschland GmbH, IBM Corporation
8 * Author(s): Jan Glauber (jang@de.ibm.com)
9 *
10 * Derived from "crypto/aes.c"
11 *
12 * This program is free software; you can redistribute it and/or modify it
13 * under the terms of the GNU General Public License as published by the Free
14 * Software Foundation; either version 2 of the License, or (at your option)
15 * any later version.
16 *
17 */
18
19#include <linux/module.h>
20#include <linux/init.h>
21#include <linux/crypto.h>
22#include "crypt_s390.h"
23
24#define AES_MIN_KEY_SIZE 16
25#define AES_MAX_KEY_SIZE 32
26
27/* data block size for all key lengths */
28#define AES_BLOCK_SIZE 16
29
30int has_aes_128 = 0;
31int has_aes_192 = 0;
32int has_aes_256 = 0;
33
34struct s390_aes_ctx {
35 u8 iv[AES_BLOCK_SIZE];
36 u8 key[AES_MAX_KEY_SIZE];
37 int key_len;
38};
39
6c2bb98b 40static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
560c06ae 41 unsigned int key_len)
bf754ae8 42{
6c2bb98b 43 struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
560c06ae 44 u32 *flags = &tfm->crt_flags;
bf754ae8
JG
45
46 switch (key_len) {
47 case 16:
48 if (!has_aes_128)
49 goto fail;
50 break;
51 case 24:
52 if (!has_aes_192)
53 goto fail;
54
55 break;
56 case 32:
57 if (!has_aes_256)
58 goto fail;
59 break;
60 default:
61 /* invalid key length */
62 goto fail;
63 break;
64 }
65
66 sctx->key_len = key_len;
67 memcpy(sctx->key, in_key, key_len);
68 return 0;
69fail:
70 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
71 return -EINVAL;
72}
73
6c2bb98b 74static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
bf754ae8 75{
6c2bb98b 76 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
bf754ae8
JG
77
78 switch (sctx->key_len) {
79 case 16:
80 crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
81 AES_BLOCK_SIZE);
82 break;
83 case 24:
84 crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
85 AES_BLOCK_SIZE);
86 break;
87 case 32:
88 crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
89 AES_BLOCK_SIZE);
90 break;
91 }
92}
93
6c2bb98b 94static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
bf754ae8 95{
6c2bb98b 96 const struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
bf754ae8
JG
97
98 switch (sctx->key_len) {
99 case 16:
100 crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
101 AES_BLOCK_SIZE);
102 break;
103 case 24:
104 crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
105 AES_BLOCK_SIZE);
106 break;
107 case 32:
108 crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
109 AES_BLOCK_SIZE);
110 break;
111 }
112}
113
114static unsigned int aes_encrypt_ecb(const struct cipher_desc *desc, u8 *out,
115 const u8 *in, unsigned int nbytes)
116{
117 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
fda5e142
JG
118 int ret;
119
120 /* only use complete blocks */
121 nbytes &= ~(AES_BLOCK_SIZE - 1);
bf754ae8
JG
122
123 switch (sctx->key_len) {
124 case 16:
fda5e142
JG
125 ret = crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in, nbytes);
126 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
127 break;
128 case 24:
fda5e142
JG
129 ret = crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in, nbytes);
130 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
131 break;
132 case 32:
fda5e142
JG
133 ret = crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in, nbytes);
134 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
135 break;
136 }
fda5e142 137 return nbytes;
bf754ae8
JG
138}
139
140static unsigned int aes_decrypt_ecb(const struct cipher_desc *desc, u8 *out,
141 const u8 *in, unsigned int nbytes)
142{
143 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
fda5e142
JG
144 int ret;
145
146 /* only use complete blocks */
147 nbytes &= ~(AES_BLOCK_SIZE - 1);
bf754ae8
JG
148
149 switch (sctx->key_len) {
150 case 16:
fda5e142
JG
151 ret = crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in, nbytes);
152 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
153 break;
154 case 24:
fda5e142
JG
155 ret = crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in, nbytes);
156 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
157 break;
158 case 32:
fda5e142
JG
159 ret = crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in, nbytes);
160 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
161 break;
162 }
fda5e142 163 return nbytes;
bf754ae8
JG
164}
165
166static unsigned int aes_encrypt_cbc(const struct cipher_desc *desc, u8 *out,
167 const u8 *in, unsigned int nbytes)
168{
169 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
fda5e142
JG
170 int ret;
171
172 /* only use complete blocks */
173 nbytes &= ~(AES_BLOCK_SIZE - 1);
bf754ae8
JG
174
175 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
176 switch (sctx->key_len) {
177 case 16:
fda5e142
JG
178 ret = crypt_s390_kmc(KMC_AES_128_ENCRYPT, &sctx->iv, out, in, nbytes);
179 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
180 break;
181 case 24:
fda5e142
JG
182 ret = crypt_s390_kmc(KMC_AES_192_ENCRYPT, &sctx->iv, out, in, nbytes);
183 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
184 break;
185 case 32:
fda5e142
JG
186 ret = crypt_s390_kmc(KMC_AES_256_ENCRYPT, &sctx->iv, out, in, nbytes);
187 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
188 break;
189 }
190 memcpy(desc->info, &sctx->iv, AES_BLOCK_SIZE);
191
fda5e142 192 return nbytes;
bf754ae8
JG
193}
194
195static unsigned int aes_decrypt_cbc(const struct cipher_desc *desc, u8 *out,
196 const u8 *in, unsigned int nbytes)
197{
198 struct s390_aes_ctx *sctx = crypto_tfm_ctx(desc->tfm);
fda5e142
JG
199 int ret;
200
201 /* only use complete blocks */
202 nbytes &= ~(AES_BLOCK_SIZE - 1);
bf754ae8
JG
203
204 memcpy(&sctx->iv, desc->info, AES_BLOCK_SIZE);
205 switch (sctx->key_len) {
206 case 16:
fda5e142
JG
207 ret = crypt_s390_kmc(KMC_AES_128_DECRYPT, &sctx->iv, out, in, nbytes);
208 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
209 break;
210 case 24:
fda5e142
JG
211 ret = crypt_s390_kmc(KMC_AES_192_DECRYPT, &sctx->iv, out, in, nbytes);
212 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
213 break;
214 case 32:
fda5e142
JG
215 ret = crypt_s390_kmc(KMC_AES_256_DECRYPT, &sctx->iv, out, in, nbytes);
216 BUG_ON((ret < 0) || (ret != nbytes));
bf754ae8
JG
217 break;
218 }
fda5e142 219 return nbytes;
bf754ae8
JG
220}
221
222
223static struct crypto_alg aes_alg = {
224 .cra_name = "aes",
65b75c36
HX
225 .cra_driver_name = "aes-s390",
226 .cra_priority = CRYPT_S390_PRIORITY,
bf754ae8
JG
227 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
228 .cra_blocksize = AES_BLOCK_SIZE,
229 .cra_ctxsize = sizeof(struct s390_aes_ctx),
230 .cra_module = THIS_MODULE,
231 .cra_list = LIST_HEAD_INIT(aes_alg.cra_list),
232 .cra_u = {
233 .cipher = {
234 .cia_min_keysize = AES_MIN_KEY_SIZE,
235 .cia_max_keysize = AES_MAX_KEY_SIZE,
236 .cia_setkey = aes_set_key,
237 .cia_encrypt = aes_encrypt,
238 .cia_decrypt = aes_decrypt,
239 .cia_encrypt_ecb = aes_encrypt_ecb,
240 .cia_decrypt_ecb = aes_decrypt_ecb,
241 .cia_encrypt_cbc = aes_encrypt_cbc,
242 .cia_decrypt_cbc = aes_decrypt_cbc,
243 }
244 }
245};
246
247static int __init aes_init(void)
248{
249 int ret;
250
251 if (crypt_s390_func_available(KM_AES_128_ENCRYPT))
252 has_aes_128 = 1;
253 if (crypt_s390_func_available(KM_AES_192_ENCRYPT))
254 has_aes_192 = 1;
255 if (crypt_s390_func_available(KM_AES_256_ENCRYPT))
256 has_aes_256 = 1;
257
258 if (!has_aes_128 && !has_aes_192 && !has_aes_256)
259 return -ENOSYS;
260
261 ret = crypto_register_alg(&aes_alg);
262 if (ret != 0)
263 printk(KERN_INFO "crypt_s390: aes_s390 couldn't be loaded.\n");
264 return ret;
265}
266
267static void __exit aes_fini(void)
268{
269 crypto_unregister_alg(&aes_alg);
270}
271
272module_init(aes_init);
273module_exit(aes_fini);
274
275MODULE_ALIAS("aes");
276
277MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
278MODULE_LICENSE("GPL");
279