]> git.proxmox.com Git - mirror_ubuntu-zesty-kernel.git/blob - arch/arm64/crypto/aes-ce-cipher.c
UBUNTU: Ubuntu-4.10.0-0.2
[mirror_ubuntu-zesty-kernel.git] / arch / arm64 / crypto / aes-ce-cipher.c
1 /*
2 * aes-ce-cipher.c - core AES cipher using ARMv8 Crypto Extensions
3 *
4 * Copyright (C) 2013 - 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11 #include <asm/neon.h>
12 #include <crypto/aes.h>
13 #include <linux/cpufeature.h>
14 #include <linux/crypto.h>
15 #include <linux/module.h>
16
17 #include "aes-ce-setkey.h"
18
19 MODULE_DESCRIPTION("Synchronous AES cipher using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22
23 struct aes_block {
24 u8 b[AES_BLOCK_SIZE];
25 };
26
27 static int num_rounds(struct crypto_aes_ctx *ctx)
28 {
29 /*
30 * # of rounds specified by AES:
31 * 128 bit key 10 rounds
32 * 192 bit key 12 rounds
33 * 256 bit key 14 rounds
34 * => n byte key => 6 + (n/4) rounds
35 */
36 return 6 + ctx->key_length / 4;
37 }
38
39 static void aes_cipher_encrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
40 {
41 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
42 struct aes_block *out = (struct aes_block *)dst;
43 struct aes_block const *in = (struct aes_block *)src;
44 void *dummy0;
45 int dummy1;
46
47 kernel_neon_begin_partial(4);
48
49 __asm__(" ld1 {v0.16b}, %[in] ;"
50 " ld1 {v1.16b}, [%[key]], #16 ;"
51 " cmp %w[rounds], #10 ;"
52 " bmi 0f ;"
53 " bne 3f ;"
54 " mov v3.16b, v1.16b ;"
55 " b 2f ;"
56 "0: mov v2.16b, v1.16b ;"
57 " ld1 {v3.16b}, [%[key]], #16 ;"
58 "1: aese v0.16b, v2.16b ;"
59 " aesmc v0.16b, v0.16b ;"
60 "2: ld1 {v1.16b}, [%[key]], #16 ;"
61 " aese v0.16b, v3.16b ;"
62 " aesmc v0.16b, v0.16b ;"
63 "3: ld1 {v2.16b}, [%[key]], #16 ;"
64 " subs %w[rounds], %w[rounds], #3 ;"
65 " aese v0.16b, v1.16b ;"
66 " aesmc v0.16b, v0.16b ;"
67 " ld1 {v3.16b}, [%[key]], #16 ;"
68 " bpl 1b ;"
69 " aese v0.16b, v2.16b ;"
70 " eor v0.16b, v0.16b, v3.16b ;"
71 " st1 {v0.16b}, %[out] ;"
72
73 : [out] "=Q"(*out),
74 [key] "=r"(dummy0),
75 [rounds] "=r"(dummy1)
76 : [in] "Q"(*in),
77 "1"(ctx->key_enc),
78 "2"(num_rounds(ctx) - 2)
79 : "cc");
80
81 kernel_neon_end();
82 }
83
84 static void aes_cipher_decrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
85 {
86 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
87 struct aes_block *out = (struct aes_block *)dst;
88 struct aes_block const *in = (struct aes_block *)src;
89 void *dummy0;
90 int dummy1;
91
92 kernel_neon_begin_partial(4);
93
94 __asm__(" ld1 {v0.16b}, %[in] ;"
95 " ld1 {v1.16b}, [%[key]], #16 ;"
96 " cmp %w[rounds], #10 ;"
97 " bmi 0f ;"
98 " bne 3f ;"
99 " mov v3.16b, v1.16b ;"
100 " b 2f ;"
101 "0: mov v2.16b, v1.16b ;"
102 " ld1 {v3.16b}, [%[key]], #16 ;"
103 "1: aesd v0.16b, v2.16b ;"
104 " aesimc v0.16b, v0.16b ;"
105 "2: ld1 {v1.16b}, [%[key]], #16 ;"
106 " aesd v0.16b, v3.16b ;"
107 " aesimc v0.16b, v0.16b ;"
108 "3: ld1 {v2.16b}, [%[key]], #16 ;"
109 " subs %w[rounds], %w[rounds], #3 ;"
110 " aesd v0.16b, v1.16b ;"
111 " aesimc v0.16b, v0.16b ;"
112 " ld1 {v3.16b}, [%[key]], #16 ;"
113 " bpl 1b ;"
114 " aesd v0.16b, v2.16b ;"
115 " eor v0.16b, v0.16b, v3.16b ;"
116 " st1 {v0.16b}, %[out] ;"
117
118 : [out] "=Q"(*out),
119 [key] "=r"(dummy0),
120 [rounds] "=r"(dummy1)
121 : [in] "Q"(*in),
122 "1"(ctx->key_dec),
123 "2"(num_rounds(ctx) - 2)
124 : "cc");
125
126 kernel_neon_end();
127 }
128
129 /*
130 * aes_sub() - use the aese instruction to perform the AES sbox substitution
131 * on each byte in 'input'
132 */
133 static u32 aes_sub(u32 input)
134 {
135 u32 ret;
136
137 __asm__("dup v1.4s, %w[in] ;"
138 "movi v0.16b, #0 ;"
139 "aese v0.16b, v1.16b ;"
140 "umov %w[out], v0.4s[0] ;"
141
142 : [out] "=r"(ret)
143 : [in] "r"(input)
144 : "v0","v1");
145
146 return ret;
147 }
148
149 int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
150 unsigned int key_len)
151 {
152 /*
153 * The AES key schedule round constants
154 */
155 static u8 const rcon[] = {
156 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
157 };
158
159 u32 kwords = key_len / sizeof(u32);
160 struct aes_block *key_enc, *key_dec;
161 int i, j;
162
163 if (key_len != AES_KEYSIZE_128 &&
164 key_len != AES_KEYSIZE_192 &&
165 key_len != AES_KEYSIZE_256)
166 return -EINVAL;
167
168 memcpy(ctx->key_enc, in_key, key_len);
169 ctx->key_length = key_len;
170
171 kernel_neon_begin_partial(2);
172 for (i = 0; i < sizeof(rcon); i++) {
173 u32 *rki = ctx->key_enc + (i * kwords);
174 u32 *rko = rki + kwords;
175
176 #ifndef CONFIG_CPU_BIG_ENDIAN
177 rko[0] = ror32(aes_sub(rki[kwords - 1]), 8) ^ rcon[i] ^ rki[0];
178 #else
179 rko[0] = rol32(aes_sub(rki[kwords - 1]), 8) ^ (rcon[i] << 24) ^
180 rki[0];
181 #endif
182 rko[1] = rko[0] ^ rki[1];
183 rko[2] = rko[1] ^ rki[2];
184 rko[3] = rko[2] ^ rki[3];
185
186 if (key_len == AES_KEYSIZE_192) {
187 if (i >= 7)
188 break;
189 rko[4] = rko[3] ^ rki[4];
190 rko[5] = rko[4] ^ rki[5];
191 } else if (key_len == AES_KEYSIZE_256) {
192 if (i >= 6)
193 break;
194 rko[4] = aes_sub(rko[3]) ^ rki[4];
195 rko[5] = rko[4] ^ rki[5];
196 rko[6] = rko[5] ^ rki[6];
197 rko[7] = rko[6] ^ rki[7];
198 }
199 }
200
201 /*
202 * Generate the decryption keys for the Equivalent Inverse Cipher.
203 * This involves reversing the order of the round keys, and applying
204 * the Inverse Mix Columns transformation on all but the first and
205 * the last one.
206 */
207 key_enc = (struct aes_block *)ctx->key_enc;
208 key_dec = (struct aes_block *)ctx->key_dec;
209 j = num_rounds(ctx);
210
211 key_dec[0] = key_enc[j];
212 for (i = 1, j--; j > 0; i++, j--)
213 __asm__("ld1 {v0.16b}, %[in] ;"
214 "aesimc v1.16b, v0.16b ;"
215 "st1 {v1.16b}, %[out] ;"
216
217 : [out] "=Q"(key_dec[i])
218 : [in] "Q"(key_enc[j])
219 : "v0","v1");
220 key_dec[i] = key_enc[0];
221
222 kernel_neon_end();
223 return 0;
224 }
225 EXPORT_SYMBOL(ce_aes_expandkey);
226
227 int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,
228 unsigned int key_len)
229 {
230 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
231 int ret;
232
233 ret = ce_aes_expandkey(ctx, in_key, key_len);
234 if (!ret)
235 return 0;
236
237 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
238 return -EINVAL;
239 }
240 EXPORT_SYMBOL(ce_aes_setkey);
241
242 static struct crypto_alg aes_alg = {
243 .cra_name = "aes",
244 .cra_driver_name = "aes-ce",
245 .cra_priority = 250,
246 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
247 .cra_blocksize = AES_BLOCK_SIZE,
248 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
249 .cra_module = THIS_MODULE,
250 .cra_cipher = {
251 .cia_min_keysize = AES_MIN_KEY_SIZE,
252 .cia_max_keysize = AES_MAX_KEY_SIZE,
253 .cia_setkey = ce_aes_setkey,
254 .cia_encrypt = aes_cipher_encrypt,
255 .cia_decrypt = aes_cipher_decrypt
256 }
257 };
258
259 static int __init aes_mod_init(void)
260 {
261 return crypto_register_alg(&aes_alg);
262 }
263
264 static void __exit aes_mod_exit(void)
265 {
266 crypto_unregister_alg(&aes_alg);
267 }
268
269 module_cpu_feature_match(AES, aes_mod_init);
270 module_exit(aes_mod_exit);