]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - arch/arm64/crypto/sha2-ce-glue.c
crypto: arm64/sha1-ce - move SHA-1 ARMv8 implementation to base layer
[mirror_ubuntu-artful-kernel.git] / arch / arm64 / crypto / sha2-ce-glue.c
CommitLineData
6ba6c74d
AB
1/*
2 * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
3 *
4 * Copyright (C) 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11#include <asm/neon.h>
12#include <asm/unaligned.h>
13#include <crypto/internal/hash.h>
14#include <crypto/sha.h>
15#include <linux/cpufeature.h>
16#include <linux/crypto.h>
17#include <linux/module.h>
18
19MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
20MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21MODULE_LICENSE("GPL v2");
22
23asmlinkage int sha2_ce_transform(int blocks, u8 const *src, u32 *state,
24 u8 *head, long bytes);
25
26static int sha224_init(struct shash_desc *desc)
27{
28 struct sha256_state *sctx = shash_desc_ctx(desc);
29
30 *sctx = (struct sha256_state){
31 .state = {
32 SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
33 SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
34 }
35 };
36 return 0;
37}
38
39static int sha256_init(struct shash_desc *desc)
40{
41 struct sha256_state *sctx = shash_desc_ctx(desc);
42
43 *sctx = (struct sha256_state){
44 .state = {
45 SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
46 SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
47 }
48 };
49 return 0;
50}
51
52static int sha2_update(struct shash_desc *desc, const u8 *data,
53 unsigned int len)
54{
55 struct sha256_state *sctx = shash_desc_ctx(desc);
56 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
57
58 sctx->count += len;
59
60 if ((partial + len) >= SHA256_BLOCK_SIZE) {
61 int blocks;
62
63 if (partial) {
64 int p = SHA256_BLOCK_SIZE - partial;
65
66 memcpy(sctx->buf + partial, data, p);
67 data += p;
68 len -= p;
69 }
70
71 blocks = len / SHA256_BLOCK_SIZE;
72 len %= SHA256_BLOCK_SIZE;
73
74 kernel_neon_begin_partial(28);
75 sha2_ce_transform(blocks, data, sctx->state,
76 partial ? sctx->buf : NULL, 0);
77 kernel_neon_end();
78
79 data += blocks * SHA256_BLOCK_SIZE;
80 partial = 0;
81 }
82 if (len)
83 memcpy(sctx->buf + partial, data, len);
84 return 0;
85}
86
87static void sha2_final(struct shash_desc *desc)
88{
89 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
90
91 struct sha256_state *sctx = shash_desc_ctx(desc);
92 __be64 bits = cpu_to_be64(sctx->count << 3);
93 u32 padlen = SHA256_BLOCK_SIZE
94 - ((sctx->count + sizeof(bits)) % SHA256_BLOCK_SIZE);
95
96 sha2_update(desc, padding, padlen);
97 sha2_update(desc, (const u8 *)&bits, sizeof(bits));
98}
99
100static int sha224_final(struct shash_desc *desc, u8 *out)
101{
102 struct sha256_state *sctx = shash_desc_ctx(desc);
103 __be32 *dst = (__be32 *)out;
104 int i;
105
106 sha2_final(desc);
107
108 for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
109 put_unaligned_be32(sctx->state[i], dst++);
110
111 *sctx = (struct sha256_state){};
112 return 0;
113}
114
115static int sha256_final(struct shash_desc *desc, u8 *out)
116{
117 struct sha256_state *sctx = shash_desc_ctx(desc);
118 __be32 *dst = (__be32 *)out;
119 int i;
120
121 sha2_final(desc);
122
123 for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
124 put_unaligned_be32(sctx->state[i], dst++);
125
126 *sctx = (struct sha256_state){};
127 return 0;
128}
129
130static void sha2_finup(struct shash_desc *desc, const u8 *data,
131 unsigned int len)
132{
133 struct sha256_state *sctx = shash_desc_ctx(desc);
134 int blocks;
135
136 if (sctx->count || !len || (len % SHA256_BLOCK_SIZE)) {
137 sha2_update(desc, data, len);
138 sha2_final(desc);
139 return;
140 }
141
142 /*
143 * Use a fast path if the input is a multiple of 64 bytes. In
144 * this case, there is no need to copy data around, and we can
145 * perform the entire digest calculation in a single invocation
146 * of sha2_ce_transform()
147 */
148 blocks = len / SHA256_BLOCK_SIZE;
149
150 kernel_neon_begin_partial(28);
151 sha2_ce_transform(blocks, data, sctx->state, NULL, len);
152 kernel_neon_end();
6ba6c74d
AB
153}
154
155static int sha224_finup(struct shash_desc *desc, const u8 *data,
156 unsigned int len, u8 *out)
157{
158 struct sha256_state *sctx = shash_desc_ctx(desc);
159 __be32 *dst = (__be32 *)out;
160 int i;
161
162 sha2_finup(desc, data, len);
163
164 for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
165 put_unaligned_be32(sctx->state[i], dst++);
166
167 *sctx = (struct sha256_state){};
168 return 0;
169}
170
171static int sha256_finup(struct shash_desc *desc, const u8 *data,
172 unsigned int len, u8 *out)
173{
174 struct sha256_state *sctx = shash_desc_ctx(desc);
175 __be32 *dst = (__be32 *)out;
176 int i;
177
178 sha2_finup(desc, data, len);
179
180 for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
181 put_unaligned_be32(sctx->state[i], dst++);
182
183 *sctx = (struct sha256_state){};
184 return 0;
185}
186
187static int sha2_export(struct shash_desc *desc, void *out)
188{
189 struct sha256_state *sctx = shash_desc_ctx(desc);
190 struct sha256_state *dst = out;
191
192 *dst = *sctx;
193 return 0;
194}
195
196static int sha2_import(struct shash_desc *desc, const void *in)
197{
198 struct sha256_state *sctx = shash_desc_ctx(desc);
199 struct sha256_state const *src = in;
200
201 *sctx = *src;
202 return 0;
203}
204
205static struct shash_alg algs[] = { {
206 .init = sha224_init,
207 .update = sha2_update,
208 .final = sha224_final,
209 .finup = sha224_finup,
210 .export = sha2_export,
211 .import = sha2_import,
212 .descsize = sizeof(struct sha256_state),
213 .digestsize = SHA224_DIGEST_SIZE,
214 .statesize = sizeof(struct sha256_state),
215 .base = {
216 .cra_name = "sha224",
217 .cra_driver_name = "sha224-ce",
218 .cra_priority = 200,
219 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
220 .cra_blocksize = SHA256_BLOCK_SIZE,
221 .cra_module = THIS_MODULE,
222 }
223}, {
224 .init = sha256_init,
225 .update = sha2_update,
226 .final = sha256_final,
227 .finup = sha256_finup,
228 .export = sha2_export,
229 .import = sha2_import,
230 .descsize = sizeof(struct sha256_state),
231 .digestsize = SHA256_DIGEST_SIZE,
232 .statesize = sizeof(struct sha256_state),
233 .base = {
234 .cra_name = "sha256",
235 .cra_driver_name = "sha256-ce",
236 .cra_priority = 200,
237 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
238 .cra_blocksize = SHA256_BLOCK_SIZE,
239 .cra_module = THIS_MODULE,
240 }
241} };
242
243static int __init sha2_ce_mod_init(void)
244{
245 return crypto_register_shashes(algs, ARRAY_SIZE(algs));
246}
247
248static void __exit sha2_ce_mod_fini(void)
249{
250 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
251}
252
253module_cpu_feature_match(SHA2, sha2_ce_mod_init);
254module_exit(sha2_ce_mod_fini);