]> git.proxmox.com Git - mirror_ubuntu-eoan-kernel.git/blob - include/crypto/sha256_base.h
Merge tag 'for-linus-20190802' of git://git.kernel.dk/linux-block
[mirror_ubuntu-eoan-kernel.git] / include / crypto / sha256_base.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3 * sha256_base.h - core logic for SHA-256 implementations
4 *
5 * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
6 */
7
8 #include <crypto/internal/hash.h>
9 #include <crypto/sha.h>
10 #include <linux/crypto.h>
11 #include <linux/module.h>
12
13 #include <asm/unaligned.h>
14
15 typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src,
16 int blocks);
17
18 static inline int sha224_base_init(struct shash_desc *desc)
19 {
20 struct sha256_state *sctx = shash_desc_ctx(desc);
21
22 sctx->state[0] = SHA224_H0;
23 sctx->state[1] = SHA224_H1;
24 sctx->state[2] = SHA224_H2;
25 sctx->state[3] = SHA224_H3;
26 sctx->state[4] = SHA224_H4;
27 sctx->state[5] = SHA224_H5;
28 sctx->state[6] = SHA224_H6;
29 sctx->state[7] = SHA224_H7;
30 sctx->count = 0;
31
32 return 0;
33 }
34
35 static inline int sha256_base_init(struct shash_desc *desc)
36 {
37 struct sha256_state *sctx = shash_desc_ctx(desc);
38
39 sctx->state[0] = SHA256_H0;
40 sctx->state[1] = SHA256_H1;
41 sctx->state[2] = SHA256_H2;
42 sctx->state[3] = SHA256_H3;
43 sctx->state[4] = SHA256_H4;
44 sctx->state[5] = SHA256_H5;
45 sctx->state[6] = SHA256_H6;
46 sctx->state[7] = SHA256_H7;
47 sctx->count = 0;
48
49 return 0;
50 }
51
52 static inline int sha256_base_do_update(struct shash_desc *desc,
53 const u8 *data,
54 unsigned int len,
55 sha256_block_fn *block_fn)
56 {
57 struct sha256_state *sctx = shash_desc_ctx(desc);
58 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
59
60 sctx->count += len;
61
62 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) {
63 int blocks;
64
65 if (partial) {
66 int p = SHA256_BLOCK_SIZE - partial;
67
68 memcpy(sctx->buf + partial, data, p);
69 data += p;
70 len -= p;
71
72 block_fn(sctx, sctx->buf, 1);
73 }
74
75 blocks = len / SHA256_BLOCK_SIZE;
76 len %= SHA256_BLOCK_SIZE;
77
78 if (blocks) {
79 block_fn(sctx, data, blocks);
80 data += blocks * SHA256_BLOCK_SIZE;
81 }
82 partial = 0;
83 }
84 if (len)
85 memcpy(sctx->buf + partial, data, len);
86
87 return 0;
88 }
89
90 static inline int sha256_base_do_finalize(struct shash_desc *desc,
91 sha256_block_fn *block_fn)
92 {
93 const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64);
94 struct sha256_state *sctx = shash_desc_ctx(desc);
95 __be64 *bits = (__be64 *)(sctx->buf + bit_offset);
96 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
97
98 sctx->buf[partial++] = 0x80;
99 if (partial > bit_offset) {
100 memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial);
101 partial = 0;
102
103 block_fn(sctx, sctx->buf, 1);
104 }
105
106 memset(sctx->buf + partial, 0x0, bit_offset - partial);
107 *bits = cpu_to_be64(sctx->count << 3);
108 block_fn(sctx, sctx->buf, 1);
109
110 return 0;
111 }
112
113 static inline int sha256_base_finish(struct shash_desc *desc, u8 *out)
114 {
115 unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
116 struct sha256_state *sctx = shash_desc_ctx(desc);
117 __be32 *digest = (__be32 *)out;
118 int i;
119
120 for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32))
121 put_unaligned_be32(sctx->state[i], digest++);
122
123 *sctx = (struct sha256_state){};
124 return 0;
125 }