]> git.proxmox.com Git - mirror_ubuntu-jammy-kernel.git/blame - fs/cifsd/crypto_ctx.c
cifsd: remove wrappers of kvmalloc/kvfree
[mirror_ubuntu-jammy-kernel.git] / fs / cifsd / crypto_ctx.c
CommitLineData
e2f34481
NJ
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2019 Samsung Electronics Co., Ltd.
4 */
5
6#include <linux/kernel.h>
7#include <linux/string.h>
8#include <linux/err.h>
9#include <linux/slab.h>
10#include <linux/wait.h>
11#include <linux/sched.h>
12#include <linux/version.h>
13
14#include "glob.h"
15#include "crypto_ctx.h"
16#include "buffer_pool.h"
17
18struct crypto_ctx_list {
19 spinlock_t ctx_lock;
20 int avail_ctx;
21 struct list_head idle_ctx;
22 wait_queue_head_t ctx_wait;
23};
24
25static struct crypto_ctx_list ctx_list;
26
27static inline void free_aead(struct crypto_aead *aead)
28{
29 if (aead)
30 crypto_free_aead(aead);
31}
32
33static void free_shash(struct shash_desc *shash)
34{
35 if (shash) {
36 crypto_free_shash(shash->tfm);
37 kfree(shash);
38 }
39}
40
41static struct crypto_aead *alloc_aead(int id)
42{
43 struct crypto_aead *tfm = NULL;
44
45 switch (id) {
46 case CRYPTO_AEAD_AES128_GCM:
47 tfm = crypto_alloc_aead("gcm(aes)", 0, 0);
48 break;
49 case CRYPTO_AEAD_AES128_CCM:
50 tfm = crypto_alloc_aead("ccm(aes)", 0, 0);
51 break;
52 default:
53 ksmbd_err("Does not support encrypt ahead(id : %d)\n", id);
54 return NULL;
55 }
56
57 if (IS_ERR(tfm)) {
58 ksmbd_err("Failed to alloc encrypt aead : %ld\n", PTR_ERR(tfm));
59 return NULL;
60 }
61
62 return tfm;
63}
64
65static struct shash_desc *alloc_shash_desc(int id)
66{
67 struct crypto_shash *tfm = NULL;
68 struct shash_desc *shash;
69
70 switch (id) {
71 case CRYPTO_SHASH_HMACMD5:
72 tfm = crypto_alloc_shash("hmac(md5)", 0, 0);
73 break;
74 case CRYPTO_SHASH_HMACSHA256:
75 tfm = crypto_alloc_shash("hmac(sha256)", 0, 0);
76 break;
77 case CRYPTO_SHASH_CMACAES:
78 tfm = crypto_alloc_shash("cmac(aes)", 0, 0);
79 break;
80 case CRYPTO_SHASH_SHA256:
81 tfm = crypto_alloc_shash("sha256", 0, 0);
82 break;
83 case CRYPTO_SHASH_SHA512:
84 tfm = crypto_alloc_shash("sha512", 0, 0);
85 break;
86 case CRYPTO_SHASH_MD4:
87 tfm = crypto_alloc_shash("md4", 0, 0);
88 break;
89 case CRYPTO_SHASH_MD5:
90 tfm = crypto_alloc_shash("md5", 0, 0);
91 break;
92 }
93
94 if (IS_ERR(tfm))
95 return NULL;
96
97 shash = kzalloc(sizeof(*shash) + crypto_shash_descsize(tfm),
98 GFP_KERNEL);
99 if (!shash)
100 crypto_free_shash(tfm);
101 else
102 shash->tfm = tfm;
103 return shash;
104}
105
106static struct ksmbd_crypto_ctx *ctx_alloc(void)
107{
79f6b11a 108 return kzalloc(sizeof(struct ksmbd_crypto_ctx), GFP_KERNEL);
e2f34481
NJ
109}
110
111static void ctx_free(struct ksmbd_crypto_ctx *ctx)
112{
113 int i;
114
115 for (i = 0; i < CRYPTO_SHASH_MAX; i++)
116 free_shash(ctx->desc[i]);
117 for (i = 0; i < CRYPTO_AEAD_MAX; i++)
118 free_aead(ctx->ccmaes[i]);
79f6b11a 119 kfree(ctx);
e2f34481
NJ
120}
121
122static struct ksmbd_crypto_ctx *ksmbd_find_crypto_ctx(void)
123{
124 struct ksmbd_crypto_ctx *ctx;
125
126 while (1) {
127 spin_lock(&ctx_list.ctx_lock);
128 if (!list_empty(&ctx_list.idle_ctx)) {
129 ctx = list_entry(ctx_list.idle_ctx.next,
130 struct ksmbd_crypto_ctx,
131 list);
132 list_del(&ctx->list);
133 spin_unlock(&ctx_list.ctx_lock);
134 return ctx;
135 }
136
137 if (ctx_list.avail_ctx > num_online_cpus()) {
138 spin_unlock(&ctx_list.ctx_lock);
139 wait_event(ctx_list.ctx_wait,
140 !list_empty(&ctx_list.idle_ctx));
141 continue;
142 }
143
144 ctx_list.avail_ctx++;
145 spin_unlock(&ctx_list.ctx_lock);
146
147 ctx = ctx_alloc();
148 if (!ctx) {
149 spin_lock(&ctx_list.ctx_lock);
150 ctx_list.avail_ctx--;
151 spin_unlock(&ctx_list.ctx_lock);
152 wait_event(ctx_list.ctx_wait,
153 !list_empty(&ctx_list.idle_ctx));
154 continue;
155 }
156 break;
157 }
158 return ctx;
159}
160
161void ksmbd_release_crypto_ctx(struct ksmbd_crypto_ctx *ctx)
162{
163 if (!ctx)
164 return;
165
166 spin_lock(&ctx_list.ctx_lock);
167 if (ctx_list.avail_ctx <= num_online_cpus()) {
168 list_add(&ctx->list, &ctx_list.idle_ctx);
169 spin_unlock(&ctx_list.ctx_lock);
170 wake_up(&ctx_list.ctx_wait);
171 return;
172 }
173
174 ctx_list.avail_ctx--;
175 spin_unlock(&ctx_list.ctx_lock);
176 ctx_free(ctx);
177}
178
179static struct ksmbd_crypto_ctx *____crypto_shash_ctx_find(int id)
180{
181 struct ksmbd_crypto_ctx *ctx;
182
183 if (id >= CRYPTO_SHASH_MAX)
184 return NULL;
185
186 ctx = ksmbd_find_crypto_ctx();
187 if (ctx->desc[id])
188 return ctx;
189
190 ctx->desc[id] = alloc_shash_desc(id);
191 if (ctx->desc[id])
192 return ctx;
193 ksmbd_release_crypto_ctx(ctx);
194 return NULL;
195}
196
197struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacmd5(void)
198{
199 return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACMD5);
200}
201
202struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_hmacsha256(void)
203{
204 return ____crypto_shash_ctx_find(CRYPTO_SHASH_HMACSHA256);
205}
206
207struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_cmacaes(void)
208{
209 return ____crypto_shash_ctx_find(CRYPTO_SHASH_CMACAES);
210}
211
212struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha256(void)
213{
214 return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA256);
215}
216
217struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_sha512(void)
218{
219 return ____crypto_shash_ctx_find(CRYPTO_SHASH_SHA512);
220}
221
222struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_md4(void)
223{
224 return ____crypto_shash_ctx_find(CRYPTO_SHASH_MD4);
225}
226
227struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_md5(void)
228{
229 return ____crypto_shash_ctx_find(CRYPTO_SHASH_MD5);
230}
231
232static struct ksmbd_crypto_ctx *____crypto_aead_ctx_find(int id)
233{
234 struct ksmbd_crypto_ctx *ctx;
235
236 if (id >= CRYPTO_AEAD_MAX)
237 return NULL;
238
239 ctx = ksmbd_find_crypto_ctx();
240 if (ctx->ccmaes[id])
241 return ctx;
242
243 ctx->ccmaes[id] = alloc_aead(id);
244 if (ctx->ccmaes[id])
245 return ctx;
246 ksmbd_release_crypto_ctx(ctx);
247 return NULL;
248}
249
250struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_gcm(void)
251{
252 return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES128_GCM);
253}
254
255struct ksmbd_crypto_ctx *ksmbd_crypto_ctx_find_ccm(void)
256{
257 return ____crypto_aead_ctx_find(CRYPTO_AEAD_AES128_CCM);
258}
259
260void ksmbd_crypto_destroy(void)
261{
262 struct ksmbd_crypto_ctx *ctx;
263
264 while (!list_empty(&ctx_list.idle_ctx)) {
265 ctx = list_entry(ctx_list.idle_ctx.next,
266 struct ksmbd_crypto_ctx,
267 list);
268 list_del(&ctx->list);
269 ctx_free(ctx);
270 }
271}
272
273int ksmbd_crypto_create(void)
274{
275 struct ksmbd_crypto_ctx *ctx;
276
277 spin_lock_init(&ctx_list.ctx_lock);
278 INIT_LIST_HEAD(&ctx_list.idle_ctx);
279 init_waitqueue_head(&ctx_list.ctx_wait);
280 ctx_list.avail_ctx = 1;
281
282 ctx = ctx_alloc();
283 if (!ctx)
284 return -ENOMEM;
285 list_add(&ctx->list, &ctx_list.idle_ctx);
286 return 0;
287}