]> git.proxmox.com Git - mirror_ubuntu-focal-kernel.git/blame - include/crypto/internal/aead.h
Merge tag 'fsverity-for-linus' of git://git.kernel.org/pub/scm/fs/fscrypt/fscrypt
[mirror_ubuntu-focal-kernel.git] / include / crypto / internal / aead.h
CommitLineData
2874c5fd 1/* SPDX-License-Identifier: GPL-2.0-or-later */
5b6d2d7f
HX
2/*
3 * AEAD: Authenticated Encryption with Associated Data
4 *
b0d955ba 5 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
5b6d2d7f
HX
6 */
7
8#ifndef _CRYPTO_INTERNAL_AEAD_H
9#define _CRYPTO_INTERNAL_AEAD_H
10
11#include <crypto/aead.h>
12#include <crypto/algapi.h>
f5d8660a 13#include <linux/stddef.h>
5b6d2d7f
HX
14#include <linux/types.h>
15
16struct rtattr;
17
63293c61 18struct aead_instance {
ba75e15f 19 void (*free)(struct aead_instance *inst);
f5d8660a
HX
20 union {
21 struct {
22 char head[offsetof(struct aead_alg, base)];
23 struct crypto_instance base;
24 } s;
25 struct aead_alg alg;
26 };
63293c61
HX
27};
28
5b6d2d7f
HX
29struct crypto_aead_spawn {
30 struct crypto_spawn base;
31};
32
2c11a3f9
HX
33struct aead_queue {
34 struct crypto_queue base;
35};
36
5d1d65f8
HX
37static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
38{
39 return crypto_tfm_ctx(&tfm->base);
40}
41
63293c61
HX
42static inline struct crypto_instance *aead_crypto_instance(
43 struct aead_instance *inst)
44{
45 return container_of(&inst->alg.base, struct crypto_instance, alg);
46}
47
48static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
49{
50 return container_of(&inst->alg, struct aead_instance, alg.base);
51}
52
5c98d620
HX
53static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
54{
b0d955ba 55 return aead_instance(crypto_tfm_alg_instance(&aead->base));
5c98d620
HX
56}
57
63293c61
HX
58static inline void *aead_instance_ctx(struct aead_instance *inst)
59{
60 return crypto_instance_ctx(aead_crypto_instance(inst));
61}
62
5d1d65f8
HX
63static inline void *aead_request_ctx(struct aead_request *req)
64{
65 return req->__ctx;
66}
67
68static inline void aead_request_complete(struct aead_request *req, int err)
69{
70 req->base.complete(&req->base, err);
71}
72
73static inline u32 aead_request_flags(struct aead_request *req)
74{
75 return req->base.flags;
76}
77
53a0bd71
TS
78static inline struct aead_request *aead_request_cast(
79 struct crypto_async_request *req)
80{
81 return container_of(req, struct aead_request, base);
82}
83
5b6d2d7f
HX
84static inline void crypto_set_aead_spawn(
85 struct crypto_aead_spawn *spawn, struct crypto_instance *inst)
86{
87 crypto_set_spawn(&spawn->base, inst);
88}
89
d29ce988
HX
90int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
91 u32 type, u32 mask);
92
5b6d2d7f
HX
93static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
94{
95 crypto_drop_spawn(&spawn->base);
96}
97
63293c61
HX
98static inline struct aead_alg *crypto_spawn_aead_alg(
99 struct crypto_aead_spawn *spawn)
100{
101 return container_of(spawn->base.alg, struct aead_alg, base);
102}
103
5b6d2d7f
HX
104static inline struct crypto_aead *crypto_spawn_aead(
105 struct crypto_aead_spawn *spawn)
106{
5d1d65f8 107 return crypto_spawn_tfm2(&spawn->base);
5b6d2d7f
HX
108}
109
21b70134
HX
110static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
111 unsigned int reqsize)
112{
b0d955ba 113 aead->reqsize = reqsize;
21b70134
HX
114}
115
30e4c010
HX
116static inline unsigned int crypto_aead_alg_maxauthsize(struct aead_alg *alg)
117{
b0d955ba 118 return alg->maxauthsize;
30e4c010
HX
119}
120
f5695259
HX
121static inline unsigned int crypto_aead_maxauthsize(struct crypto_aead *aead)
122{
30e4c010 123 return crypto_aead_alg_maxauthsize(crypto_aead_alg(aead));
f5695259
HX
124}
125
2c11a3f9
HX
126static inline void aead_init_queue(struct aead_queue *queue,
127 unsigned int max_qlen)
128{
129 crypto_init_queue(&queue->base, max_qlen);
130}
131
132static inline int aead_enqueue_request(struct aead_queue *queue,
133 struct aead_request *request)
134{
135 return crypto_enqueue_request(&queue->base, &request->base);
136}
137
138static inline struct aead_request *aead_dequeue_request(
139 struct aead_queue *queue)
140{
141 struct crypto_async_request *req;
142
143 req = crypto_dequeue_request(&queue->base);
144
145 return req ? container_of(req, struct aead_request, base) : NULL;
146}
147
148static inline struct aead_request *aead_get_backlog(struct aead_queue *queue)
149{
150 struct crypto_async_request *req;
151
152 req = crypto_get_backlog(&queue->base);
153
154 return req ? container_of(req, struct aead_request, base) : NULL;
155}
156
7a530aa9
HX
157static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
158{
159 return alg->chunksize;
160}
161
162/**
163 * crypto_aead_chunksize() - obtain chunk size
164 * @tfm: cipher handle
165 *
166 * The block size is set to one for ciphers such as CCM. However,
167 * you still need to provide incremental updates in multiples of
168 * the underlying block size as the IV does not have sub-block
169 * granularity. This is known in this API as the chunk size.
170 *
171 * Return: chunk size in bytes
172 */
173static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
174{
175 return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
176}
177
63293c61 178int crypto_register_aead(struct aead_alg *alg);
43615369 179void crypto_unregister_aead(struct aead_alg *alg);
caab9461
HX
180int crypto_register_aeads(struct aead_alg *algs, int count);
181void crypto_unregister_aeads(struct aead_alg *algs, int count);
63293c61
HX
182int aead_register_instance(struct crypto_template *tmpl,
183 struct aead_instance *inst);
184
5b6d2d7f
HX
185#endif /* _CRYPTO_INTERNAL_AEAD_H */
186