Loading...
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * AEAD: Authenticated Encryption with Associated Data
4 *
5 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#ifndef _CRYPTO_INTERNAL_AEAD_H
9#define _CRYPTO_INTERNAL_AEAD_H
10
11#include <crypto/aead.h>
12#include <crypto/algapi.h>
13#include <linux/stddef.h>
14#include <linux/types.h>
15
16struct rtattr;
17
18struct aead_instance {
19 void (*free)(struct aead_instance *inst);
20 union {
21 struct {
22 char head[offsetof(struct aead_alg, base)];
23 struct crypto_instance base;
24 } s;
25 struct aead_alg alg;
26 };
27};
28
29struct crypto_aead_spawn {
30 struct crypto_spawn base;
31};
32
33struct aead_queue {
34 struct crypto_queue base;
35};
36
37static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
38{
39 return crypto_tfm_ctx(&tfm->base);
40}
41
42static inline void *crypto_aead_ctx_dma(struct crypto_aead *tfm)
43{
44 return crypto_tfm_ctx_dma(&tfm->base);
45}
46
47static inline struct crypto_instance *aead_crypto_instance(
48 struct aead_instance *inst)
49{
50 return container_of(&inst->alg.base, struct crypto_instance, alg);
51}
52
53static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
54{
55 return container_of(&inst->alg, struct aead_instance, alg.base);
56}
57
58static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
59{
60 return aead_instance(crypto_tfm_alg_instance(&aead->base));
61}
62
63static inline void *aead_instance_ctx(struct aead_instance *inst)
64{
65 return crypto_instance_ctx(aead_crypto_instance(inst));
66}
67
68static inline void *aead_request_ctx(struct aead_request *req)
69{
70 return req->__ctx;
71}
72
73static inline void *aead_request_ctx_dma(struct aead_request *req)
74{
75 unsigned int align = crypto_dma_align();
76
77 if (align <= crypto_tfm_ctx_alignment())
78 align = 1;
79
80 return PTR_ALIGN(aead_request_ctx(req), align);
81}
82
83static inline void aead_request_complete(struct aead_request *req, int err)
84{
85 crypto_request_complete(&req->base, err);
86}
87
88static inline u32 aead_request_flags(struct aead_request *req)
89{
90 return req->base.flags;
91}
92
93static inline struct aead_request *aead_request_cast(
94 struct crypto_async_request *req)
95{
96 return container_of(req, struct aead_request, base);
97}
98
99int crypto_grab_aead(struct crypto_aead_spawn *spawn,
100 struct crypto_instance *inst,
101 const char *name, u32 type, u32 mask);
102
103static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
104{
105 crypto_drop_spawn(&spawn->base);
106}
107
108static inline struct aead_alg *crypto_spawn_aead_alg(
109 struct crypto_aead_spawn *spawn)
110{
111 return container_of(spawn->base.alg, struct aead_alg, base);
112}
113
114static inline struct crypto_aead *crypto_spawn_aead(
115 struct crypto_aead_spawn *spawn)
116{
117 return crypto_spawn_tfm2(&spawn->base);
118}
119
120static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
121 unsigned int reqsize)
122{
123 aead->reqsize = reqsize;
124}
125
126static inline void crypto_aead_set_reqsize_dma(struct crypto_aead *aead,
127 unsigned int reqsize)
128{
129 reqsize += crypto_dma_align() & ~(crypto_tfm_ctx_alignment() - 1);
130 aead->reqsize = reqsize;
131}
132
133static inline void aead_init_queue(struct aead_queue *queue,
134 unsigned int max_qlen)
135{
136 crypto_init_queue(&queue->base, max_qlen);
137}
138
139static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
140{
141 return alg->chunksize;
142}
143
144/**
145 * crypto_aead_chunksize() - obtain chunk size
146 * @tfm: cipher handle
147 *
148 * The block size is set to one for ciphers such as CCM. However,
149 * you still need to provide incremental updates in multiples of
150 * the underlying block size as the IV does not have sub-block
151 * granularity. This is known in this API as the chunk size.
152 *
153 * Return: chunk size in bytes
154 */
155static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
156{
157 return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
158}
159
160int crypto_register_aead(struct aead_alg *alg);
161void crypto_unregister_aead(struct aead_alg *alg);
162int crypto_register_aeads(struct aead_alg *algs, int count);
163void crypto_unregister_aeads(struct aead_alg *algs, int count);
164int aead_register_instance(struct crypto_template *tmpl,
165 struct aead_instance *inst);
166
167#endif /* _CRYPTO_INTERNAL_AEAD_H */
168
1/*
2 * AEAD: Authenticated Encryption with Associated Data
3 *
4 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#ifndef _CRYPTO_INTERNAL_AEAD_H
14#define _CRYPTO_INTERNAL_AEAD_H
15
16#include <crypto/aead.h>
17#include <crypto/algapi.h>
18#include <linux/stddef.h>
19#include <linux/types.h>
20
21struct rtattr;
22
23struct aead_instance {
24 void (*free)(struct aead_instance *inst);
25 union {
26 struct {
27 char head[offsetof(struct aead_alg, base)];
28 struct crypto_instance base;
29 } s;
30 struct aead_alg alg;
31 };
32};
33
34struct crypto_aead_spawn {
35 struct crypto_spawn base;
36};
37
38struct aead_queue {
39 struct crypto_queue base;
40};
41
42static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
43{
44 return crypto_tfm_ctx(&tfm->base);
45}
46
47static inline struct crypto_instance *aead_crypto_instance(
48 struct aead_instance *inst)
49{
50 return container_of(&inst->alg.base, struct crypto_instance, alg);
51}
52
53static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
54{
55 return container_of(&inst->alg, struct aead_instance, alg.base);
56}
57
58static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
59{
60 return aead_instance(crypto_tfm_alg_instance(&aead->base));
61}
62
63static inline void *aead_instance_ctx(struct aead_instance *inst)
64{
65 return crypto_instance_ctx(aead_crypto_instance(inst));
66}
67
68static inline void *aead_request_ctx(struct aead_request *req)
69{
70 return req->__ctx;
71}
72
73static inline void aead_request_complete(struct aead_request *req, int err)
74{
75 req->base.complete(&req->base, err);
76}
77
78static inline u32 aead_request_flags(struct aead_request *req)
79{
80 return req->base.flags;
81}
82
83static inline struct aead_request *aead_request_cast(
84 struct crypto_async_request *req)
85{
86 return container_of(req, struct aead_request, base);
87}
88
89static inline void crypto_set_aead_spawn(
90 struct crypto_aead_spawn *spawn, struct crypto_instance *inst)
91{
92 crypto_set_spawn(&spawn->base, inst);
93}
94
95int crypto_grab_aead(struct crypto_aead_spawn *spawn, const char *name,
96 u32 type, u32 mask);
97
98static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
99{
100 crypto_drop_spawn(&spawn->base);
101}
102
103static inline struct aead_alg *crypto_spawn_aead_alg(
104 struct crypto_aead_spawn *spawn)
105{
106 return container_of(spawn->base.alg, struct aead_alg, base);
107}
108
109static inline struct crypto_aead *crypto_spawn_aead(
110 struct crypto_aead_spawn *spawn)
111{
112 return crypto_spawn_tfm2(&spawn->base);
113}
114
115static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
116 unsigned int reqsize)
117{
118 aead->reqsize = reqsize;
119}
120
121static inline unsigned int crypto_aead_alg_maxauthsize(struct aead_alg *alg)
122{
123 return alg->maxauthsize;
124}
125
126static inline unsigned int crypto_aead_maxauthsize(struct crypto_aead *aead)
127{
128 return crypto_aead_alg_maxauthsize(crypto_aead_alg(aead));
129}
130
131static inline void aead_init_queue(struct aead_queue *queue,
132 unsigned int max_qlen)
133{
134 crypto_init_queue(&queue->base, max_qlen);
135}
136
137static inline int aead_enqueue_request(struct aead_queue *queue,
138 struct aead_request *request)
139{
140 return crypto_enqueue_request(&queue->base, &request->base);
141}
142
143static inline struct aead_request *aead_dequeue_request(
144 struct aead_queue *queue)
145{
146 struct crypto_async_request *req;
147
148 req = crypto_dequeue_request(&queue->base);
149
150 return req ? container_of(req, struct aead_request, base) : NULL;
151}
152
153static inline struct aead_request *aead_get_backlog(struct aead_queue *queue)
154{
155 struct crypto_async_request *req;
156
157 req = crypto_get_backlog(&queue->base);
158
159 return req ? container_of(req, struct aead_request, base) : NULL;
160}
161
162static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
163{
164 return alg->chunksize;
165}
166
167/**
168 * crypto_aead_chunksize() - obtain chunk size
169 * @tfm: cipher handle
170 *
171 * The block size is set to one for ciphers such as CCM. However,
172 * you still need to provide incremental updates in multiples of
173 * the underlying block size as the IV does not have sub-block
174 * granularity. This is known in this API as the chunk size.
175 *
176 * Return: chunk size in bytes
177 */
178static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
179{
180 return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
181}
182
183int crypto_register_aead(struct aead_alg *alg);
184void crypto_unregister_aead(struct aead_alg *alg);
185int crypto_register_aeads(struct aead_alg *algs, int count);
186void crypto_unregister_aeads(struct aead_alg *algs, int count);
187int aead_register_instance(struct crypto_template *tmpl,
188 struct aead_instance *inst);
189
190#endif /* _CRYPTO_INTERNAL_AEAD_H */
191