Loading...
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7#ifndef _CRYPTO_ALGAPI_H
8#define _CRYPTO_ALGAPI_H
9
10#include <linux/align.h>
11#include <linux/cache.h>
12#include <linux/crypto.h>
13#include <linux/kconfig.h>
14#include <linux/list.h>
15#include <linux/types.h>
16
17#include <asm/unaligned.h>
18
19/*
20 * Maximum values for blocksize and alignmask, used to allocate
21 * static buffers that are big enough for any combination of
22 * algs and architectures. Ciphers have a lower maximum size.
23 */
24#define MAX_ALGAPI_BLOCKSIZE 160
25#define MAX_ALGAPI_ALIGNMASK 127
26#define MAX_CIPHER_BLOCKSIZE 16
27#define MAX_CIPHER_ALIGNMASK 15
28
29#ifdef ARCH_DMA_MINALIGN
30#define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
31#else
32#define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
33#endif
34
35#define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
36
37struct crypto_aead;
38struct crypto_instance;
39struct module;
40struct notifier_block;
41struct rtattr;
42struct seq_file;
43struct sk_buff;
44
45struct crypto_type {
46 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
47 unsigned int (*extsize)(struct crypto_alg *alg);
48 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
49 int (*init_tfm)(struct crypto_tfm *tfm);
50 void (*show)(struct seq_file *m, struct crypto_alg *alg);
51 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
52 void (*free)(struct crypto_instance *inst);
53
54 unsigned int type;
55 unsigned int maskclear;
56 unsigned int maskset;
57 unsigned int tfmsize;
58};
59
60struct crypto_instance {
61 struct crypto_alg alg;
62
63 struct crypto_template *tmpl;
64
65 union {
66 /* Node in list of instances after registration. */
67 struct hlist_node list;
68 /* List of attached spawns before registration. */
69 struct crypto_spawn *spawns;
70 };
71
72 void *__ctx[] CRYPTO_MINALIGN_ATTR;
73};
74
75struct crypto_template {
76 struct list_head list;
77 struct hlist_head instances;
78 struct module *module;
79
80 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
81
82 char name[CRYPTO_MAX_ALG_NAME];
83};
84
85struct crypto_spawn {
86 struct list_head list;
87 struct crypto_alg *alg;
88 union {
89 /* Back pointer to instance after registration.*/
90 struct crypto_instance *inst;
91 /* Spawn list pointer prior to registration. */
92 struct crypto_spawn *next;
93 };
94 const struct crypto_type *frontend;
95 u32 mask;
96 bool dead;
97 bool registered;
98};
99
100struct crypto_queue {
101 struct list_head list;
102 struct list_head *backlog;
103
104 unsigned int qlen;
105 unsigned int max_qlen;
106};
107
108struct scatter_walk {
109 struct scatterlist *sg;
110 unsigned int offset;
111};
112
113struct crypto_attr_alg {
114 char name[CRYPTO_MAX_ALG_NAME];
115};
116
117struct crypto_attr_type {
118 u32 type;
119 u32 mask;
120};
121
122void crypto_mod_put(struct crypto_alg *alg);
123
124int crypto_register_template(struct crypto_template *tmpl);
125int crypto_register_templates(struct crypto_template *tmpls, int count);
126void crypto_unregister_template(struct crypto_template *tmpl);
127void crypto_unregister_templates(struct crypto_template *tmpls, int count);
128struct crypto_template *crypto_lookup_template(const char *name);
129
130int crypto_register_instance(struct crypto_template *tmpl,
131 struct crypto_instance *inst);
132void crypto_unregister_instance(struct crypto_instance *inst);
133
134int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
135 const char *name, u32 type, u32 mask);
136void crypto_drop_spawn(struct crypto_spawn *spawn);
137struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
138 u32 mask);
139void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
140
141struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
142int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
143const char *crypto_attr_alg_name(struct rtattr *rta);
144int crypto_inst_setname(struct crypto_instance *inst, const char *name,
145 struct crypto_alg *alg);
146
147void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
148int crypto_enqueue_request(struct crypto_queue *queue,
149 struct crypto_async_request *request);
150void crypto_enqueue_request_head(struct crypto_queue *queue,
151 struct crypto_async_request *request);
152struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
153static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
154{
155 return queue->qlen;
156}
157
158void crypto_inc(u8 *a, unsigned int size);
159void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int size);
160
161static inline void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
162{
163 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
164 __builtin_constant_p(size) &&
165 (size % sizeof(unsigned long)) == 0) {
166 unsigned long *d = (unsigned long *)dst;
167 unsigned long *s = (unsigned long *)src;
168 unsigned long l;
169
170 while (size > 0) {
171 l = get_unaligned(d) ^ get_unaligned(s++);
172 put_unaligned(l, d++);
173 size -= sizeof(unsigned long);
174 }
175 } else {
176 __crypto_xor(dst, dst, src, size);
177 }
178}
179
180static inline void crypto_xor_cpy(u8 *dst, const u8 *src1, const u8 *src2,
181 unsigned int size)
182{
183 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) &&
184 __builtin_constant_p(size) &&
185 (size % sizeof(unsigned long)) == 0) {
186 unsigned long *d = (unsigned long *)dst;
187 unsigned long *s1 = (unsigned long *)src1;
188 unsigned long *s2 = (unsigned long *)src2;
189 unsigned long l;
190
191 while (size > 0) {
192 l = get_unaligned(s1++) ^ get_unaligned(s2++);
193 put_unaligned(l, d++);
194 size -= sizeof(unsigned long);
195 }
196 } else {
197 __crypto_xor(dst, src1, src2, size);
198 }
199}
200
201static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
202{
203 return tfm->__crt_ctx;
204}
205
206static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
207 unsigned int align)
208{
209 if (align <= crypto_tfm_ctx_alignment())
210 align = 1;
211
212 return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
213}
214
215static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
216{
217 return crypto_tfm_ctx_align(tfm, crypto_tfm_alg_alignmask(tfm) + 1);
218}
219
220static inline unsigned int crypto_dma_align(void)
221{
222 return CRYPTO_DMA_ALIGN;
223}
224
225static inline unsigned int crypto_dma_padding(void)
226{
227 return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
228}
229
230static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
231{
232 return crypto_tfm_ctx_align(tfm, crypto_dma_align());
233}
234
235static inline struct crypto_instance *crypto_tfm_alg_instance(
236 struct crypto_tfm *tfm)
237{
238 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
239}
240
241static inline void *crypto_instance_ctx(struct crypto_instance *inst)
242{
243 return inst->__ctx;
244}
245
246static inline struct crypto_async_request *crypto_get_backlog(
247 struct crypto_queue *queue)
248{
249 return queue->backlog == &queue->list ? NULL :
250 container_of(queue->backlog, struct crypto_async_request, list);
251}
252
253static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
254{
255 return (algt->type ^ off) & algt->mask & off;
256}
257
258/*
259 * When an algorithm uses another algorithm (e.g., if it's an instance of a
260 * template), these are the flags that should always be set on the "outer"
261 * algorithm if any "inner" algorithm has them set.
262 */
263#define CRYPTO_ALG_INHERITED_FLAGS \
264 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \
265 CRYPTO_ALG_ALLOCATES_MEMORY)
266
267/*
268 * Given the type and mask that specify the flags restrictions on a template
269 * instance being created, return the mask that should be passed to
270 * crypto_grab_*() (along with type=0) to honor any request the user made to
271 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
272 */
273static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
274{
275 return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
276}
277
278noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
279
280/**
281 * crypto_memneq - Compare two areas of memory without leaking
282 * timing information.
283 *
284 * @a: One area of memory
285 * @b: Another area of memory
286 * @size: The size of the area.
287 *
288 * Returns 0 when data is equal, 1 otherwise.
289 */
290static inline int crypto_memneq(const void *a, const void *b, size_t size)
291{
292 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
293}
294
295int crypto_register_notifier(struct notifier_block *nb);
296int crypto_unregister_notifier(struct notifier_block *nb);
297
298/* Crypto notification events. */
299enum {
300 CRYPTO_MSG_ALG_REQUEST,
301 CRYPTO_MSG_ALG_REGISTER,
302 CRYPTO_MSG_ALG_LOADED,
303};
304
305#endif /* _CRYPTO_ALGAPI_H */
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7#ifndef _CRYPTO_ALGAPI_H
8#define _CRYPTO_ALGAPI_H
9
10#include <crypto/utils.h>
11#include <linux/align.h>
12#include <linux/cache.h>
13#include <linux/crypto.h>
14#include <linux/types.h>
15#include <linux/workqueue.h>
16
17/*
18 * Maximum values for blocksize and alignmask, used to allocate
19 * static buffers that are big enough for any combination of
20 * algs and architectures. Ciphers have a lower maximum size.
21 */
22#define MAX_ALGAPI_BLOCKSIZE 160
23#define MAX_ALGAPI_ALIGNMASK 127
24#define MAX_CIPHER_BLOCKSIZE 16
25#define MAX_CIPHER_ALIGNMASK 15
26
27#ifdef ARCH_DMA_MINALIGN
28#define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
29#else
30#define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
31#endif
32
33#define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
34
35/*
36 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
37 * arbitrary modules to be loaded. Loading from userspace may still need the
38 * unprefixed names, so retains those aliases as well.
39 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
40 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
41 * expands twice on the same line. Instead, use a separate base name for the
42 * alias.
43 */
44#define MODULE_ALIAS_CRYPTO(name) \
45 __MODULE_INFO(alias, alias_userspace, name); \
46 __MODULE_INFO(alias, alias_crypto, "crypto-" name)
47
48struct crypto_aead;
49struct crypto_instance;
50struct module;
51struct notifier_block;
52struct rtattr;
53struct scatterlist;
54struct seq_file;
55struct sk_buff;
56
57struct crypto_type {
58 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
59 unsigned int (*extsize)(struct crypto_alg *alg);
60 int (*init_tfm)(struct crypto_tfm *tfm);
61 void (*show)(struct seq_file *m, struct crypto_alg *alg);
62 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
63 void (*free)(struct crypto_instance *inst);
64
65 unsigned int type;
66 unsigned int maskclear;
67 unsigned int maskset;
68 unsigned int tfmsize;
69};
70
71struct crypto_instance {
72 struct crypto_alg alg;
73
74 struct crypto_template *tmpl;
75
76 union {
77 /* Node in list of instances after registration. */
78 struct hlist_node list;
79 /* List of attached spawns before registration. */
80 struct crypto_spawn *spawns;
81 };
82
83 struct work_struct free_work;
84
85 void *__ctx[] CRYPTO_MINALIGN_ATTR;
86};
87
88struct crypto_template {
89 struct list_head list;
90 struct hlist_head instances;
91 struct module *module;
92
93 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
94
95 char name[CRYPTO_MAX_ALG_NAME];
96};
97
98struct crypto_spawn {
99 struct list_head list;
100 struct crypto_alg *alg;
101 union {
102 /* Back pointer to instance after registration.*/
103 struct crypto_instance *inst;
104 /* Spawn list pointer prior to registration. */
105 struct crypto_spawn *next;
106 };
107 const struct crypto_type *frontend;
108 u32 mask;
109 bool dead;
110 bool registered;
111};
112
113struct crypto_queue {
114 struct list_head list;
115 struct list_head *backlog;
116
117 unsigned int qlen;
118 unsigned int max_qlen;
119};
120
121struct scatter_walk {
122 struct scatterlist *sg;
123 unsigned int offset;
124};
125
126struct crypto_attr_alg {
127 char name[CRYPTO_MAX_ALG_NAME];
128};
129
130struct crypto_attr_type {
131 u32 type;
132 u32 mask;
133};
134
135/*
136 * Algorithm registration interface.
137 */
138int crypto_register_alg(struct crypto_alg *alg);
139void crypto_unregister_alg(struct crypto_alg *alg);
140int crypto_register_algs(struct crypto_alg *algs, int count);
141void crypto_unregister_algs(struct crypto_alg *algs, int count);
142
143void crypto_mod_put(struct crypto_alg *alg);
144
145int crypto_register_template(struct crypto_template *tmpl);
146int crypto_register_templates(struct crypto_template *tmpls, int count);
147void crypto_unregister_template(struct crypto_template *tmpl);
148void crypto_unregister_templates(struct crypto_template *tmpls, int count);
149struct crypto_template *crypto_lookup_template(const char *name);
150
151int crypto_register_instance(struct crypto_template *tmpl,
152 struct crypto_instance *inst);
153void crypto_unregister_instance(struct crypto_instance *inst);
154
155int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
156 const char *name, u32 type, u32 mask);
157void crypto_drop_spawn(struct crypto_spawn *spawn);
158struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
159 u32 mask);
160void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
161
162struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
163int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
164const char *crypto_attr_alg_name(struct rtattr *rta);
165int crypto_inst_setname(struct crypto_instance *inst, const char *name,
166 struct crypto_alg *alg);
167
168void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
169int crypto_enqueue_request(struct crypto_queue *queue,
170 struct crypto_async_request *request);
171void crypto_enqueue_request_head(struct crypto_queue *queue,
172 struct crypto_async_request *request);
173struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
174static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
175{
176 return queue->qlen;
177}
178
179void crypto_inc(u8 *a, unsigned int size);
180
181static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
182{
183 return tfm->__crt_ctx;
184}
185
186static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
187 unsigned int align)
188{
189 if (align <= crypto_tfm_ctx_alignment())
190 align = 1;
191
192 return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
193}
194
195static inline unsigned int crypto_dma_align(void)
196{
197 return CRYPTO_DMA_ALIGN;
198}
199
200static inline unsigned int crypto_dma_padding(void)
201{
202 return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
203}
204
205static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
206{
207 return crypto_tfm_ctx_align(tfm, crypto_dma_align());
208}
209
210static inline struct crypto_instance *crypto_tfm_alg_instance(
211 struct crypto_tfm *tfm)
212{
213 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
214}
215
216static inline void *crypto_instance_ctx(struct crypto_instance *inst)
217{
218 return inst->__ctx;
219}
220
221static inline struct crypto_async_request *crypto_get_backlog(
222 struct crypto_queue *queue)
223{
224 return queue->backlog == &queue->list ? NULL :
225 container_of(queue->backlog, struct crypto_async_request, list);
226}
227
228static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
229{
230 return (algt->type ^ off) & algt->mask & off;
231}
232
233/*
234 * When an algorithm uses another algorithm (e.g., if it's an instance of a
235 * template), these are the flags that should always be set on the "outer"
236 * algorithm if any "inner" algorithm has them set.
237 */
238#define CRYPTO_ALG_INHERITED_FLAGS \
239 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK | \
240 CRYPTO_ALG_ALLOCATES_MEMORY)
241
242/*
243 * Given the type and mask that specify the flags restrictions on a template
244 * instance being created, return the mask that should be passed to
245 * crypto_grab_*() (along with type=0) to honor any request the user made to
246 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
247 */
248static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
249{
250 return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
251}
252
253int crypto_register_notifier(struct notifier_block *nb);
254int crypto_unregister_notifier(struct notifier_block *nb);
255
256/* Crypto notification events. */
257enum {
258 CRYPTO_MSG_ALG_REQUEST,
259 CRYPTO_MSG_ALG_REGISTER,
260 CRYPTO_MSG_ALG_LOADED,
261};
262
263static inline void crypto_request_complete(struct crypto_async_request *req,
264 int err)
265{
266 req->complete(req->data, err);
267}
268
269static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
270{
271 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
272}
273
274#endif /* _CRYPTO_ALGAPI_H */