Linux Audio

Check our new training course

Loading...
v6.8
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * Cryptographic API for algorithms (i.e., low-level API).
  4 *
  5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7#ifndef _CRYPTO_ALGAPI_H
  8#define _CRYPTO_ALGAPI_H
  9
 10#include <crypto/utils.h>
 11#include <linux/align.h>
 12#include <linux/cache.h>
 13#include <linux/crypto.h>
 14#include <linux/types.h>
 15#include <linux/workqueue.h>
 16
 17/*
 18 * Maximum values for blocksize and alignmask, used to allocate
 19 * static buffers that are big enough for any combination of
 20 * algs and architectures. Ciphers have a lower maximum size.
 21 */
 22#define MAX_ALGAPI_BLOCKSIZE		160
 23#define MAX_ALGAPI_ALIGNMASK		127
 24#define MAX_CIPHER_BLOCKSIZE		16
 25#define MAX_CIPHER_ALIGNMASK		15
 26
 27#ifdef ARCH_DMA_MINALIGN
 28#define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
 29#else
 30#define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
 31#endif
 32
 33#define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
 34
 35/*
 36 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
 37 * arbitrary modules to be loaded. Loading from userspace may still need the
 38 * unprefixed names, so retains those aliases as well.
 39 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
 40 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
 41 * expands twice on the same line. Instead, use a separate base name for the
 42 * alias.
 43 */
 44#define MODULE_ALIAS_CRYPTO(name)	\
 45		__MODULE_INFO(alias, alias_userspace, name);	\
 46		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
 47
 48struct crypto_aead;
 49struct crypto_instance;
 50struct module;
 51struct notifier_block;
 52struct rtattr;
 53struct scatterlist;
 54struct seq_file;
 55struct sk_buff;
 56
 57struct crypto_type {
 58	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
 59	unsigned int (*extsize)(struct crypto_alg *alg);
 60	int (*init_tfm)(struct crypto_tfm *tfm);
 61	void (*show)(struct seq_file *m, struct crypto_alg *alg);
 62	int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
 63	void (*free)(struct crypto_instance *inst);
 64#ifdef CONFIG_CRYPTO_STATS
 65	int (*report_stat)(struct sk_buff *skb, struct crypto_alg *alg);
 66#endif
 67
 68	unsigned int type;
 69	unsigned int maskclear;
 70	unsigned int maskset;
 71	unsigned int tfmsize;
 72};
 73
 74struct crypto_instance {
 75	struct crypto_alg alg;
 76
 77	struct crypto_template *tmpl;
 78
 79	union {
 80		/* Node in list of instances after registration. */
 81		struct hlist_node list;
 82		/* List of attached spawns before registration. */
 83		struct crypto_spawn *spawns;
 84	};
 85
 86	struct work_struct free_work;
 87
 88	void *__ctx[] CRYPTO_MINALIGN_ATTR;
 89};
 90
 91struct crypto_template {
 92	struct list_head list;
 93	struct hlist_head instances;
 94	struct module *module;
 95
 96	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
 97
 98	char name[CRYPTO_MAX_ALG_NAME];
 99};
100
101struct crypto_spawn {
102	struct list_head list;
103	struct crypto_alg *alg;
104	union {
105		/* Back pointer to instance after registration.*/
106		struct crypto_instance *inst;
107		/* Spawn list pointer prior to registration. */
108		struct crypto_spawn *next;
109	};
110	const struct crypto_type *frontend;
111	u32 mask;
112	bool dead;
113	bool registered;
114};
115
116struct crypto_queue {
117	struct list_head list;
118	struct list_head *backlog;
119
120	unsigned int qlen;
121	unsigned int max_qlen;
122};
123
124struct scatter_walk {
125	struct scatterlist *sg;
126	unsigned int offset;
127};
128
129struct crypto_attr_alg {
130	char name[CRYPTO_MAX_ALG_NAME];
131};
132
133struct crypto_attr_type {
134	u32 type;
135	u32 mask;
136};
137
138/*
139 * Algorithm registration interface.
140 */
141int crypto_register_alg(struct crypto_alg *alg);
142void crypto_unregister_alg(struct crypto_alg *alg);
143int crypto_register_algs(struct crypto_alg *algs, int count);
144void crypto_unregister_algs(struct crypto_alg *algs, int count);
145
146void crypto_mod_put(struct crypto_alg *alg);
147
148int crypto_register_template(struct crypto_template *tmpl);
149int crypto_register_templates(struct crypto_template *tmpls, int count);
150void crypto_unregister_template(struct crypto_template *tmpl);
151void crypto_unregister_templates(struct crypto_template *tmpls, int count);
152struct crypto_template *crypto_lookup_template(const char *name);
153
154int crypto_register_instance(struct crypto_template *tmpl,
155			     struct crypto_instance *inst);
156void crypto_unregister_instance(struct crypto_instance *inst);
157
158int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
159		      const char *name, u32 type, u32 mask);
160void crypto_drop_spawn(struct crypto_spawn *spawn);
161struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
162				    u32 mask);
163void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
164
165struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
166int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
167const char *crypto_attr_alg_name(struct rtattr *rta);
168int crypto_inst_setname(struct crypto_instance *inst, const char *name,
169			struct crypto_alg *alg);
170
171void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
172int crypto_enqueue_request(struct crypto_queue *queue,
173			   struct crypto_async_request *request);
174void crypto_enqueue_request_head(struct crypto_queue *queue,
175				 struct crypto_async_request *request);
176struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
177static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
178{
179	return queue->qlen;
180}
181
182void crypto_inc(u8 *a, unsigned int size);
183
184static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
185{
186	return tfm->__crt_ctx;
187}
188
189static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
190					 unsigned int align)
191{
192	if (align <= crypto_tfm_ctx_alignment())
193		align = 1;
194
195	return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
196}
197
198static inline unsigned int crypto_dma_align(void)
199{
200	return CRYPTO_DMA_ALIGN;
201}
202
203static inline unsigned int crypto_dma_padding(void)
204{
205	return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
206}
207
208static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
209{
210	return crypto_tfm_ctx_align(tfm, crypto_dma_align());
211}
212
213static inline struct crypto_instance *crypto_tfm_alg_instance(
214	struct crypto_tfm *tfm)
215{
216	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
217}
218
219static inline void *crypto_instance_ctx(struct crypto_instance *inst)
220{
221	return inst->__ctx;
222}
223
224static inline struct crypto_async_request *crypto_get_backlog(
225	struct crypto_queue *queue)
226{
227	return queue->backlog == &queue->list ? NULL :
228	       container_of(queue->backlog, struct crypto_async_request, list);
229}
230
231static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
232{
233	return (algt->type ^ off) & algt->mask & off;
234}
235
236/*
237 * When an algorithm uses another algorithm (e.g., if it's an instance of a
238 * template), these are the flags that should always be set on the "outer"
239 * algorithm if any "inner" algorithm has them set.
240 */
241#define CRYPTO_ALG_INHERITED_FLAGS	\
242	(CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |	\
243	 CRYPTO_ALG_ALLOCATES_MEMORY)
244
245/*
246 * Given the type and mask that specify the flags restrictions on a template
247 * instance being created, return the mask that should be passed to
248 * crypto_grab_*() (along with type=0) to honor any request the user made to
249 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
250 */
251static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
252{
253	return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
254}
255
256int crypto_register_notifier(struct notifier_block *nb);
257int crypto_unregister_notifier(struct notifier_block *nb);
258
259/* Crypto notification events. */
260enum {
261	CRYPTO_MSG_ALG_REQUEST,
262	CRYPTO_MSG_ALG_REGISTER,
263	CRYPTO_MSG_ALG_LOADED,
264};
265
266static inline void crypto_request_complete(struct crypto_async_request *req,
267					   int err)
268{
269	req->complete(req->data, err);
270}
271
272static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
273{
274	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
275}
276
277#endif	/* _CRYPTO_ALGAPI_H */
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * Cryptographic API for algorithms (i.e., low-level API).
  4 *
  5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7#ifndef _CRYPTO_ALGAPI_H
  8#define _CRYPTO_ALGAPI_H
  9
 10#include <crypto/utils.h>
 11#include <linux/align.h>
 12#include <linux/cache.h>
 13#include <linux/crypto.h>
 14#include <linux/types.h>
 15#include <linux/workqueue.h>
 16
 17/*
 18 * Maximum values for blocksize and alignmask, used to allocate
 19 * static buffers that are big enough for any combination of
 20 * algs and architectures. Ciphers have a lower maximum size.
 21 */
 22#define MAX_ALGAPI_BLOCKSIZE		160
 23#define MAX_ALGAPI_ALIGNMASK		127
 24#define MAX_CIPHER_BLOCKSIZE		16
 25#define MAX_CIPHER_ALIGNMASK		15
 26
 27#ifdef ARCH_DMA_MINALIGN
 28#define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
 29#else
 30#define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
 31#endif
 32
 33#define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
 34
 35/*
 36 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
 37 * arbitrary modules to be loaded. Loading from userspace may still need the
 38 * unprefixed names, so retains those aliases as well.
 39 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
 40 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
 41 * expands twice on the same line. Instead, use a separate base name for the
 42 * alias.
 43 */
 44#define MODULE_ALIAS_CRYPTO(name)	\
 45		__MODULE_INFO(alias, alias_userspace, name);	\
 46		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
 47
 48struct crypto_aead;
 49struct crypto_instance;
 50struct module;
 51struct notifier_block;
 52struct rtattr;
 53struct scatterlist;
 54struct seq_file;
 55struct sk_buff;
 56
 57struct crypto_type {
 58	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
 59	unsigned int (*extsize)(struct crypto_alg *alg);
 60	int (*init_tfm)(struct crypto_tfm *tfm);
 61	void (*show)(struct seq_file *m, struct crypto_alg *alg);
 62	int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
 63	void (*free)(struct crypto_instance *inst);
 
 
 
 64
 65	unsigned int type;
 66	unsigned int maskclear;
 67	unsigned int maskset;
 68	unsigned int tfmsize;
 69};
 70
 71struct crypto_instance {
 72	struct crypto_alg alg;
 73
 74	struct crypto_template *tmpl;
 75
 76	union {
 77		/* Node in list of instances after registration. */
 78		struct hlist_node list;
 79		/* List of attached spawns before registration. */
 80		struct crypto_spawn *spawns;
 81	};
 82
 83	struct work_struct free_work;
 84
 85	void *__ctx[] CRYPTO_MINALIGN_ATTR;
 86};
 87
 88struct crypto_template {
 89	struct list_head list;
 90	struct hlist_head instances;
 91	struct module *module;
 92
 93	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
 94
 95	char name[CRYPTO_MAX_ALG_NAME];
 96};
 97
 98struct crypto_spawn {
 99	struct list_head list;
100	struct crypto_alg *alg;
101	union {
102		/* Back pointer to instance after registration.*/
103		struct crypto_instance *inst;
104		/* Spawn list pointer prior to registration. */
105		struct crypto_spawn *next;
106	};
107	const struct crypto_type *frontend;
108	u32 mask;
109	bool dead;
110	bool registered;
111};
112
113struct crypto_queue {
114	struct list_head list;
115	struct list_head *backlog;
116
117	unsigned int qlen;
118	unsigned int max_qlen;
119};
120
121struct scatter_walk {
122	struct scatterlist *sg;
123	unsigned int offset;
124};
125
126struct crypto_attr_alg {
127	char name[CRYPTO_MAX_ALG_NAME];
128};
129
130struct crypto_attr_type {
131	u32 type;
132	u32 mask;
133};
134
135/*
136 * Algorithm registration interface.
137 */
138int crypto_register_alg(struct crypto_alg *alg);
139void crypto_unregister_alg(struct crypto_alg *alg);
140int crypto_register_algs(struct crypto_alg *algs, int count);
141void crypto_unregister_algs(struct crypto_alg *algs, int count);
142
143void crypto_mod_put(struct crypto_alg *alg);
144
145int crypto_register_template(struct crypto_template *tmpl);
146int crypto_register_templates(struct crypto_template *tmpls, int count);
147void crypto_unregister_template(struct crypto_template *tmpl);
148void crypto_unregister_templates(struct crypto_template *tmpls, int count);
149struct crypto_template *crypto_lookup_template(const char *name);
150
151int crypto_register_instance(struct crypto_template *tmpl,
152			     struct crypto_instance *inst);
153void crypto_unregister_instance(struct crypto_instance *inst);
154
155int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
156		      const char *name, u32 type, u32 mask);
157void crypto_drop_spawn(struct crypto_spawn *spawn);
158struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
159				    u32 mask);
160void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
161
162struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
163int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
164const char *crypto_attr_alg_name(struct rtattr *rta);
165int crypto_inst_setname(struct crypto_instance *inst, const char *name,
166			struct crypto_alg *alg);
167
168void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
169int crypto_enqueue_request(struct crypto_queue *queue,
170			   struct crypto_async_request *request);
171void crypto_enqueue_request_head(struct crypto_queue *queue,
172				 struct crypto_async_request *request);
173struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
174static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
175{
176	return queue->qlen;
177}
178
179void crypto_inc(u8 *a, unsigned int size);
180
181static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
182{
183	return tfm->__crt_ctx;
184}
185
186static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
187					 unsigned int align)
188{
189	if (align <= crypto_tfm_ctx_alignment())
190		align = 1;
191
192	return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
193}
194
195static inline unsigned int crypto_dma_align(void)
196{
197	return CRYPTO_DMA_ALIGN;
198}
199
200static inline unsigned int crypto_dma_padding(void)
201{
202	return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
203}
204
205static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
206{
207	return crypto_tfm_ctx_align(tfm, crypto_dma_align());
208}
209
210static inline struct crypto_instance *crypto_tfm_alg_instance(
211	struct crypto_tfm *tfm)
212{
213	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
214}
215
216static inline void *crypto_instance_ctx(struct crypto_instance *inst)
217{
218	return inst->__ctx;
219}
220
221static inline struct crypto_async_request *crypto_get_backlog(
222	struct crypto_queue *queue)
223{
224	return queue->backlog == &queue->list ? NULL :
225	       container_of(queue->backlog, struct crypto_async_request, list);
226}
227
228static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
229{
230	return (algt->type ^ off) & algt->mask & off;
231}
232
233/*
234 * When an algorithm uses another algorithm (e.g., if it's an instance of a
235 * template), these are the flags that should always be set on the "outer"
236 * algorithm if any "inner" algorithm has them set.
237 */
238#define CRYPTO_ALG_INHERITED_FLAGS	\
239	(CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |	\
240	 CRYPTO_ALG_ALLOCATES_MEMORY)
241
242/*
243 * Given the type and mask that specify the flags restrictions on a template
244 * instance being created, return the mask that should be passed to
245 * crypto_grab_*() (along with type=0) to honor any request the user made to
246 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
247 */
248static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
249{
250	return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
251}
252
253int crypto_register_notifier(struct notifier_block *nb);
254int crypto_unregister_notifier(struct notifier_block *nb);
255
256/* Crypto notification events. */
257enum {
258	CRYPTO_MSG_ALG_REQUEST,
259	CRYPTO_MSG_ALG_REGISTER,
260	CRYPTO_MSG_ALG_LOADED,
261};
262
263static inline void crypto_request_complete(struct crypto_async_request *req,
264					   int err)
265{
266	req->complete(req->data, err);
267}
268
269static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
270{
271	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
272}
273
274#endif	/* _CRYPTO_ALGAPI_H */