Linux Audio

Check our new training course

Loading...
v3.1
 
  1/*
  2 * Cryptographic API for algorithms (i.e., low-level API).
  3 *
  4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option) 
  9 * any later version.
 10 *
 11 */
 12#ifndef _CRYPTO_ALGAPI_H
 13#define _CRYPTO_ALGAPI_H
 14
 
 
 
 15#include <linux/crypto.h>
 16#include <linux/list.h>
 17#include <linux/kernel.h>
 18
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 19struct module;
 
 20struct rtattr;
 
 21struct seq_file;
 
 22
 23struct crypto_type {
 24	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
 25	unsigned int (*extsize)(struct crypto_alg *alg);
 26	int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
 27	int (*init_tfm)(struct crypto_tfm *tfm);
 28	void (*show)(struct seq_file *m, struct crypto_alg *alg);
 29	struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
 
 30
 31	unsigned int type;
 32	unsigned int maskclear;
 33	unsigned int maskset;
 34	unsigned int tfmsize;
 35};
 36
 37struct crypto_instance {
 38	struct crypto_alg alg;
 39
 40	struct crypto_template *tmpl;
 41	struct hlist_node list;
 
 
 
 
 
 
 
 
 42
 43	void *__ctx[] CRYPTO_MINALIGN_ATTR;
 44};
 45
 46struct crypto_template {
 47	struct list_head list;
 48	struct hlist_head instances;
 49	struct module *module;
 50
 51	struct crypto_instance *(*alloc)(struct rtattr **tb);
 52	void (*free)(struct crypto_instance *inst);
 53	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
 54
 55	char name[CRYPTO_MAX_ALG_NAME];
 56};
 57
 58struct crypto_spawn {
 59	struct list_head list;
 60	struct crypto_alg *alg;
 61	struct crypto_instance *inst;
 
 
 
 
 
 62	const struct crypto_type *frontend;
 63	u32 mask;
 
 
 64};
 65
 66struct crypto_queue {
 67	struct list_head list;
 68	struct list_head *backlog;
 69
 70	unsigned int qlen;
 71	unsigned int max_qlen;
 72};
 73
 74struct scatter_walk {
 75	struct scatterlist *sg;
 76	unsigned int offset;
 77};
 78
 79struct blkcipher_walk {
 80	union {
 81		struct {
 82			struct page *page;
 83			unsigned long offset;
 84		} phys;
 85
 86		struct {
 87			u8 *page;
 88			u8 *addr;
 89		} virt;
 90	} src, dst;
 91
 92	struct scatter_walk in;
 93	unsigned int nbytes;
 94
 95	struct scatter_walk out;
 96	unsigned int total;
 97
 98	void *page;
 99	u8 *buffer;
100	u8 *iv;
101
102	int flags;
103	unsigned int blocksize;
104};
105
106struct ablkcipher_walk {
107	struct {
108		struct page *page;
109		unsigned int offset;
110	} src, dst;
111
112	struct scatter_walk	in;
113	unsigned int		nbytes;
114	struct scatter_walk	out;
115	unsigned int		total;
116	struct list_head	buffers;
117	u8			*iv_buffer;
118	u8			*iv;
119	int			flags;
120	unsigned int		blocksize;
121};
122
123extern const struct crypto_type crypto_ablkcipher_type;
124extern const struct crypto_type crypto_aead_type;
125extern const struct crypto_type crypto_blkcipher_type;
 
 
 
 
126
127void crypto_mod_put(struct crypto_alg *alg);
128
129int crypto_register_template(struct crypto_template *tmpl);
 
130void crypto_unregister_template(struct crypto_template *tmpl);
 
131struct crypto_template *crypto_lookup_template(const char *name);
132
133int crypto_register_instance(struct crypto_template *tmpl,
134			     struct crypto_instance *inst);
 
135
136int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
137		      struct crypto_instance *inst, u32 mask);
138int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
139		       struct crypto_instance *inst,
140		       const struct crypto_type *frontend);
141
142void crypto_drop_spawn(struct crypto_spawn *spawn);
143struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
144				    u32 mask);
145void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
146
147static inline void crypto_set_spawn(struct crypto_spawn *spawn,
148				    struct crypto_instance *inst)
149{
150	spawn->inst = inst;
151}
152
153struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
154int crypto_check_attr_type(struct rtattr **tb, u32 type);
155const char *crypto_attr_alg_name(struct rtattr *rta);
156struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
157				    const struct crypto_type *frontend,
158				    u32 type, u32 mask);
159
160static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
161						 u32 type, u32 mask)
162{
163	return crypto_attr_alg2(rta, NULL, type, mask);
164}
165
166int crypto_attr_u32(struct rtattr *rta, u32 *num);
167void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
168			     unsigned int head);
169struct crypto_instance *crypto_alloc_instance(const char *name,
170					      struct crypto_alg *alg);
171
172void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
173int crypto_enqueue_request(struct crypto_queue *queue,
174			   struct crypto_async_request *request);
175void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset);
 
176struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
177int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
178
179/* These functions require the input/output to be aligned as u32. */
180void crypto_inc(u8 *a, unsigned int size);
181void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
182
183int blkcipher_walk_done(struct blkcipher_desc *desc,
184			struct blkcipher_walk *walk, int err);
185int blkcipher_walk_virt(struct blkcipher_desc *desc,
186			struct blkcipher_walk *walk);
187int blkcipher_walk_phys(struct blkcipher_desc *desc,
188			struct blkcipher_walk *walk);
189int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
190			      struct blkcipher_walk *walk,
191			      unsigned int blocksize);
192
193int ablkcipher_walk_done(struct ablkcipher_request *req,
194			 struct ablkcipher_walk *walk, int err);
195int ablkcipher_walk_phys(struct ablkcipher_request *req,
196			 struct ablkcipher_walk *walk);
197void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
198
199static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
200{
201	return PTR_ALIGN(crypto_tfm_ctx(tfm),
202			 crypto_tfm_alg_alignmask(tfm) + 1);
203}
204
205static inline struct crypto_instance *crypto_tfm_alg_instance(
206	struct crypto_tfm *tfm)
207{
208	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
209}
210
211static inline void *crypto_instance_ctx(struct crypto_instance *inst)
212{
213	return inst->__ctx;
214}
215
216static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
217	struct crypto_ablkcipher *tfm)
218{
219	return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
220}
221
222static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
223{
224	return crypto_tfm_ctx(&tfm->base);
225}
226
227static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
228{
229	return crypto_tfm_ctx_aligned(&tfm->base);
230}
231
232static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
233{
234	return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
235}
236
237static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
238{
239	return crypto_tfm_ctx(&tfm->base);
240}
241
242static inline struct crypto_instance *crypto_aead_alg_instance(
243	struct crypto_aead *aead)
244{
245	return crypto_tfm_alg_instance(&aead->base);
246}
247
248static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
249	struct crypto_spawn *spawn)
250{
251	u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
252	u32 mask = CRYPTO_ALG_TYPE_MASK;
253
254	return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
255}
256
257static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
258{
259	return crypto_tfm_ctx(&tfm->base);
260}
261
262static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
263{
264	return crypto_tfm_ctx_aligned(&tfm->base);
265}
266
267static inline struct crypto_cipher *crypto_spawn_cipher(
268	struct crypto_spawn *spawn)
269{
270	u32 type = CRYPTO_ALG_TYPE_CIPHER;
271	u32 mask = CRYPTO_ALG_TYPE_MASK;
272
273	return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
274}
275
276static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
277{
278	return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
279}
280
281static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
 
282{
283	u32 type = CRYPTO_ALG_TYPE_HASH;
284	u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
285
286	return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
287}
288
289static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
290{
291	return crypto_tfm_ctx(&tfm->base);
292}
293
294static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
295{
296	return crypto_tfm_ctx_aligned(&tfm->base);
297}
298
299static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
300				       struct scatterlist *dst,
301				       struct scatterlist *src,
302				       unsigned int nbytes)
303{
304	walk->in.sg = src;
305	walk->out.sg = dst;
306	walk->total = nbytes;
307}
308
309static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
310					struct scatterlist *dst,
311					struct scatterlist *src,
312					unsigned int nbytes)
313{
314	walk->in.sg = src;
315	walk->out.sg = dst;
316	walk->total = nbytes;
317	INIT_LIST_HEAD(&walk->buffers);
318}
319
320static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
321{
322	if (unlikely(!list_empty(&walk->buffers)))
323		__ablkcipher_walk_complete(walk);
324}
325
326static inline struct crypto_async_request *crypto_get_backlog(
327	struct crypto_queue *queue)
328{
329	return queue->backlog == &queue->list ? NULL :
330	       container_of(queue->backlog, struct crypto_async_request, list);
331}
332
333static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
334					     struct ablkcipher_request *request)
335{
336	return crypto_enqueue_request(queue, &request->base);
337}
338
339static inline struct ablkcipher_request *ablkcipher_dequeue_request(
340	struct crypto_queue *queue)
341{
342	return ablkcipher_request_cast(crypto_dequeue_request(queue));
343}
344
345static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
346{
347	return req->__ctx;
348}
349
350static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
351					  struct crypto_ablkcipher *tfm)
352{
353	return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
354}
355
356static inline void *aead_request_ctx(struct aead_request *req)
 
 
 
 
 
 
357{
358	return req->__ctx;
359}
360
361static inline void aead_request_complete(struct aead_request *req, int err)
362{
363	req->base.complete(&req->base, err);
364}
365
366static inline u32 aead_request_flags(struct aead_request *req)
367{
368	return req->base.flags;
369}
 
 
370
371static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
372						     u32 type, u32 mask)
373{
374	return crypto_attr_alg(tb[1], type, mask);
375}
376
377/*
378 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
379 * Otherwise returns zero.
380 */
381static inline int crypto_requires_sync(u32 type, u32 mask)
382{
383	return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
384}
385
386#endif	/* _CRYPTO_ALGAPI_H */
387
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * Cryptographic API for algorithms (i.e., low-level API).
  4 *
  5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  6 */
  7#ifndef _CRYPTO_ALGAPI_H
  8#define _CRYPTO_ALGAPI_H
  9
 10#include <crypto/utils.h>
 11#include <linux/align.h>
 12#include <linux/cache.h>
 13#include <linux/crypto.h>
 14#include <linux/types.h>
 15#include <linux/workqueue.h>
 16
 17/*
 18 * Maximum values for blocksize and alignmask, used to allocate
 19 * static buffers that are big enough for any combination of
 20 * algs and architectures. Ciphers have a lower maximum size.
 21 */
 22#define MAX_ALGAPI_BLOCKSIZE		160
 23#define MAX_ALGAPI_ALIGNMASK		127
 24#define MAX_CIPHER_BLOCKSIZE		16
 25#define MAX_CIPHER_ALIGNMASK		15
 26
 27#ifdef ARCH_DMA_MINALIGN
 28#define CRYPTO_DMA_ALIGN ARCH_DMA_MINALIGN
 29#else
 30#define CRYPTO_DMA_ALIGN CRYPTO_MINALIGN
 31#endif
 32
 33#define CRYPTO_DMA_PADDING ((CRYPTO_DMA_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
 34
 35/*
 36 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
 37 * arbitrary modules to be loaded. Loading from userspace may still need the
 38 * unprefixed names, so retains those aliases as well.
 39 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
 40 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
 41 * expands twice on the same line. Instead, use a separate base name for the
 42 * alias.
 43 */
 44#define MODULE_ALIAS_CRYPTO(name)	\
 45		__MODULE_INFO(alias, alias_userspace, name);	\
 46		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
 47
 48struct crypto_aead;
 49struct crypto_instance;
 50struct module;
 51struct notifier_block;
 52struct rtattr;
 53struct scatterlist;
 54struct seq_file;
 55struct sk_buff;
 56
 57struct crypto_type {
 58	unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
 59	unsigned int (*extsize)(struct crypto_alg *alg);
 
 60	int (*init_tfm)(struct crypto_tfm *tfm);
 61	void (*show)(struct seq_file *m, struct crypto_alg *alg);
 62	int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
 63	void (*free)(struct crypto_instance *inst);
 64
 65	unsigned int type;
 66	unsigned int maskclear;
 67	unsigned int maskset;
 68	unsigned int tfmsize;
 69};
 70
 71struct crypto_instance {
 72	struct crypto_alg alg;
 73
 74	struct crypto_template *tmpl;
 75
 76	union {
 77		/* Node in list of instances after registration. */
 78		struct hlist_node list;
 79		/* List of attached spawns before registration. */
 80		struct crypto_spawn *spawns;
 81	};
 82
 83	struct work_struct free_work;
 84
 85	void *__ctx[] CRYPTO_MINALIGN_ATTR;
 86};
 87
 88struct crypto_template {
 89	struct list_head list;
 90	struct hlist_head instances;
 91	struct module *module;
 92
 
 
 93	int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
 94
 95	char name[CRYPTO_MAX_ALG_NAME];
 96};
 97
 98struct crypto_spawn {
 99	struct list_head list;
100	struct crypto_alg *alg;
101	union {
102		/* Back pointer to instance after registration.*/
103		struct crypto_instance *inst;
104		/* Spawn list pointer prior to registration. */
105		struct crypto_spawn *next;
106	};
107	const struct crypto_type *frontend;
108	u32 mask;
109	bool dead;
110	bool registered;
111};
112
113struct crypto_queue {
114	struct list_head list;
115	struct list_head *backlog;
116
117	unsigned int qlen;
118	unsigned int max_qlen;
119};
120
121struct scatter_walk {
122	struct scatterlist *sg;
123	unsigned int offset;
124};
125
126struct crypto_attr_alg {
127	char name[CRYPTO_MAX_ALG_NAME];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
128};
129
130struct crypto_attr_type {
131	u32 type;
132	u32 mask;
 
 
 
 
 
 
 
 
 
 
 
 
133};
134
135/*
136 * Algorithm registration interface.
137 */
138int crypto_register_alg(struct crypto_alg *alg);
139void crypto_unregister_alg(struct crypto_alg *alg);
140int crypto_register_algs(struct crypto_alg *algs, int count);
141void crypto_unregister_algs(struct crypto_alg *algs, int count);
142
143void crypto_mod_put(struct crypto_alg *alg);
144
145int crypto_register_template(struct crypto_template *tmpl);
146int crypto_register_templates(struct crypto_template *tmpls, int count);
147void crypto_unregister_template(struct crypto_template *tmpl);
148void crypto_unregister_templates(struct crypto_template *tmpls, int count);
149struct crypto_template *crypto_lookup_template(const char *name);
150
151int crypto_register_instance(struct crypto_template *tmpl,
152			     struct crypto_instance *inst);
153void crypto_unregister_instance(struct crypto_instance *inst);
154
155int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
156		      const char *name, u32 type, u32 mask);
 
 
 
 
157void crypto_drop_spawn(struct crypto_spawn *spawn);
158struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
159				    u32 mask);
160void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
161
 
 
 
 
 
 
162struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
163int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret);
164const char *crypto_attr_alg_name(struct rtattr *rta);
165int crypto_inst_setname(struct crypto_instance *inst, const char *name,
166			struct crypto_alg *alg);
 
 
 
 
 
 
 
 
 
 
 
 
 
167
168void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
169int crypto_enqueue_request(struct crypto_queue *queue,
170			   struct crypto_async_request *request);
171void crypto_enqueue_request_head(struct crypto_queue *queue,
172				 struct crypto_async_request *request);
173struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
174static inline unsigned int crypto_queue_len(struct crypto_queue *queue)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
175{
176	return queue->qlen;
 
 
 
177}
178
179void crypto_inc(u8 *a, unsigned int size);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
180
181static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
182{
183	return tfm->__crt_ctx;
184}
185
186static inline void *crypto_tfm_ctx_align(struct crypto_tfm *tfm,
187					 unsigned int align)
188{
189	if (align <= crypto_tfm_ctx_alignment())
190		align = 1;
191
192	return PTR_ALIGN(crypto_tfm_ctx(tfm), align);
193}
194
195static inline unsigned int crypto_dma_align(void)
196{
197	return CRYPTO_DMA_ALIGN;
198}
199
200static inline unsigned int crypto_dma_padding(void)
201{
202	return (crypto_dma_align() - 1) & ~(crypto_tfm_ctx_alignment() - 1);
203}
204
205static inline void *crypto_tfm_ctx_dma(struct crypto_tfm *tfm)
 
 
 
206{
207	return crypto_tfm_ctx_align(tfm, crypto_dma_align());
 
 
208}
209
210static inline struct crypto_instance *crypto_tfm_alg_instance(
211	struct crypto_tfm *tfm)
 
 
212{
213	return container_of(tfm->__crt_alg, struct crypto_instance, alg);
 
 
 
214}
215
216static inline void *crypto_instance_ctx(struct crypto_instance *inst)
217{
218	return inst->__ctx;
 
219}
220
221static inline struct crypto_async_request *crypto_get_backlog(
222	struct crypto_queue *queue)
223{
224	return queue->backlog == &queue->list ? NULL :
225	       container_of(queue->backlog, struct crypto_async_request, list);
226}
227
228static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off)
 
229{
230	return (algt->type ^ off) & algt->mask & off;
231}
232
233/*
234 * When an algorithm uses another algorithm (e.g., if it's an instance of a
235 * template), these are the flags that should always be set on the "outer"
236 * algorithm if any "inner" algorithm has them set.
237 */
238#define CRYPTO_ALG_INHERITED_FLAGS	\
239	(CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |	\
240	 CRYPTO_ALG_ALLOCATES_MEMORY)
 
 
 
 
 
 
 
 
241
242/*
243 * Given the type and mask that specify the flags restrictions on a template
244 * instance being created, return the mask that should be passed to
245 * crypto_grab_*() (along with type=0) to honor any request the user made to
246 * have any of the CRYPTO_ALG_INHERITED_FLAGS clear.
247 */
248static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt)
249{
250	return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS);
251}
252
253int crypto_register_notifier(struct notifier_block *nb);
254int crypto_unregister_notifier(struct notifier_block *nb);
 
 
255
256/* Crypto notification events. */
257enum {
258	CRYPTO_MSG_ALG_REQUEST,
259	CRYPTO_MSG_ALG_REGISTER,
260	CRYPTO_MSG_ALG_LOADED,
261};
262
263static inline void crypto_request_complete(struct crypto_async_request *req,
264					   int err)
265{
266	req->complete(req->data, err);
267}
268
269static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
 
 
 
 
270{
271	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
272}
273
274#endif	/* _CRYPTO_ALGAPI_H */