Linux Audio

Check our new training course

Loading...
v5.14.15
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * Scatterlist Cryptographic API.
  4 *
  5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  8 *
  9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
 10 * and Nettle, by Niels Möller.
 
 
 
 
 
 
 11 */
 12#ifndef _LINUX_CRYPTO_H
 13#define _LINUX_CRYPTO_H
 14
 15#include <linux/atomic.h>
 16#include <linux/kernel.h>
 17#include <linux/list.h>
 18#include <linux/bug.h>
 19#include <linux/refcount.h>
 20#include <linux/slab.h>
 21#include <linux/completion.h>
 22
 23/*
 24 * Autoloaded crypto modules should only use a prefixed name to avoid allowing
 25 * arbitrary modules to be loaded. Loading from userspace may still need the
 26 * unprefixed names, so retains those aliases as well.
 27 * This uses __MODULE_INFO directly instead of MODULE_ALIAS because pre-4.3
 28 * gcc (e.g. avr32 toolchain) uses __LINE__ for uniqueness, and this macro
 29 * expands twice on the same line. Instead, use a separate base name for the
 30 * alias.
 31 */
 32#define MODULE_ALIAS_CRYPTO(name)	\
 33		__MODULE_INFO(alias, alias_userspace, name);	\
 34		__MODULE_INFO(alias, alias_crypto, "crypto-" name)
 35
 36/*
 37 * Algorithm masks and types.
 38 */
 39#define CRYPTO_ALG_TYPE_MASK		0x0000000f
 40#define CRYPTO_ALG_TYPE_CIPHER		0x00000001
 41#define CRYPTO_ALG_TYPE_COMPRESS	0x00000002
 42#define CRYPTO_ALG_TYPE_AEAD		0x00000003
 43#define CRYPTO_ALG_TYPE_SKCIPHER	0x00000005
 44#define CRYPTO_ALG_TYPE_KPP		0x00000008
 45#define CRYPTO_ALG_TYPE_ACOMPRESS	0x0000000a
 46#define CRYPTO_ALG_TYPE_SCOMPRESS	0x0000000b
 
 
 
 47#define CRYPTO_ALG_TYPE_RNG		0x0000000c
 48#define CRYPTO_ALG_TYPE_AKCIPHER	0x0000000d
 49#define CRYPTO_ALG_TYPE_HASH		0x0000000e
 50#define CRYPTO_ALG_TYPE_SHASH		0x0000000e
 51#define CRYPTO_ALG_TYPE_AHASH		0x0000000f
 52
 53#define CRYPTO_ALG_TYPE_HASH_MASK	0x0000000e
 54#define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000e
 55#define CRYPTO_ALG_TYPE_ACOMPRESS_MASK	0x0000000e
 56
 57#define CRYPTO_ALG_LARVAL		0x00000010
 58#define CRYPTO_ALG_DEAD			0x00000020
 59#define CRYPTO_ALG_DYING		0x00000040
 60#define CRYPTO_ALG_ASYNC		0x00000080
 61
 62/*
 63 * Set if the algorithm (or an algorithm which it uses) requires another
 64 * algorithm of the same type to handle corner cases.
 65 */
 66#define CRYPTO_ALG_NEED_FALLBACK	0x00000100
 67
 68/*
 
 
 
 
 
 
 69 * Set if the algorithm has passed automated run-time testing.  Note that
 70 * if there is no run-time testing for a given algorithm it is considered
 71 * to have passed.
 72 */
 73
 74#define CRYPTO_ALG_TESTED		0x00000400
 75
 76/*
 77 * Set if the algorithm is an instance that is built from templates.
 78 */
 79#define CRYPTO_ALG_INSTANCE		0x00000800
 80
 81/* Set this bit if the algorithm provided is hardware accelerated but
 82 * not available to userspace via instruction set or so.
 83 */
 84#define CRYPTO_ALG_KERN_DRIVER_ONLY	0x00001000
 85
 86/*
 87 * Mark a cipher as a service implementation only usable by another
 88 * cipher and never by a normal user of the kernel crypto API
 89 */
 90#define CRYPTO_ALG_INTERNAL		0x00002000
 91
 92/*
 93 * Set if the algorithm has a ->setkey() method but can be used without
 94 * calling it first, i.e. there is a default key.
 95 */
 96#define CRYPTO_ALG_OPTIONAL_KEY		0x00004000
 97
 98/*
 99 * Don't trigger module loading
100 */
101#define CRYPTO_NOLOAD			0x00008000
102
103/*
104 * The algorithm may allocate memory during request processing, i.e. during
105 * encryption, decryption, or hashing.  Users can request an algorithm with this
106 * flag unset if they can't handle memory allocation failures.
107 *
108 * This flag is currently only implemented for algorithms of type "skcipher",
109 * "aead", "ahash", "shash", and "cipher".  Algorithms of other types might not
110 * have this flag set even if they allocate memory.
111 *
112 * In some edge cases, algorithms can allocate memory regardless of this flag.
113 * To avoid these cases, users must obey the following usage constraints:
114 *    skcipher:
115 *	- The IV buffer and all scatterlist elements must be aligned to the
116 *	  algorithm's alignmask.
117 *	- If the data were to be divided into chunks of size
118 *	  crypto_skcipher_walksize() (with any remainder going at the end), no
119 *	  chunk can cross a page boundary or a scatterlist element boundary.
120 *    aead:
121 *	- The IV buffer and all scatterlist elements must be aligned to the
122 *	  algorithm's alignmask.
123 *	- The first scatterlist element must contain all the associated data,
124 *	  and its pages must be !PageHighMem.
125 *	- If the plaintext/ciphertext were to be divided into chunks of size
126 *	  crypto_aead_walksize() (with the remainder going at the end), no chunk
127 *	  can cross a page boundary or a scatterlist element boundary.
128 *    ahash:
129 *	- The result buffer must be aligned to the algorithm's alignmask.
130 *	- crypto_ahash_finup() must not be used unless the algorithm implements
131 *	  ->finup() natively.
132 */
133#define CRYPTO_ALG_ALLOCATES_MEMORY	0x00010000
134
135/*
136 * Transform masks and values (for crt_flags).
137 */
138#define CRYPTO_TFM_NEED_KEY		0x00000001
139
140#define CRYPTO_TFM_REQ_MASK		0x000fff00
141#define CRYPTO_TFM_REQ_FORBID_WEAK_KEYS	0x00000100
 
 
142#define CRYPTO_TFM_REQ_MAY_SLEEP	0x00000200
143#define CRYPTO_TFM_REQ_MAY_BACKLOG	0x00000400
 
 
 
 
 
144
145/*
146 * Miscellaneous stuff.
147 */
148#define CRYPTO_MAX_ALG_NAME		128
149
150/*
151 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
152 * declaration) is used to ensure that the crypto_tfm context structure is
153 * aligned correctly for the given architecture so that there are no alignment
154 * faults for C data types.  On architectures that support non-cache coherent
155 * DMA, such as ARM or arm64, it also takes into account the minimal alignment
156 * that is required to ensure that the context struct member does not share any
157 * cachelines with the rest of the struct. This is needed to ensure that cache
158 * maintenance for non-coherent DMA (cache invalidation in particular) does not
159 * affect data that may be accessed by the CPU concurrently.
160 */
161#define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
162
163#define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
164
165struct scatterlist;
 
166struct crypto_async_request;
 
 
 
 
167struct crypto_tfm;
168struct crypto_type;
 
 
169
170typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
171
172/**
173 * DOC: Block Cipher Context Data Structures
174 *
175 * These data structures define the operating context for each block cipher
176 * type.
177 */
178
179struct crypto_async_request {
180	struct list_head list;
181	crypto_completion_t complete;
182	void *data;
183	struct crypto_tfm *tfm;
184
185	u32 flags;
186};
187
188/**
189 * DOC: Block Cipher Algorithm Definitions
190 *
191 * These data structures define modular crypto algorithm implementations,
192 * managed via crypto_register_alg() and crypto_unregister_alg().
193 */
 
 
 
 
 
 
194
195/**
196 * struct cipher_alg - single-block symmetric ciphers definition
197 * @cia_min_keysize: Minimum key size supported by the transformation. This is
198 *		     the smallest key length supported by this transformation
199 *		     algorithm. This must be set to one of the pre-defined
200 *		     values as this is not hardware specific. Possible values
201 *		     for this field can be found via git grep "_MIN_KEY_SIZE"
202 *		     include/crypto/
203 * @cia_max_keysize: Maximum key size supported by the transformation. This is
204 *		    the largest key length supported by this transformation
205 *		    algorithm. This must be set to one of the pre-defined values
206 *		    as this is not hardware specific. Possible values for this
207 *		    field can be found via git grep "_MAX_KEY_SIZE"
208 *		    include/crypto/
209 * @cia_setkey: Set key for the transformation. This function is used to either
210 *	        program a supplied key into the hardware or store the key in the
211 *	        transformation context for programming it later. Note that this
212 *	        function does modify the transformation context. This function
213 *	        can be called multiple times during the existence of the
214 *	        transformation object, so one must make sure the key is properly
215 *	        reprogrammed into the hardware. This function is also
216 *	        responsible for checking the key length for validity.
217 * @cia_encrypt: Encrypt a single block. This function is used to encrypt a
218 *		 single block of data, which must be @cra_blocksize big. This
219 *		 always operates on a full @cra_blocksize and it is not possible
220 *		 to encrypt a block of smaller size. The supplied buffers must
221 *		 therefore also be at least of @cra_blocksize size. Both the
222 *		 input and output buffers are always aligned to @cra_alignmask.
223 *		 In case either of the input or output buffer supplied by user
224 *		 of the crypto API is not aligned to @cra_alignmask, the crypto
225 *		 API will re-align the buffers. The re-alignment means that a
226 *		 new buffer will be allocated, the data will be copied into the
227 *		 new buffer, then the processing will happen on the new buffer,
228 *		 then the data will be copied back into the original buffer and
229 *		 finally the new buffer will be freed. In case a software
230 *		 fallback was put in place in the @cra_init call, this function
231 *		 might need to use the fallback if the algorithm doesn't support
232 *		 all of the key sizes. In case the key was stored in
233 *		 transformation context, the key might need to be re-programmed
234 *		 into the hardware in this function. This function shall not
235 *		 modify the transformation context, as this function may be
236 *		 called in parallel with the same transformation object.
237 * @cia_decrypt: Decrypt a single block. This is a reverse counterpart to
238 *		 @cia_encrypt, and the conditions are exactly the same.
239 *
240 * All fields are mandatory and must be filled.
 
 
241 */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
242struct cipher_alg {
243	unsigned int cia_min_keysize;
244	unsigned int cia_max_keysize;
245	int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
246	                  unsigned int keylen);
247	void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
248	void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
249};
250
251/**
252 * struct compress_alg - compression/decompression algorithm
253 * @coa_compress: Compress a buffer of specified length, storing the resulting
254 *		  data in the specified buffer. Return the length of the
255 *		  compressed data in dlen.
256 * @coa_decompress: Decompress the source buffer, storing the uncompressed
257 *		    data in the specified buffer. The length of the data is
258 *		    returned in dlen.
259 *
260 * All fields are mandatory.
261 */
262struct compress_alg {
263	int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
264			    unsigned int slen, u8 *dst, unsigned int *dlen);
265	int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
266			      unsigned int slen, u8 *dst, unsigned int *dlen);
267};
268
269#ifdef CONFIG_CRYPTO_STATS
270/*
271 * struct crypto_istat_aead - statistics for AEAD algorithm
272 * @encrypt_cnt:	number of encrypt requests
273 * @encrypt_tlen:	total data size handled by encrypt requests
274 * @decrypt_cnt:	number of decrypt requests
275 * @decrypt_tlen:	total data size handled by decrypt requests
276 * @err_cnt:		number of error for AEAD requests
277 */
278struct crypto_istat_aead {
279	atomic64_t encrypt_cnt;
280	atomic64_t encrypt_tlen;
281	atomic64_t decrypt_cnt;
282	atomic64_t decrypt_tlen;
283	atomic64_t err_cnt;
284};
285
286/*
287 * struct crypto_istat_akcipher - statistics for akcipher algorithm
288 * @encrypt_cnt:	number of encrypt requests
289 * @encrypt_tlen:	total data size handled by encrypt requests
290 * @decrypt_cnt:	number of decrypt requests
291 * @decrypt_tlen:	total data size handled by decrypt requests
292 * @verify_cnt:		number of verify operation
293 * @sign_cnt:		number of sign requests
294 * @err_cnt:		number of error for akcipher requests
295 */
296struct crypto_istat_akcipher {
297	atomic64_t encrypt_cnt;
298	atomic64_t encrypt_tlen;
299	atomic64_t decrypt_cnt;
300	atomic64_t decrypt_tlen;
301	atomic64_t verify_cnt;
302	atomic64_t sign_cnt;
303	atomic64_t err_cnt;
304};
305
306/*
307 * struct crypto_istat_cipher - statistics for cipher algorithm
308 * @encrypt_cnt:	number of encrypt requests
309 * @encrypt_tlen:	total data size handled by encrypt requests
310 * @decrypt_cnt:	number of decrypt requests
311 * @decrypt_tlen:	total data size handled by decrypt requests
312 * @err_cnt:		number of error for cipher requests
313 */
314struct crypto_istat_cipher {
315	atomic64_t encrypt_cnt;
316	atomic64_t encrypt_tlen;
317	atomic64_t decrypt_cnt;
318	atomic64_t decrypt_tlen;
319	atomic64_t err_cnt;
320};
321
322/*
323 * struct crypto_istat_compress - statistics for compress algorithm
324 * @compress_cnt:	number of compress requests
325 * @compress_tlen:	total data size handled by compress requests
326 * @decompress_cnt:	number of decompress requests
327 * @decompress_tlen:	total data size handled by decompress requests
328 * @err_cnt:		number of error for compress requests
329 */
330struct crypto_istat_compress {
331	atomic64_t compress_cnt;
332	atomic64_t compress_tlen;
333	atomic64_t decompress_cnt;
334	atomic64_t decompress_tlen;
335	atomic64_t err_cnt;
336};
337
338/*
339 * struct crypto_istat_hash - statistics for has algorithm
340 * @hash_cnt:		number of hash requests
341 * @hash_tlen:		total data size hashed
342 * @err_cnt:		number of error for hash requests
343 */
344struct crypto_istat_hash {
345	atomic64_t hash_cnt;
346	atomic64_t hash_tlen;
347	atomic64_t err_cnt;
348};
349
350/*
351 * struct crypto_istat_kpp - statistics for KPP algorithm
352 * @setsecret_cnt:		number of setsecrey operation
353 * @generate_public_key_cnt:	number of generate_public_key operation
354 * @compute_shared_secret_cnt:	number of compute_shared_secret operation
355 * @err_cnt:			number of error for KPP requests
356 */
357struct crypto_istat_kpp {
358	atomic64_t setsecret_cnt;
359	atomic64_t generate_public_key_cnt;
360	atomic64_t compute_shared_secret_cnt;
361	atomic64_t err_cnt;
362};
363
364/*
365 * struct crypto_istat_rng: statistics for RNG algorithm
366 * @generate_cnt:	number of RNG generate requests
367 * @generate_tlen:	total data size of generated data by the RNG
368 * @seed_cnt:		number of times the RNG was seeded
369 * @err_cnt:		number of error for RNG requests
370 */
371struct crypto_istat_rng {
372	atomic64_t generate_cnt;
373	atomic64_t generate_tlen;
374	atomic64_t seed_cnt;
375	atomic64_t err_cnt;
376};
377#endif /* CONFIG_CRYPTO_STATS */
378
 
 
 
 
379#define cra_cipher	cra_u.cipher
380#define cra_compress	cra_u.compress
 
381
382/**
383 * struct crypto_alg - definition of a cryptograpic cipher algorithm
384 * @cra_flags: Flags describing this transformation. See include/linux/crypto.h
385 *	       CRYPTO_ALG_* flags for the flags which go in here. Those are
386 *	       used for fine-tuning the description of the transformation
387 *	       algorithm.
388 * @cra_blocksize: Minimum block size of this transformation. The size in bytes
389 *		   of the smallest possible unit which can be transformed with
390 *		   this algorithm. The users must respect this value.
391 *		   In case of HASH transformation, it is possible for a smaller
392 *		   block than @cra_blocksize to be passed to the crypto API for
393 *		   transformation, in case of any other transformation type, an
394 * 		   error will be returned upon any attempt to transform smaller
395 *		   than @cra_blocksize chunks.
396 * @cra_ctxsize: Size of the operational context of the transformation. This
397 *		 value informs the kernel crypto API about the memory size
398 *		 needed to be allocated for the transformation context.
399 * @cra_alignmask: Alignment mask for the input and output data buffer. The data
400 *		   buffer containing the input data for the algorithm must be
401 *		   aligned to this alignment mask. The data buffer for the
402 *		   output data must be aligned to this alignment mask. Note that
403 *		   the Crypto API will do the re-alignment in software, but
404 *		   only under special conditions and there is a performance hit.
405 *		   The re-alignment happens at these occasions for different
406 *		   @cra_u types: cipher -- For both input data and output data
407 *		   buffer; ahash -- For output hash destination buf; shash --
408 *		   For output hash destination buf.
409 *		   This is needed on hardware which is flawed by design and
410 *		   cannot pick data from arbitrary addresses.
411 * @cra_priority: Priority of this transformation implementation. In case
412 *		  multiple transformations with same @cra_name are available to
413 *		  the Crypto API, the kernel will use the one with highest
414 *		  @cra_priority.
415 * @cra_name: Generic name (usable by multiple implementations) of the
416 *	      transformation algorithm. This is the name of the transformation
417 *	      itself. This field is used by the kernel when looking up the
418 *	      providers of particular transformation.
419 * @cra_driver_name: Unique name of the transformation provider. This is the
420 *		     name of the provider of the transformation. This can be any
421 *		     arbitrary value, but in the usual case, this contains the
422 *		     name of the chip or provider and the name of the
423 *		     transformation algorithm.
424 * @cra_type: Type of the cryptographic transformation. This is a pointer to
425 *	      struct crypto_type, which implements callbacks common for all
426 *	      transformation types. There are multiple options, such as
427 *	      &crypto_skcipher_type, &crypto_ahash_type, &crypto_rng_type.
428 *	      This field might be empty. In that case, there are no common
429 *	      callbacks. This is the case for: cipher, compress, shash.
430 * @cra_u: Callbacks implementing the transformation. This is a union of
431 *	   multiple structures. Depending on the type of transformation selected
432 *	   by @cra_type and @cra_flags above, the associated structure must be
433 *	   filled with callbacks. This field might be empty. This is the case
434 *	   for ahash, shash.
435 * @cra_init: Initialize the cryptographic transformation object. This function
436 *	      is used to initialize the cryptographic transformation object.
437 *	      This function is called only once at the instantiation time, right
438 *	      after the transformation context was allocated. In case the
439 *	      cryptographic hardware has some special requirements which need to
440 *	      be handled by software, this function shall check for the precise
441 *	      requirement of the transformation and put any software fallbacks
442 *	      in place.
443 * @cra_exit: Deinitialize the cryptographic transformation object. This is a
444 *	      counterpart to @cra_init, used to remove various changes set in
445 *	      @cra_init.
446 * @cra_u.cipher: Union member which contains a single-block symmetric cipher
447 *		  definition. See @struct @cipher_alg.
448 * @cra_u.compress: Union member which contains a (de)compression algorithm.
449 *		    See @struct @compress_alg.
450 * @cra_module: Owner of this transformation implementation. Set to THIS_MODULE
451 * @cra_list: internally used
452 * @cra_users: internally used
453 * @cra_refcnt: internally used
454 * @cra_destroy: internally used
455 *
456 * @stats: union of all possible crypto_istat_xxx structures
457 * @stats.aead:		statistics for AEAD algorithm
458 * @stats.akcipher:	statistics for akcipher algorithm
459 * @stats.cipher:	statistics for cipher algorithm
460 * @stats.compress:	statistics for compress algorithm
461 * @stats.hash:		statistics for hash algorithm
462 * @stats.rng:		statistics for rng algorithm
463 * @stats.kpp:		statistics for KPP algorithm
464 *
465 * The struct crypto_alg describes a generic Crypto API algorithm and is common
466 * for all of the transformations. Any variable not documented here shall not
467 * be used by a cipher implementation as it is internal to the Crypto API.
468 */
469struct crypto_alg {
470	struct list_head cra_list;
471	struct list_head cra_users;
472
473	u32 cra_flags;
474	unsigned int cra_blocksize;
475	unsigned int cra_ctxsize;
476	unsigned int cra_alignmask;
477
478	int cra_priority;
479	refcount_t cra_refcnt;
480
481	char cra_name[CRYPTO_MAX_ALG_NAME];
482	char cra_driver_name[CRYPTO_MAX_ALG_NAME];
483
484	const struct crypto_type *cra_type;
485
486	union {
 
 
 
487		struct cipher_alg cipher;
488		struct compress_alg compress;
 
489	} cra_u;
490
491	int (*cra_init)(struct crypto_tfm *tfm);
492	void (*cra_exit)(struct crypto_tfm *tfm);
493	void (*cra_destroy)(struct crypto_alg *alg);
494	
495	struct module *cra_module;
496
497#ifdef CONFIG_CRYPTO_STATS
498	union {
499		struct crypto_istat_aead aead;
500		struct crypto_istat_akcipher akcipher;
501		struct crypto_istat_cipher cipher;
502		struct crypto_istat_compress compress;
503		struct crypto_istat_hash hash;
504		struct crypto_istat_rng rng;
505		struct crypto_istat_kpp kpp;
506	} stats;
507#endif /* CONFIG_CRYPTO_STATS */
508
509} CRYPTO_MINALIGN_ATTR;
510
511#ifdef CONFIG_CRYPTO_STATS
512void crypto_stats_init(struct crypto_alg *alg);
513void crypto_stats_get(struct crypto_alg *alg);
514void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
515void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret);
516void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg);
517void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg);
518void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
519void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg);
520void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg);
521void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg);
522void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg);
523void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg);
524void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret);
525void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret);
526void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret);
527void crypto_stats_rng_seed(struct crypto_alg *alg, int ret);
528void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret);
529void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
530void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg);
531#else
532static inline void crypto_stats_init(struct crypto_alg *alg)
533{}
534static inline void crypto_stats_get(struct crypto_alg *alg)
535{}
536static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
537{}
538static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret)
539{}
540static inline void crypto_stats_ahash_update(unsigned int nbytes, int ret, struct crypto_alg *alg)
541{}
542static inline void crypto_stats_ahash_final(unsigned int nbytes, int ret, struct crypto_alg *alg)
543{}
544static inline void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
545{}
546static inline void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret, struct crypto_alg *alg)
547{}
548static inline void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
549{}
550static inline void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
551{}
552static inline void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
553{}
554static inline void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
555{}
556static inline void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
557{}
558static inline void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
559{}
560static inline void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
561{}
562static inline void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
563{}
564static inline void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen, int ret)
565{}
566static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
567{}
568static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)
569{}
570#endif
571/*
572 * A helper struct for waiting for completion of async crypto ops
573 */
574struct crypto_wait {
575	struct completion completion;
576	int err;
577};
578
579/*
580 * Macro for declaring a crypto op async wait object on stack
581 */
582#define DECLARE_CRYPTO_WAIT(_wait) \
583	struct crypto_wait _wait = { \
584		COMPLETION_INITIALIZER_ONSTACK((_wait).completion), 0 }
585
586/*
587 * Async ops completion helper functioons
588 */
589void crypto_req_done(struct crypto_async_request *req, int err);
590
591static inline int crypto_wait_req(int err, struct crypto_wait *wait)
592{
593	switch (err) {
594	case -EINPROGRESS:
595	case -EBUSY:
596		wait_for_completion(&wait->completion);
597		reinit_completion(&wait->completion);
598		err = wait->err;
599		break;
600	}
601
602	return err;
603}
604
605static inline void crypto_init_wait(struct crypto_wait *wait)
606{
607	init_completion(&wait->completion);
608}
609
610/*
611 * Algorithm registration interface.
612 */
613int crypto_register_alg(struct crypto_alg *alg);
614void crypto_unregister_alg(struct crypto_alg *alg);
615int crypto_register_algs(struct crypto_alg *algs, int count);
616void crypto_unregister_algs(struct crypto_alg *algs, int count);
617
618/*
619 * Algorithm query interface.
620 */
621int crypto_has_alg(const char *name, u32 type, u32 mask);
622
623/*
624 * Transforms: user-instantiated objects which encapsulate algorithms
625 * and core processing logic.  Managed via crypto_alloc_*() and
626 * crypto_free_*(), as well as the various helpers below.
627 */
628
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
629struct crypto_tfm {
630
631	u32 crt_flags;
632
633	int node;
634	
 
 
 
 
 
 
 
 
 
 
635	void (*exit)(struct crypto_tfm *tfm);
636	
637	struct crypto_alg *__crt_alg;
638
639	void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
640};
641
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
642struct crypto_comp {
643	struct crypto_tfm base;
644};
645
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
646/* 
647 * Transform user interface.
648 */
649 
650struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
651void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
652
653static inline void crypto_free_tfm(struct crypto_tfm *tfm)
654{
655	return crypto_destroy_tfm(tfm, tfm);
656}
657
658int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
659
660/*
661 * Transform helpers which query the underlying algorithm.
662 */
663static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
664{
665	return tfm->__crt_alg->cra_name;
666}
667
668static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
669{
670	return tfm->__crt_alg->cra_driver_name;
671}
672
673static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
674{
675	return tfm->__crt_alg->cra_priority;
676}
677
678static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
679{
680	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
681}
682
683static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
684{
685	return tfm->__crt_alg->cra_blocksize;
686}
687
688static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
689{
690	return tfm->__crt_alg->cra_alignmask;
691}
692
693static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
694{
695	return tfm->crt_flags;
696}
697
698static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
699{
700	tfm->crt_flags |= flags;
701}
702
703static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
704{
705	tfm->crt_flags &= ~flags;
706}
707
708static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
709{
710	return tfm->__crt_ctx;
711}
712
713static inline unsigned int crypto_tfm_ctx_alignment(void)
714{
715	struct crypto_tfm *tfm;
716	return __alignof__(tfm->__crt_ctx);
717}
718
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
719static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
720{
721	return (struct crypto_comp *)tfm;
722}
723
 
 
 
 
 
 
 
724static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
725						    u32 type, u32 mask)
726{
727	type &= ~CRYPTO_ALG_TYPE_MASK;
728	type |= CRYPTO_ALG_TYPE_COMPRESS;
729	mask |= CRYPTO_ALG_TYPE_MASK;
730
731	return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
732}
733
734static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
735{
736	return &tfm->base;
737}
738
739static inline void crypto_free_comp(struct crypto_comp *tfm)
740{
741	crypto_free_tfm(crypto_comp_tfm(tfm));
742}
743
744static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
745{
746	type &= ~CRYPTO_ALG_TYPE_MASK;
747	type |= CRYPTO_ALG_TYPE_COMPRESS;
748	mask |= CRYPTO_ALG_TYPE_MASK;
749
750	return crypto_has_alg(alg_name, type, mask);
751}
752
753static inline const char *crypto_comp_name(struct crypto_comp *tfm)
754{
755	return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
756}
757
758int crypto_comp_compress(struct crypto_comp *tfm,
759			 const u8 *src, unsigned int slen,
760			 u8 *dst, unsigned int *dlen);
761
762int crypto_comp_decompress(struct crypto_comp *tfm,
763			   const u8 *src, unsigned int slen,
764			   u8 *dst, unsigned int *dlen);
 
 
 
 
 
 
 
 
 
 
 
 
 
765
766#endif	/* _LINUX_CRYPTO_H */
767
v3.5.6
 
   1/*
   2 * Scatterlist Cryptographic API.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   7 *
   8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
   9 * and Nettle, by Niels Möller.
  10 * 
  11 * This program is free software; you can redistribute it and/or modify it
  12 * under the terms of the GNU General Public License as published by the Free
  13 * Software Foundation; either version 2 of the License, or (at your option) 
  14 * any later version.
  15 *
  16 */
  17#ifndef _LINUX_CRYPTO_H
  18#define _LINUX_CRYPTO_H
  19
  20#include <linux/atomic.h>
  21#include <linux/kernel.h>
  22#include <linux/list.h>
  23#include <linux/bug.h>
 
  24#include <linux/slab.h>
  25#include <linux/string.h>
  26#include <linux/uaccess.h>
 
 
 
 
 
 
 
 
 
 
 
 
  27
  28/*
  29 * Algorithm masks and types.
  30 */
  31#define CRYPTO_ALG_TYPE_MASK		0x0000000f
  32#define CRYPTO_ALG_TYPE_CIPHER		0x00000001
  33#define CRYPTO_ALG_TYPE_COMPRESS	0x00000002
  34#define CRYPTO_ALG_TYPE_AEAD		0x00000003
  35#define CRYPTO_ALG_TYPE_BLKCIPHER	0x00000004
  36#define CRYPTO_ALG_TYPE_ABLKCIPHER	0x00000005
  37#define CRYPTO_ALG_TYPE_GIVCIPHER	0x00000006
  38#define CRYPTO_ALG_TYPE_DIGEST		0x00000008
  39#define CRYPTO_ALG_TYPE_HASH		0x00000008
  40#define CRYPTO_ALG_TYPE_SHASH		0x00000009
  41#define CRYPTO_ALG_TYPE_AHASH		0x0000000a
  42#define CRYPTO_ALG_TYPE_RNG		0x0000000c
  43#define CRYPTO_ALG_TYPE_PCOMPRESS	0x0000000f
 
 
 
  44
  45#define CRYPTO_ALG_TYPE_HASH_MASK	0x0000000e
  46#define CRYPTO_ALG_TYPE_AHASH_MASK	0x0000000c
  47#define CRYPTO_ALG_TYPE_BLKCIPHER_MASK	0x0000000c
  48
  49#define CRYPTO_ALG_LARVAL		0x00000010
  50#define CRYPTO_ALG_DEAD			0x00000020
  51#define CRYPTO_ALG_DYING		0x00000040
  52#define CRYPTO_ALG_ASYNC		0x00000080
  53
  54/*
  55 * Set this bit if and only if the algorithm requires another algorithm of
  56 * the same type to handle corner cases.
  57 */
  58#define CRYPTO_ALG_NEED_FALLBACK	0x00000100
  59
  60/*
  61 * This bit is set for symmetric key ciphers that have already been wrapped
  62 * with a generic IV generator to prevent them from being wrapped again.
  63 */
  64#define CRYPTO_ALG_GENIV		0x00000200
  65
  66/*
  67 * Set if the algorithm has passed automated run-time testing.  Note that
  68 * if there is no run-time testing for a given algorithm it is considered
  69 * to have passed.
  70 */
  71
  72#define CRYPTO_ALG_TESTED		0x00000400
  73
  74/*
  75 * Set if the algorithm is an instance that is build from templates.
  76 */
  77#define CRYPTO_ALG_INSTANCE		0x00000800
  78
  79/* Set this bit if the algorithm provided is hardware accelerated but
  80 * not available to userspace via instruction set or so.
  81 */
  82#define CRYPTO_ALG_KERN_DRIVER_ONLY	0x00001000
  83
  84/*
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  85 * Transform masks and values (for crt_flags).
  86 */
 
 
  87#define CRYPTO_TFM_REQ_MASK		0x000fff00
  88#define CRYPTO_TFM_RES_MASK		0xfff00000
  89
  90#define CRYPTO_TFM_REQ_WEAK_KEY		0x00000100
  91#define CRYPTO_TFM_REQ_MAY_SLEEP	0x00000200
  92#define CRYPTO_TFM_REQ_MAY_BACKLOG	0x00000400
  93#define CRYPTO_TFM_RES_WEAK_KEY		0x00100000
  94#define CRYPTO_TFM_RES_BAD_KEY_LEN   	0x00200000
  95#define CRYPTO_TFM_RES_BAD_KEY_SCHED 	0x00400000
  96#define CRYPTO_TFM_RES_BAD_BLOCK_LEN 	0x00800000
  97#define CRYPTO_TFM_RES_BAD_FLAGS 	0x01000000
  98
  99/*
 100 * Miscellaneous stuff.
 101 */
 102#define CRYPTO_MAX_ALG_NAME		64
 103
 104/*
 105 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
 106 * declaration) is used to ensure that the crypto_tfm context structure is
 107 * aligned correctly for the given architecture so that there are no alignment
 108 * faults for C data types.  In particular, this is required on platforms such
 109 * as arm where pointers are 32-bit aligned but there are data types such as
 110 * u64 which require 64-bit alignment.
 
 
 
 111 */
 112#define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
 113
 114#define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
 115
 116struct scatterlist;
 117struct crypto_ablkcipher;
 118struct crypto_async_request;
 119struct crypto_aead;
 120struct crypto_blkcipher;
 121struct crypto_hash;
 122struct crypto_rng;
 123struct crypto_tfm;
 124struct crypto_type;
 125struct aead_givcrypt_request;
 126struct skcipher_givcrypt_request;
 127
 128typedef void (*crypto_completion_t)(struct crypto_async_request *req, int err);
 129
 
 
 
 
 
 
 
 130struct crypto_async_request {
 131	struct list_head list;
 132	crypto_completion_t complete;
 133	void *data;
 134	struct crypto_tfm *tfm;
 135
 136	u32 flags;
 137};
 138
 139struct ablkcipher_request {
 140	struct crypto_async_request base;
 141
 142	unsigned int nbytes;
 143
 144	void *info;
 145
 146	struct scatterlist *src;
 147	struct scatterlist *dst;
 148
 149	void *__ctx[] CRYPTO_MINALIGN_ATTR;
 150};
 151
 152/**
 153 *	struct aead_request - AEAD request
 154 *	@base: Common attributes for async crypto requests
 155 *	@assoclen: Length in bytes of associated data for authentication
 156 *	@cryptlen: Length of data to be encrypted or decrypted
 157 *	@iv: Initialisation vector
 158 *	@assoc: Associated data
 159 *	@src: Source data
 160 *	@dst: Destination data
 161 *	@__ctx: Start of private context data
 162 */
 163struct aead_request {
 164	struct crypto_async_request base;
 165
 166	unsigned int assoclen;
 167	unsigned int cryptlen;
 168
 169	u8 *iv;
 170
 171	struct scatterlist *assoc;
 172	struct scatterlist *src;
 173	struct scatterlist *dst;
 174
 175	void *__ctx[] CRYPTO_MINALIGN_ATTR;
 176};
 177
 178struct blkcipher_desc {
 179	struct crypto_blkcipher *tfm;
 180	void *info;
 181	u32 flags;
 182};
 183
 184struct cipher_desc {
 185	struct crypto_tfm *tfm;
 186	void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 187	unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
 188			     const u8 *src, unsigned int nbytes);
 189	void *info;
 190};
 191
 192struct hash_desc {
 193	struct crypto_hash *tfm;
 194	u32 flags;
 195};
 196
 197/*
 198 * Algorithms: modular crypto algorithm implementations, managed
 199 * via crypto_register_alg() and crypto_unregister_alg().
 200 */
 201struct ablkcipher_alg {
 202	int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
 203	              unsigned int keylen);
 204	int (*encrypt)(struct ablkcipher_request *req);
 205	int (*decrypt)(struct ablkcipher_request *req);
 206	int (*givencrypt)(struct skcipher_givcrypt_request *req);
 207	int (*givdecrypt)(struct skcipher_givcrypt_request *req);
 208
 209	const char *geniv;
 210
 211	unsigned int min_keysize;
 212	unsigned int max_keysize;
 213	unsigned int ivsize;
 214};
 215
 216struct aead_alg {
 217	int (*setkey)(struct crypto_aead *tfm, const u8 *key,
 218	              unsigned int keylen);
 219	int (*setauthsize)(struct crypto_aead *tfm, unsigned int authsize);
 220	int (*encrypt)(struct aead_request *req);
 221	int (*decrypt)(struct aead_request *req);
 222	int (*givencrypt)(struct aead_givcrypt_request *req);
 223	int (*givdecrypt)(struct aead_givcrypt_request *req);
 224
 225	const char *geniv;
 226
 227	unsigned int ivsize;
 228	unsigned int maxauthsize;
 229};
 230
 231struct blkcipher_alg {
 232	int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
 233	              unsigned int keylen);
 234	int (*encrypt)(struct blkcipher_desc *desc,
 235		       struct scatterlist *dst, struct scatterlist *src,
 236		       unsigned int nbytes);
 237	int (*decrypt)(struct blkcipher_desc *desc,
 238		       struct scatterlist *dst, struct scatterlist *src,
 239		       unsigned int nbytes);
 240
 241	const char *geniv;
 242
 243	unsigned int min_keysize;
 244	unsigned int max_keysize;
 245	unsigned int ivsize;
 246};
 247
 248struct cipher_alg {
 249	unsigned int cia_min_keysize;
 250	unsigned int cia_max_keysize;
 251	int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
 252	                  unsigned int keylen);
 253	void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 254	void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 255};
 256
 
 
 
 
 
 
 
 
 
 
 
 257struct compress_alg {
 258	int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
 259			    unsigned int slen, u8 *dst, unsigned int *dlen);
 260	int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
 261			      unsigned int slen, u8 *dst, unsigned int *dlen);
 262};
 263
 264struct rng_alg {
 265	int (*rng_make_random)(struct crypto_rng *tfm, u8 *rdata,
 266			       unsigned int dlen);
 267	int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
 268
 269	unsigned int seedsize;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 270};
 
 271
 272
 273#define cra_ablkcipher	cra_u.ablkcipher
 274#define cra_aead	cra_u.aead
 275#define cra_blkcipher	cra_u.blkcipher
 276#define cra_cipher	cra_u.cipher
 277#define cra_compress	cra_u.compress
 278#define cra_rng		cra_u.rng
 279
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 280struct crypto_alg {
 281	struct list_head cra_list;
 282	struct list_head cra_users;
 283
 284	u32 cra_flags;
 285	unsigned int cra_blocksize;
 286	unsigned int cra_ctxsize;
 287	unsigned int cra_alignmask;
 288
 289	int cra_priority;
 290	atomic_t cra_refcnt;
 291
 292	char cra_name[CRYPTO_MAX_ALG_NAME];
 293	char cra_driver_name[CRYPTO_MAX_ALG_NAME];
 294
 295	const struct crypto_type *cra_type;
 296
 297	union {
 298		struct ablkcipher_alg ablkcipher;
 299		struct aead_alg aead;
 300		struct blkcipher_alg blkcipher;
 301		struct cipher_alg cipher;
 302		struct compress_alg compress;
 303		struct rng_alg rng;
 304	} cra_u;
 305
 306	int (*cra_init)(struct crypto_tfm *tfm);
 307	void (*cra_exit)(struct crypto_tfm *tfm);
 308	void (*cra_destroy)(struct crypto_alg *alg);
 309	
 310	struct module *cra_module;
 311};
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 312
 313/*
 314 * Algorithm registration interface.
 315 */
 316int crypto_register_alg(struct crypto_alg *alg);
 317int crypto_unregister_alg(struct crypto_alg *alg);
 318int crypto_register_algs(struct crypto_alg *algs, int count);
 319int crypto_unregister_algs(struct crypto_alg *algs, int count);
 320
 321/*
 322 * Algorithm query interface.
 323 */
 324int crypto_has_alg(const char *name, u32 type, u32 mask);
 325
 326/*
 327 * Transforms: user-instantiated objects which encapsulate algorithms
 328 * and core processing logic.  Managed via crypto_alloc_*() and
 329 * crypto_free_*(), as well as the various helpers below.
 330 */
 331
 332struct ablkcipher_tfm {
 333	int (*setkey)(struct crypto_ablkcipher *tfm, const u8 *key,
 334	              unsigned int keylen);
 335	int (*encrypt)(struct ablkcipher_request *req);
 336	int (*decrypt)(struct ablkcipher_request *req);
 337	int (*givencrypt)(struct skcipher_givcrypt_request *req);
 338	int (*givdecrypt)(struct skcipher_givcrypt_request *req);
 339
 340	struct crypto_ablkcipher *base;
 341
 342	unsigned int ivsize;
 343	unsigned int reqsize;
 344};
 345
 346struct aead_tfm {
 347	int (*setkey)(struct crypto_aead *tfm, const u8 *key,
 348	              unsigned int keylen);
 349	int (*encrypt)(struct aead_request *req);
 350	int (*decrypt)(struct aead_request *req);
 351	int (*givencrypt)(struct aead_givcrypt_request *req);
 352	int (*givdecrypt)(struct aead_givcrypt_request *req);
 353
 354	struct crypto_aead *base;
 355
 356	unsigned int ivsize;
 357	unsigned int authsize;
 358	unsigned int reqsize;
 359};
 360
 361struct blkcipher_tfm {
 362	void *iv;
 363	int (*setkey)(struct crypto_tfm *tfm, const u8 *key,
 364		      unsigned int keylen);
 365	int (*encrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
 366		       struct scatterlist *src, unsigned int nbytes);
 367	int (*decrypt)(struct blkcipher_desc *desc, struct scatterlist *dst,
 368		       struct scatterlist *src, unsigned int nbytes);
 369};
 370
 371struct cipher_tfm {
 372	int (*cit_setkey)(struct crypto_tfm *tfm,
 373	                  const u8 *key, unsigned int keylen);
 374	void (*cit_encrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 375	void (*cit_decrypt_one)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
 376};
 377
 378struct hash_tfm {
 379	int (*init)(struct hash_desc *desc);
 380	int (*update)(struct hash_desc *desc,
 381		      struct scatterlist *sg, unsigned int nsg);
 382	int (*final)(struct hash_desc *desc, u8 *out);
 383	int (*digest)(struct hash_desc *desc, struct scatterlist *sg,
 384		      unsigned int nsg, u8 *out);
 385	int (*setkey)(struct crypto_hash *tfm, const u8 *key,
 386		      unsigned int keylen);
 387	unsigned int digestsize;
 388};
 389
 390struct compress_tfm {
 391	int (*cot_compress)(struct crypto_tfm *tfm,
 392	                    const u8 *src, unsigned int slen,
 393	                    u8 *dst, unsigned int *dlen);
 394	int (*cot_decompress)(struct crypto_tfm *tfm,
 395	                      const u8 *src, unsigned int slen,
 396	                      u8 *dst, unsigned int *dlen);
 397};
 398
 399struct rng_tfm {
 400	int (*rng_gen_random)(struct crypto_rng *tfm, u8 *rdata,
 401			      unsigned int dlen);
 402	int (*rng_reset)(struct crypto_rng *tfm, u8 *seed, unsigned int slen);
 403};
 404
 405#define crt_ablkcipher	crt_u.ablkcipher
 406#define crt_aead	crt_u.aead
 407#define crt_blkcipher	crt_u.blkcipher
 408#define crt_cipher	crt_u.cipher
 409#define crt_hash	crt_u.hash
 410#define crt_compress	crt_u.compress
 411#define crt_rng		crt_u.rng
 412
 413struct crypto_tfm {
 414
 415	u32 crt_flags;
 
 
 416	
 417	union {
 418		struct ablkcipher_tfm ablkcipher;
 419		struct aead_tfm aead;
 420		struct blkcipher_tfm blkcipher;
 421		struct cipher_tfm cipher;
 422		struct hash_tfm hash;
 423		struct compress_tfm compress;
 424		struct rng_tfm rng;
 425	} crt_u;
 426
 427	void (*exit)(struct crypto_tfm *tfm);
 428	
 429	struct crypto_alg *__crt_alg;
 430
 431	void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
 432};
 433
 434struct crypto_ablkcipher {
 435	struct crypto_tfm base;
 436};
 437
 438struct crypto_aead {
 439	struct crypto_tfm base;
 440};
 441
 442struct crypto_blkcipher {
 443	struct crypto_tfm base;
 444};
 445
 446struct crypto_cipher {
 447	struct crypto_tfm base;
 448};
 449
 450struct crypto_comp {
 451	struct crypto_tfm base;
 452};
 453
 454struct crypto_hash {
 455	struct crypto_tfm base;
 456};
 457
 458struct crypto_rng {
 459	struct crypto_tfm base;
 460};
 461
 462enum {
 463	CRYPTOA_UNSPEC,
 464	CRYPTOA_ALG,
 465	CRYPTOA_TYPE,
 466	CRYPTOA_U32,
 467	__CRYPTOA_MAX,
 468};
 469
 470#define CRYPTOA_MAX (__CRYPTOA_MAX - 1)
 471
 472/* Maximum number of (rtattr) parameters for each template. */
 473#define CRYPTO_MAX_ATTRS 32
 474
 475struct crypto_attr_alg {
 476	char name[CRYPTO_MAX_ALG_NAME];
 477};
 478
 479struct crypto_attr_type {
 480	u32 type;
 481	u32 mask;
 482};
 483
 484struct crypto_attr_u32 {
 485	u32 num;
 486};
 487
 488/* 
 489 * Transform user interface.
 490 */
 491 
 492struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask);
 493void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm);
 494
 495static inline void crypto_free_tfm(struct crypto_tfm *tfm)
 496{
 497	return crypto_destroy_tfm(tfm, tfm);
 498}
 499
 500int alg_test(const char *driver, const char *alg, u32 type, u32 mask);
 501
 502/*
 503 * Transform helpers which query the underlying algorithm.
 504 */
 505static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
 506{
 507	return tfm->__crt_alg->cra_name;
 508}
 509
 510static inline const char *crypto_tfm_alg_driver_name(struct crypto_tfm *tfm)
 511{
 512	return tfm->__crt_alg->cra_driver_name;
 513}
 514
 515static inline int crypto_tfm_alg_priority(struct crypto_tfm *tfm)
 516{
 517	return tfm->__crt_alg->cra_priority;
 518}
 519
 520static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
 521{
 522	return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
 523}
 524
 525static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
 526{
 527	return tfm->__crt_alg->cra_blocksize;
 528}
 529
 530static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
 531{
 532	return tfm->__crt_alg->cra_alignmask;
 533}
 534
 535static inline u32 crypto_tfm_get_flags(struct crypto_tfm *tfm)
 536{
 537	return tfm->crt_flags;
 538}
 539
 540static inline void crypto_tfm_set_flags(struct crypto_tfm *tfm, u32 flags)
 541{
 542	tfm->crt_flags |= flags;
 543}
 544
 545static inline void crypto_tfm_clear_flags(struct crypto_tfm *tfm, u32 flags)
 546{
 547	tfm->crt_flags &= ~flags;
 548}
 549
 550static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
 551{
 552	return tfm->__crt_ctx;
 553}
 554
 555static inline unsigned int crypto_tfm_ctx_alignment(void)
 556{
 557	struct crypto_tfm *tfm;
 558	return __alignof__(tfm->__crt_ctx);
 559}
 560
 561/*
 562 * API wrappers.
 563 */
 564static inline struct crypto_ablkcipher *__crypto_ablkcipher_cast(
 565	struct crypto_tfm *tfm)
 566{
 567	return (struct crypto_ablkcipher *)tfm;
 568}
 569
 570static inline u32 crypto_skcipher_type(u32 type)
 571{
 572	type &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
 573	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
 574	return type;
 575}
 576
 577static inline u32 crypto_skcipher_mask(u32 mask)
 578{
 579	mask &= ~(CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_GENIV);
 580	mask |= CRYPTO_ALG_TYPE_BLKCIPHER_MASK;
 581	return mask;
 582}
 583
 584struct crypto_ablkcipher *crypto_alloc_ablkcipher(const char *alg_name,
 585						  u32 type, u32 mask);
 586
 587static inline struct crypto_tfm *crypto_ablkcipher_tfm(
 588	struct crypto_ablkcipher *tfm)
 589{
 590	return &tfm->base;
 591}
 592
 593static inline void crypto_free_ablkcipher(struct crypto_ablkcipher *tfm)
 594{
 595	crypto_free_tfm(crypto_ablkcipher_tfm(tfm));
 596}
 597
 598static inline int crypto_has_ablkcipher(const char *alg_name, u32 type,
 599					u32 mask)
 600{
 601	return crypto_has_alg(alg_name, crypto_skcipher_type(type),
 602			      crypto_skcipher_mask(mask));
 603}
 604
 605static inline struct ablkcipher_tfm *crypto_ablkcipher_crt(
 606	struct crypto_ablkcipher *tfm)
 607{
 608	return &crypto_ablkcipher_tfm(tfm)->crt_ablkcipher;
 609}
 610
 611static inline unsigned int crypto_ablkcipher_ivsize(
 612	struct crypto_ablkcipher *tfm)
 613{
 614	return crypto_ablkcipher_crt(tfm)->ivsize;
 615}
 616
 617static inline unsigned int crypto_ablkcipher_blocksize(
 618	struct crypto_ablkcipher *tfm)
 619{
 620	return crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(tfm));
 621}
 622
 623static inline unsigned int crypto_ablkcipher_alignmask(
 624	struct crypto_ablkcipher *tfm)
 625{
 626	return crypto_tfm_alg_alignmask(crypto_ablkcipher_tfm(tfm));
 627}
 628
 629static inline u32 crypto_ablkcipher_get_flags(struct crypto_ablkcipher *tfm)
 630{
 631	return crypto_tfm_get_flags(crypto_ablkcipher_tfm(tfm));
 632}
 633
 634static inline void crypto_ablkcipher_set_flags(struct crypto_ablkcipher *tfm,
 635					       u32 flags)
 636{
 637	crypto_tfm_set_flags(crypto_ablkcipher_tfm(tfm), flags);
 638}
 639
 640static inline void crypto_ablkcipher_clear_flags(struct crypto_ablkcipher *tfm,
 641						 u32 flags)
 642{
 643	crypto_tfm_clear_flags(crypto_ablkcipher_tfm(tfm), flags);
 644}
 645
 646static inline int crypto_ablkcipher_setkey(struct crypto_ablkcipher *tfm,
 647					   const u8 *key, unsigned int keylen)
 648{
 649	struct ablkcipher_tfm *crt = crypto_ablkcipher_crt(tfm);
 650
 651	return crt->setkey(crt->base, key, keylen);
 652}
 653
 654static inline struct crypto_ablkcipher *crypto_ablkcipher_reqtfm(
 655	struct ablkcipher_request *req)
 656{
 657	return __crypto_ablkcipher_cast(req->base.tfm);
 658}
 659
 660static inline int crypto_ablkcipher_encrypt(struct ablkcipher_request *req)
 661{
 662	struct ablkcipher_tfm *crt =
 663		crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
 664	return crt->encrypt(req);
 665}
 666
 667static inline int crypto_ablkcipher_decrypt(struct ablkcipher_request *req)
 668{
 669	struct ablkcipher_tfm *crt =
 670		crypto_ablkcipher_crt(crypto_ablkcipher_reqtfm(req));
 671	return crt->decrypt(req);
 672}
 673
 674static inline unsigned int crypto_ablkcipher_reqsize(
 675	struct crypto_ablkcipher *tfm)
 676{
 677	return crypto_ablkcipher_crt(tfm)->reqsize;
 678}
 679
 680static inline void ablkcipher_request_set_tfm(
 681	struct ablkcipher_request *req, struct crypto_ablkcipher *tfm)
 682{
 683	req->base.tfm = crypto_ablkcipher_tfm(crypto_ablkcipher_crt(tfm)->base);
 684}
 685
 686static inline struct ablkcipher_request *ablkcipher_request_cast(
 687	struct crypto_async_request *req)
 688{
 689	return container_of(req, struct ablkcipher_request, base);
 690}
 691
 692static inline struct ablkcipher_request *ablkcipher_request_alloc(
 693	struct crypto_ablkcipher *tfm, gfp_t gfp)
 694{
 695	struct ablkcipher_request *req;
 696
 697	req = kmalloc(sizeof(struct ablkcipher_request) +
 698		      crypto_ablkcipher_reqsize(tfm), gfp);
 699
 700	if (likely(req))
 701		ablkcipher_request_set_tfm(req, tfm);
 702
 703	return req;
 704}
 705
 706static inline void ablkcipher_request_free(struct ablkcipher_request *req)
 707{
 708	kzfree(req);
 709}
 710
 711static inline void ablkcipher_request_set_callback(
 712	struct ablkcipher_request *req,
 713	u32 flags, crypto_completion_t complete, void *data)
 714{
 715	req->base.complete = complete;
 716	req->base.data = data;
 717	req->base.flags = flags;
 718}
 719
 720static inline void ablkcipher_request_set_crypt(
 721	struct ablkcipher_request *req,
 722	struct scatterlist *src, struct scatterlist *dst,
 723	unsigned int nbytes, void *iv)
 724{
 725	req->src = src;
 726	req->dst = dst;
 727	req->nbytes = nbytes;
 728	req->info = iv;
 729}
 730
 731static inline struct crypto_aead *__crypto_aead_cast(struct crypto_tfm *tfm)
 732{
 733	return (struct crypto_aead *)tfm;
 734}
 735
 736struct crypto_aead *crypto_alloc_aead(const char *alg_name, u32 type, u32 mask);
 737
 738static inline struct crypto_tfm *crypto_aead_tfm(struct crypto_aead *tfm)
 739{
 740	return &tfm->base;
 741}
 742
 743static inline void crypto_free_aead(struct crypto_aead *tfm)
 744{
 745	crypto_free_tfm(crypto_aead_tfm(tfm));
 746}
 747
 748static inline struct aead_tfm *crypto_aead_crt(struct crypto_aead *tfm)
 749{
 750	return &crypto_aead_tfm(tfm)->crt_aead;
 751}
 752
 753static inline unsigned int crypto_aead_ivsize(struct crypto_aead *tfm)
 754{
 755	return crypto_aead_crt(tfm)->ivsize;
 756}
 757
 758static inline unsigned int crypto_aead_authsize(struct crypto_aead *tfm)
 759{
 760	return crypto_aead_crt(tfm)->authsize;
 761}
 762
 763static inline unsigned int crypto_aead_blocksize(struct crypto_aead *tfm)
 764{
 765	return crypto_tfm_alg_blocksize(crypto_aead_tfm(tfm));
 766}
 767
 768static inline unsigned int crypto_aead_alignmask(struct crypto_aead *tfm)
 769{
 770	return crypto_tfm_alg_alignmask(crypto_aead_tfm(tfm));
 771}
 772
 773static inline u32 crypto_aead_get_flags(struct crypto_aead *tfm)
 774{
 775	return crypto_tfm_get_flags(crypto_aead_tfm(tfm));
 776}
 777
 778static inline void crypto_aead_set_flags(struct crypto_aead *tfm, u32 flags)
 779{
 780	crypto_tfm_set_flags(crypto_aead_tfm(tfm), flags);
 781}
 782
 783static inline void crypto_aead_clear_flags(struct crypto_aead *tfm, u32 flags)
 784{
 785	crypto_tfm_clear_flags(crypto_aead_tfm(tfm), flags);
 786}
 787
 788static inline int crypto_aead_setkey(struct crypto_aead *tfm, const u8 *key,
 789				     unsigned int keylen)
 790{
 791	struct aead_tfm *crt = crypto_aead_crt(tfm);
 792
 793	return crt->setkey(crt->base, key, keylen);
 794}
 795
 796int crypto_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize);
 797
 798static inline struct crypto_aead *crypto_aead_reqtfm(struct aead_request *req)
 799{
 800	return __crypto_aead_cast(req->base.tfm);
 801}
 802
 803static inline int crypto_aead_encrypt(struct aead_request *req)
 804{
 805	return crypto_aead_crt(crypto_aead_reqtfm(req))->encrypt(req);
 806}
 807
 808static inline int crypto_aead_decrypt(struct aead_request *req)
 809{
 810	return crypto_aead_crt(crypto_aead_reqtfm(req))->decrypt(req);
 811}
 812
 813static inline unsigned int crypto_aead_reqsize(struct crypto_aead *tfm)
 814{
 815	return crypto_aead_crt(tfm)->reqsize;
 816}
 817
 818static inline void aead_request_set_tfm(struct aead_request *req,
 819					struct crypto_aead *tfm)
 820{
 821	req->base.tfm = crypto_aead_tfm(crypto_aead_crt(tfm)->base);
 822}
 823
 824static inline struct aead_request *aead_request_alloc(struct crypto_aead *tfm,
 825						      gfp_t gfp)
 826{
 827	struct aead_request *req;
 828
 829	req = kmalloc(sizeof(*req) + crypto_aead_reqsize(tfm), gfp);
 830
 831	if (likely(req))
 832		aead_request_set_tfm(req, tfm);
 833
 834	return req;
 835}
 836
 837static inline void aead_request_free(struct aead_request *req)
 838{
 839	kzfree(req);
 840}
 841
 842static inline void aead_request_set_callback(struct aead_request *req,
 843					     u32 flags,
 844					     crypto_completion_t complete,
 845					     void *data)
 846{
 847	req->base.complete = complete;
 848	req->base.data = data;
 849	req->base.flags = flags;
 850}
 851
 852static inline void aead_request_set_crypt(struct aead_request *req,
 853					  struct scatterlist *src,
 854					  struct scatterlist *dst,
 855					  unsigned int cryptlen, u8 *iv)
 856{
 857	req->src = src;
 858	req->dst = dst;
 859	req->cryptlen = cryptlen;
 860	req->iv = iv;
 861}
 862
 863static inline void aead_request_set_assoc(struct aead_request *req,
 864					  struct scatterlist *assoc,
 865					  unsigned int assoclen)
 866{
 867	req->assoc = assoc;
 868	req->assoclen = assoclen;
 869}
 870
 871static inline struct crypto_blkcipher *__crypto_blkcipher_cast(
 872	struct crypto_tfm *tfm)
 873{
 874	return (struct crypto_blkcipher *)tfm;
 875}
 876
 877static inline struct crypto_blkcipher *crypto_blkcipher_cast(
 878	struct crypto_tfm *tfm)
 879{
 880	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_BLKCIPHER);
 881	return __crypto_blkcipher_cast(tfm);
 882}
 883
 884static inline struct crypto_blkcipher *crypto_alloc_blkcipher(
 885	const char *alg_name, u32 type, u32 mask)
 886{
 887	type &= ~CRYPTO_ALG_TYPE_MASK;
 888	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
 889	mask |= CRYPTO_ALG_TYPE_MASK;
 890
 891	return __crypto_blkcipher_cast(crypto_alloc_base(alg_name, type, mask));
 892}
 893
 894static inline struct crypto_tfm *crypto_blkcipher_tfm(
 895	struct crypto_blkcipher *tfm)
 896{
 897	return &tfm->base;
 898}
 899
 900static inline void crypto_free_blkcipher(struct crypto_blkcipher *tfm)
 901{
 902	crypto_free_tfm(crypto_blkcipher_tfm(tfm));
 903}
 904
 905static inline int crypto_has_blkcipher(const char *alg_name, u32 type, u32 mask)
 906{
 907	type &= ~CRYPTO_ALG_TYPE_MASK;
 908	type |= CRYPTO_ALG_TYPE_BLKCIPHER;
 909	mask |= CRYPTO_ALG_TYPE_MASK;
 910
 911	return crypto_has_alg(alg_name, type, mask);
 912}
 913
 914static inline const char *crypto_blkcipher_name(struct crypto_blkcipher *tfm)
 915{
 916	return crypto_tfm_alg_name(crypto_blkcipher_tfm(tfm));
 917}
 918
 919static inline struct blkcipher_tfm *crypto_blkcipher_crt(
 920	struct crypto_blkcipher *tfm)
 921{
 922	return &crypto_blkcipher_tfm(tfm)->crt_blkcipher;
 923}
 924
 925static inline struct blkcipher_alg *crypto_blkcipher_alg(
 926	struct crypto_blkcipher *tfm)
 927{
 928	return &crypto_blkcipher_tfm(tfm)->__crt_alg->cra_blkcipher;
 929}
 930
 931static inline unsigned int crypto_blkcipher_ivsize(struct crypto_blkcipher *tfm)
 932{
 933	return crypto_blkcipher_alg(tfm)->ivsize;
 934}
 935
 936static inline unsigned int crypto_blkcipher_blocksize(
 937	struct crypto_blkcipher *tfm)
 938{
 939	return crypto_tfm_alg_blocksize(crypto_blkcipher_tfm(tfm));
 940}
 941
 942static inline unsigned int crypto_blkcipher_alignmask(
 943	struct crypto_blkcipher *tfm)
 944{
 945	return crypto_tfm_alg_alignmask(crypto_blkcipher_tfm(tfm));
 946}
 947
 948static inline u32 crypto_blkcipher_get_flags(struct crypto_blkcipher *tfm)
 949{
 950	return crypto_tfm_get_flags(crypto_blkcipher_tfm(tfm));
 951}
 952
 953static inline void crypto_blkcipher_set_flags(struct crypto_blkcipher *tfm,
 954					      u32 flags)
 955{
 956	crypto_tfm_set_flags(crypto_blkcipher_tfm(tfm), flags);
 957}
 958
 959static inline void crypto_blkcipher_clear_flags(struct crypto_blkcipher *tfm,
 960						u32 flags)
 961{
 962	crypto_tfm_clear_flags(crypto_blkcipher_tfm(tfm), flags);
 963}
 964
 965static inline int crypto_blkcipher_setkey(struct crypto_blkcipher *tfm,
 966					  const u8 *key, unsigned int keylen)
 967{
 968	return crypto_blkcipher_crt(tfm)->setkey(crypto_blkcipher_tfm(tfm),
 969						 key, keylen);
 970}
 971
 972static inline int crypto_blkcipher_encrypt(struct blkcipher_desc *desc,
 973					   struct scatterlist *dst,
 974					   struct scatterlist *src,
 975					   unsigned int nbytes)
 976{
 977	desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
 978	return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
 979}
 980
 981static inline int crypto_blkcipher_encrypt_iv(struct blkcipher_desc *desc,
 982					      struct scatterlist *dst,
 983					      struct scatterlist *src,
 984					      unsigned int nbytes)
 985{
 986	return crypto_blkcipher_crt(desc->tfm)->encrypt(desc, dst, src, nbytes);
 987}
 988
 989static inline int crypto_blkcipher_decrypt(struct blkcipher_desc *desc,
 990					   struct scatterlist *dst,
 991					   struct scatterlist *src,
 992					   unsigned int nbytes)
 993{
 994	desc->info = crypto_blkcipher_crt(desc->tfm)->iv;
 995	return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
 996}
 997
 998static inline int crypto_blkcipher_decrypt_iv(struct blkcipher_desc *desc,
 999					      struct scatterlist *dst,
1000					      struct scatterlist *src,
1001					      unsigned int nbytes)
1002{
1003	return crypto_blkcipher_crt(desc->tfm)->decrypt(desc, dst, src, nbytes);
1004}
1005
1006static inline void crypto_blkcipher_set_iv(struct crypto_blkcipher *tfm,
1007					   const u8 *src, unsigned int len)
1008{
1009	memcpy(crypto_blkcipher_crt(tfm)->iv, src, len);
1010}
1011
1012static inline void crypto_blkcipher_get_iv(struct crypto_blkcipher *tfm,
1013					   u8 *dst, unsigned int len)
1014{
1015	memcpy(dst, crypto_blkcipher_crt(tfm)->iv, len);
1016}
1017
1018static inline struct crypto_cipher *__crypto_cipher_cast(struct crypto_tfm *tfm)
1019{
1020	return (struct crypto_cipher *)tfm;
1021}
1022
1023static inline struct crypto_cipher *crypto_cipher_cast(struct crypto_tfm *tfm)
1024{
1025	BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
1026	return __crypto_cipher_cast(tfm);
1027}
1028
1029static inline struct crypto_cipher *crypto_alloc_cipher(const char *alg_name,
1030							u32 type, u32 mask)
1031{
1032	type &= ~CRYPTO_ALG_TYPE_MASK;
1033	type |= CRYPTO_ALG_TYPE_CIPHER;
1034	mask |= CRYPTO_ALG_TYPE_MASK;
1035
1036	return __crypto_cipher_cast(crypto_alloc_base(alg_name, type, mask));
1037}
1038
1039static inline struct crypto_tfm *crypto_cipher_tfm(struct crypto_cipher *tfm)
1040{
1041	return &tfm->base;
1042}
1043
1044static inline void crypto_free_cipher(struct crypto_cipher *tfm)
1045{
1046	crypto_free_tfm(crypto_cipher_tfm(tfm));
1047}
1048
1049static inline int crypto_has_cipher(const char *alg_name, u32 type, u32 mask)
1050{
1051	type &= ~CRYPTO_ALG_TYPE_MASK;
1052	type |= CRYPTO_ALG_TYPE_CIPHER;
1053	mask |= CRYPTO_ALG_TYPE_MASK;
1054
1055	return crypto_has_alg(alg_name, type, mask);
1056}
1057
1058static inline struct cipher_tfm *crypto_cipher_crt(struct crypto_cipher *tfm)
1059{
1060	return &crypto_cipher_tfm(tfm)->crt_cipher;
1061}
1062
1063static inline unsigned int crypto_cipher_blocksize(struct crypto_cipher *tfm)
1064{
1065	return crypto_tfm_alg_blocksize(crypto_cipher_tfm(tfm));
1066}
1067
1068static inline unsigned int crypto_cipher_alignmask(struct crypto_cipher *tfm)
1069{
1070	return crypto_tfm_alg_alignmask(crypto_cipher_tfm(tfm));
1071}
1072
1073static inline u32 crypto_cipher_get_flags(struct crypto_cipher *tfm)
1074{
1075	return crypto_tfm_get_flags(crypto_cipher_tfm(tfm));
1076}
1077
1078static inline void crypto_cipher_set_flags(struct crypto_cipher *tfm,
1079					   u32 flags)
1080{
1081	crypto_tfm_set_flags(crypto_cipher_tfm(tfm), flags);
1082}
1083
1084static inline void crypto_cipher_clear_flags(struct crypto_cipher *tfm,
1085					     u32 flags)
1086{
1087	crypto_tfm_clear_flags(crypto_cipher_tfm(tfm), flags);
1088}
1089
1090static inline int crypto_cipher_setkey(struct crypto_cipher *tfm,
1091                                       const u8 *key, unsigned int keylen)
1092{
1093	return crypto_cipher_crt(tfm)->cit_setkey(crypto_cipher_tfm(tfm),
1094						  key, keylen);
1095}
1096
1097static inline void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
1098					     u8 *dst, const u8 *src)
1099{
1100	crypto_cipher_crt(tfm)->cit_encrypt_one(crypto_cipher_tfm(tfm),
1101						dst, src);
1102}
1103
1104static inline void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
1105					     u8 *dst, const u8 *src)
1106{
1107	crypto_cipher_crt(tfm)->cit_decrypt_one(crypto_cipher_tfm(tfm),
1108						dst, src);
1109}
1110
1111static inline struct crypto_hash *__crypto_hash_cast(struct crypto_tfm *tfm)
1112{
1113	return (struct crypto_hash *)tfm;
1114}
1115
1116static inline struct crypto_hash *crypto_hash_cast(struct crypto_tfm *tfm)
1117{
1118	BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_HASH) &
1119	       CRYPTO_ALG_TYPE_HASH_MASK);
1120	return __crypto_hash_cast(tfm);
1121}
1122
1123static inline struct crypto_hash *crypto_alloc_hash(const char *alg_name,
1124						    u32 type, u32 mask)
1125{
1126	type &= ~CRYPTO_ALG_TYPE_MASK;
1127	mask &= ~CRYPTO_ALG_TYPE_MASK;
1128	type |= CRYPTO_ALG_TYPE_HASH;
1129	mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1130
1131	return __crypto_hash_cast(crypto_alloc_base(alg_name, type, mask));
1132}
1133
1134static inline struct crypto_tfm *crypto_hash_tfm(struct crypto_hash *tfm)
1135{
1136	return &tfm->base;
1137}
1138
1139static inline void crypto_free_hash(struct crypto_hash *tfm)
1140{
1141	crypto_free_tfm(crypto_hash_tfm(tfm));
1142}
1143
1144static inline int crypto_has_hash(const char *alg_name, u32 type, u32 mask)
1145{
1146	type &= ~CRYPTO_ALG_TYPE_MASK;
1147	mask &= ~CRYPTO_ALG_TYPE_MASK;
1148	type |= CRYPTO_ALG_TYPE_HASH;
1149	mask |= CRYPTO_ALG_TYPE_HASH_MASK;
1150
1151	return crypto_has_alg(alg_name, type, mask);
1152}
1153
1154static inline struct hash_tfm *crypto_hash_crt(struct crypto_hash *tfm)
1155{
1156	return &crypto_hash_tfm(tfm)->crt_hash;
1157}
1158
1159static inline unsigned int crypto_hash_blocksize(struct crypto_hash *tfm)
1160{
1161	return crypto_tfm_alg_blocksize(crypto_hash_tfm(tfm));
1162}
1163
1164static inline unsigned int crypto_hash_alignmask(struct crypto_hash *tfm)
1165{
1166	return crypto_tfm_alg_alignmask(crypto_hash_tfm(tfm));
1167}
1168
1169static inline unsigned int crypto_hash_digestsize(struct crypto_hash *tfm)
1170{
1171	return crypto_hash_crt(tfm)->digestsize;
1172}
1173
1174static inline u32 crypto_hash_get_flags(struct crypto_hash *tfm)
1175{
1176	return crypto_tfm_get_flags(crypto_hash_tfm(tfm));
1177}
1178
1179static inline void crypto_hash_set_flags(struct crypto_hash *tfm, u32 flags)
1180{
1181	crypto_tfm_set_flags(crypto_hash_tfm(tfm), flags);
1182}
1183
1184static inline void crypto_hash_clear_flags(struct crypto_hash *tfm, u32 flags)
1185{
1186	crypto_tfm_clear_flags(crypto_hash_tfm(tfm), flags);
1187}
1188
1189static inline int crypto_hash_init(struct hash_desc *desc)
1190{
1191	return crypto_hash_crt(desc->tfm)->init(desc);
1192}
1193
1194static inline int crypto_hash_update(struct hash_desc *desc,
1195				     struct scatterlist *sg,
1196				     unsigned int nbytes)
1197{
1198	return crypto_hash_crt(desc->tfm)->update(desc, sg, nbytes);
1199}
1200
1201static inline int crypto_hash_final(struct hash_desc *desc, u8 *out)
1202{
1203	return crypto_hash_crt(desc->tfm)->final(desc, out);
1204}
1205
1206static inline int crypto_hash_digest(struct hash_desc *desc,
1207				     struct scatterlist *sg,
1208				     unsigned int nbytes, u8 *out)
1209{
1210	return crypto_hash_crt(desc->tfm)->digest(desc, sg, nbytes, out);
1211}
1212
1213static inline int crypto_hash_setkey(struct crypto_hash *hash,
1214				     const u8 *key, unsigned int keylen)
1215{
1216	return crypto_hash_crt(hash)->setkey(hash, key, keylen);
1217}
1218
1219static inline struct crypto_comp *__crypto_comp_cast(struct crypto_tfm *tfm)
1220{
1221	return (struct crypto_comp *)tfm;
1222}
1223
1224static inline struct crypto_comp *crypto_comp_cast(struct crypto_tfm *tfm)
1225{
1226	BUG_ON((crypto_tfm_alg_type(tfm) ^ CRYPTO_ALG_TYPE_COMPRESS) &
1227	       CRYPTO_ALG_TYPE_MASK);
1228	return __crypto_comp_cast(tfm);
1229}
1230
1231static inline struct crypto_comp *crypto_alloc_comp(const char *alg_name,
1232						    u32 type, u32 mask)
1233{
1234	type &= ~CRYPTO_ALG_TYPE_MASK;
1235	type |= CRYPTO_ALG_TYPE_COMPRESS;
1236	mask |= CRYPTO_ALG_TYPE_MASK;
1237
1238	return __crypto_comp_cast(crypto_alloc_base(alg_name, type, mask));
1239}
1240
1241static inline struct crypto_tfm *crypto_comp_tfm(struct crypto_comp *tfm)
1242{
1243	return &tfm->base;
1244}
1245
1246static inline void crypto_free_comp(struct crypto_comp *tfm)
1247{
1248	crypto_free_tfm(crypto_comp_tfm(tfm));
1249}
1250
1251static inline int crypto_has_comp(const char *alg_name, u32 type, u32 mask)
1252{
1253	type &= ~CRYPTO_ALG_TYPE_MASK;
1254	type |= CRYPTO_ALG_TYPE_COMPRESS;
1255	mask |= CRYPTO_ALG_TYPE_MASK;
1256
1257	return crypto_has_alg(alg_name, type, mask);
1258}
1259
1260static inline const char *crypto_comp_name(struct crypto_comp *tfm)
1261{
1262	return crypto_tfm_alg_name(crypto_comp_tfm(tfm));
1263}
1264
1265static inline struct compress_tfm *crypto_comp_crt(struct crypto_comp *tfm)
1266{
1267	return &crypto_comp_tfm(tfm)->crt_compress;
1268}
1269
1270static inline int crypto_comp_compress(struct crypto_comp *tfm,
1271                                       const u8 *src, unsigned int slen,
1272                                       u8 *dst, unsigned int *dlen)
1273{
1274	return crypto_comp_crt(tfm)->cot_compress(crypto_comp_tfm(tfm),
1275						  src, slen, dst, dlen);
1276}
1277
1278static inline int crypto_comp_decompress(struct crypto_comp *tfm,
1279                                         const u8 *src, unsigned int slen,
1280                                         u8 *dst, unsigned int *dlen)
1281{
1282	return crypto_comp_crt(tfm)->cot_decompress(crypto_comp_tfm(tfm),
1283						    src, slen, dst, dlen);
1284}
1285
1286#endif	/* _LINUX_CRYPTO_H */
1287