Linux Audio

Check our new training course

Loading...
v5.4
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * Symmetric key ciphers.
  4 * 
  5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  6 */
  7
  8#ifndef _CRYPTO_INTERNAL_SKCIPHER_H
  9#define _CRYPTO_INTERNAL_SKCIPHER_H
 10
 11#include <crypto/algapi.h>
 12#include <crypto/skcipher.h>
 13#include <linux/list.h>
 14#include <linux/types.h>
 15
 16struct aead_request;
 17struct rtattr;
 18
 19struct skcipher_instance {
 20	void (*free)(struct skcipher_instance *inst);
 21	union {
 22		struct {
 23			char head[offsetof(struct skcipher_alg, base)];
 24			struct crypto_instance base;
 25		} s;
 26		struct skcipher_alg alg;
 27	};
 28};
 29
 30struct crypto_skcipher_spawn {
 31	struct crypto_spawn base;
 32};
 33
 34struct skcipher_walk {
 35	union {
 36		struct {
 37			struct page *page;
 38			unsigned long offset;
 39		} phys;
 40
 41		struct {
 42			u8 *page;
 43			void *addr;
 44		} virt;
 45	} src, dst;
 46
 47	struct scatter_walk in;
 48	unsigned int nbytes;
 49
 50	struct scatter_walk out;
 51	unsigned int total;
 52
 53	struct list_head buffers;
 54
 55	u8 *page;
 56	u8 *buffer;
 57	u8 *oiv;
 58	void *iv;
 59
 60	unsigned int ivsize;
 61
 62	int flags;
 63	unsigned int blocksize;
 64	unsigned int stride;
 65	unsigned int alignmask;
 66};
 67
 68static inline struct crypto_instance *skcipher_crypto_instance(
 69	struct skcipher_instance *inst)
 70{
 71	return &inst->s.base;
 72}
 73
 74static inline struct skcipher_instance *skcipher_alg_instance(
 75	struct crypto_skcipher *skcipher)
 76{
 77	return container_of(crypto_skcipher_alg(skcipher),
 78			    struct skcipher_instance, alg);
 79}
 80
 81static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
 82{
 83	return crypto_instance_ctx(skcipher_crypto_instance(inst));
 84}
 85
 86static inline void skcipher_request_complete(struct skcipher_request *req, int err)
 87{
 88	req->base.complete(&req->base, err);
 89}
 90
 91static inline void crypto_set_skcipher_spawn(
 92	struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
 93{
 94	crypto_set_spawn(&spawn->base, inst);
 95}
 96
 97int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
 98			 u32 type, u32 mask);
 99
 
 
100static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
101{
102	crypto_drop_spawn(&spawn->base);
103}
104
105static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
106	struct crypto_skcipher_spawn *spawn)
107{
108	return container_of(spawn->base.alg, struct skcipher_alg, base);
109}
110
111static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
112	struct crypto_skcipher_spawn *spawn)
113{
114	return crypto_skcipher_spawn_alg(spawn);
115}
116
117static inline struct crypto_skcipher *crypto_spawn_skcipher(
118	struct crypto_skcipher_spawn *spawn)
119{
120	return crypto_spawn_tfm2(&spawn->base);
121}
122
123static inline void crypto_skcipher_set_reqsize(
124	struct crypto_skcipher *skcipher, unsigned int reqsize)
125{
126	skcipher->reqsize = reqsize;
127}
128
129int crypto_register_skcipher(struct skcipher_alg *alg);
130void crypto_unregister_skcipher(struct skcipher_alg *alg);
131int crypto_register_skciphers(struct skcipher_alg *algs, int count);
132void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
133int skcipher_register_instance(struct crypto_template *tmpl,
134			       struct skcipher_instance *inst);
135
136int skcipher_walk_done(struct skcipher_walk *walk, int err);
137int skcipher_walk_virt(struct skcipher_walk *walk,
138		       struct skcipher_request *req,
139		       bool atomic);
140void skcipher_walk_atomise(struct skcipher_walk *walk);
141int skcipher_walk_async(struct skcipher_walk *walk,
142			struct skcipher_request *req);
143int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
144		       bool atomic);
145int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
146			       struct aead_request *req, bool atomic);
147int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
148			       struct aead_request *req, bool atomic);
149void skcipher_walk_complete(struct skcipher_walk *walk, int err);
150
151static inline void skcipher_walk_abort(struct skcipher_walk *walk)
152{
153	skcipher_walk_done(walk, -ECANCELED);
154}
155
156static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
157					       int err)
158{
159	req->base.complete(&req->base, err);
160}
161
162static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
163{
164	return req->base.flags;
165}
166
167static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
168{
169	return crypto_tfm_ctx(&tfm->base);
170}
171
172static inline void *skcipher_request_ctx(struct skcipher_request *req)
173{
174	return req->__ctx;
175}
176
177static inline u32 skcipher_request_flags(struct skcipher_request *req)
 
178{
179	return req->base.flags;
180}
181
182static inline unsigned int crypto_skcipher_alg_min_keysize(
183	struct skcipher_alg *alg)
184{
185	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
186	    CRYPTO_ALG_TYPE_BLKCIPHER)
187		return alg->base.cra_blkcipher.min_keysize;
188
189	if (alg->base.cra_ablkcipher.encrypt)
190		return alg->base.cra_ablkcipher.min_keysize;
191
192	return alg->min_keysize;
193}
194
195static inline unsigned int crypto_skcipher_alg_max_keysize(
196	struct skcipher_alg *alg)
197{
198	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
199	    CRYPTO_ALG_TYPE_BLKCIPHER)
200		return alg->base.cra_blkcipher.max_keysize;
201
202	if (alg->base.cra_ablkcipher.encrypt)
203		return alg->base.cra_ablkcipher.max_keysize;
204
205	return alg->max_keysize;
206}
207
208static inline unsigned int crypto_skcipher_alg_chunksize(
209	struct skcipher_alg *alg)
210{
211	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
212	    CRYPTO_ALG_TYPE_BLKCIPHER)
213		return alg->base.cra_blocksize;
214
215	if (alg->base.cra_ablkcipher.encrypt)
216		return alg->base.cra_blocksize;
217
218	return alg->chunksize;
219}
220
221static inline unsigned int crypto_skcipher_alg_walksize(
222	struct skcipher_alg *alg)
223{
224	if ((alg->base.cra_flags & CRYPTO_ALG_TYPE_MASK) ==
225	    CRYPTO_ALG_TYPE_BLKCIPHER)
226		return alg->base.cra_blocksize;
227
228	if (alg->base.cra_ablkcipher.encrypt)
229		return alg->base.cra_blocksize;
230
231	return alg->walksize;
232}
233
234/**
235 * crypto_skcipher_chunksize() - obtain chunk size
236 * @tfm: cipher handle
237 *
238 * The block size is set to one for ciphers such as CTR.  However,
239 * you still need to provide incremental updates in multiples of
240 * the underlying block size as the IV does not have sub-block
241 * granularity.  This is known in this API as the chunk size.
242 *
243 * Return: chunk size in bytes
244 */
245static inline unsigned int crypto_skcipher_chunksize(
246	struct crypto_skcipher *tfm)
247{
248	return crypto_skcipher_alg_chunksize(crypto_skcipher_alg(tfm));
249}
250
251/**
252 * crypto_skcipher_walksize() - obtain walk size
253 * @tfm: cipher handle
254 *
255 * In some cases, algorithms can only perform optimally when operating on
256 * multiple blocks in parallel. This is reflected by the walksize, which
257 * must be a multiple of the chunksize (or equal if the concern does not
258 * apply)
259 *
260 * Return: walk size in bytes
261 */
262static inline unsigned int crypto_skcipher_walksize(
263	struct crypto_skcipher *tfm)
264{
265	return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
266}
267
268/* Helpers for simple block cipher modes of operation */
269struct skcipher_ctx_simple {
270	struct crypto_cipher *cipher;	/* underlying block cipher */
271};
272static inline struct crypto_cipher *
273skcipher_cipher_simple(struct crypto_skcipher *tfm)
274{
275	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
276
277	return ctx->cipher;
278}
279struct skcipher_instance *
280skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
281			       struct crypto_alg **cipher_alg_ret);
282
283#endif	/* _CRYPTO_INTERNAL_SKCIPHER_H */
284
v3.15
 
  1/*
  2 * Symmetric key ciphers.
  3 * 
  4 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option) 
  9 * any later version.
 10 *
 11 */
 12
 13#ifndef _CRYPTO_INTERNAL_SKCIPHER_H
 14#define _CRYPTO_INTERNAL_SKCIPHER_H
 15
 16#include <crypto/algapi.h>
 17#include <crypto/skcipher.h>
 
 18#include <linux/types.h>
 19
 
 20struct rtattr;
 21
 
 
 
 
 
 
 
 
 
 
 
 22struct crypto_skcipher_spawn {
 23	struct crypto_spawn base;
 24};
 25
 26extern const struct crypto_type crypto_givcipher_type;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 27
 28static inline void crypto_set_skcipher_spawn(
 29	struct crypto_skcipher_spawn *spawn, struct crypto_instance *inst)
 30{
 31	crypto_set_spawn(&spawn->base, inst);
 32}
 33
 34int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn, const char *name,
 35			 u32 type, u32 mask);
 36
 37struct crypto_alg *crypto_lookup_skcipher(const char *name, u32 type, u32 mask);
 38
 39static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
 40{
 41	crypto_drop_spawn(&spawn->base);
 42}
 43
 44static inline struct crypto_alg *crypto_skcipher_spawn_alg(
 
 
 
 
 
 
 45	struct crypto_skcipher_spawn *spawn)
 46{
 47	return spawn->base.alg;
 48}
 49
 50static inline struct crypto_ablkcipher *crypto_spawn_skcipher(
 51	struct crypto_skcipher_spawn *spawn)
 52{
 53	return __crypto_ablkcipher_cast(
 54		crypto_spawn_tfm(&spawn->base, crypto_skcipher_type(0),
 55				 crypto_skcipher_mask(0)));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 56}
 57
 58int skcipher_null_givencrypt(struct skcipher_givcrypt_request *req);
 59int skcipher_null_givdecrypt(struct skcipher_givcrypt_request *req);
 60const char *crypto_default_geniv(const struct crypto_alg *alg);
 
 61
 62struct crypto_instance *skcipher_geniv_alloc(struct crypto_template *tmpl,
 63					     struct rtattr **tb, u32 type,
 64					     u32 mask);
 65void skcipher_geniv_free(struct crypto_instance *inst);
 66int skcipher_geniv_init(struct crypto_tfm *tfm);
 67void skcipher_geniv_exit(struct crypto_tfm *tfm);
 
 
 
 68
 69static inline struct crypto_ablkcipher *skcipher_geniv_cipher(
 70	struct crypto_ablkcipher *geniv)
 71{
 72	return crypto_ablkcipher_crt(geniv)->base;
 73}
 74
 75static inline int skcipher_enqueue_givcrypt(
 76	struct crypto_queue *queue, struct skcipher_givcrypt_request *request)
 77{
 78	return ablkcipher_enqueue_request(queue, &request->creq);
 
 
 
 
 
 
 
 79}
 80
 81static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt(
 82	struct crypto_queue *queue)
 83{
 84	return __crypto_dequeue_request(
 85		queue, offsetof(struct skcipher_givcrypt_request, creq.base));
 
 
 
 
 
 
 86}
 87
 88static inline void *skcipher_givcrypt_reqctx(
 89	struct skcipher_givcrypt_request *req)
 90{
 91	return ablkcipher_request_ctx(&req->creq);
 
 
 
 
 
 
 
 92}
 93
 94static inline void ablkcipher_request_complete(struct ablkcipher_request *req,
 95					       int err)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 96{
 97	req->base.complete(&req->base, err);
 98}
 99
100static inline void skcipher_givcrypt_complete(
101	struct skcipher_givcrypt_request *req, int err)
 
 
 
 
 
 
 
 
 
 
 
102{
103	ablkcipher_request_complete(&req->creq, err);
104}
105
106static inline u32 ablkcipher_request_flags(struct ablkcipher_request *req)
 
 
 
 
 
107{
108	return req->base.flags;
 
 
109}
 
 
 
110
111#endif	/* _CRYPTO_INTERNAL_SKCIPHER_H */
112