Loading...
1/*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12#ifndef _CRYPTO_ALGAPI_H
13#define _CRYPTO_ALGAPI_H
14
15#include <linux/crypto.h>
16#include <linux/list.h>
17#include <linux/kernel.h>
18
19struct module;
20struct rtattr;
21struct seq_file;
22
23struct crypto_type {
24 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
25 unsigned int (*extsize)(struct crypto_alg *alg);
26 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
27 int (*init_tfm)(struct crypto_tfm *tfm);
28 void (*show)(struct seq_file *m, struct crypto_alg *alg);
29 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
30
31 unsigned int type;
32 unsigned int maskclear;
33 unsigned int maskset;
34 unsigned int tfmsize;
35};
36
37struct crypto_instance {
38 struct crypto_alg alg;
39
40 struct crypto_template *tmpl;
41 struct hlist_node list;
42
43 void *__ctx[] CRYPTO_MINALIGN_ATTR;
44};
45
46struct crypto_template {
47 struct list_head list;
48 struct hlist_head instances;
49 struct module *module;
50
51 struct crypto_instance *(*alloc)(struct rtattr **tb);
52 void (*free)(struct crypto_instance *inst);
53 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
54
55 char name[CRYPTO_MAX_ALG_NAME];
56};
57
58struct crypto_spawn {
59 struct list_head list;
60 struct crypto_alg *alg;
61 struct crypto_instance *inst;
62 const struct crypto_type *frontend;
63 u32 mask;
64};
65
66struct crypto_queue {
67 struct list_head list;
68 struct list_head *backlog;
69
70 unsigned int qlen;
71 unsigned int max_qlen;
72};
73
74struct scatter_walk {
75 struct scatterlist *sg;
76 unsigned int offset;
77};
78
79struct blkcipher_walk {
80 union {
81 struct {
82 struct page *page;
83 unsigned long offset;
84 } phys;
85
86 struct {
87 u8 *page;
88 u8 *addr;
89 } virt;
90 } src, dst;
91
92 struct scatter_walk in;
93 unsigned int nbytes;
94
95 struct scatter_walk out;
96 unsigned int total;
97
98 void *page;
99 u8 *buffer;
100 u8 *iv;
101
102 int flags;
103 unsigned int blocksize;
104};
105
106struct ablkcipher_walk {
107 struct {
108 struct page *page;
109 unsigned int offset;
110 } src, dst;
111
112 struct scatter_walk in;
113 unsigned int nbytes;
114 struct scatter_walk out;
115 unsigned int total;
116 struct list_head buffers;
117 u8 *iv_buffer;
118 u8 *iv;
119 int flags;
120 unsigned int blocksize;
121};
122
123extern const struct crypto_type crypto_ablkcipher_type;
124extern const struct crypto_type crypto_aead_type;
125extern const struct crypto_type crypto_blkcipher_type;
126
127void crypto_mod_put(struct crypto_alg *alg);
128
129int crypto_register_template(struct crypto_template *tmpl);
130void crypto_unregister_template(struct crypto_template *tmpl);
131struct crypto_template *crypto_lookup_template(const char *name);
132
133int crypto_register_instance(struct crypto_template *tmpl,
134 struct crypto_instance *inst);
135
136int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
137 struct crypto_instance *inst, u32 mask);
138int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
139 struct crypto_instance *inst,
140 const struct crypto_type *frontend);
141
142void crypto_drop_spawn(struct crypto_spawn *spawn);
143struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
144 u32 mask);
145void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
146
147static inline void crypto_set_spawn(struct crypto_spawn *spawn,
148 struct crypto_instance *inst)
149{
150 spawn->inst = inst;
151}
152
153struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
154int crypto_check_attr_type(struct rtattr **tb, u32 type);
155const char *crypto_attr_alg_name(struct rtattr *rta);
156struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
157 const struct crypto_type *frontend,
158 u32 type, u32 mask);
159
160static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
161 u32 type, u32 mask)
162{
163 return crypto_attr_alg2(rta, NULL, type, mask);
164}
165
166int crypto_attr_u32(struct rtattr *rta, u32 *num);
167void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
168 unsigned int head);
169struct crypto_instance *crypto_alloc_instance(const char *name,
170 struct crypto_alg *alg);
171
172void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
173int crypto_enqueue_request(struct crypto_queue *queue,
174 struct crypto_async_request *request);
175void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset);
176struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
177int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
178
179/* These functions require the input/output to be aligned as u32. */
180void crypto_inc(u8 *a, unsigned int size);
181void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
182
183int blkcipher_walk_done(struct blkcipher_desc *desc,
184 struct blkcipher_walk *walk, int err);
185int blkcipher_walk_virt(struct blkcipher_desc *desc,
186 struct blkcipher_walk *walk);
187int blkcipher_walk_phys(struct blkcipher_desc *desc,
188 struct blkcipher_walk *walk);
189int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
190 struct blkcipher_walk *walk,
191 unsigned int blocksize);
192
193int ablkcipher_walk_done(struct ablkcipher_request *req,
194 struct ablkcipher_walk *walk, int err);
195int ablkcipher_walk_phys(struct ablkcipher_request *req,
196 struct ablkcipher_walk *walk);
197void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
198
199static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
200{
201 return PTR_ALIGN(crypto_tfm_ctx(tfm),
202 crypto_tfm_alg_alignmask(tfm) + 1);
203}
204
205static inline struct crypto_instance *crypto_tfm_alg_instance(
206 struct crypto_tfm *tfm)
207{
208 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
209}
210
211static inline void *crypto_instance_ctx(struct crypto_instance *inst)
212{
213 return inst->__ctx;
214}
215
216static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
217 struct crypto_ablkcipher *tfm)
218{
219 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
220}
221
222static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
223{
224 return crypto_tfm_ctx(&tfm->base);
225}
226
227static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
228{
229 return crypto_tfm_ctx_aligned(&tfm->base);
230}
231
232static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
233{
234 return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
235}
236
237static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
238{
239 return crypto_tfm_ctx(&tfm->base);
240}
241
242static inline struct crypto_instance *crypto_aead_alg_instance(
243 struct crypto_aead *aead)
244{
245 return crypto_tfm_alg_instance(&aead->base);
246}
247
248static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
249 struct crypto_spawn *spawn)
250{
251 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
252 u32 mask = CRYPTO_ALG_TYPE_MASK;
253
254 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
255}
256
257static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
258{
259 return crypto_tfm_ctx(&tfm->base);
260}
261
262static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
263{
264 return crypto_tfm_ctx_aligned(&tfm->base);
265}
266
267static inline struct crypto_cipher *crypto_spawn_cipher(
268 struct crypto_spawn *spawn)
269{
270 u32 type = CRYPTO_ALG_TYPE_CIPHER;
271 u32 mask = CRYPTO_ALG_TYPE_MASK;
272
273 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
274}
275
276static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
277{
278 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
279}
280
281static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
282{
283 u32 type = CRYPTO_ALG_TYPE_HASH;
284 u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
285
286 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
287}
288
289static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
290{
291 return crypto_tfm_ctx(&tfm->base);
292}
293
294static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
295{
296 return crypto_tfm_ctx_aligned(&tfm->base);
297}
298
299static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
300 struct scatterlist *dst,
301 struct scatterlist *src,
302 unsigned int nbytes)
303{
304 walk->in.sg = src;
305 walk->out.sg = dst;
306 walk->total = nbytes;
307}
308
309static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
310 struct scatterlist *dst,
311 struct scatterlist *src,
312 unsigned int nbytes)
313{
314 walk->in.sg = src;
315 walk->out.sg = dst;
316 walk->total = nbytes;
317 INIT_LIST_HEAD(&walk->buffers);
318}
319
320static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
321{
322 if (unlikely(!list_empty(&walk->buffers)))
323 __ablkcipher_walk_complete(walk);
324}
325
326static inline struct crypto_async_request *crypto_get_backlog(
327 struct crypto_queue *queue)
328{
329 return queue->backlog == &queue->list ? NULL :
330 container_of(queue->backlog, struct crypto_async_request, list);
331}
332
333static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
334 struct ablkcipher_request *request)
335{
336 return crypto_enqueue_request(queue, &request->base);
337}
338
339static inline struct ablkcipher_request *ablkcipher_dequeue_request(
340 struct crypto_queue *queue)
341{
342 return ablkcipher_request_cast(crypto_dequeue_request(queue));
343}
344
345static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
346{
347 return req->__ctx;
348}
349
350static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
351 struct crypto_ablkcipher *tfm)
352{
353 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
354}
355
356static inline void *aead_request_ctx(struct aead_request *req)
357{
358 return req->__ctx;
359}
360
361static inline void aead_request_complete(struct aead_request *req, int err)
362{
363 req->base.complete(&req->base, err);
364}
365
366static inline u32 aead_request_flags(struct aead_request *req)
367{
368 return req->base.flags;
369}
370
371static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
372 u32 type, u32 mask)
373{
374 return crypto_attr_alg(tb[1], type, mask);
375}
376
377/*
378 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
379 * Otherwise returns zero.
380 */
381static inline int crypto_requires_sync(u32 type, u32 mask)
382{
383 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
384}
385
386#endif /* _CRYPTO_ALGAPI_H */
387
1/*
2 * Cryptographic API for algorithms (i.e., low-level API).
3 *
4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12#ifndef _CRYPTO_ALGAPI_H
13#define _CRYPTO_ALGAPI_H
14
15#include <linux/crypto.h>
16#include <linux/list.h>
17#include <linux/kernel.h>
18#include <linux/skbuff.h>
19
20struct module;
21struct rtattr;
22struct seq_file;
23
24struct crypto_type {
25 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask);
26 unsigned int (*extsize)(struct crypto_alg *alg);
27 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask);
28 int (*init_tfm)(struct crypto_tfm *tfm);
29 void (*show)(struct seq_file *m, struct crypto_alg *alg);
30 int (*report)(struct sk_buff *skb, struct crypto_alg *alg);
31 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask);
32
33 unsigned int type;
34 unsigned int maskclear;
35 unsigned int maskset;
36 unsigned int tfmsize;
37};
38
39struct crypto_instance {
40 struct crypto_alg alg;
41
42 struct crypto_template *tmpl;
43 struct hlist_node list;
44
45 void *__ctx[] CRYPTO_MINALIGN_ATTR;
46};
47
48struct crypto_template {
49 struct list_head list;
50 struct hlist_head instances;
51 struct module *module;
52
53 struct crypto_instance *(*alloc)(struct rtattr **tb);
54 void (*free)(struct crypto_instance *inst);
55 int (*create)(struct crypto_template *tmpl, struct rtattr **tb);
56
57 char name[CRYPTO_MAX_ALG_NAME];
58};
59
60struct crypto_spawn {
61 struct list_head list;
62 struct crypto_alg *alg;
63 struct crypto_instance *inst;
64 const struct crypto_type *frontend;
65 u32 mask;
66};
67
68struct crypto_queue {
69 struct list_head list;
70 struct list_head *backlog;
71
72 unsigned int qlen;
73 unsigned int max_qlen;
74};
75
76struct scatter_walk {
77 struct scatterlist *sg;
78 unsigned int offset;
79};
80
81struct blkcipher_walk {
82 union {
83 struct {
84 struct page *page;
85 unsigned long offset;
86 } phys;
87
88 struct {
89 u8 *page;
90 u8 *addr;
91 } virt;
92 } src, dst;
93
94 struct scatter_walk in;
95 unsigned int nbytes;
96
97 struct scatter_walk out;
98 unsigned int total;
99
100 void *page;
101 u8 *buffer;
102 u8 *iv;
103 unsigned int ivsize;
104
105 int flags;
106 unsigned int walk_blocksize;
107 unsigned int cipher_blocksize;
108 unsigned int alignmask;
109};
110
111struct ablkcipher_walk {
112 struct {
113 struct page *page;
114 unsigned int offset;
115 } src, dst;
116
117 struct scatter_walk in;
118 unsigned int nbytes;
119 struct scatter_walk out;
120 unsigned int total;
121 struct list_head buffers;
122 u8 *iv_buffer;
123 u8 *iv;
124 int flags;
125 unsigned int blocksize;
126};
127
128extern const struct crypto_type crypto_ablkcipher_type;
129extern const struct crypto_type crypto_aead_type;
130extern const struct crypto_type crypto_blkcipher_type;
131
132void crypto_mod_put(struct crypto_alg *alg);
133
134int crypto_register_template(struct crypto_template *tmpl);
135void crypto_unregister_template(struct crypto_template *tmpl);
136struct crypto_template *crypto_lookup_template(const char *name);
137
138int crypto_register_instance(struct crypto_template *tmpl,
139 struct crypto_instance *inst);
140int crypto_unregister_instance(struct crypto_alg *alg);
141
142int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
143 struct crypto_instance *inst, u32 mask);
144int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
145 struct crypto_instance *inst,
146 const struct crypto_type *frontend);
147
148void crypto_drop_spawn(struct crypto_spawn *spawn);
149struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
150 u32 mask);
151void *crypto_spawn_tfm2(struct crypto_spawn *spawn);
152
153static inline void crypto_set_spawn(struct crypto_spawn *spawn,
154 struct crypto_instance *inst)
155{
156 spawn->inst = inst;
157}
158
159struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb);
160int crypto_check_attr_type(struct rtattr **tb, u32 type);
161const char *crypto_attr_alg_name(struct rtattr *rta);
162struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
163 const struct crypto_type *frontend,
164 u32 type, u32 mask);
165
166static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta,
167 u32 type, u32 mask)
168{
169 return crypto_attr_alg2(rta, NULL, type, mask);
170}
171
172int crypto_attr_u32(struct rtattr *rta, u32 *num);
173void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
174 unsigned int head);
175struct crypto_instance *crypto_alloc_instance(const char *name,
176 struct crypto_alg *alg);
177
178void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen);
179int crypto_enqueue_request(struct crypto_queue *queue,
180 struct crypto_async_request *request);
181void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset);
182struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue);
183int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm);
184
185/* These functions require the input/output to be aligned as u32. */
186void crypto_inc(u8 *a, unsigned int size);
187void crypto_xor(u8 *dst, const u8 *src, unsigned int size);
188
189int blkcipher_walk_done(struct blkcipher_desc *desc,
190 struct blkcipher_walk *walk, int err);
191int blkcipher_walk_virt(struct blkcipher_desc *desc,
192 struct blkcipher_walk *walk);
193int blkcipher_walk_phys(struct blkcipher_desc *desc,
194 struct blkcipher_walk *walk);
195int blkcipher_walk_virt_block(struct blkcipher_desc *desc,
196 struct blkcipher_walk *walk,
197 unsigned int blocksize);
198int blkcipher_aead_walk_virt_block(struct blkcipher_desc *desc,
199 struct blkcipher_walk *walk,
200 struct crypto_aead *tfm,
201 unsigned int blocksize);
202
203int ablkcipher_walk_done(struct ablkcipher_request *req,
204 struct ablkcipher_walk *walk, int err);
205int ablkcipher_walk_phys(struct ablkcipher_request *req,
206 struct ablkcipher_walk *walk);
207void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
208
209static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm)
210{
211 return PTR_ALIGN(crypto_tfm_ctx(tfm),
212 crypto_tfm_alg_alignmask(tfm) + 1);
213}
214
215static inline struct crypto_instance *crypto_tfm_alg_instance(
216 struct crypto_tfm *tfm)
217{
218 return container_of(tfm->__crt_alg, struct crypto_instance, alg);
219}
220
221static inline void *crypto_instance_ctx(struct crypto_instance *inst)
222{
223 return inst->__ctx;
224}
225
226static inline struct ablkcipher_alg *crypto_ablkcipher_alg(
227 struct crypto_ablkcipher *tfm)
228{
229 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher;
230}
231
232static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm)
233{
234 return crypto_tfm_ctx(&tfm->base);
235}
236
237static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm)
238{
239 return crypto_tfm_ctx_aligned(&tfm->base);
240}
241
242static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm)
243{
244 return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead;
245}
246
247static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
248{
249 return crypto_tfm_ctx(&tfm->base);
250}
251
252static inline struct crypto_instance *crypto_aead_alg_instance(
253 struct crypto_aead *aead)
254{
255 return crypto_tfm_alg_instance(&aead->base);
256}
257
258static inline struct crypto_blkcipher *crypto_spawn_blkcipher(
259 struct crypto_spawn *spawn)
260{
261 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER;
262 u32 mask = CRYPTO_ALG_TYPE_MASK;
263
264 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask));
265}
266
267static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm)
268{
269 return crypto_tfm_ctx(&tfm->base);
270}
271
272static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm)
273{
274 return crypto_tfm_ctx_aligned(&tfm->base);
275}
276
277static inline struct crypto_cipher *crypto_spawn_cipher(
278 struct crypto_spawn *spawn)
279{
280 u32 type = CRYPTO_ALG_TYPE_CIPHER;
281 u32 mask = CRYPTO_ALG_TYPE_MASK;
282
283 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask));
284}
285
286static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm)
287{
288 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher;
289}
290
291static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn)
292{
293 u32 type = CRYPTO_ALG_TYPE_HASH;
294 u32 mask = CRYPTO_ALG_TYPE_HASH_MASK;
295
296 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask));
297}
298
299static inline void *crypto_hash_ctx(struct crypto_hash *tfm)
300{
301 return crypto_tfm_ctx(&tfm->base);
302}
303
304static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm)
305{
306 return crypto_tfm_ctx_aligned(&tfm->base);
307}
308
309static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
310 struct scatterlist *dst,
311 struct scatterlist *src,
312 unsigned int nbytes)
313{
314 walk->in.sg = src;
315 walk->out.sg = dst;
316 walk->total = nbytes;
317}
318
319static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
320 struct scatterlist *dst,
321 struct scatterlist *src,
322 unsigned int nbytes)
323{
324 walk->in.sg = src;
325 walk->out.sg = dst;
326 walk->total = nbytes;
327 INIT_LIST_HEAD(&walk->buffers);
328}
329
330static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
331{
332 if (unlikely(!list_empty(&walk->buffers)))
333 __ablkcipher_walk_complete(walk);
334}
335
336static inline struct crypto_async_request *crypto_get_backlog(
337 struct crypto_queue *queue)
338{
339 return queue->backlog == &queue->list ? NULL :
340 container_of(queue->backlog, struct crypto_async_request, list);
341}
342
343static inline int ablkcipher_enqueue_request(struct crypto_queue *queue,
344 struct ablkcipher_request *request)
345{
346 return crypto_enqueue_request(queue, &request->base);
347}
348
349static inline struct ablkcipher_request *ablkcipher_dequeue_request(
350 struct crypto_queue *queue)
351{
352 return ablkcipher_request_cast(crypto_dequeue_request(queue));
353}
354
355static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req)
356{
357 return req->__ctx;
358}
359
360static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue,
361 struct crypto_ablkcipher *tfm)
362{
363 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm));
364}
365
366static inline void *aead_request_ctx(struct aead_request *req)
367{
368 return req->__ctx;
369}
370
371static inline void aead_request_complete(struct aead_request *req, int err)
372{
373 req->base.complete(&req->base, err);
374}
375
376static inline u32 aead_request_flags(struct aead_request *req)
377{
378 return req->base.flags;
379}
380
381static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb,
382 u32 type, u32 mask)
383{
384 return crypto_attr_alg(tb[1], type, mask);
385}
386
387/*
388 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms.
389 * Otherwise returns zero.
390 */
391static inline int crypto_requires_sync(u32 type, u32 mask)
392{
393 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC;
394}
395
396noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);
397
398/**
399 * crypto_memneq - Compare two areas of memory without leaking
400 * timing information.
401 *
402 * @a: One area of memory
403 * @b: Another area of memory
404 * @size: The size of the area.
405 *
406 * Returns 0 when data is equal, 1 otherwise.
407 */
408static inline int crypto_memneq(const void *a, const void *b, size_t size)
409{
410 return __crypto_memneq(a, b, size) != 0UL ? 1 : 0;
411}
412
413#endif /* _CRYPTO_ALGAPI_H */