Loading...
1/*
2 * Software async crypto daemon
3 *
4 * Added AEAD support to cryptd.
5 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
6 * Adrian Hoban <adrian.hoban@intel.com>
7 * Gabriele Paoloni <gabriele.paoloni@intel.com>
8 * Aidan O'Mahony (aidan.o.mahony@intel.com)
9 * Copyright (c) 2010, Intel Corporation.
10 */
11
12#ifndef _CRYPTO_CRYPT_H
13#define _CRYPTO_CRYPT_H
14
15#include <linux/crypto.h>
16#include <linux/kernel.h>
17#include <crypto/hash.h>
18
19struct cryptd_ablkcipher {
20 struct crypto_ablkcipher base;
21};
22
23static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
24 struct crypto_ablkcipher *tfm)
25{
26 return (struct cryptd_ablkcipher *)tfm;
27}
28
29/* alg_name should be algorithm to be cryptd-ed */
30struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
31 u32 type, u32 mask);
32struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
33void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
34
35struct cryptd_ahash {
36 struct crypto_ahash base;
37};
38
39static inline struct cryptd_ahash *__cryptd_ahash_cast(
40 struct crypto_ahash *tfm)
41{
42 return (struct cryptd_ahash *)tfm;
43}
44
45/* alg_name should be algorithm to be cryptd-ed */
46struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
47 u32 type, u32 mask);
48struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
49struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
50void cryptd_free_ahash(struct cryptd_ahash *tfm);
51
52struct cryptd_aead {
53 struct crypto_aead base;
54};
55
56static inline struct cryptd_aead *__cryptd_aead_cast(
57 struct crypto_aead *tfm)
58{
59 return (struct cryptd_aead *)tfm;
60}
61
62struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
63 u32 type, u32 mask);
64
65struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
66
67void cryptd_free_aead(struct cryptd_aead *tfm);
68
69#endif
1/*
2 * Software async crypto daemon
3 *
4 * Added AEAD support to cryptd.
5 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
6 * Adrian Hoban <adrian.hoban@intel.com>
7 * Gabriele Paoloni <gabriele.paoloni@intel.com>
8 * Aidan O'Mahony (aidan.o.mahony@intel.com)
9 * Copyright (c) 2010, Intel Corporation.
10 */
11
12#ifndef _CRYPTO_CRYPT_H
13#define _CRYPTO_CRYPT_H
14
15#include <linux/kernel.h>
16#include <crypto/aead.h>
17#include <crypto/hash.h>
18#include <crypto/skcipher.h>
19
20struct cryptd_ablkcipher {
21 struct crypto_ablkcipher base;
22};
23
24static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
25 struct crypto_ablkcipher *tfm)
26{
27 return (struct cryptd_ablkcipher *)tfm;
28}
29
30/* alg_name should be algorithm to be cryptd-ed */
31struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
32 u32 type, u32 mask);
33struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
34bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
35void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
36
37struct cryptd_skcipher {
38 struct crypto_skcipher base;
39};
40
41struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
42 u32 type, u32 mask);
43struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
44/* Must be called without moving CPUs. */
45bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
46void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
47
48struct cryptd_ahash {
49 struct crypto_ahash base;
50};
51
52static inline struct cryptd_ahash *__cryptd_ahash_cast(
53 struct crypto_ahash *tfm)
54{
55 return (struct cryptd_ahash *)tfm;
56}
57
58/* alg_name should be algorithm to be cryptd-ed */
59struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
60 u32 type, u32 mask);
61struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
62struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
63/* Must be called without moving CPUs. */
64bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
65void cryptd_free_ahash(struct cryptd_ahash *tfm);
66
67struct cryptd_aead {
68 struct crypto_aead base;
69};
70
71static inline struct cryptd_aead *__cryptd_aead_cast(
72 struct crypto_aead *tfm)
73{
74 return (struct cryptd_aead *)tfm;
75}
76
77struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
78 u32 type, u32 mask);
79
80struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
81/* Must be called without moving CPUs. */
82bool cryptd_aead_queued(struct cryptd_aead *tfm);
83
84void cryptd_free_aead(struct cryptd_aead *tfm);
85
86#endif