Loading...
1/*
2 * Software async crypto daemon
3 *
4 * Added AEAD support to cryptd.
5 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
6 * Adrian Hoban <adrian.hoban@intel.com>
7 * Gabriele Paoloni <gabriele.paoloni@intel.com>
8 * Aidan O'Mahony (aidan.o.mahony@intel.com)
9 * Copyright (c) 2010, Intel Corporation.
10 */
11
12#ifndef _CRYPTO_CRYPT_H
13#define _CRYPTO_CRYPT_H
14
15#include <linux/crypto.h>
16#include <linux/kernel.h>
17#include <crypto/hash.h>
18
19struct cryptd_ablkcipher {
20 struct crypto_ablkcipher base;
21};
22
23static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
24 struct crypto_ablkcipher *tfm)
25{
26 return (struct cryptd_ablkcipher *)tfm;
27}
28
29/* alg_name should be algorithm to be cryptd-ed */
30struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
31 u32 type, u32 mask);
32struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
33void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
34
35struct cryptd_ahash {
36 struct crypto_ahash base;
37};
38
39static inline struct cryptd_ahash *__cryptd_ahash_cast(
40 struct crypto_ahash *tfm)
41{
42 return (struct cryptd_ahash *)tfm;
43}
44
45/* alg_name should be algorithm to be cryptd-ed */
46struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
47 u32 type, u32 mask);
48struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
49struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
50void cryptd_free_ahash(struct cryptd_ahash *tfm);
51
52struct cryptd_aead {
53 struct crypto_aead base;
54};
55
56static inline struct cryptd_aead *__cryptd_aead_cast(
57 struct crypto_aead *tfm)
58{
59 return (struct cryptd_aead *)tfm;
60}
61
62struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
63 u32 type, u32 mask);
64
65struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
66
67void cryptd_free_aead(struct cryptd_aead *tfm);
68
69#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Software async crypto daemon
4 *
5 * Added AEAD support to cryptd.
6 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
7 * Adrian Hoban <adrian.hoban@intel.com>
8 * Gabriele Paoloni <gabriele.paoloni@intel.com>
9 * Aidan O'Mahony (aidan.o.mahony@intel.com)
10 * Copyright (c) 2010, Intel Corporation.
11 */
12
13#ifndef _CRYPTO_CRYPT_H
14#define _CRYPTO_CRYPT_H
15
16#include <linux/kernel.h>
17#include <crypto/aead.h>
18#include <crypto/hash.h>
19#include <crypto/skcipher.h>
20
21struct cryptd_ablkcipher {
22 struct crypto_ablkcipher base;
23};
24
25static inline struct cryptd_ablkcipher *__cryptd_ablkcipher_cast(
26 struct crypto_ablkcipher *tfm)
27{
28 return (struct cryptd_ablkcipher *)tfm;
29}
30
31/* alg_name should be algorithm to be cryptd-ed */
32struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
33 u32 type, u32 mask);
34struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm);
35bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm);
36void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm);
37
38struct cryptd_skcipher {
39 struct crypto_skcipher base;
40};
41
42struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
43 u32 type, u32 mask);
44struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
45/* Must be called without moving CPUs. */
46bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
47void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
48
49struct cryptd_ahash {
50 struct crypto_ahash base;
51};
52
53static inline struct cryptd_ahash *__cryptd_ahash_cast(
54 struct crypto_ahash *tfm)
55{
56 return (struct cryptd_ahash *)tfm;
57}
58
59/* alg_name should be algorithm to be cryptd-ed */
60struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
61 u32 type, u32 mask);
62struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
63struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
64/* Must be called without moving CPUs. */
65bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
66void cryptd_free_ahash(struct cryptd_ahash *tfm);
67
68struct cryptd_aead {
69 struct crypto_aead base;
70};
71
72static inline struct cryptd_aead *__cryptd_aead_cast(
73 struct crypto_aead *tfm)
74{
75 return (struct cryptd_aead *)tfm;
76}
77
78struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
79 u32 type, u32 mask);
80
81struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
82/* Must be called without moving CPUs. */
83bool cryptd_aead_queued(struct cryptd_aead *tfm);
84
85void cryptd_free_aead(struct cryptd_aead *tfm);
86
87#endif