Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Software async crypto daemon
4 *
5 * Added AEAD support to cryptd.
6 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
7 * Adrian Hoban <adrian.hoban@intel.com>
8 * Gabriele Paoloni <gabriele.paoloni@intel.com>
9 * Aidan O'Mahony (aidan.o.mahony@intel.com)
10 * Copyright (c) 2010, Intel Corporation.
11 */
12
13#ifndef _CRYPTO_CRYPT_H
14#define _CRYPTO_CRYPT_H
15
16#include <linux/kernel.h>
17#include <crypto/aead.h>
18#include <crypto/hash.h>
19#include <crypto/skcipher.h>
20
21struct cryptd_skcipher {
22 struct crypto_skcipher base;
23};
24
25/* alg_name should be algorithm to be cryptd-ed */
26struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
27 u32 type, u32 mask);
28struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
29/* Must be called without moving CPUs. */
30bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
31void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
32
33struct cryptd_ahash {
34 struct crypto_ahash base;
35};
36
37static inline struct cryptd_ahash *__cryptd_ahash_cast(
38 struct crypto_ahash *tfm)
39{
40 return (struct cryptd_ahash *)tfm;
41}
42
43/* alg_name should be algorithm to be cryptd-ed */
44struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
45 u32 type, u32 mask);
46struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
47struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
48/* Must be called without moving CPUs. */
49bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
50void cryptd_free_ahash(struct cryptd_ahash *tfm);
51
52struct cryptd_aead {
53 struct crypto_aead base;
54};
55
56static inline struct cryptd_aead *__cryptd_aead_cast(
57 struct crypto_aead *tfm)
58{
59 return (struct cryptd_aead *)tfm;
60}
61
62struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
63 u32 type, u32 mask);
64
65struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
66/* Must be called without moving CPUs. */
67bool cryptd_aead_queued(struct cryptd_aead *tfm);
68
69void cryptd_free_aead(struct cryptd_aead *tfm);
70
71#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Software async crypto daemon
4 *
5 * Added AEAD support to cryptd.
6 * Authors: Tadeusz Struk (tadeusz.struk@intel.com)
7 * Adrian Hoban <adrian.hoban@intel.com>
8 * Gabriele Paoloni <gabriele.paoloni@intel.com>
9 * Aidan O'Mahony (aidan.o.mahony@intel.com)
10 * Copyright (c) 2010, Intel Corporation.
11 */
12
13#ifndef _CRYPTO_CRYPT_H
14#define _CRYPTO_CRYPT_H
15
16#include <linux/kernel.h>
17#include <crypto/aead.h>
18#include <crypto/hash.h>
19#include <crypto/skcipher.h>
20
21struct cryptd_skcipher {
22 struct crypto_skcipher base;
23};
24
25/* alg_name should be algorithm to be cryptd-ed */
26struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
27 u32 type, u32 mask);
28struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm);
29/* Must be called without moving CPUs. */
30bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm);
31void cryptd_free_skcipher(struct cryptd_skcipher *tfm);
32
33struct cryptd_ahash {
34 struct crypto_ahash base;
35};
36
37static inline struct cryptd_ahash *__cryptd_ahash_cast(
38 struct crypto_ahash *tfm)
39{
40 return (struct cryptd_ahash *)tfm;
41}
42
43/* alg_name should be algorithm to be cryptd-ed */
44struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
45 u32 type, u32 mask);
46struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm);
47struct shash_desc *cryptd_shash_desc(struct ahash_request *req);
48/* Must be called without moving CPUs. */
49bool cryptd_ahash_queued(struct cryptd_ahash *tfm);
50void cryptd_free_ahash(struct cryptd_ahash *tfm);
51
52struct cryptd_aead {
53 struct crypto_aead base;
54};
55
56static inline struct cryptd_aead *__cryptd_aead_cast(
57 struct crypto_aead *tfm)
58{
59 return (struct cryptd_aead *)tfm;
60}
61
62struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
63 u32 type, u32 mask);
64
65struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm);
66/* Must be called without moving CPUs. */
67bool cryptd_aead_queued(struct cryptd_aead *tfm);
68
69void cryptd_free_aead(struct cryptd_aead *tfm);
70
71#endif