Linux Audio

Check our new training course

Open-source upstreaming

Need help get the support for your hardware in upstream Linux?
Loading...
v6.8
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * AEAD: Authenticated Encryption with Associated Data
  4 * 
  5 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7
  8#ifndef _CRYPTO_INTERNAL_AEAD_H
  9#define _CRYPTO_INTERNAL_AEAD_H
 10
 11#include <crypto/aead.h>
 12#include <crypto/algapi.h>
 13#include <linux/stddef.h>
 14#include <linux/types.h>
 15
 16struct rtattr;
 17
 18struct aead_instance {
 19	void (*free)(struct aead_instance *inst);
 20	union {
 21		struct {
 22			char head[offsetof(struct aead_alg, base)];
 23			struct crypto_instance base;
 24		} s;
 25		struct aead_alg alg;
 26	};
 27};
 28
 29struct crypto_aead_spawn {
 30	struct crypto_spawn base;
 31};
 32
 33struct aead_queue {
 34	struct crypto_queue base;
 35};
 36
 37static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
 38{
 39	return crypto_tfm_ctx(&tfm->base);
 40}
 41
 42static inline void *crypto_aead_ctx_dma(struct crypto_aead *tfm)
 43{
 44	return crypto_tfm_ctx_dma(&tfm->base);
 45}
 46
 47static inline struct crypto_instance *aead_crypto_instance(
 48	struct aead_instance *inst)
 49{
 50	return container_of(&inst->alg.base, struct crypto_instance, alg);
 51}
 52
 53static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
 54{
 55	return container_of(&inst->alg, struct aead_instance, alg.base);
 56}
 57
 58static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
 59{
 60	return aead_instance(crypto_tfm_alg_instance(&aead->base));
 61}
 62
 63static inline void *aead_instance_ctx(struct aead_instance *inst)
 64{
 65	return crypto_instance_ctx(aead_crypto_instance(inst));
 66}
 67
 68static inline void *aead_request_ctx(struct aead_request *req)
 69{
 70	return req->__ctx;
 71}
 72
 73static inline void *aead_request_ctx_dma(struct aead_request *req)
 74{
 75	unsigned int align = crypto_dma_align();
 76
 77	if (align <= crypto_tfm_ctx_alignment())
 78		align = 1;
 79
 80	return PTR_ALIGN(aead_request_ctx(req), align);
 81}
 82
 83static inline void aead_request_complete(struct aead_request *req, int err)
 84{
 85	crypto_request_complete(&req->base, err);
 86}
 87
 88static inline u32 aead_request_flags(struct aead_request *req)
 89{
 90	return req->base.flags;
 91}
 92
 93static inline struct aead_request *aead_request_cast(
 94	struct crypto_async_request *req)
 95{
 96	return container_of(req, struct aead_request, base);
 97}
 98
 99int crypto_grab_aead(struct crypto_aead_spawn *spawn,
100		     struct crypto_instance *inst,
101		     const char *name, u32 type, u32 mask);
102
103static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
104{
105	crypto_drop_spawn(&spawn->base);
106}
107
108static inline struct aead_alg *crypto_spawn_aead_alg(
109	struct crypto_aead_spawn *spawn)
110{
111	return container_of(spawn->base.alg, struct aead_alg, base);
112}
113
114static inline struct crypto_aead *crypto_spawn_aead(
115	struct crypto_aead_spawn *spawn)
116{
117	return crypto_spawn_tfm2(&spawn->base);
118}
119
120static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
121					   unsigned int reqsize)
122{
123	aead->reqsize = reqsize;
124}
125
126static inline void crypto_aead_set_reqsize_dma(struct crypto_aead *aead,
127					       unsigned int reqsize)
128{
129	reqsize += crypto_dma_align() & ~(crypto_tfm_ctx_alignment() - 1);
130	aead->reqsize = reqsize;
131}
132
133static inline void aead_init_queue(struct aead_queue *queue,
134				   unsigned int max_qlen)
135{
136	crypto_init_queue(&queue->base, max_qlen);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137}
138
139static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
140{
141	return alg->chunksize;
142}
143
144/**
145 * crypto_aead_chunksize() - obtain chunk size
146 * @tfm: cipher handle
147 *
148 * The block size is set to one for ciphers such as CCM.  However,
149 * you still need to provide incremental updates in multiples of
150 * the underlying block size as the IV does not have sub-block
151 * granularity.  This is known in this API as the chunk size.
152 *
153 * Return: chunk size in bytes
154 */
155static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
156{
157	return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
158}
159
160int crypto_register_aead(struct aead_alg *alg);
161void crypto_unregister_aead(struct aead_alg *alg);
162int crypto_register_aeads(struct aead_alg *algs, int count);
163void crypto_unregister_aeads(struct aead_alg *algs, int count);
164int aead_register_instance(struct crypto_template *tmpl,
165			   struct aead_instance *inst);
166
167#endif	/* _CRYPTO_INTERNAL_AEAD_H */
168
v5.14.15
  1/* SPDX-License-Identifier: GPL-2.0-or-later */
  2/*
  3 * AEAD: Authenticated Encryption with Associated Data
  4 * 
  5 * Copyright (c) 2007-2015 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7
  8#ifndef _CRYPTO_INTERNAL_AEAD_H
  9#define _CRYPTO_INTERNAL_AEAD_H
 10
 11#include <crypto/aead.h>
 12#include <crypto/algapi.h>
 13#include <linux/stddef.h>
 14#include <linux/types.h>
 15
 16struct rtattr;
 17
 18struct aead_instance {
 19	void (*free)(struct aead_instance *inst);
 20	union {
 21		struct {
 22			char head[offsetof(struct aead_alg, base)];
 23			struct crypto_instance base;
 24		} s;
 25		struct aead_alg alg;
 26	};
 27};
 28
 29struct crypto_aead_spawn {
 30	struct crypto_spawn base;
 31};
 32
 33struct aead_queue {
 34	struct crypto_queue base;
 35};
 36
 37static inline void *crypto_aead_ctx(struct crypto_aead *tfm)
 38{
 39	return crypto_tfm_ctx(&tfm->base);
 40}
 41
 
 
 
 
 
 42static inline struct crypto_instance *aead_crypto_instance(
 43	struct aead_instance *inst)
 44{
 45	return container_of(&inst->alg.base, struct crypto_instance, alg);
 46}
 47
 48static inline struct aead_instance *aead_instance(struct crypto_instance *inst)
 49{
 50	return container_of(&inst->alg, struct aead_instance, alg.base);
 51}
 52
 53static inline struct aead_instance *aead_alg_instance(struct crypto_aead *aead)
 54{
 55	return aead_instance(crypto_tfm_alg_instance(&aead->base));
 56}
 57
 58static inline void *aead_instance_ctx(struct aead_instance *inst)
 59{
 60	return crypto_instance_ctx(aead_crypto_instance(inst));
 61}
 62
 63static inline void *aead_request_ctx(struct aead_request *req)
 64{
 65	return req->__ctx;
 66}
 67
 
 
 
 
 
 
 
 
 
 
 68static inline void aead_request_complete(struct aead_request *req, int err)
 69{
 70	req->base.complete(&req->base, err);
 71}
 72
 73static inline u32 aead_request_flags(struct aead_request *req)
 74{
 75	return req->base.flags;
 76}
 77
 78static inline struct aead_request *aead_request_cast(
 79	struct crypto_async_request *req)
 80{
 81	return container_of(req, struct aead_request, base);
 82}
 83
 84int crypto_grab_aead(struct crypto_aead_spawn *spawn,
 85		     struct crypto_instance *inst,
 86		     const char *name, u32 type, u32 mask);
 87
 88static inline void crypto_drop_aead(struct crypto_aead_spawn *spawn)
 89{
 90	crypto_drop_spawn(&spawn->base);
 91}
 92
 93static inline struct aead_alg *crypto_spawn_aead_alg(
 94	struct crypto_aead_spawn *spawn)
 95{
 96	return container_of(spawn->base.alg, struct aead_alg, base);
 97}
 98
 99static inline struct crypto_aead *crypto_spawn_aead(
100	struct crypto_aead_spawn *spawn)
101{
102	return crypto_spawn_tfm2(&spawn->base);
103}
104
105static inline void crypto_aead_set_reqsize(struct crypto_aead *aead,
106					   unsigned int reqsize)
107{
108	aead->reqsize = reqsize;
109}
110
 
 
 
 
 
 
 
111static inline void aead_init_queue(struct aead_queue *queue,
112				   unsigned int max_qlen)
113{
114	crypto_init_queue(&queue->base, max_qlen);
115}
116
117static inline int aead_enqueue_request(struct aead_queue *queue,
118				       struct aead_request *request)
119{
120	return crypto_enqueue_request(&queue->base, &request->base);
121}
122
123static inline struct aead_request *aead_dequeue_request(
124	struct aead_queue *queue)
125{
126	struct crypto_async_request *req;
127
128	req = crypto_dequeue_request(&queue->base);
129
130	return req ? container_of(req, struct aead_request, base) : NULL;
131}
132
133static inline struct aead_request *aead_get_backlog(struct aead_queue *queue)
134{
135	struct crypto_async_request *req;
136
137	req = crypto_get_backlog(&queue->base);
138
139	return req ? container_of(req, struct aead_request, base) : NULL;
140}
141
142static inline unsigned int crypto_aead_alg_chunksize(struct aead_alg *alg)
143{
144	return alg->chunksize;
145}
146
147/**
148 * crypto_aead_chunksize() - obtain chunk size
149 * @tfm: cipher handle
150 *
151 * The block size is set to one for ciphers such as CCM.  However,
152 * you still need to provide incremental updates in multiples of
153 * the underlying block size as the IV does not have sub-block
154 * granularity.  This is known in this API as the chunk size.
155 *
156 * Return: chunk size in bytes
157 */
158static inline unsigned int crypto_aead_chunksize(struct crypto_aead *tfm)
159{
160	return crypto_aead_alg_chunksize(crypto_aead_alg(tfm));
161}
162
163int crypto_register_aead(struct aead_alg *alg);
164void crypto_unregister_aead(struct aead_alg *alg);
165int crypto_register_aeads(struct aead_alg *algs, int count);
166void crypto_unregister_aeads(struct aead_alg *algs, int count);
167int aead_register_instance(struct crypto_template *tmpl,
168			   struct aead_instance *inst);
169
170#endif	/* _CRYPTO_INTERNAL_AEAD_H */
171