Linux Audio

Check our new training course

Linux debugging, profiling, tracing and performance analysis training

Apr 14-17, 2025
Register
Loading...
v6.13.7
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * x64 SIMD accelerated ChaCha and XChaCha stream ciphers,
  4 * including ChaCha20 (RFC7539)
  5 *
  6 * Copyright (C) 2015 Martin Willi
  7 */
  8
  9#include <crypto/algapi.h>
 10#include <crypto/internal/chacha.h>
 11#include <crypto/internal/simd.h>
 12#include <crypto/internal/skcipher.h>
 13#include <linux/kernel.h>
 14#include <linux/module.h>
 15#include <linux/sizes.h>
 16#include <asm/simd.h>
 17
 
 
 18asmlinkage void chacha_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
 19				       unsigned int len, int nrounds);
 20asmlinkage void chacha_4block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
 21					unsigned int len, int nrounds);
 22asmlinkage void hchacha_block_ssse3(const u32 *state, u32 *out, int nrounds);
 23
 24asmlinkage void chacha_2block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
 25				       unsigned int len, int nrounds);
 26asmlinkage void chacha_4block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
 27				       unsigned int len, int nrounds);
 28asmlinkage void chacha_8block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
 29				       unsigned int len, int nrounds);
 30
 
 31asmlinkage void chacha_2block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
 32					   unsigned int len, int nrounds);
 33asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
 34					   unsigned int len, int nrounds);
 35asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
 36					   unsigned int len, int nrounds);
 37
 38static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_simd);
 39static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx2);
 40static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx512vl);
 41
 42static unsigned int chacha_advance(unsigned int len, unsigned int maxblocks)
 43{
 44	len = min(len, maxblocks * CHACHA_BLOCK_SIZE);
 45	return round_up(len, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE;
 46}
 47
 48static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
 49			  unsigned int bytes, int nrounds)
 50{
 51	if (IS_ENABLED(CONFIG_AS_AVX512) &&
 52	    static_branch_likely(&chacha_use_avx512vl)) {
 
 53		while (bytes >= CHACHA_BLOCK_SIZE * 8) {
 54			chacha_8block_xor_avx512vl(state, dst, src, bytes,
 55						   nrounds);
 56			bytes -= CHACHA_BLOCK_SIZE * 8;
 57			src += CHACHA_BLOCK_SIZE * 8;
 58			dst += CHACHA_BLOCK_SIZE * 8;
 59			state[12] += 8;
 60		}
 61		if (bytes > CHACHA_BLOCK_SIZE * 4) {
 62			chacha_8block_xor_avx512vl(state, dst, src, bytes,
 63						   nrounds);
 64			state[12] += chacha_advance(bytes, 8);
 65			return;
 66		}
 67		if (bytes > CHACHA_BLOCK_SIZE * 2) {
 68			chacha_4block_xor_avx512vl(state, dst, src, bytes,
 69						   nrounds);
 70			state[12] += chacha_advance(bytes, 4);
 71			return;
 72		}
 73		if (bytes) {
 74			chacha_2block_xor_avx512vl(state, dst, src, bytes,
 75						   nrounds);
 76			state[12] += chacha_advance(bytes, 2);
 77			return;
 78		}
 79	}
 80
 81	if (static_branch_likely(&chacha_use_avx2)) {
 82		while (bytes >= CHACHA_BLOCK_SIZE * 8) {
 83			chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
 84			bytes -= CHACHA_BLOCK_SIZE * 8;
 85			src += CHACHA_BLOCK_SIZE * 8;
 86			dst += CHACHA_BLOCK_SIZE * 8;
 87			state[12] += 8;
 88		}
 89		if (bytes > CHACHA_BLOCK_SIZE * 4) {
 90			chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
 91			state[12] += chacha_advance(bytes, 8);
 92			return;
 93		}
 94		if (bytes > CHACHA_BLOCK_SIZE * 2) {
 95			chacha_4block_xor_avx2(state, dst, src, bytes, nrounds);
 96			state[12] += chacha_advance(bytes, 4);
 97			return;
 98		}
 99		if (bytes > CHACHA_BLOCK_SIZE) {
100			chacha_2block_xor_avx2(state, dst, src, bytes, nrounds);
101			state[12] += chacha_advance(bytes, 2);
102			return;
103		}
104	}
105
106	while (bytes >= CHACHA_BLOCK_SIZE * 4) {
107		chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds);
108		bytes -= CHACHA_BLOCK_SIZE * 4;
109		src += CHACHA_BLOCK_SIZE * 4;
110		dst += CHACHA_BLOCK_SIZE * 4;
111		state[12] += 4;
112	}
113	if (bytes > CHACHA_BLOCK_SIZE) {
114		chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds);
115		state[12] += chacha_advance(bytes, 4);
116		return;
117	}
118	if (bytes) {
119		chacha_block_xor_ssse3(state, dst, src, bytes, nrounds);
120		state[12]++;
121	}
122}
123
124void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
125{
126	if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) {
127		hchacha_block_generic(state, stream, nrounds);
128	} else {
129		kernel_fpu_begin();
130		hchacha_block_ssse3(state, stream, nrounds);
131		kernel_fpu_end();
132	}
133}
134EXPORT_SYMBOL(hchacha_block_arch);
135
136void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
137{
138	chacha_init_generic(state, key, iv);
139}
140EXPORT_SYMBOL(chacha_init_arch);
141
142void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
143		       int nrounds)
144{
145	if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() ||
146	    bytes <= CHACHA_BLOCK_SIZE)
147		return chacha_crypt_generic(state, dst, src, bytes, nrounds);
148
149	do {
150		unsigned int todo = min_t(unsigned int, bytes, SZ_4K);
151
152		kernel_fpu_begin();
153		chacha_dosimd(state, dst, src, todo, nrounds);
154		kernel_fpu_end();
155
156		bytes -= todo;
157		src += todo;
158		dst += todo;
159	} while (bytes);
160}
161EXPORT_SYMBOL(chacha_crypt_arch);
162
163static int chacha_simd_stream_xor(struct skcipher_request *req,
164				  const struct chacha_ctx *ctx, const u8 *iv)
165{
166	u32 state[CHACHA_STATE_WORDS] __aligned(8);
167	struct skcipher_walk walk;
168	int err;
169
170	err = skcipher_walk_virt(&walk, req, false);
 
171
172	chacha_init_generic(state, ctx->key, iv);
 
 
 
173
174	while (walk.nbytes > 0) {
175		unsigned int nbytes = walk.nbytes;
176
177		if (nbytes < walk.total)
178			nbytes = round_down(nbytes, walk.stride);
179
180		if (!static_branch_likely(&chacha_use_simd) ||
181		    !crypto_simd_usable()) {
182			chacha_crypt_generic(state, walk.dst.virt.addr,
183					     walk.src.virt.addr, nbytes,
184					     ctx->nrounds);
185		} else {
186			kernel_fpu_begin();
187			chacha_dosimd(state, walk.dst.virt.addr,
188				      walk.src.virt.addr, nbytes,
189				      ctx->nrounds);
190			kernel_fpu_end();
 
 
191		}
192		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
 
193	}
194
195	return err;
196}
197
198static int chacha_simd(struct skcipher_request *req)
199{
200	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
201	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
 
 
202
203	return chacha_simd_stream_xor(req, ctx, req->iv);
 
 
 
 
 
 
 
 
 
 
204}
205
206static int xchacha_simd(struct skcipher_request *req)
207{
208	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
209	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
210	u32 state[CHACHA_STATE_WORDS] __aligned(8);
211	struct chacha_ctx subctx;
 
212	u8 real_iv[16];
 
213
214	chacha_init_generic(state, ctx->key, req->iv);
 
215
216	if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) {
217		kernel_fpu_begin();
218		hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
219		kernel_fpu_end();
220	} else {
221		hchacha_block_generic(state, subctx.key, ctx->nrounds);
222	}
 
 
 
 
223	subctx.nrounds = ctx->nrounds;
224
225	memcpy(&real_iv[0], req->iv + 24, 8);
226	memcpy(&real_iv[8], req->iv + 16, 8);
227	return chacha_simd_stream_xor(req, &subctx, real_iv);
 
 
 
 
228}
229
230static struct skcipher_alg algs[] = {
231	{
232		.base.cra_name		= "chacha20",
233		.base.cra_driver_name	= "chacha20-simd",
234		.base.cra_priority	= 300,
235		.base.cra_blocksize	= 1,
236		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
237		.base.cra_module	= THIS_MODULE,
238
239		.min_keysize		= CHACHA_KEY_SIZE,
240		.max_keysize		= CHACHA_KEY_SIZE,
241		.ivsize			= CHACHA_IV_SIZE,
242		.chunksize		= CHACHA_BLOCK_SIZE,
243		.setkey			= chacha20_setkey,
244		.encrypt		= chacha_simd,
245		.decrypt		= chacha_simd,
246	}, {
247		.base.cra_name		= "xchacha20",
248		.base.cra_driver_name	= "xchacha20-simd",
249		.base.cra_priority	= 300,
250		.base.cra_blocksize	= 1,
251		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
252		.base.cra_module	= THIS_MODULE,
253
254		.min_keysize		= CHACHA_KEY_SIZE,
255		.max_keysize		= CHACHA_KEY_SIZE,
256		.ivsize			= XCHACHA_IV_SIZE,
257		.chunksize		= CHACHA_BLOCK_SIZE,
258		.setkey			= chacha20_setkey,
259		.encrypt		= xchacha_simd,
260		.decrypt		= xchacha_simd,
261	}, {
262		.base.cra_name		= "xchacha12",
263		.base.cra_driver_name	= "xchacha12-simd",
264		.base.cra_priority	= 300,
265		.base.cra_blocksize	= 1,
266		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
267		.base.cra_module	= THIS_MODULE,
268
269		.min_keysize		= CHACHA_KEY_SIZE,
270		.max_keysize		= CHACHA_KEY_SIZE,
271		.ivsize			= XCHACHA_IV_SIZE,
272		.chunksize		= CHACHA_BLOCK_SIZE,
273		.setkey			= chacha12_setkey,
274		.encrypt		= xchacha_simd,
275		.decrypt		= xchacha_simd,
276	},
277};
278
279static int __init chacha_simd_mod_init(void)
280{
281	if (!boot_cpu_has(X86_FEATURE_SSSE3))
282		return 0;
283
284	static_branch_enable(&chacha_use_simd);
285
286	if (boot_cpu_has(X86_FEATURE_AVX) &&
287	    boot_cpu_has(X86_FEATURE_AVX2) &&
288	    cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
289		static_branch_enable(&chacha_use_avx2);
290
291		if (IS_ENABLED(CONFIG_AS_AVX512) &&
292		    boot_cpu_has(X86_FEATURE_AVX512VL) &&
293		    boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
294			static_branch_enable(&chacha_use_avx512vl);
295	}
296	return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
297		crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
298}
299
300static void __exit chacha_simd_mod_fini(void)
301{
302	if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3))
303		crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
304}
305
306module_init(chacha_simd_mod_init);
307module_exit(chacha_simd_mod_fini);
308
309MODULE_LICENSE("GPL");
310MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
311MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (x64 SIMD accelerated)");
312MODULE_ALIAS_CRYPTO("chacha20");
313MODULE_ALIAS_CRYPTO("chacha20-simd");
314MODULE_ALIAS_CRYPTO("xchacha20");
315MODULE_ALIAS_CRYPTO("xchacha20-simd");
316MODULE_ALIAS_CRYPTO("xchacha12");
317MODULE_ALIAS_CRYPTO("xchacha12-simd");
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * x64 SIMD accelerated ChaCha and XChaCha stream ciphers,
  4 * including ChaCha20 (RFC7539)
  5 *
  6 * Copyright (C) 2015 Martin Willi
  7 */
  8
  9#include <crypto/algapi.h>
 10#include <crypto/chacha.h>
 11#include <crypto/internal/simd.h>
 12#include <crypto/internal/skcipher.h>
 13#include <linux/kernel.h>
 14#include <linux/module.h>
 
 15#include <asm/simd.h>
 16
 17#define CHACHA_STATE_ALIGN 16
 18
 19asmlinkage void chacha_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
 20				       unsigned int len, int nrounds);
 21asmlinkage void chacha_4block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
 22					unsigned int len, int nrounds);
 23asmlinkage void hchacha_block_ssse3(const u32 *state, u32 *out, int nrounds);
 24#ifdef CONFIG_AS_AVX2
 25asmlinkage void chacha_2block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
 26				       unsigned int len, int nrounds);
 27asmlinkage void chacha_4block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
 28				       unsigned int len, int nrounds);
 29asmlinkage void chacha_8block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
 30				       unsigned int len, int nrounds);
 31static bool chacha_use_avx2;
 32#ifdef CONFIG_AS_AVX512
 33asmlinkage void chacha_2block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
 34					   unsigned int len, int nrounds);
 35asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
 36					   unsigned int len, int nrounds);
 37asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
 38					   unsigned int len, int nrounds);
 39static bool chacha_use_avx512vl;
 40#endif
 41#endif
 
 42
 43static unsigned int chacha_advance(unsigned int len, unsigned int maxblocks)
 44{
 45	len = min(len, maxblocks * CHACHA_BLOCK_SIZE);
 46	return round_up(len, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE;
 47}
 48
 49static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
 50			  unsigned int bytes, int nrounds)
 51{
 52#ifdef CONFIG_AS_AVX2
 53#ifdef CONFIG_AS_AVX512
 54	if (chacha_use_avx512vl) {
 55		while (bytes >= CHACHA_BLOCK_SIZE * 8) {
 56			chacha_8block_xor_avx512vl(state, dst, src, bytes,
 57						   nrounds);
 58			bytes -= CHACHA_BLOCK_SIZE * 8;
 59			src += CHACHA_BLOCK_SIZE * 8;
 60			dst += CHACHA_BLOCK_SIZE * 8;
 61			state[12] += 8;
 62		}
 63		if (bytes > CHACHA_BLOCK_SIZE * 4) {
 64			chacha_8block_xor_avx512vl(state, dst, src, bytes,
 65						   nrounds);
 66			state[12] += chacha_advance(bytes, 8);
 67			return;
 68		}
 69		if (bytes > CHACHA_BLOCK_SIZE * 2) {
 70			chacha_4block_xor_avx512vl(state, dst, src, bytes,
 71						   nrounds);
 72			state[12] += chacha_advance(bytes, 4);
 73			return;
 74		}
 75		if (bytes) {
 76			chacha_2block_xor_avx512vl(state, dst, src, bytes,
 77						   nrounds);
 78			state[12] += chacha_advance(bytes, 2);
 79			return;
 80		}
 81	}
 82#endif
 83	if (chacha_use_avx2) {
 84		while (bytes >= CHACHA_BLOCK_SIZE * 8) {
 85			chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
 86			bytes -= CHACHA_BLOCK_SIZE * 8;
 87			src += CHACHA_BLOCK_SIZE * 8;
 88			dst += CHACHA_BLOCK_SIZE * 8;
 89			state[12] += 8;
 90		}
 91		if (bytes > CHACHA_BLOCK_SIZE * 4) {
 92			chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
 93			state[12] += chacha_advance(bytes, 8);
 94			return;
 95		}
 96		if (bytes > CHACHA_BLOCK_SIZE * 2) {
 97			chacha_4block_xor_avx2(state, dst, src, bytes, nrounds);
 98			state[12] += chacha_advance(bytes, 4);
 99			return;
100		}
101		if (bytes > CHACHA_BLOCK_SIZE) {
102			chacha_2block_xor_avx2(state, dst, src, bytes, nrounds);
103			state[12] += chacha_advance(bytes, 2);
104			return;
105		}
106	}
107#endif
108	while (bytes >= CHACHA_BLOCK_SIZE * 4) {
109		chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds);
110		bytes -= CHACHA_BLOCK_SIZE * 4;
111		src += CHACHA_BLOCK_SIZE * 4;
112		dst += CHACHA_BLOCK_SIZE * 4;
113		state[12] += 4;
114	}
115	if (bytes > CHACHA_BLOCK_SIZE) {
116		chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds);
117		state[12] += chacha_advance(bytes, 4);
118		return;
119	}
120	if (bytes) {
121		chacha_block_xor_ssse3(state, dst, src, bytes, nrounds);
122		state[12]++;
123	}
124}
125
126static int chacha_simd_stream_xor(struct skcipher_walk *walk,
127				  const struct chacha_ctx *ctx, const u8 *iv)
 
 
 
 
 
 
 
 
 
 
 
128{
129	u32 *state, state_buf[16 + 2] __aligned(8);
130	int next_yield = 4096; /* bytes until next FPU yield */
131	int err = 0;
132
133	BUILD_BUG_ON(CHACHA_STATE_ALIGN != 16);
134	state = PTR_ALIGN(state_buf + 0, CHACHA_STATE_ALIGN);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
136	crypto_chacha_init(state, ctx, iv);
 
 
 
 
 
137
138	while (walk->nbytes > 0) {
139		unsigned int nbytes = walk->nbytes;
140
141		if (nbytes < walk->total) {
142			nbytes = round_down(nbytes, walk->stride);
143			next_yield -= nbytes;
144		}
145
146		chacha_dosimd(state, walk->dst.virt.addr, walk->src.virt.addr,
147			      nbytes, ctx->nrounds);
148
149		if (next_yield <= 0) {
150			/* temporarily allow preemption */
 
 
 
 
 
 
 
 
 
 
 
151			kernel_fpu_end();
152			kernel_fpu_begin();
153			next_yield = 4096;
154		}
155
156		err = skcipher_walk_done(walk, walk->nbytes - nbytes);
157	}
158
159	return err;
160}
161
162static int chacha_simd(struct skcipher_request *req)
163{
164	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
165	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
166	struct skcipher_walk walk;
167	int err;
168
169	if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable())
170		return crypto_chacha_crypt(req);
171
172	err = skcipher_walk_virt(&walk, req, true);
173	if (err)
174		return err;
175
176	kernel_fpu_begin();
177	err = chacha_simd_stream_xor(&walk, ctx, req->iv);
178	kernel_fpu_end();
179	return err;
180}
181
182static int xchacha_simd(struct skcipher_request *req)
183{
184	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
185	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
186	struct skcipher_walk walk;
187	struct chacha_ctx subctx;
188	u32 *state, state_buf[16 + 2] __aligned(8);
189	u8 real_iv[16];
190	int err;
191
192	if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable())
193		return crypto_xchacha_crypt(req);
194
195	err = skcipher_walk_virt(&walk, req, true);
196	if (err)
197		return err;
198
199	BUILD_BUG_ON(CHACHA_STATE_ALIGN != 16);
200	state = PTR_ALIGN(state_buf + 0, CHACHA_STATE_ALIGN);
201	crypto_chacha_init(state, ctx, req->iv);
202
203	kernel_fpu_begin();
204
205	hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
206	subctx.nrounds = ctx->nrounds;
207
208	memcpy(&real_iv[0], req->iv + 24, 8);
209	memcpy(&real_iv[8], req->iv + 16, 8);
210	err = chacha_simd_stream_xor(&walk, &subctx, real_iv);
211
212	kernel_fpu_end();
213
214	return err;
215}
216
217static struct skcipher_alg algs[] = {
218	{
219		.base.cra_name		= "chacha20",
220		.base.cra_driver_name	= "chacha20-simd",
221		.base.cra_priority	= 300,
222		.base.cra_blocksize	= 1,
223		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
224		.base.cra_module	= THIS_MODULE,
225
226		.min_keysize		= CHACHA_KEY_SIZE,
227		.max_keysize		= CHACHA_KEY_SIZE,
228		.ivsize			= CHACHA_IV_SIZE,
229		.chunksize		= CHACHA_BLOCK_SIZE,
230		.setkey			= crypto_chacha20_setkey,
231		.encrypt		= chacha_simd,
232		.decrypt		= chacha_simd,
233	}, {
234		.base.cra_name		= "xchacha20",
235		.base.cra_driver_name	= "xchacha20-simd",
236		.base.cra_priority	= 300,
237		.base.cra_blocksize	= 1,
238		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
239		.base.cra_module	= THIS_MODULE,
240
241		.min_keysize		= CHACHA_KEY_SIZE,
242		.max_keysize		= CHACHA_KEY_SIZE,
243		.ivsize			= XCHACHA_IV_SIZE,
244		.chunksize		= CHACHA_BLOCK_SIZE,
245		.setkey			= crypto_chacha20_setkey,
246		.encrypt		= xchacha_simd,
247		.decrypt		= xchacha_simd,
248	}, {
249		.base.cra_name		= "xchacha12",
250		.base.cra_driver_name	= "xchacha12-simd",
251		.base.cra_priority	= 300,
252		.base.cra_blocksize	= 1,
253		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
254		.base.cra_module	= THIS_MODULE,
255
256		.min_keysize		= CHACHA_KEY_SIZE,
257		.max_keysize		= CHACHA_KEY_SIZE,
258		.ivsize			= XCHACHA_IV_SIZE,
259		.chunksize		= CHACHA_BLOCK_SIZE,
260		.setkey			= crypto_chacha12_setkey,
261		.encrypt		= xchacha_simd,
262		.decrypt		= xchacha_simd,
263	},
264};
265
266static int __init chacha_simd_mod_init(void)
267{
268	if (!boot_cpu_has(X86_FEATURE_SSSE3))
269		return -ENODEV;
270
271#ifdef CONFIG_AS_AVX2
272	chacha_use_avx2 = boot_cpu_has(X86_FEATURE_AVX) &&
273			  boot_cpu_has(X86_FEATURE_AVX2) &&
274			  cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL);
275#ifdef CONFIG_AS_AVX512
276	chacha_use_avx512vl = chacha_use_avx2 &&
277			      boot_cpu_has(X86_FEATURE_AVX512VL) &&
278			      boot_cpu_has(X86_FEATURE_AVX512BW); /* kmovq */
279#endif
280#endif
281	return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
 
 
 
282}
283
284static void __exit chacha_simd_mod_fini(void)
285{
286	crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
 
287}
288
289module_init(chacha_simd_mod_init);
290module_exit(chacha_simd_mod_fini);
291
292MODULE_LICENSE("GPL");
293MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
294MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (x64 SIMD accelerated)");
295MODULE_ALIAS_CRYPTO("chacha20");
296MODULE_ALIAS_CRYPTO("chacha20-simd");
297MODULE_ALIAS_CRYPTO("xchacha20");
298MODULE_ALIAS_CRYPTO("xchacha20-simd");
299MODULE_ALIAS_CRYPTO("xchacha12");
300MODULE_ALIAS_CRYPTO("xchacha12-simd");