Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.5.6.
  1/*
  2 * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
  3 *
  4 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  5 *
  6 * This program is free software; you can redistribute it and/or modify
  7 * it under the terms of the GNU General Public License as published by
  8 * the Free Software Foundation; either version 2 of the License, or
  9 * (at your option) any later version.
 10 *
 11 */
 12
 13#include <linux/module.h>
 14#include <linux/types.h>
 15#include <linux/crypto.h>
 16#include <linux/err.h>
 17#include <crypto/ablk_helper.h>
 18#include <crypto/algapi.h>
 19#include <crypto/ctr.h>
 20#include <crypto/lrw.h>
 21#include <crypto/xts.h>
 22#include <asm/xcr.h>
 23#include <asm/xsave.h>
 24#include <asm/crypto/camellia.h>
 25#include <asm/crypto/glue_helper.h>
 26
 27#define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
 28#define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
 29
 30/* 32-way AVX2/AES-NI parallel cipher functions */
 31asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
 32				       const u8 *src);
 33asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
 34				       const u8 *src);
 35
 36asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
 37				       const u8 *src);
 38asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
 39				   const u8 *src, le128 *iv);
 40
 41asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
 42				       const u8 *src, le128 *iv);
 43asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
 44				       const u8 *src, le128 *iv);
 45
 46static const struct common_glue_ctx camellia_enc = {
 47	.num_funcs = 4,
 48	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
 49
 50	.funcs = { {
 51		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
 52		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
 53	}, {
 54		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
 55		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
 56	}, {
 57		.num_blocks = 2,
 58		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
 59	}, {
 60		.num_blocks = 1,
 61		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
 62	} }
 63};
 64
 65static const struct common_glue_ctx camellia_ctr = {
 66	.num_funcs = 4,
 67	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
 68
 69	.funcs = { {
 70		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
 71		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_32way) }
 72	}, {
 73		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
 74		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
 75	}, {
 76		.num_blocks = 2,
 77		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
 78	}, {
 79		.num_blocks = 1,
 80		.fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
 81	} }
 82};
 83
 84static const struct common_glue_ctx camellia_enc_xts = {
 85	.num_funcs = 3,
 86	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
 87
 88	.funcs = { {
 89		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
 90		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
 91	}, {
 92		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
 93		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
 94	}, {
 95		.num_blocks = 1,
 96		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
 97	} }
 98};
 99
100static const struct common_glue_ctx camellia_dec = {
101	.num_funcs = 4,
102	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
103
104	.funcs = { {
105		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
106		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
107	}, {
108		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
109		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
110	}, {
111		.num_blocks = 2,
112		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
113	}, {
114		.num_blocks = 1,
115		.fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
116	} }
117};
118
119static const struct common_glue_ctx camellia_dec_cbc = {
120	.num_funcs = 4,
121	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
122
123	.funcs = { {
124		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
125		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way) }
126	}, {
127		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
128		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
129	}, {
130		.num_blocks = 2,
131		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
132	}, {
133		.num_blocks = 1,
134		.fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
135	} }
136};
137
138static const struct common_glue_ctx camellia_dec_xts = {
139	.num_funcs = 3,
140	.fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
141
142	.funcs = { {
143		.num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
144		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
145	}, {
146		.num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
147		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
148	}, {
149		.num_blocks = 1,
150		.fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
151	} }
152};
153
154static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
155		       struct scatterlist *src, unsigned int nbytes)
156{
157	return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
158}
159
160static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
161		       struct scatterlist *src, unsigned int nbytes)
162{
163	return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
164}
165
166static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
167		       struct scatterlist *src, unsigned int nbytes)
168{
169	return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
170				       dst, src, nbytes);
171}
172
173static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
174		       struct scatterlist *src, unsigned int nbytes)
175{
176	return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
177				       nbytes);
178}
179
180static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
181		     struct scatterlist *src, unsigned int nbytes)
182{
183	return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
184}
185
186static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
187{
188	return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
189			      CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
190			      nbytes);
191}
192
193static inline void camellia_fpu_end(bool fpu_enabled)
194{
195	glue_fpu_end(fpu_enabled);
196}
197
198static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
199			   unsigned int key_len)
200{
201	return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
202				 &tfm->crt_flags);
203}
204
205struct crypt_priv {
206	struct camellia_ctx *ctx;
207	bool fpu_enabled;
208};
209
210static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
211{
212	const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
213	struct crypt_priv *ctx = priv;
214	int i;
215
216	ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
217
218	if (nbytes >= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS * bsize) {
219		camellia_ecb_enc_32way(ctx->ctx, srcdst, srcdst);
220		srcdst += bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
221		nbytes -= bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
222	}
223
224	if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
225		camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
226		srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
227		nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
228	}
229
230	while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
231		camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
232		srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
233		nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
234	}
235
236	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
237		camellia_enc_blk(ctx->ctx, srcdst, srcdst);
238}
239
240static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
241{
242	const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
243	struct crypt_priv *ctx = priv;
244	int i;
245
246	ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
247
248	if (nbytes >= CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS * bsize) {
249		camellia_ecb_dec_32way(ctx->ctx, srcdst, srcdst);
250		srcdst += bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
251		nbytes -= bsize * CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS;
252	}
253
254	if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
255		camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
256		srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
257		nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
258	}
259
260	while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
261		camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
262		srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
263		nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
264	}
265
266	for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
267		camellia_dec_blk(ctx->ctx, srcdst, srcdst);
268}
269
270static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
271		       struct scatterlist *src, unsigned int nbytes)
272{
273	struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
274	be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS];
275	struct crypt_priv crypt_ctx = {
276		.ctx = &ctx->camellia_ctx,
277		.fpu_enabled = false,
278	};
279	struct lrw_crypt_req req = {
280		.tbuf = buf,
281		.tbuflen = sizeof(buf),
282
283		.table_ctx = &ctx->lrw_table,
284		.crypt_ctx = &crypt_ctx,
285		.crypt_fn = encrypt_callback,
286	};
287	int ret;
288
289	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
290	ret = lrw_crypt(desc, dst, src, nbytes, &req);
291	camellia_fpu_end(crypt_ctx.fpu_enabled);
292
293	return ret;
294}
295
296static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
297		       struct scatterlist *src, unsigned int nbytes)
298{
299	struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
300	be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS];
301	struct crypt_priv crypt_ctx = {
302		.ctx = &ctx->camellia_ctx,
303		.fpu_enabled = false,
304	};
305	struct lrw_crypt_req req = {
306		.tbuf = buf,
307		.tbuflen = sizeof(buf),
308
309		.table_ctx = &ctx->lrw_table,
310		.crypt_ctx = &crypt_ctx,
311		.crypt_fn = decrypt_callback,
312	};
313	int ret;
314
315	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
316	ret = lrw_crypt(desc, dst, src, nbytes, &req);
317	camellia_fpu_end(crypt_ctx.fpu_enabled);
318
319	return ret;
320}
321
322static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
323		       struct scatterlist *src, unsigned int nbytes)
324{
325	struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
326
327	return glue_xts_crypt_128bit(&camellia_enc_xts, desc, dst, src, nbytes,
328				     XTS_TWEAK_CAST(camellia_enc_blk),
329				     &ctx->tweak_ctx, &ctx->crypt_ctx);
330}
331
332static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
333		       struct scatterlist *src, unsigned int nbytes)
334{
335	struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
336
337	return glue_xts_crypt_128bit(&camellia_dec_xts, desc, dst, src, nbytes,
338				     XTS_TWEAK_CAST(camellia_enc_blk),
339				     &ctx->tweak_ctx, &ctx->crypt_ctx);
340}
341
342static struct crypto_alg cmll_algs[10] = { {
343	.cra_name		= "__ecb-camellia-aesni-avx2",
344	.cra_driver_name	= "__driver-ecb-camellia-aesni-avx2",
345	.cra_priority		= 0,
346	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
347	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
348	.cra_ctxsize		= sizeof(struct camellia_ctx),
349	.cra_alignmask		= 0,
350	.cra_type		= &crypto_blkcipher_type,
351	.cra_module		= THIS_MODULE,
352	.cra_u = {
353		.blkcipher = {
354			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
355			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
356			.setkey		= camellia_setkey,
357			.encrypt	= ecb_encrypt,
358			.decrypt	= ecb_decrypt,
359		},
360	},
361}, {
362	.cra_name		= "__cbc-camellia-aesni-avx2",
363	.cra_driver_name	= "__driver-cbc-camellia-aesni-avx2",
364	.cra_priority		= 0,
365	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
366	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
367	.cra_ctxsize		= sizeof(struct camellia_ctx),
368	.cra_alignmask		= 0,
369	.cra_type		= &crypto_blkcipher_type,
370	.cra_module		= THIS_MODULE,
371	.cra_u = {
372		.blkcipher = {
373			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
374			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
375			.setkey		= camellia_setkey,
376			.encrypt	= cbc_encrypt,
377			.decrypt	= cbc_decrypt,
378		},
379	},
380}, {
381	.cra_name		= "__ctr-camellia-aesni-avx2",
382	.cra_driver_name	= "__driver-ctr-camellia-aesni-avx2",
383	.cra_priority		= 0,
384	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
385	.cra_blocksize		= 1,
386	.cra_ctxsize		= sizeof(struct camellia_ctx),
387	.cra_alignmask		= 0,
388	.cra_type		= &crypto_blkcipher_type,
389	.cra_module		= THIS_MODULE,
390	.cra_u = {
391		.blkcipher = {
392			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
393			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
394			.ivsize		= CAMELLIA_BLOCK_SIZE,
395			.setkey		= camellia_setkey,
396			.encrypt	= ctr_crypt,
397			.decrypt	= ctr_crypt,
398		},
399	},
400}, {
401	.cra_name		= "__lrw-camellia-aesni-avx2",
402	.cra_driver_name	= "__driver-lrw-camellia-aesni-avx2",
403	.cra_priority		= 0,
404	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
405	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
406	.cra_ctxsize		= sizeof(struct camellia_lrw_ctx),
407	.cra_alignmask		= 0,
408	.cra_type		= &crypto_blkcipher_type,
409	.cra_module		= THIS_MODULE,
410	.cra_exit		= lrw_camellia_exit_tfm,
411	.cra_u = {
412		.blkcipher = {
413			.min_keysize	= CAMELLIA_MIN_KEY_SIZE +
414					  CAMELLIA_BLOCK_SIZE,
415			.max_keysize	= CAMELLIA_MAX_KEY_SIZE +
416					  CAMELLIA_BLOCK_SIZE,
417			.ivsize		= CAMELLIA_BLOCK_SIZE,
418			.setkey		= lrw_camellia_setkey,
419			.encrypt	= lrw_encrypt,
420			.decrypt	= lrw_decrypt,
421		},
422	},
423}, {
424	.cra_name		= "__xts-camellia-aesni-avx2",
425	.cra_driver_name	= "__driver-xts-camellia-aesni-avx2",
426	.cra_priority		= 0,
427	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
428	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
429	.cra_ctxsize		= sizeof(struct camellia_xts_ctx),
430	.cra_alignmask		= 0,
431	.cra_type		= &crypto_blkcipher_type,
432	.cra_module		= THIS_MODULE,
433	.cra_u = {
434		.blkcipher = {
435			.min_keysize	= CAMELLIA_MIN_KEY_SIZE * 2,
436			.max_keysize	= CAMELLIA_MAX_KEY_SIZE * 2,
437			.ivsize		= CAMELLIA_BLOCK_SIZE,
438			.setkey		= xts_camellia_setkey,
439			.encrypt	= xts_encrypt,
440			.decrypt	= xts_decrypt,
441		},
442	},
443}, {
444	.cra_name		= "ecb(camellia)",
445	.cra_driver_name	= "ecb-camellia-aesni-avx2",
446	.cra_priority		= 500,
447	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
448	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
449	.cra_ctxsize		= sizeof(struct async_helper_ctx),
450	.cra_alignmask		= 0,
451	.cra_type		= &crypto_ablkcipher_type,
452	.cra_module		= THIS_MODULE,
453	.cra_init		= ablk_init,
454	.cra_exit		= ablk_exit,
455	.cra_u = {
456		.ablkcipher = {
457			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
458			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
459			.setkey		= ablk_set_key,
460			.encrypt	= ablk_encrypt,
461			.decrypt	= ablk_decrypt,
462		},
463	},
464}, {
465	.cra_name		= "cbc(camellia)",
466	.cra_driver_name	= "cbc-camellia-aesni-avx2",
467	.cra_priority		= 500,
468	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
469	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
470	.cra_ctxsize		= sizeof(struct async_helper_ctx),
471	.cra_alignmask		= 0,
472	.cra_type		= &crypto_ablkcipher_type,
473	.cra_module		= THIS_MODULE,
474	.cra_init		= ablk_init,
475	.cra_exit		= ablk_exit,
476	.cra_u = {
477		.ablkcipher = {
478			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
479			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
480			.ivsize		= CAMELLIA_BLOCK_SIZE,
481			.setkey		= ablk_set_key,
482			.encrypt	= __ablk_encrypt,
483			.decrypt	= ablk_decrypt,
484		},
485	},
486}, {
487	.cra_name		= "ctr(camellia)",
488	.cra_driver_name	= "ctr-camellia-aesni-avx2",
489	.cra_priority		= 500,
490	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
491	.cra_blocksize		= 1,
492	.cra_ctxsize		= sizeof(struct async_helper_ctx),
493	.cra_alignmask		= 0,
494	.cra_type		= &crypto_ablkcipher_type,
495	.cra_module		= THIS_MODULE,
496	.cra_init		= ablk_init,
497	.cra_exit		= ablk_exit,
498	.cra_u = {
499		.ablkcipher = {
500			.min_keysize	= CAMELLIA_MIN_KEY_SIZE,
501			.max_keysize	= CAMELLIA_MAX_KEY_SIZE,
502			.ivsize		= CAMELLIA_BLOCK_SIZE,
503			.setkey		= ablk_set_key,
504			.encrypt	= ablk_encrypt,
505			.decrypt	= ablk_encrypt,
506			.geniv		= "chainiv",
507		},
508	},
509}, {
510	.cra_name		= "lrw(camellia)",
511	.cra_driver_name	= "lrw-camellia-aesni-avx2",
512	.cra_priority		= 500,
513	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
514	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
515	.cra_ctxsize		= sizeof(struct async_helper_ctx),
516	.cra_alignmask		= 0,
517	.cra_type		= &crypto_ablkcipher_type,
518	.cra_module		= THIS_MODULE,
519	.cra_init		= ablk_init,
520	.cra_exit		= ablk_exit,
521	.cra_u = {
522		.ablkcipher = {
523			.min_keysize	= CAMELLIA_MIN_KEY_SIZE +
524					  CAMELLIA_BLOCK_SIZE,
525			.max_keysize	= CAMELLIA_MAX_KEY_SIZE +
526					  CAMELLIA_BLOCK_SIZE,
527			.ivsize		= CAMELLIA_BLOCK_SIZE,
528			.setkey		= ablk_set_key,
529			.encrypt	= ablk_encrypt,
530			.decrypt	= ablk_decrypt,
531		},
532	},
533}, {
534	.cra_name		= "xts(camellia)",
535	.cra_driver_name	= "xts-camellia-aesni-avx2",
536	.cra_priority		= 500,
537	.cra_flags		= CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
538	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
539	.cra_ctxsize		= sizeof(struct async_helper_ctx),
540	.cra_alignmask		= 0,
541	.cra_type		= &crypto_ablkcipher_type,
542	.cra_module		= THIS_MODULE,
543	.cra_init		= ablk_init,
544	.cra_exit		= ablk_exit,
545	.cra_u = {
546		.ablkcipher = {
547			.min_keysize	= CAMELLIA_MIN_KEY_SIZE * 2,
548			.max_keysize	= CAMELLIA_MAX_KEY_SIZE * 2,
549			.ivsize		= CAMELLIA_BLOCK_SIZE,
550			.setkey		= ablk_set_key,
551			.encrypt	= ablk_encrypt,
552			.decrypt	= ablk_decrypt,
553		},
554	},
555} };
556
557static int __init camellia_aesni_init(void)
558{
559	u64 xcr0;
560
561	if (!cpu_has_avx2 || !cpu_has_avx || !cpu_has_aes || !cpu_has_osxsave) {
562		pr_info("AVX2 or AES-NI instructions are not detected.\n");
563		return -ENODEV;
564	}
565
566	xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
567	if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
568		pr_info("AVX2 detected but unusable.\n");
569		return -ENODEV;
570	}
571
572	return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
573}
574
575static void __exit camellia_aesni_fini(void)
576{
577	crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
578}
579
580module_init(camellia_aesni_init);
581module_exit(camellia_aesni_fini);
582
583MODULE_LICENSE("GPL");
584MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
585MODULE_ALIAS("camellia");
586MODULE_ALIAS("camellia-asm");