Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia
4 *
5 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
6 */
7
8#include <crypto/algapi.h>
9#include <crypto/internal/simd.h>
10#include <linux/crypto.h>
11#include <linux/err.h>
12#include <linux/module.h>
13#include <linux/types.h>
14
15#include "camellia.h"
16#include "ecb_cbc_helpers.h"
17
18#define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
19
20/* 16-way parallel cipher functions (avx/aes-ni) */
21asmlinkage void camellia_ecb_enc_16way(const void *ctx, u8 *dst, const u8 *src);
22EXPORT_SYMBOL_GPL(camellia_ecb_enc_16way);
23
24asmlinkage void camellia_ecb_dec_16way(const void *ctx, u8 *dst, const u8 *src);
25EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way);
26
27asmlinkage void camellia_cbc_dec_16way(const void *ctx, u8 *dst, const u8 *src);
28EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way);
29
30static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
31 unsigned int keylen)
32{
33 return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen);
34}
35
36static int ecb_encrypt(struct skcipher_request *req)
37{
38 ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, CAMELLIA_AESNI_PARALLEL_BLOCKS);
39 ECB_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_ecb_enc_16way);
40 ECB_BLOCK(2, camellia_enc_blk_2way);
41 ECB_BLOCK(1, camellia_enc_blk);
42 ECB_WALK_END();
43}
44
45static int ecb_decrypt(struct skcipher_request *req)
46{
47 ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, CAMELLIA_AESNI_PARALLEL_BLOCKS);
48 ECB_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_ecb_dec_16way);
49 ECB_BLOCK(2, camellia_dec_blk_2way);
50 ECB_BLOCK(1, camellia_dec_blk);
51 ECB_WALK_END();
52}
53
54static int cbc_encrypt(struct skcipher_request *req)
55{
56 CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, -1);
57 CBC_ENC_BLOCK(camellia_enc_blk);
58 CBC_WALK_END();
59}
60
61static int cbc_decrypt(struct skcipher_request *req)
62{
63 CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, CAMELLIA_AESNI_PARALLEL_BLOCKS);
64 CBC_DEC_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_cbc_dec_16way);
65 CBC_DEC_BLOCK(2, camellia_decrypt_cbc_2way);
66 CBC_DEC_BLOCK(1, camellia_dec_blk);
67 CBC_WALK_END();
68}
69
70static struct skcipher_alg camellia_algs[] = {
71 {
72 .base.cra_name = "__ecb(camellia)",
73 .base.cra_driver_name = "__ecb-camellia-aesni",
74 .base.cra_priority = 400,
75 .base.cra_flags = CRYPTO_ALG_INTERNAL,
76 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
77 .base.cra_ctxsize = sizeof(struct camellia_ctx),
78 .base.cra_module = THIS_MODULE,
79 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
80 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
81 .setkey = camellia_setkey,
82 .encrypt = ecb_encrypt,
83 .decrypt = ecb_decrypt,
84 }, {
85 .base.cra_name = "__cbc(camellia)",
86 .base.cra_driver_name = "__cbc-camellia-aesni",
87 .base.cra_priority = 400,
88 .base.cra_flags = CRYPTO_ALG_INTERNAL,
89 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
90 .base.cra_ctxsize = sizeof(struct camellia_ctx),
91 .base.cra_module = THIS_MODULE,
92 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
93 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
94 .ivsize = CAMELLIA_BLOCK_SIZE,
95 .setkey = camellia_setkey,
96 .encrypt = cbc_encrypt,
97 .decrypt = cbc_decrypt,
98 }
99};
100
101static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
102
103static int __init camellia_aesni_init(void)
104{
105 const char *feature_name;
106
107 if (!boot_cpu_has(X86_FEATURE_AVX) ||
108 !boot_cpu_has(X86_FEATURE_AES) ||
109 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
110 pr_info("AVX or AES-NI instructions are not detected.\n");
111 return -ENODEV;
112 }
113
114 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
115 &feature_name)) {
116 pr_info("CPU feature '%s' is not supported.\n", feature_name);
117 return -ENODEV;
118 }
119
120 return simd_register_skciphers_compat(camellia_algs,
121 ARRAY_SIZE(camellia_algs),
122 camellia_simd_algs);
123}
124
125static void __exit camellia_aesni_fini(void)
126{
127 simd_unregister_skciphers(camellia_algs, ARRAY_SIZE(camellia_algs),
128 camellia_simd_algs);
129}
130
131module_init(camellia_aesni_init);
132module_exit(camellia_aesni_fini);
133
134MODULE_LICENSE("GPL");
135MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
136MODULE_ALIAS_CRYPTO("camellia");
137MODULE_ALIAS_CRYPTO("camellia-asm");
1/*
2 * Glue Code for x86_64/AVX/AES-NI assembler optimized version of Camellia
3 *
4 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 *
11 */
12
13#include <linux/module.h>
14#include <linux/types.h>
15#include <linux/crypto.h>
16#include <linux/err.h>
17#include <crypto/ablk_helper.h>
18#include <crypto/algapi.h>
19#include <crypto/ctr.h>
20#include <crypto/lrw.h>
21#include <crypto/xts.h>
22#include <asm/xcr.h>
23#include <asm/xsave.h>
24#include <asm/crypto/camellia.h>
25#include <asm/crypto/glue_helper.h>
26
27#define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
28
29/* 16-way parallel cipher functions (avx/aes-ni) */
30asmlinkage void camellia_ecb_enc_16way(struct camellia_ctx *ctx, u8 *dst,
31 const u8 *src);
32EXPORT_SYMBOL_GPL(camellia_ecb_enc_16way);
33
34asmlinkage void camellia_ecb_dec_16way(struct camellia_ctx *ctx, u8 *dst,
35 const u8 *src);
36EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way);
37
38asmlinkage void camellia_cbc_dec_16way(struct camellia_ctx *ctx, u8 *dst,
39 const u8 *src);
40EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way);
41
42asmlinkage void camellia_ctr_16way(struct camellia_ctx *ctx, u8 *dst,
43 const u8 *src, le128 *iv);
44EXPORT_SYMBOL_GPL(camellia_ctr_16way);
45
46asmlinkage void camellia_xts_enc_16way(struct camellia_ctx *ctx, u8 *dst,
47 const u8 *src, le128 *iv);
48EXPORT_SYMBOL_GPL(camellia_xts_enc_16way);
49
50asmlinkage void camellia_xts_dec_16way(struct camellia_ctx *ctx, u8 *dst,
51 const u8 *src, le128 *iv);
52EXPORT_SYMBOL_GPL(camellia_xts_dec_16way);
53
54void camellia_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
55{
56 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
57 GLUE_FUNC_CAST(camellia_enc_blk));
58}
59EXPORT_SYMBOL_GPL(camellia_xts_enc);
60
61void camellia_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
62{
63 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
64 GLUE_FUNC_CAST(camellia_dec_blk));
65}
66EXPORT_SYMBOL_GPL(camellia_xts_dec);
67
68static const struct common_glue_ctx camellia_enc = {
69 .num_funcs = 3,
70 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
71
72 .funcs = { {
73 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
74 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
75 }, {
76 .num_blocks = 2,
77 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
78 }, {
79 .num_blocks = 1,
80 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
81 } }
82};
83
84static const struct common_glue_ctx camellia_ctr = {
85 .num_funcs = 3,
86 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
87
88 .funcs = { {
89 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
90 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
91 }, {
92 .num_blocks = 2,
93 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
94 }, {
95 .num_blocks = 1,
96 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
97 } }
98};
99
100static const struct common_glue_ctx camellia_enc_xts = {
101 .num_funcs = 2,
102 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
103
104 .funcs = { {
105 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
106 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
107 }, {
108 .num_blocks = 1,
109 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
110 } }
111};
112
113static const struct common_glue_ctx camellia_dec = {
114 .num_funcs = 3,
115 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
116
117 .funcs = { {
118 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
119 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
120 }, {
121 .num_blocks = 2,
122 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
123 }, {
124 .num_blocks = 1,
125 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
126 } }
127};
128
129static const struct common_glue_ctx camellia_dec_cbc = {
130 .num_funcs = 3,
131 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
132
133 .funcs = { {
134 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
135 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
136 }, {
137 .num_blocks = 2,
138 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
139 }, {
140 .num_blocks = 1,
141 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
142 } }
143};
144
145static const struct common_glue_ctx camellia_dec_xts = {
146 .num_funcs = 2,
147 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
148
149 .funcs = { {
150 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
151 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
152 }, {
153 .num_blocks = 1,
154 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
155 } }
156};
157
158static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
159 struct scatterlist *src, unsigned int nbytes)
160{
161 return glue_ecb_crypt_128bit(&camellia_enc, desc, dst, src, nbytes);
162}
163
164static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
165 struct scatterlist *src, unsigned int nbytes)
166{
167 return glue_ecb_crypt_128bit(&camellia_dec, desc, dst, src, nbytes);
168}
169
170static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
171 struct scatterlist *src, unsigned int nbytes)
172{
173 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(camellia_enc_blk), desc,
174 dst, src, nbytes);
175}
176
177static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
178 struct scatterlist *src, unsigned int nbytes)
179{
180 return glue_cbc_decrypt_128bit(&camellia_dec_cbc, desc, dst, src,
181 nbytes);
182}
183
184static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
185 struct scatterlist *src, unsigned int nbytes)
186{
187 return glue_ctr_crypt_128bit(&camellia_ctr, desc, dst, src, nbytes);
188}
189
190static inline bool camellia_fpu_begin(bool fpu_enabled, unsigned int nbytes)
191{
192 return glue_fpu_begin(CAMELLIA_BLOCK_SIZE,
193 CAMELLIA_AESNI_PARALLEL_BLOCKS, NULL, fpu_enabled,
194 nbytes);
195}
196
197static inline void camellia_fpu_end(bool fpu_enabled)
198{
199 glue_fpu_end(fpu_enabled);
200}
201
202static int camellia_setkey(struct crypto_tfm *tfm, const u8 *in_key,
203 unsigned int key_len)
204{
205 return __camellia_setkey(crypto_tfm_ctx(tfm), in_key, key_len,
206 &tfm->crt_flags);
207}
208
209struct crypt_priv {
210 struct camellia_ctx *ctx;
211 bool fpu_enabled;
212};
213
214static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
215{
216 const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
217 struct crypt_priv *ctx = priv;
218 int i;
219
220 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
221
222 if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
223 camellia_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
224 srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
225 nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
226 }
227
228 while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
229 camellia_enc_blk_2way(ctx->ctx, srcdst, srcdst);
230 srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
231 nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
232 }
233
234 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
235 camellia_enc_blk(ctx->ctx, srcdst, srcdst);
236}
237
238static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
239{
240 const unsigned int bsize = CAMELLIA_BLOCK_SIZE;
241 struct crypt_priv *ctx = priv;
242 int i;
243
244 ctx->fpu_enabled = camellia_fpu_begin(ctx->fpu_enabled, nbytes);
245
246 if (nbytes >= CAMELLIA_AESNI_PARALLEL_BLOCKS * bsize) {
247 camellia_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
248 srcdst += bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
249 nbytes -= bsize * CAMELLIA_AESNI_PARALLEL_BLOCKS;
250 }
251
252 while (nbytes >= CAMELLIA_PARALLEL_BLOCKS * bsize) {
253 camellia_dec_blk_2way(ctx->ctx, srcdst, srcdst);
254 srcdst += bsize * CAMELLIA_PARALLEL_BLOCKS;
255 nbytes -= bsize * CAMELLIA_PARALLEL_BLOCKS;
256 }
257
258 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
259 camellia_dec_blk(ctx->ctx, srcdst, srcdst);
260}
261
262static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
263 struct scatterlist *src, unsigned int nbytes)
264{
265 struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
266 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
267 struct crypt_priv crypt_ctx = {
268 .ctx = &ctx->camellia_ctx,
269 .fpu_enabled = false,
270 };
271 struct lrw_crypt_req req = {
272 .tbuf = buf,
273 .tbuflen = sizeof(buf),
274
275 .table_ctx = &ctx->lrw_table,
276 .crypt_ctx = &crypt_ctx,
277 .crypt_fn = encrypt_callback,
278 };
279 int ret;
280
281 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
282 ret = lrw_crypt(desc, dst, src, nbytes, &req);
283 camellia_fpu_end(crypt_ctx.fpu_enabled);
284
285 return ret;
286}
287
288static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
289 struct scatterlist *src, unsigned int nbytes)
290{
291 struct camellia_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
292 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS];
293 struct crypt_priv crypt_ctx = {
294 .ctx = &ctx->camellia_ctx,
295 .fpu_enabled = false,
296 };
297 struct lrw_crypt_req req = {
298 .tbuf = buf,
299 .tbuflen = sizeof(buf),
300
301 .table_ctx = &ctx->lrw_table,
302 .crypt_ctx = &crypt_ctx,
303 .crypt_fn = decrypt_callback,
304 };
305 int ret;
306
307 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
308 ret = lrw_crypt(desc, dst, src, nbytes, &req);
309 camellia_fpu_end(crypt_ctx.fpu_enabled);
310
311 return ret;
312}
313
314static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
315 struct scatterlist *src, unsigned int nbytes)
316{
317 struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
318
319 return glue_xts_crypt_128bit(&camellia_enc_xts, desc, dst, src, nbytes,
320 XTS_TWEAK_CAST(camellia_enc_blk),
321 &ctx->tweak_ctx, &ctx->crypt_ctx);
322}
323
324static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
325 struct scatterlist *src, unsigned int nbytes)
326{
327 struct camellia_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
328
329 return glue_xts_crypt_128bit(&camellia_dec_xts, desc, dst, src, nbytes,
330 XTS_TWEAK_CAST(camellia_enc_blk),
331 &ctx->tweak_ctx, &ctx->crypt_ctx);
332}
333
334static struct crypto_alg cmll_algs[10] = { {
335 .cra_name = "__ecb-camellia-aesni",
336 .cra_driver_name = "__driver-ecb-camellia-aesni",
337 .cra_priority = 0,
338 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
339 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
340 .cra_ctxsize = sizeof(struct camellia_ctx),
341 .cra_alignmask = 0,
342 .cra_type = &crypto_blkcipher_type,
343 .cra_module = THIS_MODULE,
344 .cra_u = {
345 .blkcipher = {
346 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
347 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
348 .setkey = camellia_setkey,
349 .encrypt = ecb_encrypt,
350 .decrypt = ecb_decrypt,
351 },
352 },
353}, {
354 .cra_name = "__cbc-camellia-aesni",
355 .cra_driver_name = "__driver-cbc-camellia-aesni",
356 .cra_priority = 0,
357 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
358 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
359 .cra_ctxsize = sizeof(struct camellia_ctx),
360 .cra_alignmask = 0,
361 .cra_type = &crypto_blkcipher_type,
362 .cra_module = THIS_MODULE,
363 .cra_u = {
364 .blkcipher = {
365 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
366 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
367 .setkey = camellia_setkey,
368 .encrypt = cbc_encrypt,
369 .decrypt = cbc_decrypt,
370 },
371 },
372}, {
373 .cra_name = "__ctr-camellia-aesni",
374 .cra_driver_name = "__driver-ctr-camellia-aesni",
375 .cra_priority = 0,
376 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
377 .cra_blocksize = 1,
378 .cra_ctxsize = sizeof(struct camellia_ctx),
379 .cra_alignmask = 0,
380 .cra_type = &crypto_blkcipher_type,
381 .cra_module = THIS_MODULE,
382 .cra_u = {
383 .blkcipher = {
384 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
385 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
386 .ivsize = CAMELLIA_BLOCK_SIZE,
387 .setkey = camellia_setkey,
388 .encrypt = ctr_crypt,
389 .decrypt = ctr_crypt,
390 },
391 },
392}, {
393 .cra_name = "__lrw-camellia-aesni",
394 .cra_driver_name = "__driver-lrw-camellia-aesni",
395 .cra_priority = 0,
396 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
397 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
398 .cra_ctxsize = sizeof(struct camellia_lrw_ctx),
399 .cra_alignmask = 0,
400 .cra_type = &crypto_blkcipher_type,
401 .cra_module = THIS_MODULE,
402 .cra_exit = lrw_camellia_exit_tfm,
403 .cra_u = {
404 .blkcipher = {
405 .min_keysize = CAMELLIA_MIN_KEY_SIZE +
406 CAMELLIA_BLOCK_SIZE,
407 .max_keysize = CAMELLIA_MAX_KEY_SIZE +
408 CAMELLIA_BLOCK_SIZE,
409 .ivsize = CAMELLIA_BLOCK_SIZE,
410 .setkey = lrw_camellia_setkey,
411 .encrypt = lrw_encrypt,
412 .decrypt = lrw_decrypt,
413 },
414 },
415}, {
416 .cra_name = "__xts-camellia-aesni",
417 .cra_driver_name = "__driver-xts-camellia-aesni",
418 .cra_priority = 0,
419 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
420 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
421 .cra_ctxsize = sizeof(struct camellia_xts_ctx),
422 .cra_alignmask = 0,
423 .cra_type = &crypto_blkcipher_type,
424 .cra_module = THIS_MODULE,
425 .cra_u = {
426 .blkcipher = {
427 .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
428 .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
429 .ivsize = CAMELLIA_BLOCK_SIZE,
430 .setkey = xts_camellia_setkey,
431 .encrypt = xts_encrypt,
432 .decrypt = xts_decrypt,
433 },
434 },
435}, {
436 .cra_name = "ecb(camellia)",
437 .cra_driver_name = "ecb-camellia-aesni",
438 .cra_priority = 400,
439 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
440 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
441 .cra_ctxsize = sizeof(struct async_helper_ctx),
442 .cra_alignmask = 0,
443 .cra_type = &crypto_ablkcipher_type,
444 .cra_module = THIS_MODULE,
445 .cra_init = ablk_init,
446 .cra_exit = ablk_exit,
447 .cra_u = {
448 .ablkcipher = {
449 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
450 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
451 .setkey = ablk_set_key,
452 .encrypt = ablk_encrypt,
453 .decrypt = ablk_decrypt,
454 },
455 },
456}, {
457 .cra_name = "cbc(camellia)",
458 .cra_driver_name = "cbc-camellia-aesni",
459 .cra_priority = 400,
460 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
461 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
462 .cra_ctxsize = sizeof(struct async_helper_ctx),
463 .cra_alignmask = 0,
464 .cra_type = &crypto_ablkcipher_type,
465 .cra_module = THIS_MODULE,
466 .cra_init = ablk_init,
467 .cra_exit = ablk_exit,
468 .cra_u = {
469 .ablkcipher = {
470 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
471 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
472 .ivsize = CAMELLIA_BLOCK_SIZE,
473 .setkey = ablk_set_key,
474 .encrypt = __ablk_encrypt,
475 .decrypt = ablk_decrypt,
476 },
477 },
478}, {
479 .cra_name = "ctr(camellia)",
480 .cra_driver_name = "ctr-camellia-aesni",
481 .cra_priority = 400,
482 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
483 .cra_blocksize = 1,
484 .cra_ctxsize = sizeof(struct async_helper_ctx),
485 .cra_alignmask = 0,
486 .cra_type = &crypto_ablkcipher_type,
487 .cra_module = THIS_MODULE,
488 .cra_init = ablk_init,
489 .cra_exit = ablk_exit,
490 .cra_u = {
491 .ablkcipher = {
492 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
493 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
494 .ivsize = CAMELLIA_BLOCK_SIZE,
495 .setkey = ablk_set_key,
496 .encrypt = ablk_encrypt,
497 .decrypt = ablk_encrypt,
498 .geniv = "chainiv",
499 },
500 },
501}, {
502 .cra_name = "lrw(camellia)",
503 .cra_driver_name = "lrw-camellia-aesni",
504 .cra_priority = 400,
505 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
506 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
507 .cra_ctxsize = sizeof(struct async_helper_ctx),
508 .cra_alignmask = 0,
509 .cra_type = &crypto_ablkcipher_type,
510 .cra_module = THIS_MODULE,
511 .cra_init = ablk_init,
512 .cra_exit = ablk_exit,
513 .cra_u = {
514 .ablkcipher = {
515 .min_keysize = CAMELLIA_MIN_KEY_SIZE +
516 CAMELLIA_BLOCK_SIZE,
517 .max_keysize = CAMELLIA_MAX_KEY_SIZE +
518 CAMELLIA_BLOCK_SIZE,
519 .ivsize = CAMELLIA_BLOCK_SIZE,
520 .setkey = ablk_set_key,
521 .encrypt = ablk_encrypt,
522 .decrypt = ablk_decrypt,
523 },
524 },
525}, {
526 .cra_name = "xts(camellia)",
527 .cra_driver_name = "xts-camellia-aesni",
528 .cra_priority = 400,
529 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
530 .cra_blocksize = CAMELLIA_BLOCK_SIZE,
531 .cra_ctxsize = sizeof(struct async_helper_ctx),
532 .cra_alignmask = 0,
533 .cra_type = &crypto_ablkcipher_type,
534 .cra_module = THIS_MODULE,
535 .cra_init = ablk_init,
536 .cra_exit = ablk_exit,
537 .cra_u = {
538 .ablkcipher = {
539 .min_keysize = CAMELLIA_MIN_KEY_SIZE * 2,
540 .max_keysize = CAMELLIA_MAX_KEY_SIZE * 2,
541 .ivsize = CAMELLIA_BLOCK_SIZE,
542 .setkey = ablk_set_key,
543 .encrypt = ablk_encrypt,
544 .decrypt = ablk_decrypt,
545 },
546 },
547} };
548
549static int __init camellia_aesni_init(void)
550{
551 u64 xcr0;
552
553 if (!cpu_has_avx || !cpu_has_aes || !cpu_has_osxsave) {
554 pr_info("AVX or AES-NI instructions are not detected.\n");
555 return -ENODEV;
556 }
557
558 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
559 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
560 pr_info("AVX detected but unusable.\n");
561 return -ENODEV;
562 }
563
564 return crypto_register_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
565}
566
567static void __exit camellia_aesni_fini(void)
568{
569 crypto_unregister_algs(cmll_algs, ARRAY_SIZE(cmll_algs));
570}
571
572module_init(camellia_aesni_init);
573module_exit(camellia_aesni_fini);
574
575MODULE_LICENSE("GPL");
576MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX optimized");
577MODULE_ALIAS("camellia");
578MODULE_ALIAS("camellia-asm");