Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Glue Code for the AVX assembler implementation of the Cast6 Cipher
4 *
5 * Copyright (C) 2012 Johannes Goetzfried
6 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 *
8 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 */
10
11#include <linux/module.h>
12#include <linux/types.h>
13#include <linux/crypto.h>
14#include <linux/err.h>
15#include <crypto/algapi.h>
16#include <crypto/cast6.h>
17#include <crypto/internal/simd.h>
18
19#include "ecb_cbc_helpers.h"
20
21#define CAST6_PARALLEL_BLOCKS 8
22
23asmlinkage void cast6_ecb_enc_8way(const void *ctx, u8 *dst, const u8 *src);
24asmlinkage void cast6_ecb_dec_8way(const void *ctx, u8 *dst, const u8 *src);
25
26asmlinkage void cast6_cbc_dec_8way(const void *ctx, u8 *dst, const u8 *src);
27
28static int cast6_setkey_skcipher(struct crypto_skcipher *tfm,
29 const u8 *key, unsigned int keylen)
30{
31 return cast6_setkey(&tfm->base, key, keylen);
32}
33
34static int ecb_encrypt(struct skcipher_request *req)
35{
36 ECB_WALK_START(req, CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS);
37 ECB_BLOCK(CAST6_PARALLEL_BLOCKS, cast6_ecb_enc_8way);
38 ECB_BLOCK(1, __cast6_encrypt);
39 ECB_WALK_END();
40}
41
42static int ecb_decrypt(struct skcipher_request *req)
43{
44 ECB_WALK_START(req, CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS);
45 ECB_BLOCK(CAST6_PARALLEL_BLOCKS, cast6_ecb_dec_8way);
46 ECB_BLOCK(1, __cast6_decrypt);
47 ECB_WALK_END();
48}
49
50static int cbc_encrypt(struct skcipher_request *req)
51{
52 CBC_WALK_START(req, CAST6_BLOCK_SIZE, -1);
53 CBC_ENC_BLOCK(__cast6_encrypt);
54 CBC_WALK_END();
55}
56
57static int cbc_decrypt(struct skcipher_request *req)
58{
59 CBC_WALK_START(req, CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS);
60 CBC_DEC_BLOCK(CAST6_PARALLEL_BLOCKS, cast6_cbc_dec_8way);
61 CBC_DEC_BLOCK(1, __cast6_decrypt);
62 CBC_WALK_END();
63}
64
65static struct skcipher_alg cast6_algs[] = {
66 {
67 .base.cra_name = "__ecb(cast6)",
68 .base.cra_driver_name = "__ecb-cast6-avx",
69 .base.cra_priority = 200,
70 .base.cra_flags = CRYPTO_ALG_INTERNAL,
71 .base.cra_blocksize = CAST6_BLOCK_SIZE,
72 .base.cra_ctxsize = sizeof(struct cast6_ctx),
73 .base.cra_module = THIS_MODULE,
74 .min_keysize = CAST6_MIN_KEY_SIZE,
75 .max_keysize = CAST6_MAX_KEY_SIZE,
76 .setkey = cast6_setkey_skcipher,
77 .encrypt = ecb_encrypt,
78 .decrypt = ecb_decrypt,
79 }, {
80 .base.cra_name = "__cbc(cast6)",
81 .base.cra_driver_name = "__cbc-cast6-avx",
82 .base.cra_priority = 200,
83 .base.cra_flags = CRYPTO_ALG_INTERNAL,
84 .base.cra_blocksize = CAST6_BLOCK_SIZE,
85 .base.cra_ctxsize = sizeof(struct cast6_ctx),
86 .base.cra_module = THIS_MODULE,
87 .min_keysize = CAST6_MIN_KEY_SIZE,
88 .max_keysize = CAST6_MAX_KEY_SIZE,
89 .ivsize = CAST6_BLOCK_SIZE,
90 .setkey = cast6_setkey_skcipher,
91 .encrypt = cbc_encrypt,
92 .decrypt = cbc_decrypt,
93 },
94};
95
96static struct simd_skcipher_alg *cast6_simd_algs[ARRAY_SIZE(cast6_algs)];
97
98static int __init cast6_init(void)
99{
100 const char *feature_name;
101
102 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
103 &feature_name)) {
104 pr_info("CPU feature '%s' is not supported.\n", feature_name);
105 return -ENODEV;
106 }
107
108 return simd_register_skciphers_compat(cast6_algs,
109 ARRAY_SIZE(cast6_algs),
110 cast6_simd_algs);
111}
112
113static void __exit cast6_exit(void)
114{
115 simd_unregister_skciphers(cast6_algs, ARRAY_SIZE(cast6_algs),
116 cast6_simd_algs);
117}
118
119module_init(cast6_init);
120module_exit(cast6_exit);
121
122MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
123MODULE_LICENSE("GPL");
124MODULE_ALIAS_CRYPTO("cast6");
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Glue Code for the AVX assembler implementation of the Cast6 Cipher
4 *
5 * Copyright (C) 2012 Johannes Goetzfried
6 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 *
8 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 */
10
11#include <linux/module.h>
12#include <linux/types.h>
13#include <linux/crypto.h>
14#include <linux/err.h>
15#include <crypto/algapi.h>
16#include <crypto/cast6.h>
17#include <crypto/internal/simd.h>
18#include <crypto/xts.h>
19#include <asm/crypto/glue_helper.h>
20
21#define CAST6_PARALLEL_BLOCKS 8
22
23asmlinkage void cast6_ecb_enc_8way(const void *ctx, u8 *dst, const u8 *src);
24asmlinkage void cast6_ecb_dec_8way(const void *ctx, u8 *dst, const u8 *src);
25
26asmlinkage void cast6_cbc_dec_8way(const void *ctx, u8 *dst, const u8 *src);
27asmlinkage void cast6_ctr_8way(const void *ctx, u8 *dst, const u8 *src,
28 le128 *iv);
29
30asmlinkage void cast6_xts_enc_8way(const void *ctx, u8 *dst, const u8 *src,
31 le128 *iv);
32asmlinkage void cast6_xts_dec_8way(const void *ctx, u8 *dst, const u8 *src,
33 le128 *iv);
34
35static int cast6_setkey_skcipher(struct crypto_skcipher *tfm,
36 const u8 *key, unsigned int keylen)
37{
38 return cast6_setkey(&tfm->base, key, keylen);
39}
40
41static void cast6_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
42{
43 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_encrypt);
44}
45
46static void cast6_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
47{
48 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __cast6_decrypt);
49}
50
51static void cast6_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv)
52{
53 be128 ctrblk;
54 u128 *dst = (u128 *)d;
55 const u128 *src = (const u128 *)s;
56
57 le128_to_be128(&ctrblk, iv);
58 le128_inc(iv);
59
60 __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
61 u128_xor(dst, src, (u128 *)&ctrblk);
62}
63
64static const struct common_glue_ctx cast6_enc = {
65 .num_funcs = 2,
66 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
67
68 .funcs = { {
69 .num_blocks = CAST6_PARALLEL_BLOCKS,
70 .fn_u = { .ecb = cast6_ecb_enc_8way }
71 }, {
72 .num_blocks = 1,
73 .fn_u = { .ecb = __cast6_encrypt }
74 } }
75};
76
77static const struct common_glue_ctx cast6_ctr = {
78 .num_funcs = 2,
79 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
80
81 .funcs = { {
82 .num_blocks = CAST6_PARALLEL_BLOCKS,
83 .fn_u = { .ctr = cast6_ctr_8way }
84 }, {
85 .num_blocks = 1,
86 .fn_u = { .ctr = cast6_crypt_ctr }
87 } }
88};
89
90static const struct common_glue_ctx cast6_enc_xts = {
91 .num_funcs = 2,
92 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
93
94 .funcs = { {
95 .num_blocks = CAST6_PARALLEL_BLOCKS,
96 .fn_u = { .xts = cast6_xts_enc_8way }
97 }, {
98 .num_blocks = 1,
99 .fn_u = { .xts = cast6_xts_enc }
100 } }
101};
102
103static const struct common_glue_ctx cast6_dec = {
104 .num_funcs = 2,
105 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
106
107 .funcs = { {
108 .num_blocks = CAST6_PARALLEL_BLOCKS,
109 .fn_u = { .ecb = cast6_ecb_dec_8way }
110 }, {
111 .num_blocks = 1,
112 .fn_u = { .ecb = __cast6_decrypt }
113 } }
114};
115
116static const struct common_glue_ctx cast6_dec_cbc = {
117 .num_funcs = 2,
118 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
119
120 .funcs = { {
121 .num_blocks = CAST6_PARALLEL_BLOCKS,
122 .fn_u = { .cbc = cast6_cbc_dec_8way }
123 }, {
124 .num_blocks = 1,
125 .fn_u = { .cbc = __cast6_decrypt }
126 } }
127};
128
129static const struct common_glue_ctx cast6_dec_xts = {
130 .num_funcs = 2,
131 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
132
133 .funcs = { {
134 .num_blocks = CAST6_PARALLEL_BLOCKS,
135 .fn_u = { .xts = cast6_xts_dec_8way }
136 }, {
137 .num_blocks = 1,
138 .fn_u = { .xts = cast6_xts_dec }
139 } }
140};
141
142static int ecb_encrypt(struct skcipher_request *req)
143{
144 return glue_ecb_req_128bit(&cast6_enc, req);
145}
146
147static int ecb_decrypt(struct skcipher_request *req)
148{
149 return glue_ecb_req_128bit(&cast6_dec, req);
150}
151
152static int cbc_encrypt(struct skcipher_request *req)
153{
154 return glue_cbc_encrypt_req_128bit(__cast6_encrypt, req);
155}
156
157static int cbc_decrypt(struct skcipher_request *req)
158{
159 return glue_cbc_decrypt_req_128bit(&cast6_dec_cbc, req);
160}
161
162static int ctr_crypt(struct skcipher_request *req)
163{
164 return glue_ctr_req_128bit(&cast6_ctr, req);
165}
166
167struct cast6_xts_ctx {
168 struct cast6_ctx tweak_ctx;
169 struct cast6_ctx crypt_ctx;
170};
171
172static int xts_cast6_setkey(struct crypto_skcipher *tfm, const u8 *key,
173 unsigned int keylen)
174{
175 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
176 int err;
177
178 err = xts_verify_key(tfm, key, keylen);
179 if (err)
180 return err;
181
182 /* first half of xts-key is for crypt */
183 err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2);
184 if (err)
185 return err;
186
187 /* second half of xts-key is for tweak */
188 return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
189}
190
191static int xts_encrypt(struct skcipher_request *req)
192{
193 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
194 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
195
196 return glue_xts_req_128bit(&cast6_enc_xts, req, __cast6_encrypt,
197 &ctx->tweak_ctx, &ctx->crypt_ctx, false);
198}
199
200static int xts_decrypt(struct skcipher_request *req)
201{
202 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
203 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
204
205 return glue_xts_req_128bit(&cast6_dec_xts, req, __cast6_encrypt,
206 &ctx->tweak_ctx, &ctx->crypt_ctx, true);
207}
208
209static struct skcipher_alg cast6_algs[] = {
210 {
211 .base.cra_name = "__ecb(cast6)",
212 .base.cra_driver_name = "__ecb-cast6-avx",
213 .base.cra_priority = 200,
214 .base.cra_flags = CRYPTO_ALG_INTERNAL,
215 .base.cra_blocksize = CAST6_BLOCK_SIZE,
216 .base.cra_ctxsize = sizeof(struct cast6_ctx),
217 .base.cra_module = THIS_MODULE,
218 .min_keysize = CAST6_MIN_KEY_SIZE,
219 .max_keysize = CAST6_MAX_KEY_SIZE,
220 .setkey = cast6_setkey_skcipher,
221 .encrypt = ecb_encrypt,
222 .decrypt = ecb_decrypt,
223 }, {
224 .base.cra_name = "__cbc(cast6)",
225 .base.cra_driver_name = "__cbc-cast6-avx",
226 .base.cra_priority = 200,
227 .base.cra_flags = CRYPTO_ALG_INTERNAL,
228 .base.cra_blocksize = CAST6_BLOCK_SIZE,
229 .base.cra_ctxsize = sizeof(struct cast6_ctx),
230 .base.cra_module = THIS_MODULE,
231 .min_keysize = CAST6_MIN_KEY_SIZE,
232 .max_keysize = CAST6_MAX_KEY_SIZE,
233 .ivsize = CAST6_BLOCK_SIZE,
234 .setkey = cast6_setkey_skcipher,
235 .encrypt = cbc_encrypt,
236 .decrypt = cbc_decrypt,
237 }, {
238 .base.cra_name = "__ctr(cast6)",
239 .base.cra_driver_name = "__ctr-cast6-avx",
240 .base.cra_priority = 200,
241 .base.cra_flags = CRYPTO_ALG_INTERNAL,
242 .base.cra_blocksize = 1,
243 .base.cra_ctxsize = sizeof(struct cast6_ctx),
244 .base.cra_module = THIS_MODULE,
245 .min_keysize = CAST6_MIN_KEY_SIZE,
246 .max_keysize = CAST6_MAX_KEY_SIZE,
247 .ivsize = CAST6_BLOCK_SIZE,
248 .chunksize = CAST6_BLOCK_SIZE,
249 .setkey = cast6_setkey_skcipher,
250 .encrypt = ctr_crypt,
251 .decrypt = ctr_crypt,
252 }, {
253 .base.cra_name = "__xts(cast6)",
254 .base.cra_driver_name = "__xts-cast6-avx",
255 .base.cra_priority = 200,
256 .base.cra_flags = CRYPTO_ALG_INTERNAL,
257 .base.cra_blocksize = CAST6_BLOCK_SIZE,
258 .base.cra_ctxsize = sizeof(struct cast6_xts_ctx),
259 .base.cra_module = THIS_MODULE,
260 .min_keysize = 2 * CAST6_MIN_KEY_SIZE,
261 .max_keysize = 2 * CAST6_MAX_KEY_SIZE,
262 .ivsize = CAST6_BLOCK_SIZE,
263 .setkey = xts_cast6_setkey,
264 .encrypt = xts_encrypt,
265 .decrypt = xts_decrypt,
266 },
267};
268
269static struct simd_skcipher_alg *cast6_simd_algs[ARRAY_SIZE(cast6_algs)];
270
271static int __init cast6_init(void)
272{
273 const char *feature_name;
274
275 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
276 &feature_name)) {
277 pr_info("CPU feature '%s' is not supported.\n", feature_name);
278 return -ENODEV;
279 }
280
281 return simd_register_skciphers_compat(cast6_algs,
282 ARRAY_SIZE(cast6_algs),
283 cast6_simd_algs);
284}
285
286static void __exit cast6_exit(void)
287{
288 simd_unregister_skciphers(cast6_algs, ARRAY_SIZE(cast6_algs),
289 cast6_simd_algs);
290}
291
292module_init(cast6_init);
293module_exit(cast6_exit);
294
295MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
296MODULE_LICENSE("GPL");
297MODULE_ALIAS_CRYPTO("cast6");