Loading...
1// SPDX-License-Identifier: GPL-2.0-only
2/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
3 *
4 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
5 *
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
8 *
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
16 */
17
18#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
19
20#include <linux/crypto.h>
21#include <linux/init.h>
22#include <linux/module.h>
23#include <linux/mm.h>
24#include <linux/types.h>
25#include <crypto/algapi.h>
26#include <crypto/aes.h>
27#include <crypto/internal/skcipher.h>
28
29#include <asm/fpumacro.h>
30#include <asm/pstate.h>
31#include <asm/elf.h>
32
33#include "opcodes.h"
34
35struct aes_ops {
36 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
37 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
38 void (*load_encrypt_keys)(const u64 *key);
39 void (*load_decrypt_keys)(const u64 *key);
40 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
41 unsigned int len);
42 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
43 unsigned int len);
44 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
45 unsigned int len, u64 *iv);
46 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
47 unsigned int len, u64 *iv);
48 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
49 unsigned int len, u64 *iv);
50};
51
52struct crypto_sparc64_aes_ctx {
53 struct aes_ops *ops;
54 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
55 u32 key_length;
56 u32 expanded_key_length;
57};
58
59extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
60 u32 *output);
61extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
62 u32 *output);
63extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
64 u32 *output);
65
66extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
67 u32 *output);
68extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
69 u32 *output);
70extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
71 u32 *output);
72
73extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
74extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
75extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
76
77extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
78extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
79extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
80
81extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
82 u64 *output, unsigned int len);
83extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
84 u64 *output, unsigned int len);
85extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
86 u64 *output, unsigned int len);
87
88extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
89 u64 *output, unsigned int len);
90extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
91 u64 *output, unsigned int len);
92extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
93 u64 *output, unsigned int len);
94
95extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
96 u64 *output, unsigned int len,
97 u64 *iv);
98
99extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
100 u64 *output, unsigned int len,
101 u64 *iv);
102
103extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
104 u64 *output, unsigned int len,
105 u64 *iv);
106
107extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
108 u64 *output, unsigned int len,
109 u64 *iv);
110
111extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
112 u64 *output, unsigned int len,
113 u64 *iv);
114
115extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
116 u64 *output, unsigned int len,
117 u64 *iv);
118
119extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
120 u64 *output, unsigned int len,
121 u64 *iv);
122extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
123 u64 *output, unsigned int len,
124 u64 *iv);
125extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
126 u64 *output, unsigned int len,
127 u64 *iv);
128
129static struct aes_ops aes128_ops = {
130 .encrypt = aes_sparc64_encrypt_128,
131 .decrypt = aes_sparc64_decrypt_128,
132 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
133 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
134 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
135 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
136 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
137 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
138 .ctr_crypt = aes_sparc64_ctr_crypt_128,
139};
140
141static struct aes_ops aes192_ops = {
142 .encrypt = aes_sparc64_encrypt_192,
143 .decrypt = aes_sparc64_decrypt_192,
144 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
145 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
146 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
147 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
148 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
149 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
150 .ctr_crypt = aes_sparc64_ctr_crypt_192,
151};
152
153static struct aes_ops aes256_ops = {
154 .encrypt = aes_sparc64_encrypt_256,
155 .decrypt = aes_sparc64_decrypt_256,
156 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
157 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
158 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
159 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
160 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
161 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
162 .ctr_crypt = aes_sparc64_ctr_crypt_256,
163};
164
165extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
166 unsigned int key_len);
167
168static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
169 unsigned int key_len)
170{
171 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
172
173 switch (key_len) {
174 case AES_KEYSIZE_128:
175 ctx->expanded_key_length = 0xb0;
176 ctx->ops = &aes128_ops;
177 break;
178
179 case AES_KEYSIZE_192:
180 ctx->expanded_key_length = 0xd0;
181 ctx->ops = &aes192_ops;
182 break;
183
184 case AES_KEYSIZE_256:
185 ctx->expanded_key_length = 0xf0;
186 ctx->ops = &aes256_ops;
187 break;
188
189 default:
190 return -EINVAL;
191 }
192
193 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194 ctx->key_length = key_len;
195
196 return 0;
197}
198
199static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
200 unsigned int key_len)
201{
202 return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
203}
204
205static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
206{
207 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
208
209 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
210}
211
212static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
213{
214 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
215
216 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
217}
218
219static int ecb_encrypt(struct skcipher_request *req)
220{
221 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
222 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
223 struct skcipher_walk walk;
224 unsigned int nbytes;
225 int err;
226
227 err = skcipher_walk_virt(&walk, req, true);
228 if (err)
229 return err;
230
231 ctx->ops->load_encrypt_keys(&ctx->key[0]);
232 while ((nbytes = walk.nbytes) != 0) {
233 ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr,
234 walk.dst.virt.addr,
235 round_down(nbytes, AES_BLOCK_SIZE));
236 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
237 }
238 fprs_write(0);
239 return err;
240}
241
242static int ecb_decrypt(struct skcipher_request *req)
243{
244 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
245 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
246 const u64 *key_end;
247 struct skcipher_walk walk;
248 unsigned int nbytes;
249 int err;
250
251 err = skcipher_walk_virt(&walk, req, true);
252 if (err)
253 return err;
254
255 ctx->ops->load_decrypt_keys(&ctx->key[0]);
256 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
257 while ((nbytes = walk.nbytes) != 0) {
258 ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr,
259 walk.dst.virt.addr,
260 round_down(nbytes, AES_BLOCK_SIZE));
261 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
262 }
263 fprs_write(0);
264
265 return err;
266}
267
268static int cbc_encrypt(struct skcipher_request *req)
269{
270 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
271 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
272 struct skcipher_walk walk;
273 unsigned int nbytes;
274 int err;
275
276 err = skcipher_walk_virt(&walk, req, true);
277 if (err)
278 return err;
279
280 ctx->ops->load_encrypt_keys(&ctx->key[0]);
281 while ((nbytes = walk.nbytes) != 0) {
282 ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr,
283 walk.dst.virt.addr,
284 round_down(nbytes, AES_BLOCK_SIZE),
285 walk.iv);
286 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
287 }
288 fprs_write(0);
289 return err;
290}
291
292static int cbc_decrypt(struct skcipher_request *req)
293{
294 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
295 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
296 const u64 *key_end;
297 struct skcipher_walk walk;
298 unsigned int nbytes;
299 int err;
300
301 err = skcipher_walk_virt(&walk, req, true);
302 if (err)
303 return err;
304
305 ctx->ops->load_decrypt_keys(&ctx->key[0]);
306 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
307 while ((nbytes = walk.nbytes) != 0) {
308 ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr,
309 walk.dst.virt.addr,
310 round_down(nbytes, AES_BLOCK_SIZE),
311 walk.iv);
312 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
313 }
314 fprs_write(0);
315
316 return err;
317}
318
319static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx,
320 struct skcipher_walk *walk)
321{
322 u8 *ctrblk = walk->iv;
323 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
324 u8 *src = walk->src.virt.addr;
325 u8 *dst = walk->dst.virt.addr;
326 unsigned int nbytes = walk->nbytes;
327
328 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
329 keystream, AES_BLOCK_SIZE);
330 crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
331 crypto_inc(ctrblk, AES_BLOCK_SIZE);
332}
333
334static int ctr_crypt(struct skcipher_request *req)
335{
336 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
337 const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
338 struct skcipher_walk walk;
339 unsigned int nbytes;
340 int err;
341
342 err = skcipher_walk_virt(&walk, req, true);
343 if (err)
344 return err;
345
346 ctx->ops->load_encrypt_keys(&ctx->key[0]);
347 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
348 ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr,
349 walk.dst.virt.addr,
350 round_down(nbytes, AES_BLOCK_SIZE),
351 walk.iv);
352 err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
353 }
354 if (walk.nbytes) {
355 ctr_crypt_final(ctx, &walk);
356 err = skcipher_walk_done(&walk, 0);
357 }
358 fprs_write(0);
359 return err;
360}
361
362static struct crypto_alg cipher_alg = {
363 .cra_name = "aes",
364 .cra_driver_name = "aes-sparc64",
365 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
366 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
367 .cra_blocksize = AES_BLOCK_SIZE,
368 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
369 .cra_alignmask = 3,
370 .cra_module = THIS_MODULE,
371 .cra_u = {
372 .cipher = {
373 .cia_min_keysize = AES_MIN_KEY_SIZE,
374 .cia_max_keysize = AES_MAX_KEY_SIZE,
375 .cia_setkey = aes_set_key,
376 .cia_encrypt = crypto_aes_encrypt,
377 .cia_decrypt = crypto_aes_decrypt
378 }
379 }
380};
381
382static struct skcipher_alg skcipher_algs[] = {
383 {
384 .base.cra_name = "ecb(aes)",
385 .base.cra_driver_name = "ecb-aes-sparc64",
386 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
387 .base.cra_blocksize = AES_BLOCK_SIZE,
388 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
389 .base.cra_alignmask = 7,
390 .base.cra_module = THIS_MODULE,
391 .min_keysize = AES_MIN_KEY_SIZE,
392 .max_keysize = AES_MAX_KEY_SIZE,
393 .setkey = aes_set_key_skcipher,
394 .encrypt = ecb_encrypt,
395 .decrypt = ecb_decrypt,
396 }, {
397 .base.cra_name = "cbc(aes)",
398 .base.cra_driver_name = "cbc-aes-sparc64",
399 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
400 .base.cra_blocksize = AES_BLOCK_SIZE,
401 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
402 .base.cra_alignmask = 7,
403 .base.cra_module = THIS_MODULE,
404 .min_keysize = AES_MIN_KEY_SIZE,
405 .max_keysize = AES_MAX_KEY_SIZE,
406 .ivsize = AES_BLOCK_SIZE,
407 .setkey = aes_set_key_skcipher,
408 .encrypt = cbc_encrypt,
409 .decrypt = cbc_decrypt,
410 }, {
411 .base.cra_name = "ctr(aes)",
412 .base.cra_driver_name = "ctr-aes-sparc64",
413 .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
414 .base.cra_blocksize = 1,
415 .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
416 .base.cra_alignmask = 7,
417 .base.cra_module = THIS_MODULE,
418 .min_keysize = AES_MIN_KEY_SIZE,
419 .max_keysize = AES_MAX_KEY_SIZE,
420 .ivsize = AES_BLOCK_SIZE,
421 .setkey = aes_set_key_skcipher,
422 .encrypt = ctr_crypt,
423 .decrypt = ctr_crypt,
424 .chunksize = AES_BLOCK_SIZE,
425 }
426};
427
428static bool __init sparc64_has_aes_opcode(void)
429{
430 unsigned long cfr;
431
432 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
433 return false;
434
435 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
436 if (!(cfr & CFR_AES))
437 return false;
438
439 return true;
440}
441
442static int __init aes_sparc64_mod_init(void)
443{
444 int err;
445
446 if (!sparc64_has_aes_opcode()) {
447 pr_info("sparc64 aes opcodes not available.\n");
448 return -ENODEV;
449 }
450 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
451 err = crypto_register_alg(&cipher_alg);
452 if (err)
453 return err;
454 err = crypto_register_skciphers(skcipher_algs,
455 ARRAY_SIZE(skcipher_algs));
456 if (err)
457 crypto_unregister_alg(&cipher_alg);
458 return err;
459}
460
461static void __exit aes_sparc64_mod_fini(void)
462{
463 crypto_unregister_alg(&cipher_alg);
464 crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
465}
466
467module_init(aes_sparc64_mod_init);
468module_exit(aes_sparc64_mod_fini);
469
470MODULE_LICENSE("GPL");
471MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
472
473MODULE_ALIAS_CRYPTO("aes");
474
475#include "crop_devid.c"
1/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
2 *
3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 */
16
17#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
18
19#include <linux/crypto.h>
20#include <linux/init.h>
21#include <linux/module.h>
22#include <linux/mm.h>
23#include <linux/types.h>
24#include <crypto/algapi.h>
25#include <crypto/aes.h>
26
27#include <asm/fpumacro.h>
28#include <asm/pstate.h>
29#include <asm/elf.h>
30
31#include "opcodes.h"
32
33struct aes_ops {
34 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
35 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
36 void (*load_encrypt_keys)(const u64 *key);
37 void (*load_decrypt_keys)(const u64 *key);
38 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
39 unsigned int len);
40 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
41 unsigned int len);
42 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
43 unsigned int len, u64 *iv);
44 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
45 unsigned int len, u64 *iv);
46 void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
47 unsigned int len, u64 *iv);
48};
49
50struct crypto_sparc64_aes_ctx {
51 struct aes_ops *ops;
52 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
53 u32 key_length;
54 u32 expanded_key_length;
55};
56
57extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
58 u32 *output);
59extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
60 u32 *output);
61extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
62 u32 *output);
63
64extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
65 u32 *output);
66extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
67 u32 *output);
68extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
69 u32 *output);
70
71extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
72extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
73extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
74
75extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
76extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
77extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
78
79extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
80 u64 *output, unsigned int len);
81extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
82 u64 *output, unsigned int len);
83extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
84 u64 *output, unsigned int len);
85
86extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
87 u64 *output, unsigned int len);
88extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
89 u64 *output, unsigned int len);
90extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
91 u64 *output, unsigned int len);
92
93extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
94 u64 *output, unsigned int len,
95 u64 *iv);
96
97extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
98 u64 *output, unsigned int len,
99 u64 *iv);
100
101extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
102 u64 *output, unsigned int len,
103 u64 *iv);
104
105extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
106 u64 *output, unsigned int len,
107 u64 *iv);
108
109extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
110 u64 *output, unsigned int len,
111 u64 *iv);
112
113extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
114 u64 *output, unsigned int len,
115 u64 *iv);
116
117extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
118 u64 *output, unsigned int len,
119 u64 *iv);
120extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
121 u64 *output, unsigned int len,
122 u64 *iv);
123extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
124 u64 *output, unsigned int len,
125 u64 *iv);
126
127static struct aes_ops aes128_ops = {
128 .encrypt = aes_sparc64_encrypt_128,
129 .decrypt = aes_sparc64_decrypt_128,
130 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
131 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
132 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
133 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
134 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
135 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
136 .ctr_crypt = aes_sparc64_ctr_crypt_128,
137};
138
139static struct aes_ops aes192_ops = {
140 .encrypt = aes_sparc64_encrypt_192,
141 .decrypt = aes_sparc64_decrypt_192,
142 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
143 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
144 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
145 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
146 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
147 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
148 .ctr_crypt = aes_sparc64_ctr_crypt_192,
149};
150
151static struct aes_ops aes256_ops = {
152 .encrypt = aes_sparc64_encrypt_256,
153 .decrypt = aes_sparc64_decrypt_256,
154 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
155 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
156 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
157 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
158 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
159 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
160 .ctr_crypt = aes_sparc64_ctr_crypt_256,
161};
162
163extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
164 unsigned int key_len);
165
166static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
167 unsigned int key_len)
168{
169 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
170 u32 *flags = &tfm->crt_flags;
171
172 switch (key_len) {
173 case AES_KEYSIZE_128:
174 ctx->expanded_key_length = 0xb0;
175 ctx->ops = &aes128_ops;
176 break;
177
178 case AES_KEYSIZE_192:
179 ctx->expanded_key_length = 0xd0;
180 ctx->ops = &aes192_ops;
181 break;
182
183 case AES_KEYSIZE_256:
184 ctx->expanded_key_length = 0xf0;
185 ctx->ops = &aes256_ops;
186 break;
187
188 default:
189 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
190 return -EINVAL;
191 }
192
193 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194 ctx->key_length = key_len;
195
196 return 0;
197}
198
199static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
200{
201 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
202
203 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
204}
205
206static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
207{
208 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
209
210 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
211}
212
213#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
214
215static int ecb_encrypt(struct blkcipher_desc *desc,
216 struct scatterlist *dst, struct scatterlist *src,
217 unsigned int nbytes)
218{
219 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
220 struct blkcipher_walk walk;
221 int err;
222
223 blkcipher_walk_init(&walk, dst, src, nbytes);
224 err = blkcipher_walk_virt(desc, &walk);
225 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
226
227 ctx->ops->load_encrypt_keys(&ctx->key[0]);
228 while ((nbytes = walk.nbytes)) {
229 unsigned int block_len = nbytes & AES_BLOCK_MASK;
230
231 if (likely(block_len)) {
232 ctx->ops->ecb_encrypt(&ctx->key[0],
233 (const u64 *)walk.src.virt.addr,
234 (u64 *) walk.dst.virt.addr,
235 block_len);
236 }
237 nbytes &= AES_BLOCK_SIZE - 1;
238 err = blkcipher_walk_done(desc, &walk, nbytes);
239 }
240 fprs_write(0);
241 return err;
242}
243
244static int ecb_decrypt(struct blkcipher_desc *desc,
245 struct scatterlist *dst, struct scatterlist *src,
246 unsigned int nbytes)
247{
248 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
249 struct blkcipher_walk walk;
250 u64 *key_end;
251 int err;
252
253 blkcipher_walk_init(&walk, dst, src, nbytes);
254 err = blkcipher_walk_virt(desc, &walk);
255 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
256
257 ctx->ops->load_decrypt_keys(&ctx->key[0]);
258 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
259 while ((nbytes = walk.nbytes)) {
260 unsigned int block_len = nbytes & AES_BLOCK_MASK;
261
262 if (likely(block_len)) {
263 ctx->ops->ecb_decrypt(key_end,
264 (const u64 *) walk.src.virt.addr,
265 (u64 *) walk.dst.virt.addr, block_len);
266 }
267 nbytes &= AES_BLOCK_SIZE - 1;
268 err = blkcipher_walk_done(desc, &walk, nbytes);
269 }
270 fprs_write(0);
271
272 return err;
273}
274
275static int cbc_encrypt(struct blkcipher_desc *desc,
276 struct scatterlist *dst, struct scatterlist *src,
277 unsigned int nbytes)
278{
279 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
280 struct blkcipher_walk walk;
281 int err;
282
283 blkcipher_walk_init(&walk, dst, src, nbytes);
284 err = blkcipher_walk_virt(desc, &walk);
285 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
286
287 ctx->ops->load_encrypt_keys(&ctx->key[0]);
288 while ((nbytes = walk.nbytes)) {
289 unsigned int block_len = nbytes & AES_BLOCK_MASK;
290
291 if (likely(block_len)) {
292 ctx->ops->cbc_encrypt(&ctx->key[0],
293 (const u64 *)walk.src.virt.addr,
294 (u64 *) walk.dst.virt.addr,
295 block_len, (u64 *) walk.iv);
296 }
297 nbytes &= AES_BLOCK_SIZE - 1;
298 err = blkcipher_walk_done(desc, &walk, nbytes);
299 }
300 fprs_write(0);
301 return err;
302}
303
304static int cbc_decrypt(struct blkcipher_desc *desc,
305 struct scatterlist *dst, struct scatterlist *src,
306 unsigned int nbytes)
307{
308 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
309 struct blkcipher_walk walk;
310 u64 *key_end;
311 int err;
312
313 blkcipher_walk_init(&walk, dst, src, nbytes);
314 err = blkcipher_walk_virt(desc, &walk);
315 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
316
317 ctx->ops->load_decrypt_keys(&ctx->key[0]);
318 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
319 while ((nbytes = walk.nbytes)) {
320 unsigned int block_len = nbytes & AES_BLOCK_MASK;
321
322 if (likely(block_len)) {
323 ctx->ops->cbc_decrypt(key_end,
324 (const u64 *) walk.src.virt.addr,
325 (u64 *) walk.dst.virt.addr,
326 block_len, (u64 *) walk.iv);
327 }
328 nbytes &= AES_BLOCK_SIZE - 1;
329 err = blkcipher_walk_done(desc, &walk, nbytes);
330 }
331 fprs_write(0);
332
333 return err;
334}
335
336static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
337 struct blkcipher_walk *walk)
338{
339 u8 *ctrblk = walk->iv;
340 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
341 u8 *src = walk->src.virt.addr;
342 u8 *dst = walk->dst.virt.addr;
343 unsigned int nbytes = walk->nbytes;
344
345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
346 keystream, AES_BLOCK_SIZE);
347 crypto_xor((u8 *) keystream, src, nbytes);
348 memcpy(dst, keystream, nbytes);
349 crypto_inc(ctrblk, AES_BLOCK_SIZE);
350}
351
352static int ctr_crypt(struct blkcipher_desc *desc,
353 struct scatterlist *dst, struct scatterlist *src,
354 unsigned int nbytes)
355{
356 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
357 struct blkcipher_walk walk;
358 int err;
359
360 blkcipher_walk_init(&walk, dst, src, nbytes);
361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
362 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
363
364 ctx->ops->load_encrypt_keys(&ctx->key[0]);
365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
366 unsigned int block_len = nbytes & AES_BLOCK_MASK;
367
368 if (likely(block_len)) {
369 ctx->ops->ctr_crypt(&ctx->key[0],
370 (const u64 *)walk.src.virt.addr,
371 (u64 *) walk.dst.virt.addr,
372 block_len, (u64 *) walk.iv);
373 }
374 nbytes &= AES_BLOCK_SIZE - 1;
375 err = blkcipher_walk_done(desc, &walk, nbytes);
376 }
377 if (walk.nbytes) {
378 ctr_crypt_final(ctx, &walk);
379 err = blkcipher_walk_done(desc, &walk, 0);
380 }
381 fprs_write(0);
382 return err;
383}
384
385static struct crypto_alg algs[] = { {
386 .cra_name = "aes",
387 .cra_driver_name = "aes-sparc64",
388 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
389 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
390 .cra_blocksize = AES_BLOCK_SIZE,
391 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
392 .cra_alignmask = 3,
393 .cra_module = THIS_MODULE,
394 .cra_u = {
395 .cipher = {
396 .cia_min_keysize = AES_MIN_KEY_SIZE,
397 .cia_max_keysize = AES_MAX_KEY_SIZE,
398 .cia_setkey = aes_set_key,
399 .cia_encrypt = aes_encrypt,
400 .cia_decrypt = aes_decrypt
401 }
402 }
403}, {
404 .cra_name = "ecb(aes)",
405 .cra_driver_name = "ecb-aes-sparc64",
406 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
407 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
408 .cra_blocksize = AES_BLOCK_SIZE,
409 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
410 .cra_alignmask = 7,
411 .cra_type = &crypto_blkcipher_type,
412 .cra_module = THIS_MODULE,
413 .cra_u = {
414 .blkcipher = {
415 .min_keysize = AES_MIN_KEY_SIZE,
416 .max_keysize = AES_MAX_KEY_SIZE,
417 .setkey = aes_set_key,
418 .encrypt = ecb_encrypt,
419 .decrypt = ecb_decrypt,
420 },
421 },
422}, {
423 .cra_name = "cbc(aes)",
424 .cra_driver_name = "cbc-aes-sparc64",
425 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
426 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
427 .cra_blocksize = AES_BLOCK_SIZE,
428 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
429 .cra_alignmask = 7,
430 .cra_type = &crypto_blkcipher_type,
431 .cra_module = THIS_MODULE,
432 .cra_u = {
433 .blkcipher = {
434 .min_keysize = AES_MIN_KEY_SIZE,
435 .max_keysize = AES_MAX_KEY_SIZE,
436 .ivsize = AES_BLOCK_SIZE,
437 .setkey = aes_set_key,
438 .encrypt = cbc_encrypt,
439 .decrypt = cbc_decrypt,
440 },
441 },
442}, {
443 .cra_name = "ctr(aes)",
444 .cra_driver_name = "ctr-aes-sparc64",
445 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
446 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
447 .cra_blocksize = 1,
448 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
449 .cra_alignmask = 7,
450 .cra_type = &crypto_blkcipher_type,
451 .cra_module = THIS_MODULE,
452 .cra_u = {
453 .blkcipher = {
454 .min_keysize = AES_MIN_KEY_SIZE,
455 .max_keysize = AES_MAX_KEY_SIZE,
456 .ivsize = AES_BLOCK_SIZE,
457 .setkey = aes_set_key,
458 .encrypt = ctr_crypt,
459 .decrypt = ctr_crypt,
460 },
461 },
462} };
463
464static bool __init sparc64_has_aes_opcode(void)
465{
466 unsigned long cfr;
467
468 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
469 return false;
470
471 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
472 if (!(cfr & CFR_AES))
473 return false;
474
475 return true;
476}
477
478static int __init aes_sparc64_mod_init(void)
479{
480 int i;
481
482 for (i = 0; i < ARRAY_SIZE(algs); i++)
483 INIT_LIST_HEAD(&algs[i].cra_list);
484
485 if (sparc64_has_aes_opcode()) {
486 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
487 return crypto_register_algs(algs, ARRAY_SIZE(algs));
488 }
489 pr_info("sparc64 aes opcodes not available.\n");
490 return -ENODEV;
491}
492
493static void __exit aes_sparc64_mod_fini(void)
494{
495 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
496}
497
498module_init(aes_sparc64_mod_init);
499module_exit(aes_sparc64_mod_fini);
500
501MODULE_LICENSE("GPL");
502MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
503
504MODULE_ALIAS_CRYPTO("aes");
505
506#include "crop_devid.c"