Linux Audio

Check our new training course

Loading...
v4.17
 
  1/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
  2 *
  3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
  4 *
  5 * Copyright (C) 2008, Intel Corp.
  6 *    Author: Huang Ying <ying.huang@intel.com>
  7 *
  8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
  9 * interface for 64-bit kernels.
 10 *    Authors: Adrian Hoban <adrian.hoban@intel.com>
 11 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
 12 *             Tadeusz Struk (tadeusz.struk@intel.com)
 13 *             Aidan O'Mahony (aidan.o.mahony@intel.com)
 14 *    Copyright (c) 2010, Intel Corporation.
 15 */
 16
 17#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
 18
 19#include <linux/crypto.h>
 20#include <linux/init.h>
 21#include <linux/module.h>
 22#include <linux/mm.h>
 23#include <linux/types.h>
 24#include <crypto/algapi.h>
 25#include <crypto/aes.h>
 
 26
 27#include <asm/fpumacro.h>
 28#include <asm/pstate.h>
 29#include <asm/elf.h>
 30
 31#include "opcodes.h"
 32
 33struct aes_ops {
 34	void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
 35	void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
 36	void (*load_encrypt_keys)(const u64 *key);
 37	void (*load_decrypt_keys)(const u64 *key);
 38	void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
 39			    unsigned int len);
 40	void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
 41			    unsigned int len);
 42	void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
 43			    unsigned int len, u64 *iv);
 44	void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
 45			    unsigned int len, u64 *iv);
 46	void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
 47			  unsigned int len, u64 *iv);
 48};
 49
 50struct crypto_sparc64_aes_ctx {
 51	struct aes_ops *ops;
 52	u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
 53	u32 key_length;
 54	u32 expanded_key_length;
 55};
 56
 57extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
 58				    u32 *output);
 59extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
 60				    u32 *output);
 61extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
 62				    u32 *output);
 63
 64extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
 65				    u32 *output);
 66extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
 67				    u32 *output);
 68extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
 69				    u32 *output);
 70
 71extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
 72extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
 73extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
 74
 75extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
 76extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
 77extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
 78
 79extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
 80					u64 *output, unsigned int len);
 81extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
 82					u64 *output, unsigned int len);
 83extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
 84					u64 *output, unsigned int len);
 85
 86extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
 87					u64 *output, unsigned int len);
 88extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
 89					u64 *output, unsigned int len);
 90extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
 91					u64 *output, unsigned int len);
 92
 93extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
 94					u64 *output, unsigned int len,
 95					u64 *iv);
 96
 97extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
 98					u64 *output, unsigned int len,
 99					u64 *iv);
100
101extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
102					u64 *output, unsigned int len,
103					u64 *iv);
104
105extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
106					u64 *output, unsigned int len,
107					u64 *iv);
108
109extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
110					u64 *output, unsigned int len,
111					u64 *iv);
112
113extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
114					u64 *output, unsigned int len,
115					u64 *iv);
116
117extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
118				      u64 *output, unsigned int len,
119				      u64 *iv);
120extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
121				      u64 *output, unsigned int len,
122				      u64 *iv);
123extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
124				      u64 *output, unsigned int len,
125				      u64 *iv);
126
127static struct aes_ops aes128_ops = {
128	.encrypt		= aes_sparc64_encrypt_128,
129	.decrypt		= aes_sparc64_decrypt_128,
130	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_128,
131	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_128,
132	.ecb_encrypt		= aes_sparc64_ecb_encrypt_128,
133	.ecb_decrypt		= aes_sparc64_ecb_decrypt_128,
134	.cbc_encrypt		= aes_sparc64_cbc_encrypt_128,
135	.cbc_decrypt		= aes_sparc64_cbc_decrypt_128,
136	.ctr_crypt		= aes_sparc64_ctr_crypt_128,
137};
138
139static struct aes_ops aes192_ops = {
140	.encrypt		= aes_sparc64_encrypt_192,
141	.decrypt		= aes_sparc64_decrypt_192,
142	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_192,
143	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_192,
144	.ecb_encrypt		= aes_sparc64_ecb_encrypt_192,
145	.ecb_decrypt		= aes_sparc64_ecb_decrypt_192,
146	.cbc_encrypt		= aes_sparc64_cbc_encrypt_192,
147	.cbc_decrypt		= aes_sparc64_cbc_decrypt_192,
148	.ctr_crypt		= aes_sparc64_ctr_crypt_192,
149};
150
151static struct aes_ops aes256_ops = {
152	.encrypt		= aes_sparc64_encrypt_256,
153	.decrypt		= aes_sparc64_decrypt_256,
154	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_256,
155	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_256,
156	.ecb_encrypt		= aes_sparc64_ecb_encrypt_256,
157	.ecb_decrypt		= aes_sparc64_ecb_decrypt_256,
158	.cbc_encrypt		= aes_sparc64_cbc_encrypt_256,
159	.cbc_decrypt		= aes_sparc64_cbc_decrypt_256,
160	.ctr_crypt		= aes_sparc64_ctr_crypt_256,
161};
162
163extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
164				   unsigned int key_len);
165
166static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
167		       unsigned int key_len)
168{
169	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
170	u32 *flags = &tfm->crt_flags;
171
172	switch (key_len) {
173	case AES_KEYSIZE_128:
174		ctx->expanded_key_length = 0xb0;
175		ctx->ops = &aes128_ops;
176		break;
177
178	case AES_KEYSIZE_192:
179		ctx->expanded_key_length = 0xd0;
180		ctx->ops = &aes192_ops;
181		break;
182
183	case AES_KEYSIZE_256:
184		ctx->expanded_key_length = 0xf0;
185		ctx->ops = &aes256_ops;
186		break;
187
188	default:
189		*flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
190		return -EINVAL;
191	}
192
193	aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194	ctx->key_length = key_len;
195
196	return 0;
197}
198
199static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 
 
 
 
 
 
200{
201	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
202
203	ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
204}
205
206static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
207{
208	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
209
210	ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
211}
212
213#define AES_BLOCK_MASK	(~(AES_BLOCK_SIZE-1))
214
215static int ecb_encrypt(struct blkcipher_desc *desc,
216		       struct scatterlist *dst, struct scatterlist *src,
217		       unsigned int nbytes)
218{
219	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
220	struct blkcipher_walk walk;
 
 
221	int err;
222
223	blkcipher_walk_init(&walk, dst, src, nbytes);
224	err = blkcipher_walk_virt(desc, &walk);
225	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
226
227	ctx->ops->load_encrypt_keys(&ctx->key[0]);
228	while ((nbytes = walk.nbytes)) {
229		unsigned int block_len = nbytes & AES_BLOCK_MASK;
230
231		if (likely(block_len)) {
232			ctx->ops->ecb_encrypt(&ctx->key[0],
233					      (const u64 *)walk.src.virt.addr,
234					      (u64 *) walk.dst.virt.addr,
235					      block_len);
236		}
237		nbytes &= AES_BLOCK_SIZE - 1;
238		err = blkcipher_walk_done(desc, &walk, nbytes);
239	}
240	fprs_write(0);
241	return err;
242}
243
244static int ecb_decrypt(struct blkcipher_desc *desc,
245		       struct scatterlist *dst, struct scatterlist *src,
246		       unsigned int nbytes)
247{
248	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
249	struct blkcipher_walk walk;
250	u64 *key_end;
251	int err;
252
253	blkcipher_walk_init(&walk, dst, src, nbytes);
254	err = blkcipher_walk_virt(desc, &walk);
255	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
256
257	ctx->ops->load_decrypt_keys(&ctx->key[0]);
258	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
259	while ((nbytes = walk.nbytes)) {
260		unsigned int block_len = nbytes & AES_BLOCK_MASK;
261
262		if (likely(block_len)) {
263			ctx->ops->ecb_decrypt(key_end,
264					      (const u64 *) walk.src.virt.addr,
265					      (u64 *) walk.dst.virt.addr, block_len);
266		}
267		nbytes &= AES_BLOCK_SIZE - 1;
268		err = blkcipher_walk_done(desc, &walk, nbytes);
269	}
270	fprs_write(0);
271
272	return err;
273}
274
275static int cbc_encrypt(struct blkcipher_desc *desc,
276		       struct scatterlist *dst, struct scatterlist *src,
277		       unsigned int nbytes)
278{
279	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
280	struct blkcipher_walk walk;
 
 
281	int err;
282
283	blkcipher_walk_init(&walk, dst, src, nbytes);
284	err = blkcipher_walk_virt(desc, &walk);
285	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
286
287	ctx->ops->load_encrypt_keys(&ctx->key[0]);
288	while ((nbytes = walk.nbytes)) {
289		unsigned int block_len = nbytes & AES_BLOCK_MASK;
290
291		if (likely(block_len)) {
292			ctx->ops->cbc_encrypt(&ctx->key[0],
293					      (const u64 *)walk.src.virt.addr,
294					      (u64 *) walk.dst.virt.addr,
295					      block_len, (u64 *) walk.iv);
296		}
297		nbytes &= AES_BLOCK_SIZE - 1;
298		err = blkcipher_walk_done(desc, &walk, nbytes);
299	}
300	fprs_write(0);
301	return err;
302}
303
304static int cbc_decrypt(struct blkcipher_desc *desc,
305		       struct scatterlist *dst, struct scatterlist *src,
306		       unsigned int nbytes)
307{
308	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
309	struct blkcipher_walk walk;
310	u64 *key_end;
311	int err;
312
313	blkcipher_walk_init(&walk, dst, src, nbytes);
314	err = blkcipher_walk_virt(desc, &walk);
315	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
316
317	ctx->ops->load_decrypt_keys(&ctx->key[0]);
318	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
319	while ((nbytes = walk.nbytes)) {
320		unsigned int block_len = nbytes & AES_BLOCK_MASK;
321
322		if (likely(block_len)) {
323			ctx->ops->cbc_decrypt(key_end,
324					      (const u64 *) walk.src.virt.addr,
325					      (u64 *) walk.dst.virt.addr,
326					      block_len, (u64 *) walk.iv);
327		}
328		nbytes &= AES_BLOCK_SIZE - 1;
329		err = blkcipher_walk_done(desc, &walk, nbytes);
330	}
331	fprs_write(0);
332
333	return err;
334}
335
336static void ctr_crypt_final(struct crypto_sparc64_aes_ctx *ctx,
337			    struct blkcipher_walk *walk)
338{
339	u8 *ctrblk = walk->iv;
340	u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
341	u8 *src = walk->src.virt.addr;
342	u8 *dst = walk->dst.virt.addr;
343	unsigned int nbytes = walk->nbytes;
344
345	ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
346			      keystream, AES_BLOCK_SIZE);
347	crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
348	crypto_inc(ctrblk, AES_BLOCK_SIZE);
349}
350
351static int ctr_crypt(struct blkcipher_desc *desc,
352		     struct scatterlist *dst, struct scatterlist *src,
353		     unsigned int nbytes)
354{
355	struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
356	struct blkcipher_walk walk;
 
 
357	int err;
358
359	blkcipher_walk_init(&walk, dst, src, nbytes);
360	err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
361	desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
362
363	ctx->ops->load_encrypt_keys(&ctx->key[0]);
364	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
365		unsigned int block_len = nbytes & AES_BLOCK_MASK;
366
367		if (likely(block_len)) {
368			ctx->ops->ctr_crypt(&ctx->key[0],
369					    (const u64 *)walk.src.virt.addr,
370					    (u64 *) walk.dst.virt.addr,
371					    block_len, (u64 *) walk.iv);
372		}
373		nbytes &= AES_BLOCK_SIZE - 1;
374		err = blkcipher_walk_done(desc, &walk, nbytes);
375	}
376	if (walk.nbytes) {
377		ctr_crypt_final(ctx, &walk);
378		err = blkcipher_walk_done(desc, &walk, 0);
379	}
380	fprs_write(0);
381	return err;
382}
383
384static struct crypto_alg algs[] = { {
385	.cra_name		= "aes",
386	.cra_driver_name	= "aes-sparc64",
387	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
388	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
389	.cra_blocksize		= AES_BLOCK_SIZE,
390	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
391	.cra_alignmask		= 3,
392	.cra_module		= THIS_MODULE,
393	.cra_u	= {
394		.cipher	= {
395			.cia_min_keysize	= AES_MIN_KEY_SIZE,
396			.cia_max_keysize	= AES_MAX_KEY_SIZE,
397			.cia_setkey		= aes_set_key,
398			.cia_encrypt		= aes_encrypt,
399			.cia_decrypt		= aes_decrypt
400		}
401	}
402}, {
403	.cra_name		= "ecb(aes)",
404	.cra_driver_name	= "ecb-aes-sparc64",
405	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
406	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
407	.cra_blocksize		= AES_BLOCK_SIZE,
408	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
409	.cra_alignmask		= 7,
410	.cra_type		= &crypto_blkcipher_type,
411	.cra_module		= THIS_MODULE,
412	.cra_u = {
413		.blkcipher = {
414			.min_keysize	= AES_MIN_KEY_SIZE,
415			.max_keysize	= AES_MAX_KEY_SIZE,
416			.setkey		= aes_set_key,
417			.encrypt	= ecb_encrypt,
418			.decrypt	= ecb_decrypt,
419		},
420	},
421}, {
422	.cra_name		= "cbc(aes)",
423	.cra_driver_name	= "cbc-aes-sparc64",
424	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
425	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
426	.cra_blocksize		= AES_BLOCK_SIZE,
427	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
428	.cra_alignmask		= 7,
429	.cra_type		= &crypto_blkcipher_type,
430	.cra_module		= THIS_MODULE,
431	.cra_u = {
432		.blkcipher = {
433			.min_keysize	= AES_MIN_KEY_SIZE,
434			.max_keysize	= AES_MAX_KEY_SIZE,
435			.ivsize		= AES_BLOCK_SIZE,
436			.setkey		= aes_set_key,
437			.encrypt	= cbc_encrypt,
438			.decrypt	= cbc_decrypt,
439		},
440	},
441}, {
442	.cra_name		= "ctr(aes)",
443	.cra_driver_name	= "ctr-aes-sparc64",
444	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
445	.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
446	.cra_blocksize		= 1,
447	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
448	.cra_alignmask		= 7,
449	.cra_type		= &crypto_blkcipher_type,
450	.cra_module		= THIS_MODULE,
451	.cra_u = {
452		.blkcipher = {
453			.min_keysize	= AES_MIN_KEY_SIZE,
454			.max_keysize	= AES_MAX_KEY_SIZE,
455			.ivsize		= AES_BLOCK_SIZE,
456			.setkey		= aes_set_key,
457			.encrypt	= ctr_crypt,
458			.decrypt	= ctr_crypt,
459		},
460	},
461} };
462
463static bool __init sparc64_has_aes_opcode(void)
464{
465	unsigned long cfr;
466
467	if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
468		return false;
469
470	__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
471	if (!(cfr & CFR_AES))
472		return false;
473
474	return true;
475}
476
477static int __init aes_sparc64_mod_init(void)
478{
479	int i;
480
481	for (i = 0; i < ARRAY_SIZE(algs); i++)
482		INIT_LIST_HEAD(&algs[i].cra_list);
483
484	if (sparc64_has_aes_opcode()) {
485		pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
486		return crypto_register_algs(algs, ARRAY_SIZE(algs));
487	}
488	pr_info("sparc64 aes opcodes not available.\n");
489	return -ENODEV;
 
 
 
 
 
 
 
490}
491
492static void __exit aes_sparc64_mod_fini(void)
493{
494	crypto_unregister_algs(algs, ARRAY_SIZE(algs));
 
495}
496
497module_init(aes_sparc64_mod_init);
498module_exit(aes_sparc64_mod_fini);
499
500MODULE_LICENSE("GPL");
501MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
502
503MODULE_ALIAS_CRYPTO("aes");
504
505#include "crop_devid.c"
v5.14.15
  1// SPDX-License-Identifier: GPL-2.0-only
  2/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
  3 *
  4 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
  5 *
  6 * Copyright (C) 2008, Intel Corp.
  7 *    Author: Huang Ying <ying.huang@intel.com>
  8 *
  9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
 10 * interface for 64-bit kernels.
 11 *    Authors: Adrian Hoban <adrian.hoban@intel.com>
 12 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
 13 *             Tadeusz Struk (tadeusz.struk@intel.com)
 14 *             Aidan O'Mahony (aidan.o.mahony@intel.com)
 15 *    Copyright (c) 2010, Intel Corporation.
 16 */
 17
 18#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
 19
 20#include <linux/crypto.h>
 21#include <linux/init.h>
 22#include <linux/module.h>
 23#include <linux/mm.h>
 24#include <linux/types.h>
 25#include <crypto/algapi.h>
 26#include <crypto/aes.h>
 27#include <crypto/internal/skcipher.h>
 28
 29#include <asm/fpumacro.h>
 30#include <asm/pstate.h>
 31#include <asm/elf.h>
 32
 33#include "opcodes.h"
 34
 35struct aes_ops {
 36	void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
 37	void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
 38	void (*load_encrypt_keys)(const u64 *key);
 39	void (*load_decrypt_keys)(const u64 *key);
 40	void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
 41			    unsigned int len);
 42	void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
 43			    unsigned int len);
 44	void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
 45			    unsigned int len, u64 *iv);
 46	void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
 47			    unsigned int len, u64 *iv);
 48	void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
 49			  unsigned int len, u64 *iv);
 50};
 51
 52struct crypto_sparc64_aes_ctx {
 53	struct aes_ops *ops;
 54	u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
 55	u32 key_length;
 56	u32 expanded_key_length;
 57};
 58
 59extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
 60				    u32 *output);
 61extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
 62				    u32 *output);
 63extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
 64				    u32 *output);
 65
 66extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
 67				    u32 *output);
 68extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
 69				    u32 *output);
 70extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
 71				    u32 *output);
 72
 73extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
 74extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
 75extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
 76
 77extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
 78extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
 79extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
 80
 81extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
 82					u64 *output, unsigned int len);
 83extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
 84					u64 *output, unsigned int len);
 85extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
 86					u64 *output, unsigned int len);
 87
 88extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
 89					u64 *output, unsigned int len);
 90extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
 91					u64 *output, unsigned int len);
 92extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
 93					u64 *output, unsigned int len);
 94
 95extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
 96					u64 *output, unsigned int len,
 97					u64 *iv);
 98
 99extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
100					u64 *output, unsigned int len,
101					u64 *iv);
102
103extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
104					u64 *output, unsigned int len,
105					u64 *iv);
106
107extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
108					u64 *output, unsigned int len,
109					u64 *iv);
110
111extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
112					u64 *output, unsigned int len,
113					u64 *iv);
114
115extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
116					u64 *output, unsigned int len,
117					u64 *iv);
118
119extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
120				      u64 *output, unsigned int len,
121				      u64 *iv);
122extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
123				      u64 *output, unsigned int len,
124				      u64 *iv);
125extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
126				      u64 *output, unsigned int len,
127				      u64 *iv);
128
129static struct aes_ops aes128_ops = {
130	.encrypt		= aes_sparc64_encrypt_128,
131	.decrypt		= aes_sparc64_decrypt_128,
132	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_128,
133	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_128,
134	.ecb_encrypt		= aes_sparc64_ecb_encrypt_128,
135	.ecb_decrypt		= aes_sparc64_ecb_decrypt_128,
136	.cbc_encrypt		= aes_sparc64_cbc_encrypt_128,
137	.cbc_decrypt		= aes_sparc64_cbc_decrypt_128,
138	.ctr_crypt		= aes_sparc64_ctr_crypt_128,
139};
140
141static struct aes_ops aes192_ops = {
142	.encrypt		= aes_sparc64_encrypt_192,
143	.decrypt		= aes_sparc64_decrypt_192,
144	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_192,
145	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_192,
146	.ecb_encrypt		= aes_sparc64_ecb_encrypt_192,
147	.ecb_decrypt		= aes_sparc64_ecb_decrypt_192,
148	.cbc_encrypt		= aes_sparc64_cbc_encrypt_192,
149	.cbc_decrypt		= aes_sparc64_cbc_decrypt_192,
150	.ctr_crypt		= aes_sparc64_ctr_crypt_192,
151};
152
153static struct aes_ops aes256_ops = {
154	.encrypt		= aes_sparc64_encrypt_256,
155	.decrypt		= aes_sparc64_decrypt_256,
156	.load_encrypt_keys	= aes_sparc64_load_encrypt_keys_256,
157	.load_decrypt_keys	= aes_sparc64_load_decrypt_keys_256,
158	.ecb_encrypt		= aes_sparc64_ecb_encrypt_256,
159	.ecb_decrypt		= aes_sparc64_ecb_decrypt_256,
160	.cbc_encrypt		= aes_sparc64_cbc_encrypt_256,
161	.cbc_decrypt		= aes_sparc64_cbc_decrypt_256,
162	.ctr_crypt		= aes_sparc64_ctr_crypt_256,
163};
164
165extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
166				   unsigned int key_len);
167
168static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
169		       unsigned int key_len)
170{
171	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
 
172
173	switch (key_len) {
174	case AES_KEYSIZE_128:
175		ctx->expanded_key_length = 0xb0;
176		ctx->ops = &aes128_ops;
177		break;
178
179	case AES_KEYSIZE_192:
180		ctx->expanded_key_length = 0xd0;
181		ctx->ops = &aes192_ops;
182		break;
183
184	case AES_KEYSIZE_256:
185		ctx->expanded_key_length = 0xf0;
186		ctx->ops = &aes256_ops;
187		break;
188
189	default:
 
190		return -EINVAL;
191	}
192
193	aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194	ctx->key_length = key_len;
195
196	return 0;
197}
198
199static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
200				unsigned int key_len)
201{
202	return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
203}
204
205static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
206{
207	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
208
209	ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
210}
211
212static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
213{
214	struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
215
216	ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
217}
218
219static int ecb_encrypt(struct skcipher_request *req)
 
 
 
 
220{
221	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
222	const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
223	struct skcipher_walk walk;
224	unsigned int nbytes;
225	int err;
226
227	err = skcipher_walk_virt(&walk, req, true);
228	if (err)
229		return err;
230
231	ctx->ops->load_encrypt_keys(&ctx->key[0]);
232	while ((nbytes = walk.nbytes) != 0) {
233		ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr,
234				      walk.dst.virt.addr,
235				      round_down(nbytes, AES_BLOCK_SIZE));
236		err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 
 
 
 
 
 
237	}
238	fprs_write(0);
239	return err;
240}
241
242static int ecb_decrypt(struct skcipher_request *req)
243{
244	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
245	const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
246	const u64 *key_end;
247	struct skcipher_walk walk;
248	unsigned int nbytes;
249	int err;
250
251	err = skcipher_walk_virt(&walk, req, true);
252	if (err)
253		return err;
254
255	ctx->ops->load_decrypt_keys(&ctx->key[0]);
256	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
257	while ((nbytes = walk.nbytes) != 0) {
258		ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr,
259				      walk.dst.virt.addr,
260				      round_down(nbytes, AES_BLOCK_SIZE));
261		err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 
 
 
 
 
262	}
263	fprs_write(0);
264
265	return err;
266}
267
268static int cbc_encrypt(struct skcipher_request *req)
 
 
269{
270	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
271	const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
272	struct skcipher_walk walk;
273	unsigned int nbytes;
274	int err;
275
276	err = skcipher_walk_virt(&walk, req, true);
277	if (err)
278		return err;
279
280	ctx->ops->load_encrypt_keys(&ctx->key[0]);
281	while ((nbytes = walk.nbytes) != 0) {
282		ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr,
283				      walk.dst.virt.addr,
284				      round_down(nbytes, AES_BLOCK_SIZE),
285				      walk.iv);
286		err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 
 
 
 
 
287	}
288	fprs_write(0);
289	return err;
290}
291
292static int cbc_decrypt(struct skcipher_request *req)
293{
294	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
295	const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
296	const u64 *key_end;
297	struct skcipher_walk walk;
298	unsigned int nbytes;
299	int err;
300
301	err = skcipher_walk_virt(&walk, req, true);
302	if (err)
303		return err;
304
305	ctx->ops->load_decrypt_keys(&ctx->key[0]);
306	key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
307	while ((nbytes = walk.nbytes) != 0) {
308		ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr,
309				      walk.dst.virt.addr,
310				      round_down(nbytes, AES_BLOCK_SIZE),
311				      walk.iv);
312		err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 
 
 
 
 
313	}
314	fprs_write(0);
315
316	return err;
317}
318
319static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx,
320			    struct skcipher_walk *walk)
321{
322	u8 *ctrblk = walk->iv;
323	u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
324	u8 *src = walk->src.virt.addr;
325	u8 *dst = walk->dst.virt.addr;
326	unsigned int nbytes = walk->nbytes;
327
328	ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
329			      keystream, AES_BLOCK_SIZE);
330	crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
331	crypto_inc(ctrblk, AES_BLOCK_SIZE);
332}
333
334static int ctr_crypt(struct skcipher_request *req)
 
 
335{
336	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
337	const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
338	struct skcipher_walk walk;
339	unsigned int nbytes;
340	int err;
341
342	err = skcipher_walk_virt(&walk, req, true);
343	if (err)
344		return err;
345
346	ctx->ops->load_encrypt_keys(&ctx->key[0]);
347	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
348		ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr,
349				    walk.dst.virt.addr,
350				    round_down(nbytes, AES_BLOCK_SIZE),
351				    walk.iv);
352		err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
 
 
 
 
 
353	}
354	if (walk.nbytes) {
355		ctr_crypt_final(ctx, &walk);
356		err = skcipher_walk_done(&walk, 0);
357	}
358	fprs_write(0);
359	return err;
360}
361
362static struct crypto_alg cipher_alg = {
363	.cra_name		= "aes",
364	.cra_driver_name	= "aes-sparc64",
365	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
366	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
367	.cra_blocksize		= AES_BLOCK_SIZE,
368	.cra_ctxsize		= sizeof(struct crypto_sparc64_aes_ctx),
369	.cra_alignmask		= 3,
370	.cra_module		= THIS_MODULE,
371	.cra_u	= {
372		.cipher	= {
373			.cia_min_keysize	= AES_MIN_KEY_SIZE,
374			.cia_max_keysize	= AES_MAX_KEY_SIZE,
375			.cia_setkey		= aes_set_key,
376			.cia_encrypt		= crypto_aes_encrypt,
377			.cia_decrypt		= crypto_aes_decrypt
378		}
379	}
380};
381
382static struct skcipher_alg skcipher_algs[] = {
383	{
384		.base.cra_name		= "ecb(aes)",
385		.base.cra_driver_name	= "ecb-aes-sparc64",
386		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,
387		.base.cra_blocksize	= AES_BLOCK_SIZE,
388		.base.cra_ctxsize	= sizeof(struct crypto_sparc64_aes_ctx),
389		.base.cra_alignmask	= 7,
390		.base.cra_module	= THIS_MODULE,
391		.min_keysize		= AES_MIN_KEY_SIZE,
392		.max_keysize		= AES_MAX_KEY_SIZE,
393		.setkey			= aes_set_key_skcipher,
394		.encrypt		= ecb_encrypt,
395		.decrypt		= ecb_decrypt,
396	}, {
397		.base.cra_name		= "cbc(aes)",
398		.base.cra_driver_name	= "cbc-aes-sparc64",
399		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,
400		.base.cra_blocksize	= AES_BLOCK_SIZE,
401		.base.cra_ctxsize	= sizeof(struct crypto_sparc64_aes_ctx),
402		.base.cra_alignmask	= 7,
403		.base.cra_module	= THIS_MODULE,
404		.min_keysize		= AES_MIN_KEY_SIZE,
405		.max_keysize		= AES_MAX_KEY_SIZE,
406		.ivsize			= AES_BLOCK_SIZE,
407		.setkey			= aes_set_key_skcipher,
408		.encrypt		= cbc_encrypt,
409		.decrypt		= cbc_decrypt,
410	}, {
411		.base.cra_name		= "ctr(aes)",
412		.base.cra_driver_name	= "ctr-aes-sparc64",
413		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,
414		.base.cra_blocksize	= 1,
415		.base.cra_ctxsize	= sizeof(struct crypto_sparc64_aes_ctx),
416		.base.cra_alignmask	= 7,
417		.base.cra_module	= THIS_MODULE,
418		.min_keysize		= AES_MIN_KEY_SIZE,
419		.max_keysize		= AES_MAX_KEY_SIZE,
420		.ivsize			= AES_BLOCK_SIZE,
421		.setkey			= aes_set_key_skcipher,
422		.encrypt		= ctr_crypt,
423		.decrypt		= ctr_crypt,
424		.chunksize		= AES_BLOCK_SIZE,
425	}
426};
 
 
 
 
 
 
 
 
 
 
 
 
 
427
428static bool __init sparc64_has_aes_opcode(void)
429{
430	unsigned long cfr;
431
432	if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
433		return false;
434
435	__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
436	if (!(cfr & CFR_AES))
437		return false;
438
439	return true;
440}
441
442static int __init aes_sparc64_mod_init(void)
443{
444	int err;
 
 
 
445
446	if (!sparc64_has_aes_opcode()) {
447		pr_info("sparc64 aes opcodes not available.\n");
448		return -ENODEV;
449	}
450	pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
451	err = crypto_register_alg(&cipher_alg);
452	if (err)
453		return err;
454	err = crypto_register_skciphers(skcipher_algs,
455					ARRAY_SIZE(skcipher_algs));
456	if (err)
457		crypto_unregister_alg(&cipher_alg);
458	return err;
459}
460
461static void __exit aes_sparc64_mod_fini(void)
462{
463	crypto_unregister_alg(&cipher_alg);
464	crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
465}
466
467module_init(aes_sparc64_mod_init);
468module_exit(aes_sparc64_mod_fini);
469
470MODULE_LICENSE("GPL");
471MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
472
473MODULE_ALIAS_CRYPTO("aes");
474
475#include "crop_devid.c"