Linux Audio

Check our new training course

Loading...
v6.8
  1// SPDX-License-Identifier: GPL-2.0-only
  2/*
  3 * AES CTR routines supporting VMX instructions on the Power 8
  4 *
  5 * Copyright (C) 2015 International Business Machines Inc.
  6 *
 
 
 
 
 
 
 
 
 
 
 
 
 
  7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
  8 */
  9
 10#include <asm/simd.h>
 
 
 
 
 11#include <asm/switch_to.h>
 12#include <crypto/aes.h>
 13#include <crypto/internal/simd.h>
 14#include <crypto/internal/skcipher.h>
 15
 16#include "aesp8-ppc.h"
 17
 18struct p8_aes_ctr_ctx {
 19	struct crypto_skcipher *fallback;
 20	struct aes_key enc_key;
 21};
 22
 23static int p8_aes_ctr_init(struct crypto_skcipher *tfm)
 24{
 25	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
 26	struct crypto_skcipher *fallback;
 
 
 
 
 
 
 27
 28	fallback = crypto_alloc_skcipher("ctr(aes)", 0,
 29					 CRYPTO_ALG_NEED_FALLBACK |
 30					 CRYPTO_ALG_ASYNC);
 31	if (IS_ERR(fallback)) {
 32		pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
 33		       PTR_ERR(fallback));
 
 34		return PTR_ERR(fallback);
 35	}
 
 
 36
 37	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
 38				    crypto_skcipher_reqsize(fallback));
 
 39	ctx->fallback = fallback;
 
 40	return 0;
 41}
 42
 43static void p8_aes_ctr_exit(struct crypto_skcipher *tfm)
 44{
 45	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
 46
 47	crypto_free_skcipher(ctx->fallback);
 
 
 
 48}
 49
 50static int p8_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key,
 51			     unsigned int keylen)
 52{
 53	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
 54	int ret;
 
 55
 56	preempt_disable();
 57	pagefault_disable();
 58	enable_kernel_vsx();
 59	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
 60	disable_kernel_vsx();
 61	pagefault_enable();
 62	preempt_enable();
 63
 64	ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
 65
 66	return ret ? -EINVAL : 0;
 67}
 68
 69static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
 70			     struct skcipher_walk *walk)
 71{
 72	u8 *ctrblk = walk->iv;
 73	u8 keystream[AES_BLOCK_SIZE];
 74	u8 *src = walk->src.virt.addr;
 75	u8 *dst = walk->dst.virt.addr;
 76	unsigned int nbytes = walk->nbytes;
 77
 78	preempt_disable();
 79	pagefault_disable();
 80	enable_kernel_vsx();
 81	aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
 82	disable_kernel_vsx();
 83	pagefault_enable();
 84	preempt_enable();
 85
 86	crypto_xor_cpy(dst, keystream, src, nbytes);
 
 87	crypto_inc(ctrblk, AES_BLOCK_SIZE);
 88}
 89
 90static int p8_aes_ctr_crypt(struct skcipher_request *req)
 
 
 91{
 92	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 93	const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
 94	struct skcipher_walk walk;
 95	unsigned int nbytes;
 96	int ret;
 97
 98	if (!crypto_simd_usable()) {
 99		struct skcipher_request *subreq = skcipher_request_ctx(req);
100
101		*subreq = *req;
102		skcipher_request_set_tfm(subreq, ctx->fallback);
103		return crypto_skcipher_encrypt(subreq);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
104	}
105
106	ret = skcipher_walk_virt(&walk, req, false);
107	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
108		preempt_disable();
109		pagefault_disable();
110		enable_kernel_vsx();
111		aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
112					    walk.dst.virt.addr,
113					    nbytes / AES_BLOCK_SIZE,
114					    &ctx->enc_key, walk.iv);
115		disable_kernel_vsx();
116		pagefault_enable();
117		preempt_enable();
118
119		do {
120			crypto_inc(walk.iv, AES_BLOCK_SIZE);
121		} while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE);
122
123		ret = skcipher_walk_done(&walk, nbytes);
124	}
125	if (nbytes) {
126		p8_aes_ctr_final(ctx, &walk);
127		ret = skcipher_walk_done(&walk, 0);
128	}
129	return ret;
130}
131
132struct skcipher_alg p8_aes_ctr_alg = {
133	.base.cra_name = "ctr(aes)",
134	.base.cra_driver_name = "p8_aes_ctr",
135	.base.cra_module = THIS_MODULE,
136	.base.cra_priority = 2000,
137	.base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
138	.base.cra_blocksize = 1,
139	.base.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
140	.setkey = p8_aes_ctr_setkey,
141	.encrypt = p8_aes_ctr_crypt,
142	.decrypt = p8_aes_ctr_crypt,
143	.init = p8_aes_ctr_init,
144	.exit = p8_aes_ctr_exit,
145	.min_keysize = AES_MIN_KEY_SIZE,
146	.max_keysize = AES_MAX_KEY_SIZE,
147	.ivsize = AES_BLOCK_SIZE,
148	.chunksize = AES_BLOCK_SIZE,
 
 
 
149};
v4.10.11
  1/**
 
  2 * AES CTR routines supporting VMX instructions on the Power 8
  3 *
  4 * Copyright (C) 2015 International Business Machines Inc.
  5 *
  6 * This program is free software; you can redistribute it and/or modify
  7 * it under the terms of the GNU General Public License as published by
  8 * the Free Software Foundation; version 2 only.
  9 *
 10 * This program is distributed in the hope that it will be useful,
 11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 13 * GNU General Public License for more details.
 14 *
 15 * You should have received a copy of the GNU General Public License
 16 * along with this program; if not, write to the Free Software
 17 * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 18 *
 19 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
 20 */
 21
 22#include <linux/types.h>
 23#include <linux/err.h>
 24#include <linux/crypto.h>
 25#include <linux/delay.h>
 26#include <linux/hardirq.h>
 27#include <asm/switch_to.h>
 28#include <crypto/aes.h>
 29#include <crypto/scatterwalk.h>
 
 
 30#include "aesp8-ppc.h"
 31
 32struct p8_aes_ctr_ctx {
 33	struct crypto_blkcipher *fallback;
 34	struct aes_key enc_key;
 35};
 36
 37static int p8_aes_ctr_init(struct crypto_tfm *tfm)
 38{
 39	const char *alg;
 40	struct crypto_blkcipher *fallback;
 41	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
 42
 43	if (!(alg = crypto_tfm_alg_name(tfm))) {
 44		printk(KERN_ERR "Failed to get algorithm name.\n");
 45		return -ENOENT;
 46	}
 47
 48	fallback =
 49	    crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
 
 50	if (IS_ERR(fallback)) {
 51		printk(KERN_ERR
 52		       "Failed to allocate transformation for '%s': %ld\n",
 53		       alg, PTR_ERR(fallback));
 54		return PTR_ERR(fallback);
 55	}
 56	printk(KERN_INFO "Using '%s' as fallback implementation.\n",
 57	       crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
 58
 59	crypto_blkcipher_set_flags(
 60		fallback,
 61		crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm));
 62	ctx->fallback = fallback;
 63
 64	return 0;
 65}
 66
 67static void p8_aes_ctr_exit(struct crypto_tfm *tfm)
 68{
 69	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
 70
 71	if (ctx->fallback) {
 72		crypto_free_blkcipher(ctx->fallback);
 73		ctx->fallback = NULL;
 74	}
 75}
 76
 77static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
 78			     unsigned int keylen)
 79{
 
 80	int ret;
 81	struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
 82
 
 83	pagefault_disable();
 84	enable_kernel_vsx();
 85	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
 86	disable_kernel_vsx();
 87	pagefault_enable();
 
 88
 89	ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
 90	return ret;
 
 91}
 92
 93static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
 94			     struct blkcipher_walk *walk)
 95{
 96	u8 *ctrblk = walk->iv;
 97	u8 keystream[AES_BLOCK_SIZE];
 98	u8 *src = walk->src.virt.addr;
 99	u8 *dst = walk->dst.virt.addr;
100	unsigned int nbytes = walk->nbytes;
101
 
102	pagefault_disable();
103	enable_kernel_vsx();
104	aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
105	disable_kernel_vsx();
106	pagefault_enable();
 
107
108	crypto_xor(keystream, src, nbytes);
109	memcpy(dst, keystream, nbytes);
110	crypto_inc(ctrblk, AES_BLOCK_SIZE);
111}
112
113static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
114			    struct scatterlist *dst,
115			    struct scatterlist *src, unsigned int nbytes)
116{
 
 
 
 
117	int ret;
118	u64 inc;
119	struct blkcipher_walk walk;
120	struct p8_aes_ctr_ctx *ctx =
121		crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
122	struct blkcipher_desc fallback_desc = {
123		.tfm = ctx->fallback,
124		.info = desc->info,
125		.flags = desc->flags
126	};
127
128	if (in_interrupt()) {
129		ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src,
130					       nbytes);
131	} else {
132		blkcipher_walk_init(&walk, dst, src, nbytes);
133		ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
134		while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
135			pagefault_disable();
136			enable_kernel_vsx();
137			aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
138						    walk.dst.virt.addr,
139						    (nbytes &
140						     AES_BLOCK_MASK) /
141						    AES_BLOCK_SIZE,
142						    &ctx->enc_key,
143						    walk.iv);
144			disable_kernel_vsx();
145			pagefault_enable();
146
147			/* We need to update IV mostly for last bytes/round */
148			inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
149			if (inc > 0)
150				while (inc--)
151					crypto_inc(walk.iv, AES_BLOCK_SIZE);
152
153			nbytes &= AES_BLOCK_SIZE - 1;
154			ret = blkcipher_walk_done(desc, &walk, nbytes);
155		}
156		if (walk.nbytes) {
157			p8_aes_ctr_final(ctx, &walk);
158			ret = blkcipher_walk_done(desc, &walk, 0);
159		}
160	}
161
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162	return ret;
163}
164
165struct crypto_alg p8_aes_ctr_alg = {
166	.cra_name = "ctr(aes)",
167	.cra_driver_name = "p8_aes_ctr",
168	.cra_module = THIS_MODULE,
169	.cra_priority = 2000,
170	.cra_type = &crypto_blkcipher_type,
171	.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
172	.cra_alignmask = 0,
173	.cra_blocksize = 1,
174	.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
175	.cra_init = p8_aes_ctr_init,
176	.cra_exit = p8_aes_ctr_exit,
177	.cra_blkcipher = {
178			  .ivsize = AES_BLOCK_SIZE,
179			  .min_keysize = AES_MIN_KEY_SIZE,
180			  .max_keysize = AES_MAX_KEY_SIZE,
181			  .setkey = p8_aes_ctr_setkey,
182			  .encrypt = p8_aes_ctr_crypt,
183			  .decrypt = p8_aes_ctr_crypt,
184	},
185};