Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.1.
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Glue Code for assembler optimized version of Blowfish
  4 *
  5 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  6 *
  7 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
  8 *   Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  9 */
 10
 11#include <crypto/algapi.h>
 12#include <crypto/blowfish.h>
 13#include <crypto/internal/skcipher.h>
 14#include <linux/crypto.h>
 15#include <linux/init.h>
 16#include <linux/module.h>
 17#include <linux/types.h>
 18
 19/* regular block cipher functions */
 20asmlinkage void __blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src,
 21				   bool xor);
 22asmlinkage void blowfish_dec_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src);
 23
 24/* 4-way parallel cipher functions */
 25asmlinkage void __blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst,
 26					const u8 *src, bool xor);
 27asmlinkage void blowfish_dec_blk_4way(struct bf_ctx *ctx, u8 *dst,
 28				      const u8 *src);
 29
 30static inline void blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src)
 31{
 32	__blowfish_enc_blk(ctx, dst, src, false);
 33}
 34
 35static inline void blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst,
 36					 const u8 *src)
 37{
 38	__blowfish_enc_blk_4way(ctx, dst, src, false);
 39}
 40
 41static void blowfish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 42{
 43	blowfish_enc_blk(crypto_tfm_ctx(tfm), dst, src);
 44}
 45
 46static void blowfish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 47{
 48	blowfish_dec_blk(crypto_tfm_ctx(tfm), dst, src);
 49}
 50
 51static int blowfish_setkey_skcipher(struct crypto_skcipher *tfm,
 52				    const u8 *key, unsigned int keylen)
 53{
 54	return blowfish_setkey(&tfm->base, key, keylen);
 55}
 56
 57static int ecb_crypt(struct skcipher_request *req,
 58		     void (*fn)(struct bf_ctx *, u8 *, const u8 *),
 59		     void (*fn_4way)(struct bf_ctx *, u8 *, const u8 *))
 60{
 61	unsigned int bsize = BF_BLOCK_SIZE;
 62	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 63	struct bf_ctx *ctx = crypto_skcipher_ctx(tfm);
 64	struct skcipher_walk walk;
 65	unsigned int nbytes;
 66	int err;
 67
 68	err = skcipher_walk_virt(&walk, req, false);
 69
 70	while ((nbytes = walk.nbytes)) {
 71		u8 *wsrc = walk.src.virt.addr;
 72		u8 *wdst = walk.dst.virt.addr;
 73
 74		/* Process four block batch */
 75		if (nbytes >= bsize * 4) {
 76			do {
 77				fn_4way(ctx, wdst, wsrc);
 78
 79				wsrc += bsize * 4;
 80				wdst += bsize * 4;
 81				nbytes -= bsize * 4;
 82			} while (nbytes >= bsize * 4);
 83
 84			if (nbytes < bsize)
 85				goto done;
 86		}
 87
 88		/* Handle leftovers */
 89		do {
 90			fn(ctx, wdst, wsrc);
 91
 92			wsrc += bsize;
 93			wdst += bsize;
 94			nbytes -= bsize;
 95		} while (nbytes >= bsize);
 96
 97done:
 98		err = skcipher_walk_done(&walk, nbytes);
 99	}
100
101	return err;
102}
103
104static int ecb_encrypt(struct skcipher_request *req)
105{
106	return ecb_crypt(req, blowfish_enc_blk, blowfish_enc_blk_4way);
107}
108
109static int ecb_decrypt(struct skcipher_request *req)
110{
111	return ecb_crypt(req, blowfish_dec_blk, blowfish_dec_blk_4way);
112}
113
114static unsigned int __cbc_encrypt(struct bf_ctx *ctx,
115				  struct skcipher_walk *walk)
116{
117	unsigned int bsize = BF_BLOCK_SIZE;
118	unsigned int nbytes = walk->nbytes;
119	u64 *src = (u64 *)walk->src.virt.addr;
120	u64 *dst = (u64 *)walk->dst.virt.addr;
121	u64 *iv = (u64 *)walk->iv;
122
123	do {
124		*dst = *src ^ *iv;
125		blowfish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
126		iv = dst;
127
128		src += 1;
129		dst += 1;
130		nbytes -= bsize;
131	} while (nbytes >= bsize);
132
133	*(u64 *)walk->iv = *iv;
134	return nbytes;
135}
136
137static int cbc_encrypt(struct skcipher_request *req)
138{
139	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
140	struct bf_ctx *ctx = crypto_skcipher_ctx(tfm);
141	struct skcipher_walk walk;
142	unsigned int nbytes;
143	int err;
144
145	err = skcipher_walk_virt(&walk, req, false);
146
147	while (walk.nbytes) {
148		nbytes = __cbc_encrypt(ctx, &walk);
149		err = skcipher_walk_done(&walk, nbytes);
150	}
151
152	return err;
153}
154
155static unsigned int __cbc_decrypt(struct bf_ctx *ctx,
156				  struct skcipher_walk *walk)
157{
158	unsigned int bsize = BF_BLOCK_SIZE;
159	unsigned int nbytes = walk->nbytes;
160	u64 *src = (u64 *)walk->src.virt.addr;
161	u64 *dst = (u64 *)walk->dst.virt.addr;
162	u64 ivs[4 - 1];
163	u64 last_iv;
164
165	/* Start of the last block. */
166	src += nbytes / bsize - 1;
167	dst += nbytes / bsize - 1;
168
169	last_iv = *src;
170
171	/* Process four block batch */
172	if (nbytes >= bsize * 4) {
173		do {
174			nbytes -= bsize * 4 - bsize;
175			src -= 4 - 1;
176			dst -= 4 - 1;
177
178			ivs[0] = src[0];
179			ivs[1] = src[1];
180			ivs[2] = src[2];
181
182			blowfish_dec_blk_4way(ctx, (u8 *)dst, (u8 *)src);
183
184			dst[1] ^= ivs[0];
185			dst[2] ^= ivs[1];
186			dst[3] ^= ivs[2];
187
188			nbytes -= bsize;
189			if (nbytes < bsize)
190				goto done;
191
192			*dst ^= *(src - 1);
193			src -= 1;
194			dst -= 1;
195		} while (nbytes >= bsize * 4);
196	}
197
198	/* Handle leftovers */
199	for (;;) {
200		blowfish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
201
202		nbytes -= bsize;
203		if (nbytes < bsize)
204			break;
205
206		*dst ^= *(src - 1);
207		src -= 1;
208		dst -= 1;
209	}
210
211done:
212	*dst ^= *(u64 *)walk->iv;
213	*(u64 *)walk->iv = last_iv;
214
215	return nbytes;
216}
217
218static int cbc_decrypt(struct skcipher_request *req)
219{
220	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
221	struct bf_ctx *ctx = crypto_skcipher_ctx(tfm);
222	struct skcipher_walk walk;
223	unsigned int nbytes;
224	int err;
225
226	err = skcipher_walk_virt(&walk, req, false);
227
228	while (walk.nbytes) {
229		nbytes = __cbc_decrypt(ctx, &walk);
230		err = skcipher_walk_done(&walk, nbytes);
231	}
232
233	return err;
234}
235
236static struct crypto_alg bf_cipher_alg = {
237	.cra_name		= "blowfish",
238	.cra_driver_name	= "blowfish-asm",
239	.cra_priority		= 200,
240	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
241	.cra_blocksize		= BF_BLOCK_SIZE,
242	.cra_ctxsize		= sizeof(struct bf_ctx),
243	.cra_alignmask		= 0,
244	.cra_module		= THIS_MODULE,
245	.cra_u = {
246		.cipher = {
247			.cia_min_keysize	= BF_MIN_KEY_SIZE,
248			.cia_max_keysize	= BF_MAX_KEY_SIZE,
249			.cia_setkey		= blowfish_setkey,
250			.cia_encrypt		= blowfish_encrypt,
251			.cia_decrypt		= blowfish_decrypt,
252		}
253	}
254};
255
256static struct skcipher_alg bf_skcipher_algs[] = {
257	{
258		.base.cra_name		= "ecb(blowfish)",
259		.base.cra_driver_name	= "ecb-blowfish-asm",
260		.base.cra_priority	= 300,
261		.base.cra_blocksize	= BF_BLOCK_SIZE,
262		.base.cra_ctxsize	= sizeof(struct bf_ctx),
263		.base.cra_module	= THIS_MODULE,
264		.min_keysize		= BF_MIN_KEY_SIZE,
265		.max_keysize		= BF_MAX_KEY_SIZE,
266		.setkey			= blowfish_setkey_skcipher,
267		.encrypt		= ecb_encrypt,
268		.decrypt		= ecb_decrypt,
269	}, {
270		.base.cra_name		= "cbc(blowfish)",
271		.base.cra_driver_name	= "cbc-blowfish-asm",
272		.base.cra_priority	= 300,
273		.base.cra_blocksize	= BF_BLOCK_SIZE,
274		.base.cra_ctxsize	= sizeof(struct bf_ctx),
275		.base.cra_module	= THIS_MODULE,
276		.min_keysize		= BF_MIN_KEY_SIZE,
277		.max_keysize		= BF_MAX_KEY_SIZE,
278		.ivsize			= BF_BLOCK_SIZE,
279		.setkey			= blowfish_setkey_skcipher,
280		.encrypt		= cbc_encrypt,
281		.decrypt		= cbc_decrypt,
282	},
283};
284
285static bool is_blacklisted_cpu(void)
286{
287	if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
288		return false;
289
290	if (boot_cpu_data.x86 == 0x0f) {
291		/*
292		 * On Pentium 4, blowfish-x86_64 is slower than generic C
293		 * implementation because use of 64bit rotates (which are really
294		 * slow on P4). Therefore blacklist P4s.
295		 */
296		return true;
297	}
298
299	return false;
300}
301
302static int force;
303module_param(force, int, 0);
304MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
305
306static int __init blowfish_init(void)
307{
308	int err;
309
310	if (!force && is_blacklisted_cpu()) {
311		printk(KERN_INFO
312			"blowfish-x86_64: performance on this CPU "
313			"would be suboptimal: disabling "
314			"blowfish-x86_64.\n");
315		return -ENODEV;
316	}
317
318	err = crypto_register_alg(&bf_cipher_alg);
319	if (err)
320		return err;
321
322	err = crypto_register_skciphers(bf_skcipher_algs,
323					ARRAY_SIZE(bf_skcipher_algs));
324	if (err)
325		crypto_unregister_alg(&bf_cipher_alg);
326
327	return err;
328}
329
330static void __exit blowfish_fini(void)
331{
332	crypto_unregister_alg(&bf_cipher_alg);
333	crypto_unregister_skciphers(bf_skcipher_algs,
334				    ARRAY_SIZE(bf_skcipher_algs));
335}
336
337module_init(blowfish_init);
338module_exit(blowfish_fini);
339
340MODULE_LICENSE("GPL");
341MODULE_DESCRIPTION("Blowfish Cipher Algorithm, asm optimized");
342MODULE_ALIAS_CRYPTO("blowfish");
343MODULE_ALIAS_CRYPTO("blowfish-asm");