Linux Audio

Check our new training course

Loading...
Note: File does not exist in v4.6.
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Shared crypto simd helpers
  4 *
  5 * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  6 * Copyright (c) 2016 Herbert Xu <herbert@gondor.apana.org.au>
  7 * Copyright (c) 2019 Google LLC
  8 *
  9 * Based on aesni-intel_glue.c by:
 10 *  Copyright (C) 2008, Intel Corp.
 11 *    Author: Huang Ying <ying.huang@intel.com>
 12 */
 13
 14/*
 15 * Shared crypto SIMD helpers.  These functions dynamically create and register
 16 * an skcipher or AEAD algorithm that wraps another, internal algorithm.  The
 17 * wrapper ensures that the internal algorithm is only executed in a context
 18 * where SIMD instructions are usable, i.e. where may_use_simd() returns true.
 19 * If SIMD is already usable, the wrapper directly calls the internal algorithm.
 20 * Otherwise it defers execution to a workqueue via cryptd.
 21 *
 22 * This is an alternative to the internal algorithm implementing a fallback for
 23 * the !may_use_simd() case itself.
 24 *
 25 * Note that the wrapper algorithm is asynchronous, i.e. it has the
 26 * CRYPTO_ALG_ASYNC flag set.  Therefore it won't be found by users who
 27 * explicitly allocate a synchronous algorithm.
 28 */
 29
 30#include <crypto/cryptd.h>
 31#include <crypto/internal/aead.h>
 32#include <crypto/internal/simd.h>
 33#include <crypto/internal/skcipher.h>
 34#include <linux/kernel.h>
 35#include <linux/module.h>
 36#include <linux/preempt.h>
 37#include <asm/simd.h>
 38
 39/* skcipher support */
 40
 41struct simd_skcipher_alg {
 42	const char *ialg_name;
 43	struct skcipher_alg alg;
 44};
 45
 46struct simd_skcipher_ctx {
 47	struct cryptd_skcipher *cryptd_tfm;
 48};
 49
 50static int simd_skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
 51				unsigned int key_len)
 52{
 53	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
 54	struct crypto_skcipher *child = &ctx->cryptd_tfm->base;
 55
 56	crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 57	crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(tfm) &
 58					 CRYPTO_TFM_REQ_MASK);
 59	return crypto_skcipher_setkey(child, key, key_len);
 60}
 61
 62static int simd_skcipher_encrypt(struct skcipher_request *req)
 63{
 64	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 65	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
 66	struct skcipher_request *subreq;
 67	struct crypto_skcipher *child;
 68
 69	subreq = skcipher_request_ctx(req);
 70	*subreq = *req;
 71
 72	if (!crypto_simd_usable() ||
 73	    (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
 74		child = &ctx->cryptd_tfm->base;
 75	else
 76		child = cryptd_skcipher_child(ctx->cryptd_tfm);
 77
 78	skcipher_request_set_tfm(subreq, child);
 79
 80	return crypto_skcipher_encrypt(subreq);
 81}
 82
 83static int simd_skcipher_decrypt(struct skcipher_request *req)
 84{
 85	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 86	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
 87	struct skcipher_request *subreq;
 88	struct crypto_skcipher *child;
 89
 90	subreq = skcipher_request_ctx(req);
 91	*subreq = *req;
 92
 93	if (!crypto_simd_usable() ||
 94	    (in_atomic() && cryptd_skcipher_queued(ctx->cryptd_tfm)))
 95		child = &ctx->cryptd_tfm->base;
 96	else
 97		child = cryptd_skcipher_child(ctx->cryptd_tfm);
 98
 99	skcipher_request_set_tfm(subreq, child);
100
101	return crypto_skcipher_decrypt(subreq);
102}
103
104static void simd_skcipher_exit(struct crypto_skcipher *tfm)
105{
106	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
107
108	cryptd_free_skcipher(ctx->cryptd_tfm);
109}
110
111static int simd_skcipher_init(struct crypto_skcipher *tfm)
112{
113	struct simd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
114	struct cryptd_skcipher *cryptd_tfm;
115	struct simd_skcipher_alg *salg;
116	struct skcipher_alg *alg;
117	unsigned reqsize;
118
119	alg = crypto_skcipher_alg(tfm);
120	salg = container_of(alg, struct simd_skcipher_alg, alg);
121
122	cryptd_tfm = cryptd_alloc_skcipher(salg->ialg_name,
123					   CRYPTO_ALG_INTERNAL,
124					   CRYPTO_ALG_INTERNAL);
125	if (IS_ERR(cryptd_tfm))
126		return PTR_ERR(cryptd_tfm);
127
128	ctx->cryptd_tfm = cryptd_tfm;
129
130	reqsize = crypto_skcipher_reqsize(cryptd_skcipher_child(cryptd_tfm));
131	reqsize = max(reqsize, crypto_skcipher_reqsize(&cryptd_tfm->base));
132	reqsize += sizeof(struct skcipher_request);
133
134	crypto_skcipher_set_reqsize(tfm, reqsize);
135
136	return 0;
137}
138
139struct simd_skcipher_alg *simd_skcipher_create_compat(struct skcipher_alg *ialg,
140						      const char *algname,
141						      const char *drvname,
142						      const char *basename)
143{
144	struct simd_skcipher_alg *salg;
145	struct skcipher_alg *alg;
146	int err;
147
148	salg = kzalloc(sizeof(*salg), GFP_KERNEL);
149	if (!salg) {
150		salg = ERR_PTR(-ENOMEM);
151		goto out;
152	}
153
154	salg->ialg_name = basename;
155	alg = &salg->alg;
156
157	err = -ENAMETOOLONG;
158	if (snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", algname) >=
159	    CRYPTO_MAX_ALG_NAME)
160		goto out_free_salg;
161
162	if (snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
163		     drvname) >= CRYPTO_MAX_ALG_NAME)
164		goto out_free_salg;
165
166	alg->base.cra_flags = CRYPTO_ALG_ASYNC |
167		(ialg->base.cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
168	alg->base.cra_priority = ialg->base.cra_priority;
169	alg->base.cra_blocksize = ialg->base.cra_blocksize;
170	alg->base.cra_alignmask = ialg->base.cra_alignmask;
171	alg->base.cra_module = ialg->base.cra_module;
172	alg->base.cra_ctxsize = sizeof(struct simd_skcipher_ctx);
173
174	alg->ivsize = ialg->ivsize;
175	alg->chunksize = ialg->chunksize;
176	alg->min_keysize = ialg->min_keysize;
177	alg->max_keysize = ialg->max_keysize;
178
179	alg->init = simd_skcipher_init;
180	alg->exit = simd_skcipher_exit;
181
182	alg->setkey = simd_skcipher_setkey;
183	alg->encrypt = simd_skcipher_encrypt;
184	alg->decrypt = simd_skcipher_decrypt;
185
186	err = crypto_register_skcipher(alg);
187	if (err)
188		goto out_free_salg;
189
190out:
191	return salg;
192
193out_free_salg:
194	kfree(salg);
195	salg = ERR_PTR(err);
196	goto out;
197}
198EXPORT_SYMBOL_GPL(simd_skcipher_create_compat);
199
200void simd_skcipher_free(struct simd_skcipher_alg *salg)
201{
202	crypto_unregister_skcipher(&salg->alg);
203	kfree(salg);
204}
205EXPORT_SYMBOL_GPL(simd_skcipher_free);
206
207int simd_register_skciphers_compat(struct skcipher_alg *algs, int count,
208				   struct simd_skcipher_alg **simd_algs)
209{
210	int err;
211	int i;
212	const char *algname;
213	const char *drvname;
214	const char *basename;
215	struct simd_skcipher_alg *simd;
216
217	err = crypto_register_skciphers(algs, count);
218	if (err)
219		return err;
220
221	for (i = 0; i < count; i++) {
222		WARN_ON(strncmp(algs[i].base.cra_name, "__", 2));
223		WARN_ON(strncmp(algs[i].base.cra_driver_name, "__", 2));
224		algname = algs[i].base.cra_name + 2;
225		drvname = algs[i].base.cra_driver_name + 2;
226		basename = algs[i].base.cra_driver_name;
227		simd = simd_skcipher_create_compat(algs + i, algname, drvname, basename);
228		err = PTR_ERR(simd);
229		if (IS_ERR(simd))
230			goto err_unregister;
231		simd_algs[i] = simd;
232	}
233	return 0;
234
235err_unregister:
236	simd_unregister_skciphers(algs, count, simd_algs);
237	return err;
238}
239EXPORT_SYMBOL_GPL(simd_register_skciphers_compat);
240
241void simd_unregister_skciphers(struct skcipher_alg *algs, int count,
242			       struct simd_skcipher_alg **simd_algs)
243{
244	int i;
245
246	crypto_unregister_skciphers(algs, count);
247
248	for (i = 0; i < count; i++) {
249		if (simd_algs[i]) {
250			simd_skcipher_free(simd_algs[i]);
251			simd_algs[i] = NULL;
252		}
253	}
254}
255EXPORT_SYMBOL_GPL(simd_unregister_skciphers);
256
257/* AEAD support */
258
259struct simd_aead_alg {
260	const char *ialg_name;
261	struct aead_alg alg;
262};
263
264struct simd_aead_ctx {
265	struct cryptd_aead *cryptd_tfm;
266};
267
268static int simd_aead_setkey(struct crypto_aead *tfm, const u8 *key,
269				unsigned int key_len)
270{
271	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
272	struct crypto_aead *child = &ctx->cryptd_tfm->base;
273
274	crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
275	crypto_aead_set_flags(child, crypto_aead_get_flags(tfm) &
276				     CRYPTO_TFM_REQ_MASK);
277	return crypto_aead_setkey(child, key, key_len);
278}
279
280static int simd_aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
281{
282	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
283	struct crypto_aead *child = &ctx->cryptd_tfm->base;
284
285	return crypto_aead_setauthsize(child, authsize);
286}
287
288static int simd_aead_encrypt(struct aead_request *req)
289{
290	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
291	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
292	struct aead_request *subreq;
293	struct crypto_aead *child;
294
295	subreq = aead_request_ctx(req);
296	*subreq = *req;
297
298	if (!crypto_simd_usable() ||
299	    (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
300		child = &ctx->cryptd_tfm->base;
301	else
302		child = cryptd_aead_child(ctx->cryptd_tfm);
303
304	aead_request_set_tfm(subreq, child);
305
306	return crypto_aead_encrypt(subreq);
307}
308
309static int simd_aead_decrypt(struct aead_request *req)
310{
311	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
312	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
313	struct aead_request *subreq;
314	struct crypto_aead *child;
315
316	subreq = aead_request_ctx(req);
317	*subreq = *req;
318
319	if (!crypto_simd_usable() ||
320	    (in_atomic() && cryptd_aead_queued(ctx->cryptd_tfm)))
321		child = &ctx->cryptd_tfm->base;
322	else
323		child = cryptd_aead_child(ctx->cryptd_tfm);
324
325	aead_request_set_tfm(subreq, child);
326
327	return crypto_aead_decrypt(subreq);
328}
329
330static void simd_aead_exit(struct crypto_aead *tfm)
331{
332	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
333
334	cryptd_free_aead(ctx->cryptd_tfm);
335}
336
337static int simd_aead_init(struct crypto_aead *tfm)
338{
339	struct simd_aead_ctx *ctx = crypto_aead_ctx(tfm);
340	struct cryptd_aead *cryptd_tfm;
341	struct simd_aead_alg *salg;
342	struct aead_alg *alg;
343	unsigned reqsize;
344
345	alg = crypto_aead_alg(tfm);
346	salg = container_of(alg, struct simd_aead_alg, alg);
347
348	cryptd_tfm = cryptd_alloc_aead(salg->ialg_name, CRYPTO_ALG_INTERNAL,
349				       CRYPTO_ALG_INTERNAL);
350	if (IS_ERR(cryptd_tfm))
351		return PTR_ERR(cryptd_tfm);
352
353	ctx->cryptd_tfm = cryptd_tfm;
354
355	reqsize = crypto_aead_reqsize(cryptd_aead_child(cryptd_tfm));
356	reqsize = max(reqsize, crypto_aead_reqsize(&cryptd_tfm->base));
357	reqsize += sizeof(struct aead_request);
358
359	crypto_aead_set_reqsize(tfm, reqsize);
360
361	return 0;
362}
363
364static struct simd_aead_alg *simd_aead_create_compat(struct aead_alg *ialg,
365						     const char *algname,
366						     const char *drvname,
367						     const char *basename)
368{
369	struct simd_aead_alg *salg;
370	struct aead_alg *alg;
371	int err;
372
373	salg = kzalloc(sizeof(*salg), GFP_KERNEL);
374	if (!salg) {
375		salg = ERR_PTR(-ENOMEM);
376		goto out;
377	}
378
379	salg->ialg_name = basename;
380	alg = &salg->alg;
381
382	err = -ENAMETOOLONG;
383	if (snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", algname) >=
384	    CRYPTO_MAX_ALG_NAME)
385		goto out_free_salg;
386
387	if (snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
388		     drvname) >= CRYPTO_MAX_ALG_NAME)
389		goto out_free_salg;
390
391	alg->base.cra_flags = CRYPTO_ALG_ASYNC |
392		(ialg->base.cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
393	alg->base.cra_priority = ialg->base.cra_priority;
394	alg->base.cra_blocksize = ialg->base.cra_blocksize;
395	alg->base.cra_alignmask = ialg->base.cra_alignmask;
396	alg->base.cra_module = ialg->base.cra_module;
397	alg->base.cra_ctxsize = sizeof(struct simd_aead_ctx);
398
399	alg->ivsize = ialg->ivsize;
400	alg->maxauthsize = ialg->maxauthsize;
401	alg->chunksize = ialg->chunksize;
402
403	alg->init = simd_aead_init;
404	alg->exit = simd_aead_exit;
405
406	alg->setkey = simd_aead_setkey;
407	alg->setauthsize = simd_aead_setauthsize;
408	alg->encrypt = simd_aead_encrypt;
409	alg->decrypt = simd_aead_decrypt;
410
411	err = crypto_register_aead(alg);
412	if (err)
413		goto out_free_salg;
414
415out:
416	return salg;
417
418out_free_salg:
419	kfree(salg);
420	salg = ERR_PTR(err);
421	goto out;
422}
423
424static void simd_aead_free(struct simd_aead_alg *salg)
425{
426	crypto_unregister_aead(&salg->alg);
427	kfree(salg);
428}
429
430int simd_register_aeads_compat(struct aead_alg *algs, int count,
431			       struct simd_aead_alg **simd_algs)
432{
433	int err;
434	int i;
435	const char *algname;
436	const char *drvname;
437	const char *basename;
438	struct simd_aead_alg *simd;
439
440	err = crypto_register_aeads(algs, count);
441	if (err)
442		return err;
443
444	for (i = 0; i < count; i++) {
445		WARN_ON(strncmp(algs[i].base.cra_name, "__", 2));
446		WARN_ON(strncmp(algs[i].base.cra_driver_name, "__", 2));
447		algname = algs[i].base.cra_name + 2;
448		drvname = algs[i].base.cra_driver_name + 2;
449		basename = algs[i].base.cra_driver_name;
450		simd = simd_aead_create_compat(algs + i, algname, drvname, basename);
451		err = PTR_ERR(simd);
452		if (IS_ERR(simd))
453			goto err_unregister;
454		simd_algs[i] = simd;
455	}
456	return 0;
457
458err_unregister:
459	simd_unregister_aeads(algs, count, simd_algs);
460	return err;
461}
462EXPORT_SYMBOL_GPL(simd_register_aeads_compat);
463
464void simd_unregister_aeads(struct aead_alg *algs, int count,
465			   struct simd_aead_alg **simd_algs)
466{
467	int i;
468
469	crypto_unregister_aeads(algs, count);
470
471	for (i = 0; i < count; i++) {
472		if (simd_algs[i]) {
473			simd_aead_free(simd_algs[i]);
474			simd_algs[i] = NULL;
475		}
476	}
477}
478EXPORT_SYMBOL_GPL(simd_unregister_aeads);
479
480MODULE_DESCRIPTION("Shared crypto SIMD helpers");
481MODULE_LICENSE("GPL");