Linux Audio

Check our new training course

Loading...
v6.2
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Synchronous Cryptographic Hash operations.
  4 *
  5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  6 */
  7
  8#include <crypto/scatterwalk.h>
  9#include <crypto/internal/hash.h>
 10#include <linux/err.h>
 11#include <linux/kernel.h>
 12#include <linux/module.h>
 13#include <linux/slab.h>
 14#include <linux/seq_file.h>
 15#include <linux/cryptouser.h>
 16#include <net/netlink.h>
 17#include <linux/compiler.h>
 18
 19#include "internal.h"
 20
 21#define MAX_SHASH_ALIGNMASK 63
 22
 23static const struct crypto_type crypto_shash_type;
 24
 25int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 26		    unsigned int keylen)
 27{
 28	return -ENOSYS;
 29}
 30EXPORT_SYMBOL_GPL(shash_no_setkey);
 31
 32static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 33				  unsigned int keylen)
 34{
 35	struct shash_alg *shash = crypto_shash_alg(tfm);
 36	unsigned long alignmask = crypto_shash_alignmask(tfm);
 37	unsigned long absize;
 38	u8 *buffer, *alignbuffer;
 39	int err;
 40
 41	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 42	buffer = kmalloc(absize, GFP_ATOMIC);
 43	if (!buffer)
 44		return -ENOMEM;
 45
 46	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 47	memcpy(alignbuffer, key, keylen);
 48	err = shash->setkey(tfm, alignbuffer, keylen);
 49	kfree_sensitive(buffer);
 50	return err;
 51}
 52
 53static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
 54{
 55	if (crypto_shash_alg_needs_key(alg))
 56		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
 57}
 58
 59int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 60			unsigned int keylen)
 61{
 62	struct shash_alg *shash = crypto_shash_alg(tfm);
 63	unsigned long alignmask = crypto_shash_alignmask(tfm);
 64	int err;
 65
 66	if ((unsigned long)key & alignmask)
 67		err = shash_setkey_unaligned(tfm, key, keylen);
 68	else
 69		err = shash->setkey(tfm, key, keylen);
 70
 71	if (unlikely(err)) {
 72		shash_set_needkey(tfm, shash);
 73		return err;
 74	}
 75
 76	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 77	return 0;
 78}
 79EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 80
 
 
 
 
 
 
 81static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 82				  unsigned int len)
 83{
 84	struct crypto_shash *tfm = desc->tfm;
 85	struct shash_alg *shash = crypto_shash_alg(tfm);
 86	unsigned long alignmask = crypto_shash_alignmask(tfm);
 87	unsigned int unaligned_len = alignmask + 1 -
 88				     ((unsigned long)data & alignmask);
 89	/*
 90	 * We cannot count on __aligned() working for large values:
 91	 * https://patchwork.kernel.org/patch/9507697/
 92	 */
 93	u8 ubuf[MAX_SHASH_ALIGNMASK * 2];
 94	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 95	int err;
 96
 97	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
 98		return -EINVAL;
 99
100	if (unaligned_len > len)
101		unaligned_len = len;
102
103	memcpy(buf, data, unaligned_len);
104	err = shash->update(desc, buf, unaligned_len);
105	memset(buf, 0, unaligned_len);
106
107	return err ?:
108	       shash->update(desc, data + unaligned_len, len - unaligned_len);
109}
110
111int crypto_shash_update(struct shash_desc *desc, const u8 *data,
112			unsigned int len)
113{
114	struct crypto_shash *tfm = desc->tfm;
115	struct shash_alg *shash = crypto_shash_alg(tfm);
116	unsigned long alignmask = crypto_shash_alignmask(tfm);
117
118	if ((unsigned long)data & alignmask)
119		return shash_update_unaligned(desc, data, len);
120
121	return shash->update(desc, data, len);
122}
123EXPORT_SYMBOL_GPL(crypto_shash_update);
124
125static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
126{
127	struct crypto_shash *tfm = desc->tfm;
128	unsigned long alignmask = crypto_shash_alignmask(tfm);
129	struct shash_alg *shash = crypto_shash_alg(tfm);
130	unsigned int ds = crypto_shash_digestsize(tfm);
131	/*
132	 * We cannot count on __aligned() working for large values:
133	 * https://patchwork.kernel.org/patch/9507697/
134	 */
135	u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE];
136	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
137	int err;
138
139	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
140		return -EINVAL;
141
142	err = shash->final(desc, buf);
143	if (err)
144		goto out;
145
146	memcpy(out, buf, ds);
147
148out:
149	memset(buf, 0, ds);
150	return err;
151}
152
153int crypto_shash_final(struct shash_desc *desc, u8 *out)
154{
155	struct crypto_shash *tfm = desc->tfm;
156	struct shash_alg *shash = crypto_shash_alg(tfm);
157	unsigned long alignmask = crypto_shash_alignmask(tfm);
158
159	if ((unsigned long)out & alignmask)
160		return shash_final_unaligned(desc, out);
161
162	return shash->final(desc, out);
163}
164EXPORT_SYMBOL_GPL(crypto_shash_final);
165
166static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
167				 unsigned int len, u8 *out)
168{
169	return crypto_shash_update(desc, data, len) ?:
170	       crypto_shash_final(desc, out);
171}
172
173int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
174		       unsigned int len, u8 *out)
175{
176	struct crypto_shash *tfm = desc->tfm;
177	struct shash_alg *shash = crypto_shash_alg(tfm);
178	unsigned long alignmask = crypto_shash_alignmask(tfm);
179
180	if (((unsigned long)data | (unsigned long)out) & alignmask)
181		return shash_finup_unaligned(desc, data, len, out);
182
183	return shash->finup(desc, data, len, out);
184}
185EXPORT_SYMBOL_GPL(crypto_shash_finup);
186
187static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
188				  unsigned int len, u8 *out)
189{
190	return crypto_shash_init(desc) ?:
191	       crypto_shash_finup(desc, data, len, out);
192}
193
194int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
195			unsigned int len, u8 *out)
196{
197	struct crypto_shash *tfm = desc->tfm;
198	struct shash_alg *shash = crypto_shash_alg(tfm);
199	unsigned long alignmask = crypto_shash_alignmask(tfm);
200
201	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
202		return -ENOKEY;
203
204	if (((unsigned long)data | (unsigned long)out) & alignmask)
205		return shash_digest_unaligned(desc, data, len, out);
206
207	return shash->digest(desc, data, len, out);
208}
209EXPORT_SYMBOL_GPL(crypto_shash_digest);
210
211int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
212			    unsigned int len, u8 *out)
213{
214	SHASH_DESC_ON_STACK(desc, tfm);
215	int err;
216
217	desc->tfm = tfm;
218
219	err = crypto_shash_digest(desc, data, len, out);
220
221	shash_desc_zero(desc);
222
223	return err;
224}
225EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
226
227static int shash_default_export(struct shash_desc *desc, void *out)
228{
229	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
230	return 0;
231}
232
233static int shash_default_import(struct shash_desc *desc, const void *in)
234{
235	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
236	return 0;
237}
238
239static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
240			      unsigned int keylen)
241{
242	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
243
244	return crypto_shash_setkey(*ctx, key, keylen);
245}
246
247static int shash_async_init(struct ahash_request *req)
248{
249	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
250	struct shash_desc *desc = ahash_request_ctx(req);
251
252	desc->tfm = *ctx;
 
253
254	return crypto_shash_init(desc);
255}
256
257int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
258{
259	struct crypto_hash_walk walk;
260	int nbytes;
261
262	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
263	     nbytes = crypto_hash_walk_done(&walk, nbytes))
264		nbytes = crypto_shash_update(desc, walk.data, nbytes);
265
266	return nbytes;
267}
268EXPORT_SYMBOL_GPL(shash_ahash_update);
269
270static int shash_async_update(struct ahash_request *req)
271{
272	return shash_ahash_update(req, ahash_request_ctx(req));
273}
274
275static int shash_async_final(struct ahash_request *req)
276{
277	return crypto_shash_final(ahash_request_ctx(req), req->result);
278}
279
280int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
281{
282	struct crypto_hash_walk walk;
283	int nbytes;
284
285	nbytes = crypto_hash_walk_first(req, &walk);
286	if (!nbytes)
287		return crypto_shash_final(desc, req->result);
288
289	do {
290		nbytes = crypto_hash_walk_last(&walk) ?
291			 crypto_shash_finup(desc, walk.data, nbytes,
292					    req->result) :
293			 crypto_shash_update(desc, walk.data, nbytes);
294		nbytes = crypto_hash_walk_done(&walk, nbytes);
295	} while (nbytes > 0);
296
297	return nbytes;
298}
299EXPORT_SYMBOL_GPL(shash_ahash_finup);
300
301static int shash_async_finup(struct ahash_request *req)
302{
303	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304	struct shash_desc *desc = ahash_request_ctx(req);
305
306	desc->tfm = *ctx;
 
307
308	return shash_ahash_finup(req, desc);
309}
310
311int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
312{
 
 
313	unsigned int nbytes = req->nbytes;
314	struct scatterlist *sg;
315	unsigned int offset;
316	int err;
317
318	if (nbytes &&
319	    (sg = req->src, offset = sg->offset,
320	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
321		void *data;
322
323		data = kmap_atomic(sg_page(sg));
324		err = crypto_shash_digest(desc, data + offset, nbytes,
325					  req->result);
326		kunmap_atomic(data);
 
327	} else
328		err = crypto_shash_init(desc) ?:
329		      shash_ahash_finup(req, desc);
330
331	return err;
332}
333EXPORT_SYMBOL_GPL(shash_ahash_digest);
334
335static int shash_async_digest(struct ahash_request *req)
336{
337	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
338	struct shash_desc *desc = ahash_request_ctx(req);
339
340	desc->tfm = *ctx;
 
341
342	return shash_ahash_digest(req, desc);
343}
344
345static int shash_async_export(struct ahash_request *req, void *out)
346{
347	return crypto_shash_export(ahash_request_ctx(req), out);
348}
349
350static int shash_async_import(struct ahash_request *req, const void *in)
351{
352	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
353	struct shash_desc *desc = ahash_request_ctx(req);
354
355	desc->tfm = *ctx;
 
356
357	return crypto_shash_import(desc, in);
358}
359
360static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
361{
362	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
363
364	crypto_free_shash(*ctx);
365}
366
367int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
368{
369	struct crypto_alg *calg = tfm->__crt_alg;
370	struct shash_alg *alg = __crypto_shash_alg(calg);
371	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
372	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
373	struct crypto_shash *shash;
374
375	if (!crypto_mod_get(calg))
376		return -EAGAIN;
377
378	shash = crypto_create_tfm(calg, &crypto_shash_type);
379	if (IS_ERR(shash)) {
380		crypto_mod_put(calg);
381		return PTR_ERR(shash);
382	}
383
384	*ctx = shash;
385	tfm->exit = crypto_exit_shash_ops_async;
386
387	crt->init = shash_async_init;
388	crt->update = shash_async_update;
389	crt->final = shash_async_final;
390	crt->finup = shash_async_finup;
391	crt->digest = shash_async_digest;
392	if (crypto_shash_alg_has_setkey(alg))
393		crt->setkey = shash_async_setkey;
394
395	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
396				    CRYPTO_TFM_NEED_KEY);
397
398	crt->export = shash_async_export;
399	crt->import = shash_async_import;
 
 
 
 
400
401	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
402
403	return 0;
404}
405
406static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
 
407{
408	struct crypto_shash *hash = __crypto_shash_cast(tfm);
409	struct shash_alg *alg = crypto_shash_alg(hash);
410
411	alg->exit_tfm(hash);
412}
413
414static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
415{
416	struct crypto_shash *hash = __crypto_shash_cast(tfm);
417	struct shash_alg *alg = crypto_shash_alg(hash);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
418	int err;
419
420	hash->descsize = alg->descsize;
 
 
 
421
422	shash_set_needkey(hash, alg);
423
424	if (alg->exit_tfm)
425		tfm->exit = crypto_shash_exit_tfm;
 
 
 
 
426
427	if (!alg->init_tfm)
428		return 0;
 
429
430	err = alg->init_tfm(hash);
431	if (err)
432		return err;
433
434	/* ->init_tfm() may have increased the descsize. */
435	if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
436		if (alg->exit_tfm)
437			alg->exit_tfm(hash);
438		return -EINVAL;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
439	}
440
441	return 0;
442}
443
444static void crypto_shash_free_instance(struct crypto_instance *inst)
445{
446	struct shash_instance *shash = shash_instance(inst);
447
448	shash->free(shash);
 
 
 
 
 
 
449}
450
451#ifdef CONFIG_NET
452static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
453{
454	struct crypto_report_hash rhash;
455	struct shash_alg *salg = __crypto_shash_alg(alg);
456
457	memset(&rhash, 0, sizeof(rhash));
458
459	strscpy(rhash.type, "shash", sizeof(rhash.type));
460
461	rhash.blocksize = alg->cra_blocksize;
462	rhash.digestsize = salg->digestsize;
463
464	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
 
 
 
 
 
 
465}
466#else
467static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
468{
469	return -ENOSYS;
470}
471#endif
472
473static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
474	__maybe_unused;
475static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
476{
477	struct shash_alg *salg = __crypto_shash_alg(alg);
478
479	seq_printf(m, "type         : shash\n");
480	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
481	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
482}
483
484static const struct crypto_type crypto_shash_type = {
485	.extsize = crypto_alg_extsize,
 
 
486	.init_tfm = crypto_shash_init_tfm,
487	.free = crypto_shash_free_instance,
488#ifdef CONFIG_PROC_FS
489	.show = crypto_shash_show,
490#endif
491	.report = crypto_shash_report,
492	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
493	.maskset = CRYPTO_ALG_TYPE_MASK,
494	.type = CRYPTO_ALG_TYPE_SHASH,
495	.tfmsize = offsetof(struct crypto_shash, base),
496};
497
498int crypto_grab_shash(struct crypto_shash_spawn *spawn,
499		      struct crypto_instance *inst,
500		      const char *name, u32 type, u32 mask)
501{
502	spawn->base.frontend = &crypto_shash_type;
503	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
504}
505EXPORT_SYMBOL_GPL(crypto_grab_shash);
506
507struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
508					u32 mask)
509{
510	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
511}
512EXPORT_SYMBOL_GPL(crypto_alloc_shash);
513
514int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
515{
516	return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
517}
518EXPORT_SYMBOL_GPL(crypto_has_shash);
519
520static int shash_prepare_alg(struct shash_alg *alg)
521{
522	struct crypto_alg *base = &alg->base;
523
524	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
525	    alg->descsize > HASH_MAX_DESCSIZE ||
526	    alg->statesize > HASH_MAX_STATESIZE)
527		return -EINVAL;
528
529	if (base->cra_alignmask > MAX_SHASH_ALIGNMASK)
530		return -EINVAL;
531
532	if ((alg->export && !alg->import) || (alg->import && !alg->export))
533		return -EINVAL;
534
535	base->cra_type = &crypto_shash_type;
536	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
537	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
538
539	if (!alg->finup)
540		alg->finup = shash_finup_unaligned;
541	if (!alg->digest)
542		alg->digest = shash_digest_unaligned;
543	if (!alg->export) {
544		alg->export = shash_default_export;
545		alg->import = shash_default_import;
546		alg->statesize = alg->descsize;
547	}
548	if (!alg->setkey)
549		alg->setkey = shash_no_setkey;
550
551	return 0;
552}
553
554int crypto_register_shash(struct shash_alg *alg)
555{
556	struct crypto_alg *base = &alg->base;
557	int err;
558
559	err = shash_prepare_alg(alg);
560	if (err)
561		return err;
562
563	return crypto_register_alg(base);
564}
565EXPORT_SYMBOL_GPL(crypto_register_shash);
566
567void crypto_unregister_shash(struct shash_alg *alg)
568{
569	crypto_unregister_alg(&alg->base);
570}
571EXPORT_SYMBOL_GPL(crypto_unregister_shash);
572
573int crypto_register_shashes(struct shash_alg *algs, int count)
574{
575	int i, ret;
576
577	for (i = 0; i < count; i++) {
578		ret = crypto_register_shash(&algs[i]);
579		if (ret)
580			goto err;
581	}
582
583	return 0;
584
585err:
586	for (--i; i >= 0; --i)
587		crypto_unregister_shash(&algs[i]);
588
589	return ret;
590}
591EXPORT_SYMBOL_GPL(crypto_register_shashes);
592
593void crypto_unregister_shashes(struct shash_alg *algs, int count)
594{
595	int i;
596
597	for (i = count - 1; i >= 0; --i)
598		crypto_unregister_shash(&algs[i]);
599}
600EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
601
602int shash_register_instance(struct crypto_template *tmpl,
603			    struct shash_instance *inst)
604{
605	int err;
606
607	if (WARN_ON(!inst->free))
608		return -EINVAL;
609
610	err = shash_prepare_alg(&inst->alg);
611	if (err)
612		return err;
613
614	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
615}
616EXPORT_SYMBOL_GPL(shash_register_instance);
617
618void shash_free_singlespawn_instance(struct shash_instance *inst)
 
 
 
 
 
 
 
 
 
619{
620	crypto_drop_spawn(shash_instance_ctx(inst));
621	kfree(inst);
 
 
 
 
 
 
 
 
 
 
622}
623EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
624
625MODULE_LICENSE("GPL");
626MODULE_DESCRIPTION("Synchronous cryptographic hash type");
v3.5.6
 
  1/*
  2 * Synchronous Cryptographic Hash operations.
  3 *
  4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option)
  9 * any later version.
 10 *
 11 */
 12
 13#include <crypto/scatterwalk.h>
 14#include <crypto/internal/hash.h>
 15#include <linux/err.h>
 16#include <linux/kernel.h>
 17#include <linux/module.h>
 18#include <linux/slab.h>
 19#include <linux/seq_file.h>
 20#include <linux/cryptouser.h>
 21#include <net/netlink.h>
 
 22
 23#include "internal.h"
 24
 
 
 25static const struct crypto_type crypto_shash_type;
 26
 27static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 28			   unsigned int keylen)
 29{
 30	return -ENOSYS;
 31}
 
 32
 33static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 34				  unsigned int keylen)
 35{
 36	struct shash_alg *shash = crypto_shash_alg(tfm);
 37	unsigned long alignmask = crypto_shash_alignmask(tfm);
 38	unsigned long absize;
 39	u8 *buffer, *alignbuffer;
 40	int err;
 41
 42	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 43	buffer = kmalloc(absize, GFP_KERNEL);
 44	if (!buffer)
 45		return -ENOMEM;
 46
 47	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 48	memcpy(alignbuffer, key, keylen);
 49	err = shash->setkey(tfm, alignbuffer, keylen);
 50	kzfree(buffer);
 51	return err;
 52}
 53
 
 
 
 
 
 
 54int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 55			unsigned int keylen)
 56{
 57	struct shash_alg *shash = crypto_shash_alg(tfm);
 58	unsigned long alignmask = crypto_shash_alignmask(tfm);
 
 59
 60	if ((unsigned long)key & alignmask)
 61		return shash_setkey_unaligned(tfm, key, keylen);
 
 
 
 
 
 
 
 62
 63	return shash->setkey(tfm, key, keylen);
 
 64}
 65EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 66
 67static inline unsigned int shash_align_buffer_size(unsigned len,
 68						   unsigned long mask)
 69{
 70	return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
 71}
 72
 73static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 74				  unsigned int len)
 75{
 76	struct crypto_shash *tfm = desc->tfm;
 77	struct shash_alg *shash = crypto_shash_alg(tfm);
 78	unsigned long alignmask = crypto_shash_alignmask(tfm);
 79	unsigned int unaligned_len = alignmask + 1 -
 80				     ((unsigned long)data & alignmask);
 81	u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
 82		__attribute__ ((aligned));
 
 
 
 83	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 84	int err;
 85
 
 
 
 86	if (unaligned_len > len)
 87		unaligned_len = len;
 88
 89	memcpy(buf, data, unaligned_len);
 90	err = shash->update(desc, buf, unaligned_len);
 91	memset(buf, 0, unaligned_len);
 92
 93	return err ?:
 94	       shash->update(desc, data + unaligned_len, len - unaligned_len);
 95}
 96
 97int crypto_shash_update(struct shash_desc *desc, const u8 *data,
 98			unsigned int len)
 99{
100	struct crypto_shash *tfm = desc->tfm;
101	struct shash_alg *shash = crypto_shash_alg(tfm);
102	unsigned long alignmask = crypto_shash_alignmask(tfm);
103
104	if ((unsigned long)data & alignmask)
105		return shash_update_unaligned(desc, data, len);
106
107	return shash->update(desc, data, len);
108}
109EXPORT_SYMBOL_GPL(crypto_shash_update);
110
111static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
112{
113	struct crypto_shash *tfm = desc->tfm;
114	unsigned long alignmask = crypto_shash_alignmask(tfm);
115	struct shash_alg *shash = crypto_shash_alg(tfm);
116	unsigned int ds = crypto_shash_digestsize(tfm);
117	u8 ubuf[shash_align_buffer_size(ds, alignmask)]
118		__attribute__ ((aligned));
 
 
 
119	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
120	int err;
121
 
 
 
122	err = shash->final(desc, buf);
123	if (err)
124		goto out;
125
126	memcpy(out, buf, ds);
127
128out:
129	memset(buf, 0, ds);
130	return err;
131}
132
133int crypto_shash_final(struct shash_desc *desc, u8 *out)
134{
135	struct crypto_shash *tfm = desc->tfm;
136	struct shash_alg *shash = crypto_shash_alg(tfm);
137	unsigned long alignmask = crypto_shash_alignmask(tfm);
138
139	if ((unsigned long)out & alignmask)
140		return shash_final_unaligned(desc, out);
141
142	return shash->final(desc, out);
143}
144EXPORT_SYMBOL_GPL(crypto_shash_final);
145
146static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
147				 unsigned int len, u8 *out)
148{
149	return crypto_shash_update(desc, data, len) ?:
150	       crypto_shash_final(desc, out);
151}
152
153int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
154		       unsigned int len, u8 *out)
155{
156	struct crypto_shash *tfm = desc->tfm;
157	struct shash_alg *shash = crypto_shash_alg(tfm);
158	unsigned long alignmask = crypto_shash_alignmask(tfm);
159
160	if (((unsigned long)data | (unsigned long)out) & alignmask)
161		return shash_finup_unaligned(desc, data, len, out);
162
163	return shash->finup(desc, data, len, out);
164}
165EXPORT_SYMBOL_GPL(crypto_shash_finup);
166
167static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
168				  unsigned int len, u8 *out)
169{
170	return crypto_shash_init(desc) ?:
171	       crypto_shash_finup(desc, data, len, out);
172}
173
174int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
175			unsigned int len, u8 *out)
176{
177	struct crypto_shash *tfm = desc->tfm;
178	struct shash_alg *shash = crypto_shash_alg(tfm);
179	unsigned long alignmask = crypto_shash_alignmask(tfm);
180
 
 
 
181	if (((unsigned long)data | (unsigned long)out) & alignmask)
182		return shash_digest_unaligned(desc, data, len, out);
183
184	return shash->digest(desc, data, len, out);
185}
186EXPORT_SYMBOL_GPL(crypto_shash_digest);
187
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
188static int shash_default_export(struct shash_desc *desc, void *out)
189{
190	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
191	return 0;
192}
193
194static int shash_default_import(struct shash_desc *desc, const void *in)
195{
196	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
197	return 0;
198}
199
200static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
201			      unsigned int keylen)
202{
203	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
204
205	return crypto_shash_setkey(*ctx, key, keylen);
206}
207
208static int shash_async_init(struct ahash_request *req)
209{
210	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
211	struct shash_desc *desc = ahash_request_ctx(req);
212
213	desc->tfm = *ctx;
214	desc->flags = req->base.flags;
215
216	return crypto_shash_init(desc);
217}
218
219int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
220{
221	struct crypto_hash_walk walk;
222	int nbytes;
223
224	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
225	     nbytes = crypto_hash_walk_done(&walk, nbytes))
226		nbytes = crypto_shash_update(desc, walk.data, nbytes);
227
228	return nbytes;
229}
230EXPORT_SYMBOL_GPL(shash_ahash_update);
231
232static int shash_async_update(struct ahash_request *req)
233{
234	return shash_ahash_update(req, ahash_request_ctx(req));
235}
236
237static int shash_async_final(struct ahash_request *req)
238{
239	return crypto_shash_final(ahash_request_ctx(req), req->result);
240}
241
242int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
243{
244	struct crypto_hash_walk walk;
245	int nbytes;
246
247	nbytes = crypto_hash_walk_first(req, &walk);
248	if (!nbytes)
249		return crypto_shash_final(desc, req->result);
250
251	do {
252		nbytes = crypto_hash_walk_last(&walk) ?
253			 crypto_shash_finup(desc, walk.data, nbytes,
254					    req->result) :
255			 crypto_shash_update(desc, walk.data, nbytes);
256		nbytes = crypto_hash_walk_done(&walk, nbytes);
257	} while (nbytes > 0);
258
259	return nbytes;
260}
261EXPORT_SYMBOL_GPL(shash_ahash_finup);
262
263static int shash_async_finup(struct ahash_request *req)
264{
265	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
266	struct shash_desc *desc = ahash_request_ctx(req);
267
268	desc->tfm = *ctx;
269	desc->flags = req->base.flags;
270
271	return shash_ahash_finup(req, desc);
272}
273
274int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
275{
276	struct scatterlist *sg = req->src;
277	unsigned int offset = sg->offset;
278	unsigned int nbytes = req->nbytes;
 
 
279	int err;
280
281	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
 
 
282		void *data;
283
284		data = kmap_atomic(sg_page(sg));
285		err = crypto_shash_digest(desc, data + offset, nbytes,
286					  req->result);
287		kunmap_atomic(data);
288		crypto_yield(desc->flags);
289	} else
290		err = crypto_shash_init(desc) ?:
291		      shash_ahash_finup(req, desc);
292
293	return err;
294}
295EXPORT_SYMBOL_GPL(shash_ahash_digest);
296
297static int shash_async_digest(struct ahash_request *req)
298{
299	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
300	struct shash_desc *desc = ahash_request_ctx(req);
301
302	desc->tfm = *ctx;
303	desc->flags = req->base.flags;
304
305	return shash_ahash_digest(req, desc);
306}
307
308static int shash_async_export(struct ahash_request *req, void *out)
309{
310	return crypto_shash_export(ahash_request_ctx(req), out);
311}
312
313static int shash_async_import(struct ahash_request *req, const void *in)
314{
315	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
316	struct shash_desc *desc = ahash_request_ctx(req);
317
318	desc->tfm = *ctx;
319	desc->flags = req->base.flags;
320
321	return crypto_shash_import(desc, in);
322}
323
324static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
325{
326	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
327
328	crypto_free_shash(*ctx);
329}
330
331int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
332{
333	struct crypto_alg *calg = tfm->__crt_alg;
334	struct shash_alg *alg = __crypto_shash_alg(calg);
335	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
336	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
337	struct crypto_shash *shash;
338
339	if (!crypto_mod_get(calg))
340		return -EAGAIN;
341
342	shash = crypto_create_tfm(calg, &crypto_shash_type);
343	if (IS_ERR(shash)) {
344		crypto_mod_put(calg);
345		return PTR_ERR(shash);
346	}
347
348	*ctx = shash;
349	tfm->exit = crypto_exit_shash_ops_async;
350
351	crt->init = shash_async_init;
352	crt->update = shash_async_update;
353	crt->final = shash_async_final;
354	crt->finup = shash_async_finup;
355	crt->digest = shash_async_digest;
 
 
 
 
 
356
357	if (alg->setkey)
358		crt->setkey = shash_async_setkey;
359	if (alg->export)
360		crt->export = shash_async_export;
361	if (alg->import)
362		crt->import = shash_async_import;
363
364	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
365
366	return 0;
367}
368
369static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
370			       unsigned int keylen)
371{
372	struct shash_desc **descp = crypto_hash_ctx(tfm);
373	struct shash_desc *desc = *descp;
374
375	return crypto_shash_setkey(desc->tfm, key, keylen);
376}
377
378static int shash_compat_init(struct hash_desc *hdesc)
379{
380	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
381	struct shash_desc *desc = *descp;
382
383	desc->flags = hdesc->flags;
384
385	return crypto_shash_init(desc);
386}
387
388static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
389			       unsigned int len)
390{
391	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
392	struct shash_desc *desc = *descp;
393	struct crypto_hash_walk walk;
394	int nbytes;
395
396	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
397	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
398		nbytes = crypto_shash_update(desc, walk.data, nbytes);
399
400	return nbytes;
401}
402
403static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
404{
405	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
406
407	return crypto_shash_final(*descp, out);
408}
409
410static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
411			       unsigned int nbytes, u8 *out)
412{
413	unsigned int offset = sg->offset;
414	int err;
415
416	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
417		struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
418		struct shash_desc *desc = *descp;
419		void *data;
420
421		desc->flags = hdesc->flags;
422
423		data = kmap_atomic(sg_page(sg));
424		err = crypto_shash_digest(desc, data + offset, nbytes, out);
425		kunmap_atomic(data);
426		crypto_yield(desc->flags);
427		goto out;
428	}
429
430	err = shash_compat_init(hdesc);
431	if (err)
432		goto out;
433
434	err = shash_compat_update(hdesc, sg, nbytes);
435	if (err)
436		goto out;
437
438	err = shash_compat_final(hdesc, out);
439
440out:
441	return err;
442}
443
444static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
445{
446	struct shash_desc **descp = crypto_tfm_ctx(tfm);
447	struct shash_desc *desc = *descp;
448
449	crypto_free_shash(desc->tfm);
450	kzfree(desc);
451}
452
453static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
454{
455	struct hash_tfm *crt = &tfm->crt_hash;
456	struct crypto_alg *calg = tfm->__crt_alg;
457	struct shash_alg *alg = __crypto_shash_alg(calg);
458	struct shash_desc **descp = crypto_tfm_ctx(tfm);
459	struct crypto_shash *shash;
460	struct shash_desc *desc;
461
462	if (!crypto_mod_get(calg))
463		return -EAGAIN;
464
465	shash = crypto_create_tfm(calg, &crypto_shash_type);
466	if (IS_ERR(shash)) {
467		crypto_mod_put(calg);
468		return PTR_ERR(shash);
469	}
470
471	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
472		       GFP_KERNEL);
473	if (!desc) {
474		crypto_free_shash(shash);
475		return -ENOMEM;
476	}
477
478	*descp = desc;
479	desc->tfm = shash;
480	tfm->exit = crypto_exit_shash_ops_compat;
481
482	crt->init = shash_compat_init;
483	crt->update = shash_compat_update;
484	crt->final  = shash_compat_final;
485	crt->digest = shash_compat_digest;
486	crt->setkey = shash_compat_setkey;
487
488	crt->digestsize = alg->digestsize;
489
490	return 0;
491}
492
493static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
494{
495	switch (mask & CRYPTO_ALG_TYPE_MASK) {
496	case CRYPTO_ALG_TYPE_HASH_MASK:
497		return crypto_init_shash_ops_compat(tfm);
498	}
499
500	return -EINVAL;
501}
502
503static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
504					 u32 mask)
505{
506	switch (mask & CRYPTO_ALG_TYPE_MASK) {
507	case CRYPTO_ALG_TYPE_HASH_MASK:
508		return sizeof(struct shash_desc *);
509	}
510
511	return 0;
512}
513
514static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
515{
516	struct crypto_shash *hash = __crypto_shash_cast(tfm);
517
518	hash->descsize = crypto_shash_alg(hash)->descsize;
519	return 0;
520}
521
522static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
523{
524	return alg->cra_ctxsize;
525}
526
527#ifdef CONFIG_NET
528static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
529{
530	struct crypto_report_hash rhash;
531	struct shash_alg *salg = __crypto_shash_alg(alg);
532
533	snprintf(rhash.type, CRYPTO_MAX_ALG_NAME, "%s", "shash");
 
 
 
534	rhash.blocksize = alg->cra_blocksize;
535	rhash.digestsize = salg->digestsize;
536
537	if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
538		    sizeof(struct crypto_report_hash), &rhash))
539		goto nla_put_failure;
540	return 0;
541
542nla_put_failure:
543	return -EMSGSIZE;
544}
545#else
546static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
547{
548	return -ENOSYS;
549}
550#endif
551
552static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
553	__attribute__ ((unused));
554static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
555{
556	struct shash_alg *salg = __crypto_shash_alg(alg);
557
558	seq_printf(m, "type         : shash\n");
559	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
560	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
561}
562
563static const struct crypto_type crypto_shash_type = {
564	.ctxsize = crypto_shash_ctxsize,
565	.extsize = crypto_shash_extsize,
566	.init = crypto_init_shash_ops,
567	.init_tfm = crypto_shash_init_tfm,
 
568#ifdef CONFIG_PROC_FS
569	.show = crypto_shash_show,
570#endif
571	.report = crypto_shash_report,
572	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
573	.maskset = CRYPTO_ALG_TYPE_MASK,
574	.type = CRYPTO_ALG_TYPE_SHASH,
575	.tfmsize = offsetof(struct crypto_shash, base),
576};
577
 
 
 
 
 
 
 
 
 
578struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
579					u32 mask)
580{
581	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
582}
583EXPORT_SYMBOL_GPL(crypto_alloc_shash);
584
 
 
 
 
 
 
585static int shash_prepare_alg(struct shash_alg *alg)
586{
587	struct crypto_alg *base = &alg->base;
588
589	if (alg->digestsize > PAGE_SIZE / 8 ||
590	    alg->descsize > PAGE_SIZE / 8 ||
591	    alg->statesize > PAGE_SIZE / 8)
 
 
 
 
 
 
592		return -EINVAL;
593
594	base->cra_type = &crypto_shash_type;
595	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
596	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
597
598	if (!alg->finup)
599		alg->finup = shash_finup_unaligned;
600	if (!alg->digest)
601		alg->digest = shash_digest_unaligned;
602	if (!alg->export) {
603		alg->export = shash_default_export;
604		alg->import = shash_default_import;
605		alg->statesize = alg->descsize;
606	}
607	if (!alg->setkey)
608		alg->setkey = shash_no_setkey;
609
610	return 0;
611}
612
613int crypto_register_shash(struct shash_alg *alg)
614{
615	struct crypto_alg *base = &alg->base;
616	int err;
617
618	err = shash_prepare_alg(alg);
619	if (err)
620		return err;
621
622	return crypto_register_alg(base);
623}
624EXPORT_SYMBOL_GPL(crypto_register_shash);
625
626int crypto_unregister_shash(struct shash_alg *alg)
627{
628	return crypto_unregister_alg(&alg->base);
629}
630EXPORT_SYMBOL_GPL(crypto_unregister_shash);
631
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
632int shash_register_instance(struct crypto_template *tmpl,
633			    struct shash_instance *inst)
634{
635	int err;
636
 
 
 
637	err = shash_prepare_alg(&inst->alg);
638	if (err)
639		return err;
640
641	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
642}
643EXPORT_SYMBOL_GPL(shash_register_instance);
644
645void shash_free_instance(struct crypto_instance *inst)
646{
647	crypto_drop_spawn(crypto_instance_ctx(inst));
648	kfree(shash_instance(inst));
649}
650EXPORT_SYMBOL_GPL(shash_free_instance);
651
652int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
653			    struct shash_alg *alg,
654			    struct crypto_instance *inst)
655{
656	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
657				  &crypto_shash_type);
658}
659EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
660
661struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
662{
663	struct crypto_alg *alg;
664
665	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
666	return IS_ERR(alg) ? ERR_CAST(alg) :
667	       container_of(alg, struct shash_alg, base);
668}
669EXPORT_SYMBOL_GPL(shash_attr_alg);
670
671MODULE_LICENSE("GPL");
672MODULE_DESCRIPTION("Synchronous cryptographic hash type");