Linux Audio

Check our new training course

Loading...
v6.2
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Synchronous Cryptographic Hash operations.
  4 *
  5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7
  8#include <crypto/scatterwalk.h>
  9#include <crypto/internal/hash.h>
 10#include <linux/err.h>
 11#include <linux/kernel.h>
 12#include <linux/module.h>
 13#include <linux/slab.h>
 14#include <linux/seq_file.h>
 15#include <linux/cryptouser.h>
 16#include <net/netlink.h>
 17#include <linux/compiler.h>
 18
 19#include "internal.h"
 20
 21#define MAX_SHASH_ALIGNMASK 63
 
 
 
 22
 23static const struct crypto_type crypto_shash_type;
 
 
 
 
 
 24
 25int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 26		    unsigned int keylen)
 27{
 28	return -ENOSYS;
 29}
 30EXPORT_SYMBOL_GPL(shash_no_setkey);
 31
 32static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 33				  unsigned int keylen)
 34{
 35	struct shash_alg *shash = crypto_shash_alg(tfm);
 36	unsigned long alignmask = crypto_shash_alignmask(tfm);
 37	unsigned long absize;
 38	u8 *buffer, *alignbuffer;
 39	int err;
 40
 41	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 42	buffer = kmalloc(absize, GFP_ATOMIC);
 43	if (!buffer)
 44		return -ENOMEM;
 45
 46	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 47	memcpy(alignbuffer, key, keylen);
 48	err = shash->setkey(tfm, alignbuffer, keylen);
 49	kfree_sensitive(buffer);
 50	return err;
 51}
 52
 53static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
 54{
 55	if (crypto_shash_alg_needs_key(alg))
 56		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
 57}
 58
 59int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 60			unsigned int keylen)
 61{
 62	struct shash_alg *shash = crypto_shash_alg(tfm);
 63	unsigned long alignmask = crypto_shash_alignmask(tfm);
 64	int err;
 65
 66	if ((unsigned long)key & alignmask)
 67		err = shash_setkey_unaligned(tfm, key, keylen);
 68	else
 69		err = shash->setkey(tfm, key, keylen);
 70
 71	if (unlikely(err)) {
 72		shash_set_needkey(tfm, shash);
 73		return err;
 74	}
 75
 76	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 77	return 0;
 78}
 79EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 80
 81static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 82				  unsigned int len)
 83{
 84	struct crypto_shash *tfm = desc->tfm;
 85	struct shash_alg *shash = crypto_shash_alg(tfm);
 86	unsigned long alignmask = crypto_shash_alignmask(tfm);
 87	unsigned int unaligned_len = alignmask + 1 -
 88				     ((unsigned long)data & alignmask);
 89	/*
 90	 * We cannot count on __aligned() working for large values:
 91	 * https://patchwork.kernel.org/patch/9507697/
 92	 */
 93	u8 ubuf[MAX_SHASH_ALIGNMASK * 2];
 94	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 95	int err;
 96
 97	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
 98		return -EINVAL;
 99
100	if (unaligned_len > len)
101		unaligned_len = len;
102
103	memcpy(buf, data, unaligned_len);
104	err = shash->update(desc, buf, unaligned_len);
105	memset(buf, 0, unaligned_len);
106
107	return err ?:
108	       shash->update(desc, data + unaligned_len, len - unaligned_len);
109}
110
111int crypto_shash_update(struct shash_desc *desc, const u8 *data,
112			unsigned int len)
113{
114	struct crypto_shash *tfm = desc->tfm;
115	struct shash_alg *shash = crypto_shash_alg(tfm);
116	unsigned long alignmask = crypto_shash_alignmask(tfm);
117
118	if ((unsigned long)data & alignmask)
119		return shash_update_unaligned(desc, data, len);
120
121	return shash->update(desc, data, len);
122}
123EXPORT_SYMBOL_GPL(crypto_shash_update);
124
125static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
126{
127	struct crypto_shash *tfm = desc->tfm;
128	unsigned long alignmask = crypto_shash_alignmask(tfm);
129	struct shash_alg *shash = crypto_shash_alg(tfm);
130	unsigned int ds = crypto_shash_digestsize(tfm);
131	/*
132	 * We cannot count on __aligned() working for large values:
133	 * https://patchwork.kernel.org/patch/9507697/
134	 */
135	u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE];
136	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
137	int err;
138
139	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
140		return -EINVAL;
141
142	err = shash->final(desc, buf);
143	if (err)
144		goto out;
145
146	memcpy(out, buf, ds);
147
148out:
149	memset(buf, 0, ds);
150	return err;
151}
 
152
153int crypto_shash_final(struct shash_desc *desc, u8 *out)
154{
155	struct crypto_shash *tfm = desc->tfm;
156	struct shash_alg *shash = crypto_shash_alg(tfm);
157	unsigned long alignmask = crypto_shash_alignmask(tfm);
 
 
158
159	if ((unsigned long)out & alignmask)
160		return shash_final_unaligned(desc, out);
161
162	return shash->final(desc, out);
163}
164EXPORT_SYMBOL_GPL(crypto_shash_final);
165
166static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
167				 unsigned int len, u8 *out)
168{
169	return crypto_shash_update(desc, data, len) ?:
170	       crypto_shash_final(desc, out);
 
 
171}
172
173int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
174		       unsigned int len, u8 *out)
175{
176	struct crypto_shash *tfm = desc->tfm;
177	struct shash_alg *shash = crypto_shash_alg(tfm);
178	unsigned long alignmask = crypto_shash_alignmask(tfm);
 
 
 
 
 
 
 
179
180	if (((unsigned long)data | (unsigned long)out) & alignmask)
181		return shash_finup_unaligned(desc, data, len, out);
182
183	return shash->finup(desc, data, len, out);
184}
185EXPORT_SYMBOL_GPL(crypto_shash_finup);
186
187static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
188				  unsigned int len, u8 *out)
189{
190	return crypto_shash_init(desc) ?:
191	       crypto_shash_finup(desc, data, len, out);
 
 
192}
193
194int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
195			unsigned int len, u8 *out)
196{
197	struct crypto_shash *tfm = desc->tfm;
198	struct shash_alg *shash = crypto_shash_alg(tfm);
199	unsigned long alignmask = crypto_shash_alignmask(tfm);
200
201	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
202		return -ENOKEY;
203
204	if (((unsigned long)data | (unsigned long)out) & alignmask)
205		return shash_digest_unaligned(desc, data, len, out);
 
206
207	return shash->digest(desc, data, len, out);
 
 
 
 
 
208}
209EXPORT_SYMBOL_GPL(crypto_shash_digest);
210
211int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
212			    unsigned int len, u8 *out)
213{
214	SHASH_DESC_ON_STACK(desc, tfm);
215	int err;
216
217	desc->tfm = tfm;
218
219	err = crypto_shash_digest(desc, data, len, out);
220
221	shash_desc_zero(desc);
222
223	return err;
224}
225EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
226
227static int shash_default_export(struct shash_desc *desc, void *out)
228{
229	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
230	return 0;
231}
232
233static int shash_default_import(struct shash_desc *desc, const void *in)
234{
235	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
236	return 0;
237}
238
239static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
240			      unsigned int keylen)
241{
242	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
243
244	return crypto_shash_setkey(*ctx, key, keylen);
245}
246
247static int shash_async_init(struct ahash_request *req)
248{
249	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
250	struct shash_desc *desc = ahash_request_ctx(req);
251
252	desc->tfm = *ctx;
253
254	return crypto_shash_init(desc);
255}
256
257int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
258{
259	struct crypto_hash_walk walk;
260	int nbytes;
261
262	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
263	     nbytes = crypto_hash_walk_done(&walk, nbytes))
264		nbytes = crypto_shash_update(desc, walk.data, nbytes);
265
266	return nbytes;
267}
268EXPORT_SYMBOL_GPL(shash_ahash_update);
269
270static int shash_async_update(struct ahash_request *req)
271{
272	return shash_ahash_update(req, ahash_request_ctx(req));
273}
274
275static int shash_async_final(struct ahash_request *req)
276{
277	return crypto_shash_final(ahash_request_ctx(req), req->result);
278}
279
280int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
281{
282	struct crypto_hash_walk walk;
283	int nbytes;
284
285	nbytes = crypto_hash_walk_first(req, &walk);
286	if (!nbytes)
287		return crypto_shash_final(desc, req->result);
288
289	do {
290		nbytes = crypto_hash_walk_last(&walk) ?
291			 crypto_shash_finup(desc, walk.data, nbytes,
292					    req->result) :
293			 crypto_shash_update(desc, walk.data, nbytes);
294		nbytes = crypto_hash_walk_done(&walk, nbytes);
295	} while (nbytes > 0);
296
297	return nbytes;
298}
299EXPORT_SYMBOL_GPL(shash_ahash_finup);
300
301static int shash_async_finup(struct ahash_request *req)
302{
303	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304	struct shash_desc *desc = ahash_request_ctx(req);
305
306	desc->tfm = *ctx;
307
308	return shash_ahash_finup(req, desc);
309}
310
311int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
312{
313	unsigned int nbytes = req->nbytes;
314	struct scatterlist *sg;
315	unsigned int offset;
316	int err;
317
318	if (nbytes &&
319	    (sg = req->src, offset = sg->offset,
320	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
321		void *data;
322
323		data = kmap_atomic(sg_page(sg));
324		err = crypto_shash_digest(desc, data + offset, nbytes,
325					  req->result);
326		kunmap_atomic(data);
327	} else
328		err = crypto_shash_init(desc) ?:
329		      shash_ahash_finup(req, desc);
330
331	return err;
332}
333EXPORT_SYMBOL_GPL(shash_ahash_digest);
334
335static int shash_async_digest(struct ahash_request *req)
336{
337	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
338	struct shash_desc *desc = ahash_request_ctx(req);
339
340	desc->tfm = *ctx;
341
342	return shash_ahash_digest(req, desc);
343}
344
345static int shash_async_export(struct ahash_request *req, void *out)
346{
347	return crypto_shash_export(ahash_request_ctx(req), out);
348}
349
350static int shash_async_import(struct ahash_request *req, const void *in)
351{
352	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
353	struct shash_desc *desc = ahash_request_ctx(req);
354
355	desc->tfm = *ctx;
356
357	return crypto_shash_import(desc, in);
358}
359
360static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
361{
362	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
363
364	crypto_free_shash(*ctx);
 
365}
 
366
367int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
368{
369	struct crypto_alg *calg = tfm->__crt_alg;
370	struct shash_alg *alg = __crypto_shash_alg(calg);
371	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
372	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
373	struct crypto_shash *shash;
374
375	if (!crypto_mod_get(calg))
376		return -EAGAIN;
377
378	shash = crypto_create_tfm(calg, &crypto_shash_type);
379	if (IS_ERR(shash)) {
380		crypto_mod_put(calg);
381		return PTR_ERR(shash);
382	}
383
384	*ctx = shash;
385	tfm->exit = crypto_exit_shash_ops_async;
386
387	crt->init = shash_async_init;
388	crt->update = shash_async_update;
389	crt->final = shash_async_final;
390	crt->finup = shash_async_finup;
391	crt->digest = shash_async_digest;
392	if (crypto_shash_alg_has_setkey(alg))
393		crt->setkey = shash_async_setkey;
394
395	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
396				    CRYPTO_TFM_NEED_KEY);
397
398	crt->export = shash_async_export;
399	crt->import = shash_async_import;
400
401	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
 
402
 
403	return 0;
404}
 
405
406static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
407{
408	struct crypto_shash *hash = __crypto_shash_cast(tfm);
409	struct shash_alg *alg = crypto_shash_alg(hash);
410
411	alg->exit_tfm(hash);
412}
413
414static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
415{
416	struct crypto_shash *hash = __crypto_shash_cast(tfm);
417	struct shash_alg *alg = crypto_shash_alg(hash);
418	int err;
419
420	hash->descsize = alg->descsize;
421
422	shash_set_needkey(hash, alg);
423
424	if (alg->exit_tfm)
425		tfm->exit = crypto_shash_exit_tfm;
426
427	if (!alg->init_tfm)
428		return 0;
429
430	err = alg->init_tfm(hash);
431	if (err)
432		return err;
433
434	/* ->init_tfm() may have increased the descsize. */
435	if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
436		if (alg->exit_tfm)
437			alg->exit_tfm(hash);
438		return -EINVAL;
439	}
440
441	return 0;
442}
443
444static void crypto_shash_free_instance(struct crypto_instance *inst)
445{
446	struct shash_instance *shash = shash_instance(inst);
447
448	shash->free(shash);
449}
450
451#ifdef CONFIG_NET
452static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
453{
454	struct crypto_report_hash rhash;
455	struct shash_alg *salg = __crypto_shash_alg(alg);
456
457	memset(&rhash, 0, sizeof(rhash));
458
459	strscpy(rhash.type, "shash", sizeof(rhash.type));
460
461	rhash.blocksize = alg->cra_blocksize;
462	rhash.digestsize = salg->digestsize;
463
464	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
465}
466#else
467static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
468{
469	return -ENOSYS;
470}
471#endif
472
473static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
474	__maybe_unused;
475static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
476{
477	struct shash_alg *salg = __crypto_shash_alg(alg);
478
479	seq_printf(m, "type         : shash\n");
480	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
481	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
482}
483
484static const struct crypto_type crypto_shash_type = {
 
 
 
 
 
 
485	.extsize = crypto_alg_extsize,
486	.init_tfm = crypto_shash_init_tfm,
487	.free = crypto_shash_free_instance,
488#ifdef CONFIG_PROC_FS
489	.show = crypto_shash_show,
490#endif
 
491	.report = crypto_shash_report,
 
 
 
 
492	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
493	.maskset = CRYPTO_ALG_TYPE_MASK,
494	.type = CRYPTO_ALG_TYPE_SHASH,
495	.tfmsize = offsetof(struct crypto_shash, base),
496};
497
498int crypto_grab_shash(struct crypto_shash_spawn *spawn,
499		      struct crypto_instance *inst,
500		      const char *name, u32 type, u32 mask)
501{
502	spawn->base.frontend = &crypto_shash_type;
503	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
504}
505EXPORT_SYMBOL_GPL(crypto_grab_shash);
506
507struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
508					u32 mask)
509{
510	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
511}
512EXPORT_SYMBOL_GPL(crypto_alloc_shash);
513
514int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
515{
516	return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
517}
518EXPORT_SYMBOL_GPL(crypto_has_shash);
519
520static int shash_prepare_alg(struct shash_alg *alg)
521{
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
522	struct crypto_alg *base = &alg->base;
523
524	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
525	    alg->descsize > HASH_MAX_DESCSIZE ||
526	    alg->statesize > HASH_MAX_STATESIZE)
527		return -EINVAL;
528
529	if (base->cra_alignmask > MAX_SHASH_ALIGNMASK)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
530		return -EINVAL;
531
532	if ((alg->export && !alg->import) || (alg->import && !alg->export))
533		return -EINVAL;
534
 
 
 
 
535	base->cra_type = &crypto_shash_type;
536	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
537	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
538
 
 
 
 
 
 
 
 
 
 
 
539	if (!alg->finup)
540		alg->finup = shash_finup_unaligned;
541	if (!alg->digest)
542		alg->digest = shash_digest_unaligned;
543	if (!alg->export) {
544		alg->export = shash_default_export;
545		alg->import = shash_default_import;
546		alg->statesize = alg->descsize;
547	}
548	if (!alg->setkey)
549		alg->setkey = shash_no_setkey;
550
551	return 0;
552}
553
554int crypto_register_shash(struct shash_alg *alg)
555{
556	struct crypto_alg *base = &alg->base;
557	int err;
558
559	err = shash_prepare_alg(alg);
560	if (err)
561		return err;
562
563	return crypto_register_alg(base);
564}
565EXPORT_SYMBOL_GPL(crypto_register_shash);
566
567void crypto_unregister_shash(struct shash_alg *alg)
568{
569	crypto_unregister_alg(&alg->base);
570}
571EXPORT_SYMBOL_GPL(crypto_unregister_shash);
572
573int crypto_register_shashes(struct shash_alg *algs, int count)
574{
575	int i, ret;
576
577	for (i = 0; i < count; i++) {
578		ret = crypto_register_shash(&algs[i]);
579		if (ret)
580			goto err;
581	}
582
583	return 0;
584
585err:
586	for (--i; i >= 0; --i)
587		crypto_unregister_shash(&algs[i]);
588
589	return ret;
590}
591EXPORT_SYMBOL_GPL(crypto_register_shashes);
592
593void crypto_unregister_shashes(struct shash_alg *algs, int count)
594{
595	int i;
596
597	for (i = count - 1; i >= 0; --i)
598		crypto_unregister_shash(&algs[i]);
599}
600EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
601
602int shash_register_instance(struct crypto_template *tmpl,
603			    struct shash_instance *inst)
604{
605	int err;
606
607	if (WARN_ON(!inst->free))
608		return -EINVAL;
609
610	err = shash_prepare_alg(&inst->alg);
611	if (err)
612		return err;
613
614	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
615}
616EXPORT_SYMBOL_GPL(shash_register_instance);
617
618void shash_free_singlespawn_instance(struct shash_instance *inst)
619{
620	crypto_drop_spawn(shash_instance_ctx(inst));
621	kfree(inst);
622}
623EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
624
625MODULE_LICENSE("GPL");
626MODULE_DESCRIPTION("Synchronous cryptographic hash type");
v6.8
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Synchronous Cryptographic Hash operations.
  4 *
  5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7
  8#include <crypto/scatterwalk.h>
  9#include <linux/cryptouser.h>
 10#include <linux/err.h>
 11#include <linux/kernel.h>
 12#include <linux/module.h>
 
 13#include <linux/seq_file.h>
 14#include <linux/string.h>
 15#include <net/netlink.h>
 
 16
 17#include "hash.h"
 18
 19static inline struct crypto_istat_hash *shash_get_stat(struct shash_alg *alg)
 20{
 21	return hash_get_stat(&alg->halg);
 22}
 23
 24static inline int crypto_shash_errstat(struct shash_alg *alg, int err)
 25{
 26	if (IS_ENABLED(CONFIG_CRYPTO_STATS) && err)
 27		atomic64_inc(&shash_get_stat(alg)->err_cnt);
 28	return err;
 29}
 30
 31int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 32		    unsigned int keylen)
 33{
 34	return -ENOSYS;
 35}
 36EXPORT_SYMBOL_GPL(shash_no_setkey);
 37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 38static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
 39{
 40	if (crypto_shash_alg_needs_key(alg))
 41		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
 42}
 43
 44int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 45			unsigned int keylen)
 46{
 47	struct shash_alg *shash = crypto_shash_alg(tfm);
 
 48	int err;
 49
 50	err = shash->setkey(tfm, key, keylen);
 
 
 
 
 51	if (unlikely(err)) {
 52		shash_set_needkey(tfm, shash);
 53		return err;
 54	}
 55
 56	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 57	return 0;
 58}
 59EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 61int crypto_shash_update(struct shash_desc *desc, const u8 *data,
 62			unsigned int len)
 63{
 64	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 65	int err;
 66
 67	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
 68		atomic64_add(len, &shash_get_stat(shash)->hash_tlen);
 
 
 
 
 69
 70	err = shash->update(desc, data, len);
 71
 72	return crypto_shash_errstat(shash, err);
 
 
 73}
 74EXPORT_SYMBOL_GPL(crypto_shash_update);
 75
 76int crypto_shash_final(struct shash_desc *desc, u8 *out)
 77{
 78	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
 79	int err;
 80
 81	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
 82		atomic64_inc(&shash_get_stat(shash)->hash_cnt);
 83
 84	err = shash->final(desc, out);
 
 85
 86	return crypto_shash_errstat(shash, err);
 87}
 88EXPORT_SYMBOL_GPL(crypto_shash_final);
 89
 90static int shash_default_finup(struct shash_desc *desc, const u8 *data,
 91			       unsigned int len, u8 *out)
 92{
 93	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
 94
 95	return shash->update(desc, data, len) ?:
 96	       shash->final(desc, out);
 97}
 98
 99int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
100		       unsigned int len, u8 *out)
101{
102	struct crypto_shash *tfm = desc->tfm;
103	struct shash_alg *shash = crypto_shash_alg(tfm);
104	int err;
105
106	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
107		struct crypto_istat_hash *istat = shash_get_stat(shash);
108
109		atomic64_inc(&istat->hash_cnt);
110		atomic64_add(len, &istat->hash_tlen);
111	}
112
113	err = shash->finup(desc, data, len, out);
 
114
115	return crypto_shash_errstat(shash, err);
116}
117EXPORT_SYMBOL_GPL(crypto_shash_finup);
118
119static int shash_default_digest(struct shash_desc *desc, const u8 *data,
120				unsigned int len, u8 *out)
121{
122	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
123
124	return shash->init(desc) ?:
125	       shash->finup(desc, data, len, out);
126}
127
128int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
129			unsigned int len, u8 *out)
130{
131	struct crypto_shash *tfm = desc->tfm;
132	struct shash_alg *shash = crypto_shash_alg(tfm);
133	int err;
134
135	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
136		struct crypto_istat_hash *istat = shash_get_stat(shash);
137
138		atomic64_inc(&istat->hash_cnt);
139		atomic64_add(len, &istat->hash_tlen);
140	}
141
142	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
143		err = -ENOKEY;
144	else
145		err = shash->digest(desc, data, len, out);
146
147	return crypto_shash_errstat(shash, err);
148}
149EXPORT_SYMBOL_GPL(crypto_shash_digest);
150
151int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
152			    unsigned int len, u8 *out)
153{
154	SHASH_DESC_ON_STACK(desc, tfm);
155	int err;
156
157	desc->tfm = tfm;
158
159	err = crypto_shash_digest(desc, data, len, out);
160
161	shash_desc_zero(desc);
162
163	return err;
164}
165EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
166
167int crypto_shash_export(struct shash_desc *desc, void *out)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168{
169	struct crypto_shash *tfm = desc->tfm;
170	struct shash_alg *shash = crypto_shash_alg(tfm);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
171
172	if (shash->export)
173		return shash->export(desc, out);
 
174
175	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(tfm));
176	return 0;
177}
178EXPORT_SYMBOL_GPL(crypto_shash_export);
179
180int crypto_shash_import(struct shash_desc *desc, const void *in)
181{
182	struct crypto_shash *tfm = desc->tfm;
183	struct shash_alg *shash = crypto_shash_alg(tfm);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
184
185	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
186		return -ENOKEY;
187
188	if (shash->import)
189		return shash->import(desc, in);
190
191	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
192	return 0;
193}
194EXPORT_SYMBOL_GPL(crypto_shash_import);
195
196static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
197{
198	struct crypto_shash *hash = __crypto_shash_cast(tfm);
199	struct shash_alg *alg = crypto_shash_alg(hash);
200
201	alg->exit_tfm(hash);
202}
203
204static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
205{
206	struct crypto_shash *hash = __crypto_shash_cast(tfm);
207	struct shash_alg *alg = crypto_shash_alg(hash);
208	int err;
209
210	hash->descsize = alg->descsize;
211
212	shash_set_needkey(hash, alg);
213
214	if (alg->exit_tfm)
215		tfm->exit = crypto_shash_exit_tfm;
216
217	if (!alg->init_tfm)
218		return 0;
219
220	err = alg->init_tfm(hash);
221	if (err)
222		return err;
223
224	/* ->init_tfm() may have increased the descsize. */
225	if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
226		if (alg->exit_tfm)
227			alg->exit_tfm(hash);
228		return -EINVAL;
229	}
230
231	return 0;
232}
233
234static void crypto_shash_free_instance(struct crypto_instance *inst)
235{
236	struct shash_instance *shash = shash_instance(inst);
237
238	shash->free(shash);
239}
240
241static int __maybe_unused crypto_shash_report(
242	struct sk_buff *skb, struct crypto_alg *alg)
243{
244	struct crypto_report_hash rhash;
245	struct shash_alg *salg = __crypto_shash_alg(alg);
246
247	memset(&rhash, 0, sizeof(rhash));
248
249	strscpy(rhash.type, "shash", sizeof(rhash.type));
250
251	rhash.blocksize = alg->cra_blocksize;
252	rhash.digestsize = salg->digestsize;
253
254	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
255}
 
 
 
 
 
 
256
257static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
258	__maybe_unused;
259static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
260{
261	struct shash_alg *salg = __crypto_shash_alg(alg);
262
263	seq_printf(m, "type         : shash\n");
264	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
265	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
266}
267
268static int __maybe_unused crypto_shash_report_stat(
269	struct sk_buff *skb, struct crypto_alg *alg)
270{
271	return crypto_hash_report_stat(skb, alg, "shash");
272}
273
274const struct crypto_type crypto_shash_type = {
275	.extsize = crypto_alg_extsize,
276	.init_tfm = crypto_shash_init_tfm,
277	.free = crypto_shash_free_instance,
278#ifdef CONFIG_PROC_FS
279	.show = crypto_shash_show,
280#endif
281#if IS_ENABLED(CONFIG_CRYPTO_USER)
282	.report = crypto_shash_report,
283#endif
284#ifdef CONFIG_CRYPTO_STATS
285	.report_stat = crypto_shash_report_stat,
286#endif
287	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
288	.maskset = CRYPTO_ALG_TYPE_MASK,
289	.type = CRYPTO_ALG_TYPE_SHASH,
290	.tfmsize = offsetof(struct crypto_shash, base),
291};
292
293int crypto_grab_shash(struct crypto_shash_spawn *spawn,
294		      struct crypto_instance *inst,
295		      const char *name, u32 type, u32 mask)
296{
297	spawn->base.frontend = &crypto_shash_type;
298	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
299}
300EXPORT_SYMBOL_GPL(crypto_grab_shash);
301
302struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
303					u32 mask)
304{
305	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
306}
307EXPORT_SYMBOL_GPL(crypto_alloc_shash);
308
309int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
310{
311	return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
312}
313EXPORT_SYMBOL_GPL(crypto_has_shash);
314
315struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash)
316{
317	struct crypto_tfm *tfm = crypto_shash_tfm(hash);
318	struct shash_alg *alg = crypto_shash_alg(hash);
319	struct crypto_shash *nhash;
320	int err;
321
322	if (!crypto_shash_alg_has_setkey(alg)) {
323		tfm = crypto_tfm_get(tfm);
324		if (IS_ERR(tfm))
325			return ERR_CAST(tfm);
326
327		return hash;
328	}
329
330	if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init))
331		return ERR_PTR(-ENOSYS);
332
333	nhash = crypto_clone_tfm(&crypto_shash_type, tfm);
334	if (IS_ERR(nhash))
335		return nhash;
336
337	nhash->descsize = hash->descsize;
338
339	if (alg->clone_tfm) {
340		err = alg->clone_tfm(nhash, hash);
341		if (err) {
342			crypto_free_shash(nhash);
343			return ERR_PTR(err);
344		}
345	}
346
347	return nhash;
348}
349EXPORT_SYMBOL_GPL(crypto_clone_shash);
350
351int hash_prepare_alg(struct hash_alg_common *alg)
352{
353	struct crypto_istat_hash *istat = hash_get_stat(alg);
354	struct crypto_alg *base = &alg->base;
355
356	if (alg->digestsize > HASH_MAX_DIGESTSIZE)
 
 
357		return -EINVAL;
358
359	/* alignmask is not useful for hashes, so it is not supported. */
360	if (base->cra_alignmask)
361		return -EINVAL;
362
363	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
364
365	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
366		memset(istat, 0, sizeof(*istat));
367
368	return 0;
369}
370
371static int shash_prepare_alg(struct shash_alg *alg)
372{
373	struct crypto_alg *base = &alg->halg.base;
374	int err;
375
376	if (alg->descsize > HASH_MAX_DESCSIZE)
377		return -EINVAL;
378
379	if ((alg->export && !alg->import) || (alg->import && !alg->export))
380		return -EINVAL;
381
382	err = hash_prepare_alg(&alg->halg);
383	if (err)
384		return err;
385
386	base->cra_type = &crypto_shash_type;
 
387	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
388
389	/*
390	 * Handle missing optional functions.  For each one we can either
391	 * install a default here, or we can leave the pointer as NULL and check
392	 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
393	 * when the default behavior is desired.  For ->finup and ->digest we
394	 * install defaults, since for optimal performance algorithms should
395	 * implement these anyway.  On the other hand, for ->import and
396	 * ->export the common case and best performance comes from the simple
397	 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
398	 * leave them NULL and provide the memcpy with no indirect call.
399	 */
400	if (!alg->finup)
401		alg->finup = shash_default_finup;
402	if (!alg->digest)
403		alg->digest = shash_default_digest;
404	if (!alg->export)
405		alg->halg.statesize = alg->descsize;
 
 
 
406	if (!alg->setkey)
407		alg->setkey = shash_no_setkey;
408
409	return 0;
410}
411
412int crypto_register_shash(struct shash_alg *alg)
413{
414	struct crypto_alg *base = &alg->base;
415	int err;
416
417	err = shash_prepare_alg(alg);
418	if (err)
419		return err;
420
421	return crypto_register_alg(base);
422}
423EXPORT_SYMBOL_GPL(crypto_register_shash);
424
425void crypto_unregister_shash(struct shash_alg *alg)
426{
427	crypto_unregister_alg(&alg->base);
428}
429EXPORT_SYMBOL_GPL(crypto_unregister_shash);
430
431int crypto_register_shashes(struct shash_alg *algs, int count)
432{
433	int i, ret;
434
435	for (i = 0; i < count; i++) {
436		ret = crypto_register_shash(&algs[i]);
437		if (ret)
438			goto err;
439	}
440
441	return 0;
442
443err:
444	for (--i; i >= 0; --i)
445		crypto_unregister_shash(&algs[i]);
446
447	return ret;
448}
449EXPORT_SYMBOL_GPL(crypto_register_shashes);
450
451void crypto_unregister_shashes(struct shash_alg *algs, int count)
452{
453	int i;
454
455	for (i = count - 1; i >= 0; --i)
456		crypto_unregister_shash(&algs[i]);
457}
458EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
459
460int shash_register_instance(struct crypto_template *tmpl,
461			    struct shash_instance *inst)
462{
463	int err;
464
465	if (WARN_ON(!inst->free))
466		return -EINVAL;
467
468	err = shash_prepare_alg(&inst->alg);
469	if (err)
470		return err;
471
472	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
473}
474EXPORT_SYMBOL_GPL(shash_register_instance);
475
476void shash_free_singlespawn_instance(struct shash_instance *inst)
477{
478	crypto_drop_spawn(shash_instance_ctx(inst));
479	kfree(inst);
480}
481EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
482
483MODULE_LICENSE("GPL");
484MODULE_DESCRIPTION("Synchronous cryptographic hash type");