Linux Audio

Check our new training course

Loading...
v4.17
 
  1/*
  2 * Synchronous Cryptographic Hash operations.
  3 *
  4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option)
  9 * any later version.
 10 *
 11 */
 12
 13#include <crypto/scatterwalk.h>
 14#include <crypto/internal/hash.h>
 15#include <linux/err.h>
 16#include <linux/kernel.h>
 17#include <linux/module.h>
 18#include <linux/slab.h>
 19#include <linux/seq_file.h>
 20#include <linux/cryptouser.h>
 21#include <net/netlink.h>
 22#include <linux/compiler.h>
 23
 24#include "internal.h"
 25
 26static const struct crypto_type crypto_shash_type;
 27
 28int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 29		    unsigned int keylen)
 30{
 31	return -ENOSYS;
 32}
 33EXPORT_SYMBOL_GPL(shash_no_setkey);
 34
 35static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 36				  unsigned int keylen)
 37{
 38	struct shash_alg *shash = crypto_shash_alg(tfm);
 39	unsigned long alignmask = crypto_shash_alignmask(tfm);
 40	unsigned long absize;
 41	u8 *buffer, *alignbuffer;
 42	int err;
 43
 44	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 45	buffer = kmalloc(absize, GFP_ATOMIC);
 46	if (!buffer)
 47		return -ENOMEM;
 48
 49	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 50	memcpy(alignbuffer, key, keylen);
 51	err = shash->setkey(tfm, alignbuffer, keylen);
 52	kzfree(buffer);
 53	return err;
 54}
 55
 
 
 
 
 
 
 
 56int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 57			unsigned int keylen)
 58{
 59	struct shash_alg *shash = crypto_shash_alg(tfm);
 60	unsigned long alignmask = crypto_shash_alignmask(tfm);
 61	int err;
 62
 63	if ((unsigned long)key & alignmask)
 64		err = shash_setkey_unaligned(tfm, key, keylen);
 65	else
 66		err = shash->setkey(tfm, key, keylen);
 67
 68	if (err)
 
 69		return err;
 
 70
 71	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 72	return 0;
 73}
 74EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 75
 76static inline unsigned int shash_align_buffer_size(unsigned len,
 77						   unsigned long mask)
 78{
 79	typedef u8 __aligned_largest u8_aligned;
 80	return len + (mask & ~(__alignof__(u8_aligned) - 1));
 81}
 82
 83static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 84				  unsigned int len)
 85{
 86	struct crypto_shash *tfm = desc->tfm;
 87	struct shash_alg *shash = crypto_shash_alg(tfm);
 88	unsigned long alignmask = crypto_shash_alignmask(tfm);
 89	unsigned int unaligned_len = alignmask + 1 -
 90				     ((unsigned long)data & alignmask);
 91	u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
 92		__aligned_largest;
 
 
 
 93	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 94	int err;
 95
 
 
 
 96	if (unaligned_len > len)
 97		unaligned_len = len;
 98
 99	memcpy(buf, data, unaligned_len);
100	err = shash->update(desc, buf, unaligned_len);
101	memset(buf, 0, unaligned_len);
102
103	return err ?:
104	       shash->update(desc, data + unaligned_len, len - unaligned_len);
105}
106
107int crypto_shash_update(struct shash_desc *desc, const u8 *data,
108			unsigned int len)
109{
110	struct crypto_shash *tfm = desc->tfm;
111	struct shash_alg *shash = crypto_shash_alg(tfm);
112	unsigned long alignmask = crypto_shash_alignmask(tfm);
113
114	if ((unsigned long)data & alignmask)
115		return shash_update_unaligned(desc, data, len);
116
117	return shash->update(desc, data, len);
118}
119EXPORT_SYMBOL_GPL(crypto_shash_update);
120
121static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
122{
123	struct crypto_shash *tfm = desc->tfm;
124	unsigned long alignmask = crypto_shash_alignmask(tfm);
125	struct shash_alg *shash = crypto_shash_alg(tfm);
126	unsigned int ds = crypto_shash_digestsize(tfm);
127	u8 ubuf[shash_align_buffer_size(ds, alignmask)]
128		__aligned_largest;
 
 
 
129	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
130	int err;
131
 
 
 
132	err = shash->final(desc, buf);
133	if (err)
134		goto out;
135
136	memcpy(out, buf, ds);
137
138out:
139	memset(buf, 0, ds);
140	return err;
141}
142
143int crypto_shash_final(struct shash_desc *desc, u8 *out)
144{
145	struct crypto_shash *tfm = desc->tfm;
146	struct shash_alg *shash = crypto_shash_alg(tfm);
147	unsigned long alignmask = crypto_shash_alignmask(tfm);
148
149	if ((unsigned long)out & alignmask)
150		return shash_final_unaligned(desc, out);
151
152	return shash->final(desc, out);
153}
154EXPORT_SYMBOL_GPL(crypto_shash_final);
155
156static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
157				 unsigned int len, u8 *out)
158{
159	return crypto_shash_update(desc, data, len) ?:
160	       crypto_shash_final(desc, out);
161}
162
163int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
164		       unsigned int len, u8 *out)
165{
166	struct crypto_shash *tfm = desc->tfm;
167	struct shash_alg *shash = crypto_shash_alg(tfm);
168	unsigned long alignmask = crypto_shash_alignmask(tfm);
169
170	if (((unsigned long)data | (unsigned long)out) & alignmask)
171		return shash_finup_unaligned(desc, data, len, out);
172
173	return shash->finup(desc, data, len, out);
174}
175EXPORT_SYMBOL_GPL(crypto_shash_finup);
176
177static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
178				  unsigned int len, u8 *out)
179{
180	return crypto_shash_init(desc) ?:
181	       crypto_shash_finup(desc, data, len, out);
182}
183
184int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
185			unsigned int len, u8 *out)
186{
187	struct crypto_shash *tfm = desc->tfm;
188	struct shash_alg *shash = crypto_shash_alg(tfm);
189	unsigned long alignmask = crypto_shash_alignmask(tfm);
190
191	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
192		return -ENOKEY;
193
194	if (((unsigned long)data | (unsigned long)out) & alignmask)
195		return shash_digest_unaligned(desc, data, len, out);
196
197	return shash->digest(desc, data, len, out);
198}
199EXPORT_SYMBOL_GPL(crypto_shash_digest);
200
201static int shash_default_export(struct shash_desc *desc, void *out)
202{
203	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
204	return 0;
205}
206
207static int shash_default_import(struct shash_desc *desc, const void *in)
208{
209	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
210	return 0;
211}
212
213static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
214			      unsigned int keylen)
215{
216	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
217
218	return crypto_shash_setkey(*ctx, key, keylen);
219}
220
221static int shash_async_init(struct ahash_request *req)
222{
223	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
224	struct shash_desc *desc = ahash_request_ctx(req);
225
226	desc->tfm = *ctx;
227	desc->flags = req->base.flags;
228
229	return crypto_shash_init(desc);
230}
231
232int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
233{
234	struct crypto_hash_walk walk;
235	int nbytes;
236
237	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
238	     nbytes = crypto_hash_walk_done(&walk, nbytes))
239		nbytes = crypto_shash_update(desc, walk.data, nbytes);
240
241	return nbytes;
242}
243EXPORT_SYMBOL_GPL(shash_ahash_update);
244
245static int shash_async_update(struct ahash_request *req)
246{
247	return shash_ahash_update(req, ahash_request_ctx(req));
248}
249
250static int shash_async_final(struct ahash_request *req)
251{
252	return crypto_shash_final(ahash_request_ctx(req), req->result);
253}
254
255int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
256{
257	struct crypto_hash_walk walk;
258	int nbytes;
259
260	nbytes = crypto_hash_walk_first(req, &walk);
261	if (!nbytes)
262		return crypto_shash_final(desc, req->result);
263
264	do {
265		nbytes = crypto_hash_walk_last(&walk) ?
266			 crypto_shash_finup(desc, walk.data, nbytes,
267					    req->result) :
268			 crypto_shash_update(desc, walk.data, nbytes);
269		nbytes = crypto_hash_walk_done(&walk, nbytes);
270	} while (nbytes > 0);
271
272	return nbytes;
273}
274EXPORT_SYMBOL_GPL(shash_ahash_finup);
275
276static int shash_async_finup(struct ahash_request *req)
277{
278	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
279	struct shash_desc *desc = ahash_request_ctx(req);
280
281	desc->tfm = *ctx;
282	desc->flags = req->base.flags;
283
284	return shash_ahash_finup(req, desc);
285}
286
287int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
288{
289	unsigned int nbytes = req->nbytes;
290	struct scatterlist *sg;
291	unsigned int offset;
292	int err;
293
294	if (nbytes &&
295	    (sg = req->src, offset = sg->offset,
296	     nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
297		void *data;
298
299		data = kmap_atomic(sg_page(sg));
300		err = crypto_shash_digest(desc, data + offset, nbytes,
301					  req->result);
302		kunmap_atomic(data);
303		crypto_yield(desc->flags);
304	} else
305		err = crypto_shash_init(desc) ?:
306		      shash_ahash_finup(req, desc);
307
308	return err;
309}
310EXPORT_SYMBOL_GPL(shash_ahash_digest);
311
312static int shash_async_digest(struct ahash_request *req)
313{
314	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
315	struct shash_desc *desc = ahash_request_ctx(req);
316
317	desc->tfm = *ctx;
318	desc->flags = req->base.flags;
319
320	return shash_ahash_digest(req, desc);
321}
322
323static int shash_async_export(struct ahash_request *req, void *out)
324{
325	return crypto_shash_export(ahash_request_ctx(req), out);
326}
327
328static int shash_async_import(struct ahash_request *req, const void *in)
329{
330	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
331	struct shash_desc *desc = ahash_request_ctx(req);
332
333	desc->tfm = *ctx;
334	desc->flags = req->base.flags;
335
336	return crypto_shash_import(desc, in);
337}
338
339static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
340{
341	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
342
343	crypto_free_shash(*ctx);
344}
345
346int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
347{
348	struct crypto_alg *calg = tfm->__crt_alg;
349	struct shash_alg *alg = __crypto_shash_alg(calg);
350	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
351	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
352	struct crypto_shash *shash;
353
354	if (!crypto_mod_get(calg))
355		return -EAGAIN;
356
357	shash = crypto_create_tfm(calg, &crypto_shash_type);
358	if (IS_ERR(shash)) {
359		crypto_mod_put(calg);
360		return PTR_ERR(shash);
361	}
362
363	*ctx = shash;
364	tfm->exit = crypto_exit_shash_ops_async;
365
366	crt->init = shash_async_init;
367	crt->update = shash_async_update;
368	crt->final = shash_async_final;
369	crt->finup = shash_async_finup;
370	crt->digest = shash_async_digest;
371	crt->setkey = shash_async_setkey;
 
372
373	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
374				    CRYPTO_TFM_NEED_KEY);
375
376	if (alg->export)
377		crt->export = shash_async_export;
378	if (alg->import)
379		crt->import = shash_async_import;
380
381	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
382
383	return 0;
384}
385
386static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
387{
388	struct crypto_shash *hash = __crypto_shash_cast(tfm);
389	struct shash_alg *alg = crypto_shash_alg(hash);
390
391	hash->descsize = alg->descsize;
392
393	if (crypto_shash_alg_has_setkey(alg) &&
394	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
395		crypto_shash_set_flags(hash, CRYPTO_TFM_NEED_KEY);
396
397	return 0;
398}
399
400#ifdef CONFIG_NET
401static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
402{
403	struct crypto_report_hash rhash;
404	struct shash_alg *salg = __crypto_shash_alg(alg);
405
406	strncpy(rhash.type, "shash", sizeof(rhash.type));
 
 
407
408	rhash.blocksize = alg->cra_blocksize;
409	rhash.digestsize = salg->digestsize;
410
411	if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
412		    sizeof(struct crypto_report_hash), &rhash))
413		goto nla_put_failure;
414	return 0;
415
416nla_put_failure:
417	return -EMSGSIZE;
418}
419#else
420static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
421{
422	return -ENOSYS;
423}
424#endif
425
426static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
427	__maybe_unused;
428static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
429{
430	struct shash_alg *salg = __crypto_shash_alg(alg);
431
432	seq_printf(m, "type         : shash\n");
433	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
434	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
435}
436
437static const struct crypto_type crypto_shash_type = {
438	.extsize = crypto_alg_extsize,
439	.init_tfm = crypto_shash_init_tfm,
440#ifdef CONFIG_PROC_FS
441	.show = crypto_shash_show,
442#endif
443	.report = crypto_shash_report,
444	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
445	.maskset = CRYPTO_ALG_TYPE_MASK,
446	.type = CRYPTO_ALG_TYPE_SHASH,
447	.tfmsize = offsetof(struct crypto_shash, base),
448};
449
450struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
451					u32 mask)
452{
453	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
454}
455EXPORT_SYMBOL_GPL(crypto_alloc_shash);
456
457static int shash_prepare_alg(struct shash_alg *alg)
458{
459	struct crypto_alg *base = &alg->base;
460
461	if (alg->digestsize > PAGE_SIZE / 8 ||
462	    alg->descsize > PAGE_SIZE / 8 ||
463	    alg->statesize > PAGE_SIZE / 8)
 
 
 
464		return -EINVAL;
465
466	base->cra_type = &crypto_shash_type;
467	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
468	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
469
470	if (!alg->finup)
471		alg->finup = shash_finup_unaligned;
472	if (!alg->digest)
473		alg->digest = shash_digest_unaligned;
474	if (!alg->export) {
475		alg->export = shash_default_export;
476		alg->import = shash_default_import;
477		alg->statesize = alg->descsize;
478	}
479	if (!alg->setkey)
480		alg->setkey = shash_no_setkey;
481
482	return 0;
483}
484
485int crypto_register_shash(struct shash_alg *alg)
486{
487	struct crypto_alg *base = &alg->base;
488	int err;
489
490	err = shash_prepare_alg(alg);
491	if (err)
492		return err;
493
494	return crypto_register_alg(base);
495}
496EXPORT_SYMBOL_GPL(crypto_register_shash);
497
498int crypto_unregister_shash(struct shash_alg *alg)
499{
500	return crypto_unregister_alg(&alg->base);
501}
502EXPORT_SYMBOL_GPL(crypto_unregister_shash);
503
504int crypto_register_shashes(struct shash_alg *algs, int count)
505{
506	int i, ret;
507
508	for (i = 0; i < count; i++) {
509		ret = crypto_register_shash(&algs[i]);
510		if (ret)
511			goto err;
512	}
513
514	return 0;
515
516err:
517	for (--i; i >= 0; --i)
518		crypto_unregister_shash(&algs[i]);
519
520	return ret;
521}
522EXPORT_SYMBOL_GPL(crypto_register_shashes);
523
524int crypto_unregister_shashes(struct shash_alg *algs, int count)
525{
526	int i, ret;
527
528	for (i = count - 1; i >= 0; --i) {
529		ret = crypto_unregister_shash(&algs[i]);
530		if (ret)
531			pr_err("Failed to unregister %s %s: %d\n",
532			       algs[i].base.cra_driver_name,
533			       algs[i].base.cra_name, ret);
534	}
535
536	return 0;
537}
538EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
539
540int shash_register_instance(struct crypto_template *tmpl,
541			    struct shash_instance *inst)
542{
543	int err;
544
545	err = shash_prepare_alg(&inst->alg);
546	if (err)
547		return err;
548
549	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
550}
551EXPORT_SYMBOL_GPL(shash_register_instance);
552
553void shash_free_instance(struct crypto_instance *inst)
554{
555	crypto_drop_spawn(crypto_instance_ctx(inst));
556	kfree(shash_instance(inst));
557}
558EXPORT_SYMBOL_GPL(shash_free_instance);
559
560int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
561			    struct shash_alg *alg,
562			    struct crypto_instance *inst)
563{
564	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
565				  &crypto_shash_type);
566}
567EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
568
569struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
570{
571	struct crypto_alg *alg;
572
573	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
574	return IS_ERR(alg) ? ERR_CAST(alg) :
575	       container_of(alg, struct shash_alg, base);
576}
577EXPORT_SYMBOL_GPL(shash_attr_alg);
578
579MODULE_LICENSE("GPL");
580MODULE_DESCRIPTION("Synchronous cryptographic hash type");
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Synchronous Cryptographic Hash operations.
  4 *
  5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  6 */
  7
  8#include <crypto/scatterwalk.h>
  9#include <crypto/internal/hash.h>
 10#include <linux/err.h>
 11#include <linux/kernel.h>
 12#include <linux/module.h>
 13#include <linux/slab.h>
 14#include <linux/seq_file.h>
 15#include <linux/cryptouser.h>
 16#include <net/netlink.h>
 17#include <linux/compiler.h>
 18
 19#include "internal.h"
 20
 21static const struct crypto_type crypto_shash_type;
 22
 23int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 24		    unsigned int keylen)
 25{
 26	return -ENOSYS;
 27}
 28EXPORT_SYMBOL_GPL(shash_no_setkey);
 29
 30static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 31				  unsigned int keylen)
 32{
 33	struct shash_alg *shash = crypto_shash_alg(tfm);
 34	unsigned long alignmask = crypto_shash_alignmask(tfm);
 35	unsigned long absize;
 36	u8 *buffer, *alignbuffer;
 37	int err;
 38
 39	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 40	buffer = kmalloc(absize, GFP_ATOMIC);
 41	if (!buffer)
 42		return -ENOMEM;
 43
 44	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 45	memcpy(alignbuffer, key, keylen);
 46	err = shash->setkey(tfm, alignbuffer, keylen);
 47	kzfree(buffer);
 48	return err;
 49}
 50
 51static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
 52{
 53	if (crypto_shash_alg_has_setkey(alg) &&
 54	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
 55		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
 56}
 57
 58int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 59			unsigned int keylen)
 60{
 61	struct shash_alg *shash = crypto_shash_alg(tfm);
 62	unsigned long alignmask = crypto_shash_alignmask(tfm);
 63	int err;
 64
 65	if ((unsigned long)key & alignmask)
 66		err = shash_setkey_unaligned(tfm, key, keylen);
 67	else
 68		err = shash->setkey(tfm, key, keylen);
 69
 70	if (unlikely(err)) {
 71		shash_set_needkey(tfm, shash);
 72		return err;
 73	}
 74
 75	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 76	return 0;
 77}
 78EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 79
 
 
 
 
 
 
 
 80static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 81				  unsigned int len)
 82{
 83	struct crypto_shash *tfm = desc->tfm;
 84	struct shash_alg *shash = crypto_shash_alg(tfm);
 85	unsigned long alignmask = crypto_shash_alignmask(tfm);
 86	unsigned int unaligned_len = alignmask + 1 -
 87				     ((unsigned long)data & alignmask);
 88	/*
 89	 * We cannot count on __aligned() working for large values:
 90	 * https://patchwork.kernel.org/patch/9507697/
 91	 */
 92	u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
 93	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 94	int err;
 95
 96	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
 97		return -EINVAL;
 98
 99	if (unaligned_len > len)
100		unaligned_len = len;
101
102	memcpy(buf, data, unaligned_len);
103	err = shash->update(desc, buf, unaligned_len);
104	memset(buf, 0, unaligned_len);
105
106	return err ?:
107	       shash->update(desc, data + unaligned_len, len - unaligned_len);
108}
109
110int crypto_shash_update(struct shash_desc *desc, const u8 *data,
111			unsigned int len)
112{
113	struct crypto_shash *tfm = desc->tfm;
114	struct shash_alg *shash = crypto_shash_alg(tfm);
115	unsigned long alignmask = crypto_shash_alignmask(tfm);
116
117	if ((unsigned long)data & alignmask)
118		return shash_update_unaligned(desc, data, len);
119
120	return shash->update(desc, data, len);
121}
122EXPORT_SYMBOL_GPL(crypto_shash_update);
123
124static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
125{
126	struct crypto_shash *tfm = desc->tfm;
127	unsigned long alignmask = crypto_shash_alignmask(tfm);
128	struct shash_alg *shash = crypto_shash_alg(tfm);
129	unsigned int ds = crypto_shash_digestsize(tfm);
130	/*
131	 * We cannot count on __aligned() working for large values:
132	 * https://patchwork.kernel.org/patch/9507697/
133	 */
134	u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
135	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
136	int err;
137
138	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
139		return -EINVAL;
140
141	err = shash->final(desc, buf);
142	if (err)
143		goto out;
144
145	memcpy(out, buf, ds);
146
147out:
148	memset(buf, 0, ds);
149	return err;
150}
151
152int crypto_shash_final(struct shash_desc *desc, u8 *out)
153{
154	struct crypto_shash *tfm = desc->tfm;
155	struct shash_alg *shash = crypto_shash_alg(tfm);
156	unsigned long alignmask = crypto_shash_alignmask(tfm);
157
158	if ((unsigned long)out & alignmask)
159		return shash_final_unaligned(desc, out);
160
161	return shash->final(desc, out);
162}
163EXPORT_SYMBOL_GPL(crypto_shash_final);
164
165static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
166				 unsigned int len, u8 *out)
167{
168	return crypto_shash_update(desc, data, len) ?:
169	       crypto_shash_final(desc, out);
170}
171
172int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
173		       unsigned int len, u8 *out)
174{
175	struct crypto_shash *tfm = desc->tfm;
176	struct shash_alg *shash = crypto_shash_alg(tfm);
177	unsigned long alignmask = crypto_shash_alignmask(tfm);
178
179	if (((unsigned long)data | (unsigned long)out) & alignmask)
180		return shash_finup_unaligned(desc, data, len, out);
181
182	return shash->finup(desc, data, len, out);
183}
184EXPORT_SYMBOL_GPL(crypto_shash_finup);
185
186static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
187				  unsigned int len, u8 *out)
188{
189	return crypto_shash_init(desc) ?:
190	       crypto_shash_finup(desc, data, len, out);
191}
192
193int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
194			unsigned int len, u8 *out)
195{
196	struct crypto_shash *tfm = desc->tfm;
197	struct shash_alg *shash = crypto_shash_alg(tfm);
198	unsigned long alignmask = crypto_shash_alignmask(tfm);
199
200	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
201		return -ENOKEY;
202
203	if (((unsigned long)data | (unsigned long)out) & alignmask)
204		return shash_digest_unaligned(desc, data, len, out);
205
206	return shash->digest(desc, data, len, out);
207}
208EXPORT_SYMBOL_GPL(crypto_shash_digest);
209
210static int shash_default_export(struct shash_desc *desc, void *out)
211{
212	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
213	return 0;
214}
215
216static int shash_default_import(struct shash_desc *desc, const void *in)
217{
218	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
219	return 0;
220}
221
222static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
223			      unsigned int keylen)
224{
225	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
226
227	return crypto_shash_setkey(*ctx, key, keylen);
228}
229
230static int shash_async_init(struct ahash_request *req)
231{
232	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
233	struct shash_desc *desc = ahash_request_ctx(req);
234
235	desc->tfm = *ctx;
 
236
237	return crypto_shash_init(desc);
238}
239
240int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
241{
242	struct crypto_hash_walk walk;
243	int nbytes;
244
245	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
246	     nbytes = crypto_hash_walk_done(&walk, nbytes))
247		nbytes = crypto_shash_update(desc, walk.data, nbytes);
248
249	return nbytes;
250}
251EXPORT_SYMBOL_GPL(shash_ahash_update);
252
253static int shash_async_update(struct ahash_request *req)
254{
255	return shash_ahash_update(req, ahash_request_ctx(req));
256}
257
258static int shash_async_final(struct ahash_request *req)
259{
260	return crypto_shash_final(ahash_request_ctx(req), req->result);
261}
262
263int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
264{
265	struct crypto_hash_walk walk;
266	int nbytes;
267
268	nbytes = crypto_hash_walk_first(req, &walk);
269	if (!nbytes)
270		return crypto_shash_final(desc, req->result);
271
272	do {
273		nbytes = crypto_hash_walk_last(&walk) ?
274			 crypto_shash_finup(desc, walk.data, nbytes,
275					    req->result) :
276			 crypto_shash_update(desc, walk.data, nbytes);
277		nbytes = crypto_hash_walk_done(&walk, nbytes);
278	} while (nbytes > 0);
279
280	return nbytes;
281}
282EXPORT_SYMBOL_GPL(shash_ahash_finup);
283
284static int shash_async_finup(struct ahash_request *req)
285{
286	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
287	struct shash_desc *desc = ahash_request_ctx(req);
288
289	desc->tfm = *ctx;
 
290
291	return shash_ahash_finup(req, desc);
292}
293
294int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
295{
296	unsigned int nbytes = req->nbytes;
297	struct scatterlist *sg;
298	unsigned int offset;
299	int err;
300
301	if (nbytes &&
302	    (sg = req->src, offset = sg->offset,
303	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
304		void *data;
305
306		data = kmap_atomic(sg_page(sg));
307		err = crypto_shash_digest(desc, data + offset, nbytes,
308					  req->result);
309		kunmap_atomic(data);
 
310	} else
311		err = crypto_shash_init(desc) ?:
312		      shash_ahash_finup(req, desc);
313
314	return err;
315}
316EXPORT_SYMBOL_GPL(shash_ahash_digest);
317
318static int shash_async_digest(struct ahash_request *req)
319{
320	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
321	struct shash_desc *desc = ahash_request_ctx(req);
322
323	desc->tfm = *ctx;
 
324
325	return shash_ahash_digest(req, desc);
326}
327
328static int shash_async_export(struct ahash_request *req, void *out)
329{
330	return crypto_shash_export(ahash_request_ctx(req), out);
331}
332
333static int shash_async_import(struct ahash_request *req, const void *in)
334{
335	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
336	struct shash_desc *desc = ahash_request_ctx(req);
337
338	desc->tfm = *ctx;
 
339
340	return crypto_shash_import(desc, in);
341}
342
343static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
344{
345	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
346
347	crypto_free_shash(*ctx);
348}
349
350int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
351{
352	struct crypto_alg *calg = tfm->__crt_alg;
353	struct shash_alg *alg = __crypto_shash_alg(calg);
354	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
355	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
356	struct crypto_shash *shash;
357
358	if (!crypto_mod_get(calg))
359		return -EAGAIN;
360
361	shash = crypto_create_tfm(calg, &crypto_shash_type);
362	if (IS_ERR(shash)) {
363		crypto_mod_put(calg);
364		return PTR_ERR(shash);
365	}
366
367	*ctx = shash;
368	tfm->exit = crypto_exit_shash_ops_async;
369
370	crt->init = shash_async_init;
371	crt->update = shash_async_update;
372	crt->final = shash_async_final;
373	crt->finup = shash_async_finup;
374	crt->digest = shash_async_digest;
375	if (crypto_shash_alg_has_setkey(alg))
376		crt->setkey = shash_async_setkey;
377
378	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
379				    CRYPTO_TFM_NEED_KEY);
380
381	crt->export = shash_async_export;
382	crt->import = shash_async_import;
 
 
383
384	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
385
386	return 0;
387}
388
389static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
390{
391	struct crypto_shash *hash = __crypto_shash_cast(tfm);
392	struct shash_alg *alg = crypto_shash_alg(hash);
393
394	hash->descsize = alg->descsize;
395
396	shash_set_needkey(hash, alg);
 
 
397
398	return 0;
399}
400
401#ifdef CONFIG_NET
402static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
403{
404	struct crypto_report_hash rhash;
405	struct shash_alg *salg = __crypto_shash_alg(alg);
406
407	memset(&rhash, 0, sizeof(rhash));
408
409	strscpy(rhash.type, "shash", sizeof(rhash.type));
410
411	rhash.blocksize = alg->cra_blocksize;
412	rhash.digestsize = salg->digestsize;
413
414	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
 
 
 
 
 
 
415}
416#else
417static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
418{
419	return -ENOSYS;
420}
421#endif
422
423static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
424	__maybe_unused;
425static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
426{
427	struct shash_alg *salg = __crypto_shash_alg(alg);
428
429	seq_printf(m, "type         : shash\n");
430	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
431	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
432}
433
434static const struct crypto_type crypto_shash_type = {
435	.extsize = crypto_alg_extsize,
436	.init_tfm = crypto_shash_init_tfm,
437#ifdef CONFIG_PROC_FS
438	.show = crypto_shash_show,
439#endif
440	.report = crypto_shash_report,
441	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
442	.maskset = CRYPTO_ALG_TYPE_MASK,
443	.type = CRYPTO_ALG_TYPE_SHASH,
444	.tfmsize = offsetof(struct crypto_shash, base),
445};
446
447struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
448					u32 mask)
449{
450	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
451}
452EXPORT_SYMBOL_GPL(crypto_alloc_shash);
453
454static int shash_prepare_alg(struct shash_alg *alg)
455{
456	struct crypto_alg *base = &alg->base;
457
458	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
459	    alg->descsize > HASH_MAX_DESCSIZE ||
460	    alg->statesize > HASH_MAX_STATESIZE)
461		return -EINVAL;
462
463	if ((alg->export && !alg->import) || (alg->import && !alg->export))
464		return -EINVAL;
465
466	base->cra_type = &crypto_shash_type;
467	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
468	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
469
470	if (!alg->finup)
471		alg->finup = shash_finup_unaligned;
472	if (!alg->digest)
473		alg->digest = shash_digest_unaligned;
474	if (!alg->export) {
475		alg->export = shash_default_export;
476		alg->import = shash_default_import;
477		alg->statesize = alg->descsize;
478	}
479	if (!alg->setkey)
480		alg->setkey = shash_no_setkey;
481
482	return 0;
483}
484
485int crypto_register_shash(struct shash_alg *alg)
486{
487	struct crypto_alg *base = &alg->base;
488	int err;
489
490	err = shash_prepare_alg(alg);
491	if (err)
492		return err;
493
494	return crypto_register_alg(base);
495}
496EXPORT_SYMBOL_GPL(crypto_register_shash);
497
498int crypto_unregister_shash(struct shash_alg *alg)
499{
500	return crypto_unregister_alg(&alg->base);
501}
502EXPORT_SYMBOL_GPL(crypto_unregister_shash);
503
504int crypto_register_shashes(struct shash_alg *algs, int count)
505{
506	int i, ret;
507
508	for (i = 0; i < count; i++) {
509		ret = crypto_register_shash(&algs[i]);
510		if (ret)
511			goto err;
512	}
513
514	return 0;
515
516err:
517	for (--i; i >= 0; --i)
518		crypto_unregister_shash(&algs[i]);
519
520	return ret;
521}
522EXPORT_SYMBOL_GPL(crypto_register_shashes);
523
524int crypto_unregister_shashes(struct shash_alg *algs, int count)
525{
526	int i, ret;
527
528	for (i = count - 1; i >= 0; --i) {
529		ret = crypto_unregister_shash(&algs[i]);
530		if (ret)
531			pr_err("Failed to unregister %s %s: %d\n",
532			       algs[i].base.cra_driver_name,
533			       algs[i].base.cra_name, ret);
534	}
535
536	return 0;
537}
538EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
539
540int shash_register_instance(struct crypto_template *tmpl,
541			    struct shash_instance *inst)
542{
543	int err;
544
545	err = shash_prepare_alg(&inst->alg);
546	if (err)
547		return err;
548
549	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
550}
551EXPORT_SYMBOL_GPL(shash_register_instance);
552
553void shash_free_instance(struct crypto_instance *inst)
554{
555	crypto_drop_spawn(crypto_instance_ctx(inst));
556	kfree(shash_instance(inst));
557}
558EXPORT_SYMBOL_GPL(shash_free_instance);
559
560int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
561			    struct shash_alg *alg,
562			    struct crypto_instance *inst)
563{
564	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
565				  &crypto_shash_type);
566}
567EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
568
569struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
570{
571	struct crypto_alg *alg;
572
573	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
574	return IS_ERR(alg) ? ERR_CAST(alg) :
575	       container_of(alg, struct shash_alg, base);
576}
577EXPORT_SYMBOL_GPL(shash_attr_alg);
578
579MODULE_LICENSE("GPL");
580MODULE_DESCRIPTION("Synchronous cryptographic hash type");