Linux Audio

Check our new training course

Loading...
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Synchronous Cryptographic Hash operations.
  4 *
  5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  6 */
  7
  8#include <crypto/scatterwalk.h>
  9#include <crypto/internal/hash.h>
 10#include <linux/err.h>
 11#include <linux/kernel.h>
 12#include <linux/module.h>
 13#include <linux/slab.h>
 14#include <linux/seq_file.h>
 15#include <linux/cryptouser.h>
 16#include <net/netlink.h>
 17#include <linux/compiler.h>
 18
 19#include "internal.h"
 20
 21static const struct crypto_type crypto_shash_type;
 22
 23int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 24		    unsigned int keylen)
 25{
 26	return -ENOSYS;
 27}
 28EXPORT_SYMBOL_GPL(shash_no_setkey);
 29
 30static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 31				  unsigned int keylen)
 32{
 33	struct shash_alg *shash = crypto_shash_alg(tfm);
 34	unsigned long alignmask = crypto_shash_alignmask(tfm);
 35	unsigned long absize;
 36	u8 *buffer, *alignbuffer;
 37	int err;
 38
 39	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 40	buffer = kmalloc(absize, GFP_ATOMIC);
 41	if (!buffer)
 42		return -ENOMEM;
 43
 44	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 45	memcpy(alignbuffer, key, keylen);
 46	err = shash->setkey(tfm, alignbuffer, keylen);
 47	kzfree(buffer);
 48	return err;
 49}
 50
 51static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
 52{
 53	if (crypto_shash_alg_has_setkey(alg) &&
 54	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
 55		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
 56}
 57
 58int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 59			unsigned int keylen)
 60{
 61	struct shash_alg *shash = crypto_shash_alg(tfm);
 62	unsigned long alignmask = crypto_shash_alignmask(tfm);
 63	int err;
 64
 65	if ((unsigned long)key & alignmask)
 66		err = shash_setkey_unaligned(tfm, key, keylen);
 67	else
 68		err = shash->setkey(tfm, key, keylen);
 69
 70	if (unlikely(err)) {
 71		shash_set_needkey(tfm, shash);
 72		return err;
 73	}
 74
 75	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 76	return 0;
 77}
 78EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 79
 
 
 
 
 
 
 80static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 81				  unsigned int len)
 82{
 83	struct crypto_shash *tfm = desc->tfm;
 84	struct shash_alg *shash = crypto_shash_alg(tfm);
 85	unsigned long alignmask = crypto_shash_alignmask(tfm);
 86	unsigned int unaligned_len = alignmask + 1 -
 87				     ((unsigned long)data & alignmask);
 88	/*
 89	 * We cannot count on __aligned() working for large values:
 90	 * https://patchwork.kernel.org/patch/9507697/
 91	 */
 92	u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
 93	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 94	int err;
 95
 96	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
 97		return -EINVAL;
 98
 99	if (unaligned_len > len)
100		unaligned_len = len;
101
102	memcpy(buf, data, unaligned_len);
103	err = shash->update(desc, buf, unaligned_len);
104	memset(buf, 0, unaligned_len);
105
106	return err ?:
107	       shash->update(desc, data + unaligned_len, len - unaligned_len);
108}
109
110int crypto_shash_update(struct shash_desc *desc, const u8 *data,
111			unsigned int len)
112{
113	struct crypto_shash *tfm = desc->tfm;
114	struct shash_alg *shash = crypto_shash_alg(tfm);
115	unsigned long alignmask = crypto_shash_alignmask(tfm);
116
117	if ((unsigned long)data & alignmask)
118		return shash_update_unaligned(desc, data, len);
119
120	return shash->update(desc, data, len);
121}
122EXPORT_SYMBOL_GPL(crypto_shash_update);
123
124static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
125{
126	struct crypto_shash *tfm = desc->tfm;
127	unsigned long alignmask = crypto_shash_alignmask(tfm);
128	struct shash_alg *shash = crypto_shash_alg(tfm);
129	unsigned int ds = crypto_shash_digestsize(tfm);
130	/*
131	 * We cannot count on __aligned() working for large values:
132	 * https://patchwork.kernel.org/patch/9507697/
133	 */
134	u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
135	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
136	int err;
137
138	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
139		return -EINVAL;
140
141	err = shash->final(desc, buf);
142	if (err)
143		goto out;
144
145	memcpy(out, buf, ds);
146
147out:
148	memset(buf, 0, ds);
149	return err;
150}
151
152int crypto_shash_final(struct shash_desc *desc, u8 *out)
153{
154	struct crypto_shash *tfm = desc->tfm;
155	struct shash_alg *shash = crypto_shash_alg(tfm);
156	unsigned long alignmask = crypto_shash_alignmask(tfm);
157
158	if ((unsigned long)out & alignmask)
159		return shash_final_unaligned(desc, out);
160
161	return shash->final(desc, out);
162}
163EXPORT_SYMBOL_GPL(crypto_shash_final);
164
165static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
166				 unsigned int len, u8 *out)
167{
168	return crypto_shash_update(desc, data, len) ?:
169	       crypto_shash_final(desc, out);
170}
171
172int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
173		       unsigned int len, u8 *out)
174{
175	struct crypto_shash *tfm = desc->tfm;
176	struct shash_alg *shash = crypto_shash_alg(tfm);
177	unsigned long alignmask = crypto_shash_alignmask(tfm);
178
179	if (((unsigned long)data | (unsigned long)out) & alignmask)
180		return shash_finup_unaligned(desc, data, len, out);
181
182	return shash->finup(desc, data, len, out);
183}
184EXPORT_SYMBOL_GPL(crypto_shash_finup);
185
186static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
187				  unsigned int len, u8 *out)
188{
189	return crypto_shash_init(desc) ?:
190	       crypto_shash_finup(desc, data, len, out);
191}
192
193int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
194			unsigned int len, u8 *out)
195{
196	struct crypto_shash *tfm = desc->tfm;
197	struct shash_alg *shash = crypto_shash_alg(tfm);
198	unsigned long alignmask = crypto_shash_alignmask(tfm);
199
200	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
201		return -ENOKEY;
202
203	if (((unsigned long)data | (unsigned long)out) & alignmask)
204		return shash_digest_unaligned(desc, data, len, out);
205
206	return shash->digest(desc, data, len, out);
207}
208EXPORT_SYMBOL_GPL(crypto_shash_digest);
209
210static int shash_default_export(struct shash_desc *desc, void *out)
211{
212	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
213	return 0;
214}
215
216static int shash_default_import(struct shash_desc *desc, const void *in)
217{
218	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
219	return 0;
220}
221
222static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
223			      unsigned int keylen)
224{
225	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
226
227	return crypto_shash_setkey(*ctx, key, keylen);
228}
229
230static int shash_async_init(struct ahash_request *req)
231{
232	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
233	struct shash_desc *desc = ahash_request_ctx(req);
234
235	desc->tfm = *ctx;
 
236
237	return crypto_shash_init(desc);
238}
239
240int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
241{
242	struct crypto_hash_walk walk;
243	int nbytes;
244
245	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
246	     nbytes = crypto_hash_walk_done(&walk, nbytes))
247		nbytes = crypto_shash_update(desc, walk.data, nbytes);
248
249	return nbytes;
250}
251EXPORT_SYMBOL_GPL(shash_ahash_update);
252
253static int shash_async_update(struct ahash_request *req)
254{
255	return shash_ahash_update(req, ahash_request_ctx(req));
256}
257
258static int shash_async_final(struct ahash_request *req)
259{
260	return crypto_shash_final(ahash_request_ctx(req), req->result);
261}
262
263int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
264{
265	struct crypto_hash_walk walk;
266	int nbytes;
267
268	nbytes = crypto_hash_walk_first(req, &walk);
269	if (!nbytes)
270		return crypto_shash_final(desc, req->result);
271
272	do {
273		nbytes = crypto_hash_walk_last(&walk) ?
274			 crypto_shash_finup(desc, walk.data, nbytes,
275					    req->result) :
276			 crypto_shash_update(desc, walk.data, nbytes);
277		nbytes = crypto_hash_walk_done(&walk, nbytes);
278	} while (nbytes > 0);
279
280	return nbytes;
281}
282EXPORT_SYMBOL_GPL(shash_ahash_finup);
283
284static int shash_async_finup(struct ahash_request *req)
285{
286	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
287	struct shash_desc *desc = ahash_request_ctx(req);
288
289	desc->tfm = *ctx;
 
290
291	return shash_ahash_finup(req, desc);
292}
293
294int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
295{
 
 
296	unsigned int nbytes = req->nbytes;
297	struct scatterlist *sg;
298	unsigned int offset;
299	int err;
300
301	if (nbytes &&
302	    (sg = req->src, offset = sg->offset,
303	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
304		void *data;
305
306		data = kmap_atomic(sg_page(sg));
307		err = crypto_shash_digest(desc, data + offset, nbytes,
308					  req->result);
309		kunmap_atomic(data);
 
310	} else
311		err = crypto_shash_init(desc) ?:
312		      shash_ahash_finup(req, desc);
313
314	return err;
315}
316EXPORT_SYMBOL_GPL(shash_ahash_digest);
317
318static int shash_async_digest(struct ahash_request *req)
319{
320	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
321	struct shash_desc *desc = ahash_request_ctx(req);
322
323	desc->tfm = *ctx;
 
324
325	return shash_ahash_digest(req, desc);
326}
327
328static int shash_async_export(struct ahash_request *req, void *out)
329{
330	return crypto_shash_export(ahash_request_ctx(req), out);
331}
332
333static int shash_async_import(struct ahash_request *req, const void *in)
334{
335	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
336	struct shash_desc *desc = ahash_request_ctx(req);
337
338	desc->tfm = *ctx;
 
339
340	return crypto_shash_import(desc, in);
341}
342
343static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
344{
345	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
346
347	crypto_free_shash(*ctx);
348}
349
350int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
351{
352	struct crypto_alg *calg = tfm->__crt_alg;
353	struct shash_alg *alg = __crypto_shash_alg(calg);
354	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
355	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
356	struct crypto_shash *shash;
357
358	if (!crypto_mod_get(calg))
359		return -EAGAIN;
360
361	shash = crypto_create_tfm(calg, &crypto_shash_type);
362	if (IS_ERR(shash)) {
363		crypto_mod_put(calg);
364		return PTR_ERR(shash);
365	}
366
367	*ctx = shash;
368	tfm->exit = crypto_exit_shash_ops_async;
369
370	crt->init = shash_async_init;
371	crt->update = shash_async_update;
372	crt->final = shash_async_final;
373	crt->finup = shash_async_finup;
374	crt->digest = shash_async_digest;
375	if (crypto_shash_alg_has_setkey(alg))
376		crt->setkey = shash_async_setkey;
377
378	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
379				    CRYPTO_TFM_NEED_KEY);
380
381	crt->export = shash_async_export;
382	crt->import = shash_async_import;
 
383
384	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
385
386	return 0;
387}
388
389static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
 
390{
391	struct crypto_shash *hash = __crypto_shash_cast(tfm);
392	struct shash_alg *alg = crypto_shash_alg(hash);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
393
394	hash->descsize = alg->descsize;
395
396	shash_set_needkey(hash, alg);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
397
398	return 0;
399}
400
401#ifdef CONFIG_NET
402static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
403{
404	struct crypto_report_hash rhash;
405	struct shash_alg *salg = __crypto_shash_alg(alg);
 
 
406
407	memset(&rhash, 0, sizeof(rhash));
 
408
409	strscpy(rhash.type, "shash", sizeof(rhash.type));
 
 
 
 
 
 
 
 
 
410
411	rhash.blocksize = alg->cra_blocksize;
412	rhash.digestsize = salg->digestsize;
 
413
414	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
 
415}
416#else
417static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
418{
419	return -ENOSYS;
420}
421#endif
422
423static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
424	__maybe_unused;
425static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
426{
427	struct shash_alg *salg = __crypto_shash_alg(alg);
428
429	seq_printf(m, "type         : shash\n");
430	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
431	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
432}
433
434static const struct crypto_type crypto_shash_type = {
435	.extsize = crypto_alg_extsize,
 
 
436	.init_tfm = crypto_shash_init_tfm,
437#ifdef CONFIG_PROC_FS
438	.show = crypto_shash_show,
439#endif
440	.report = crypto_shash_report,
441	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
442	.maskset = CRYPTO_ALG_TYPE_MASK,
443	.type = CRYPTO_ALG_TYPE_SHASH,
444	.tfmsize = offsetof(struct crypto_shash, base),
445};
446
447struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
448					u32 mask)
449{
450	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
451}
452EXPORT_SYMBOL_GPL(crypto_alloc_shash);
453
454static int shash_prepare_alg(struct shash_alg *alg)
455{
456	struct crypto_alg *base = &alg->base;
457
458	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
459	    alg->descsize > HASH_MAX_DESCSIZE ||
460	    alg->statesize > HASH_MAX_STATESIZE)
461		return -EINVAL;
462
463	if ((alg->export && !alg->import) || (alg->import && !alg->export))
464		return -EINVAL;
465
466	base->cra_type = &crypto_shash_type;
467	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
468	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
469
470	if (!alg->finup)
471		alg->finup = shash_finup_unaligned;
472	if (!alg->digest)
473		alg->digest = shash_digest_unaligned;
474	if (!alg->export) {
475		alg->export = shash_default_export;
476		alg->import = shash_default_import;
477		alg->statesize = alg->descsize;
478	}
479	if (!alg->setkey)
480		alg->setkey = shash_no_setkey;
481
482	return 0;
483}
484
485int crypto_register_shash(struct shash_alg *alg)
486{
487	struct crypto_alg *base = &alg->base;
488	int err;
489
490	err = shash_prepare_alg(alg);
491	if (err)
492		return err;
493
494	return crypto_register_alg(base);
495}
496EXPORT_SYMBOL_GPL(crypto_register_shash);
497
498int crypto_unregister_shash(struct shash_alg *alg)
499{
500	return crypto_unregister_alg(&alg->base);
501}
502EXPORT_SYMBOL_GPL(crypto_unregister_shash);
503
504int crypto_register_shashes(struct shash_alg *algs, int count)
505{
506	int i, ret;
507
508	for (i = 0; i < count; i++) {
509		ret = crypto_register_shash(&algs[i]);
510		if (ret)
511			goto err;
512	}
513
514	return 0;
515
516err:
517	for (--i; i >= 0; --i)
518		crypto_unregister_shash(&algs[i]);
519
520	return ret;
521}
522EXPORT_SYMBOL_GPL(crypto_register_shashes);
523
524int crypto_unregister_shashes(struct shash_alg *algs, int count)
525{
526	int i, ret;
527
528	for (i = count - 1; i >= 0; --i) {
529		ret = crypto_unregister_shash(&algs[i]);
530		if (ret)
531			pr_err("Failed to unregister %s %s: %d\n",
532			       algs[i].base.cra_driver_name,
533			       algs[i].base.cra_name, ret);
534	}
535
536	return 0;
537}
538EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
539
540int shash_register_instance(struct crypto_template *tmpl,
541			    struct shash_instance *inst)
542{
543	int err;
544
545	err = shash_prepare_alg(&inst->alg);
546	if (err)
547		return err;
548
549	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
550}
551EXPORT_SYMBOL_GPL(shash_register_instance);
552
553void shash_free_instance(struct crypto_instance *inst)
554{
555	crypto_drop_spawn(crypto_instance_ctx(inst));
556	kfree(shash_instance(inst));
557}
558EXPORT_SYMBOL_GPL(shash_free_instance);
559
560int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
561			    struct shash_alg *alg,
562			    struct crypto_instance *inst)
563{
564	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
565				  &crypto_shash_type);
566}
567EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
568
569struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
570{
571	struct crypto_alg *alg;
572
573	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
574	return IS_ERR(alg) ? ERR_CAST(alg) :
575	       container_of(alg, struct shash_alg, base);
576}
577EXPORT_SYMBOL_GPL(shash_attr_alg);
578
579MODULE_LICENSE("GPL");
580MODULE_DESCRIPTION("Synchronous cryptographic hash type");
v3.1
 
  1/*
  2 * Synchronous Cryptographic Hash operations.
  3 *
  4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option)
  9 * any later version.
 10 *
 11 */
 12
 13#include <crypto/scatterwalk.h>
 14#include <crypto/internal/hash.h>
 15#include <linux/err.h>
 16#include <linux/kernel.h>
 17#include <linux/module.h>
 18#include <linux/slab.h>
 19#include <linux/seq_file.h>
 
 
 
 20
 21#include "internal.h"
 22
 23static const struct crypto_type crypto_shash_type;
 24
 25static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 26			   unsigned int keylen)
 27{
 28	return -ENOSYS;
 29}
 
 30
 31static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 32				  unsigned int keylen)
 33{
 34	struct shash_alg *shash = crypto_shash_alg(tfm);
 35	unsigned long alignmask = crypto_shash_alignmask(tfm);
 36	unsigned long absize;
 37	u8 *buffer, *alignbuffer;
 38	int err;
 39
 40	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 41	buffer = kmalloc(absize, GFP_KERNEL);
 42	if (!buffer)
 43		return -ENOMEM;
 44
 45	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 46	memcpy(alignbuffer, key, keylen);
 47	err = shash->setkey(tfm, alignbuffer, keylen);
 48	kzfree(buffer);
 49	return err;
 50}
 51
 
 
 
 
 
 
 
 52int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 53			unsigned int keylen)
 54{
 55	struct shash_alg *shash = crypto_shash_alg(tfm);
 56	unsigned long alignmask = crypto_shash_alignmask(tfm);
 
 57
 58	if ((unsigned long)key & alignmask)
 59		return shash_setkey_unaligned(tfm, key, keylen);
 
 
 60
 61	return shash->setkey(tfm, key, keylen);
 
 
 
 
 
 
 62}
 63EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 64
 65static inline unsigned int shash_align_buffer_size(unsigned len,
 66						   unsigned long mask)
 67{
 68	return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1));
 69}
 70
 71static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 72				  unsigned int len)
 73{
 74	struct crypto_shash *tfm = desc->tfm;
 75	struct shash_alg *shash = crypto_shash_alg(tfm);
 76	unsigned long alignmask = crypto_shash_alignmask(tfm);
 77	unsigned int unaligned_len = alignmask + 1 -
 78				     ((unsigned long)data & alignmask);
 79	u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
 80		__attribute__ ((aligned));
 
 
 
 81	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 82	int err;
 83
 
 
 
 84	if (unaligned_len > len)
 85		unaligned_len = len;
 86
 87	memcpy(buf, data, unaligned_len);
 88	err = shash->update(desc, buf, unaligned_len);
 89	memset(buf, 0, unaligned_len);
 90
 91	return err ?:
 92	       shash->update(desc, data + unaligned_len, len - unaligned_len);
 93}
 94
 95int crypto_shash_update(struct shash_desc *desc, const u8 *data,
 96			unsigned int len)
 97{
 98	struct crypto_shash *tfm = desc->tfm;
 99	struct shash_alg *shash = crypto_shash_alg(tfm);
100	unsigned long alignmask = crypto_shash_alignmask(tfm);
101
102	if ((unsigned long)data & alignmask)
103		return shash_update_unaligned(desc, data, len);
104
105	return shash->update(desc, data, len);
106}
107EXPORT_SYMBOL_GPL(crypto_shash_update);
108
109static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
110{
111	struct crypto_shash *tfm = desc->tfm;
112	unsigned long alignmask = crypto_shash_alignmask(tfm);
113	struct shash_alg *shash = crypto_shash_alg(tfm);
114	unsigned int ds = crypto_shash_digestsize(tfm);
115	u8 ubuf[shash_align_buffer_size(ds, alignmask)]
116		__attribute__ ((aligned));
 
 
 
117	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
118	int err;
119
 
 
 
120	err = shash->final(desc, buf);
121	if (err)
122		goto out;
123
124	memcpy(out, buf, ds);
125
126out:
127	memset(buf, 0, ds);
128	return err;
129}
130
131int crypto_shash_final(struct shash_desc *desc, u8 *out)
132{
133	struct crypto_shash *tfm = desc->tfm;
134	struct shash_alg *shash = crypto_shash_alg(tfm);
135	unsigned long alignmask = crypto_shash_alignmask(tfm);
136
137	if ((unsigned long)out & alignmask)
138		return shash_final_unaligned(desc, out);
139
140	return shash->final(desc, out);
141}
142EXPORT_SYMBOL_GPL(crypto_shash_final);
143
144static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
145				 unsigned int len, u8 *out)
146{
147	return crypto_shash_update(desc, data, len) ?:
148	       crypto_shash_final(desc, out);
149}
150
151int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
152		       unsigned int len, u8 *out)
153{
154	struct crypto_shash *tfm = desc->tfm;
155	struct shash_alg *shash = crypto_shash_alg(tfm);
156	unsigned long alignmask = crypto_shash_alignmask(tfm);
157
158	if (((unsigned long)data | (unsigned long)out) & alignmask)
159		return shash_finup_unaligned(desc, data, len, out);
160
161	return shash->finup(desc, data, len, out);
162}
163EXPORT_SYMBOL_GPL(crypto_shash_finup);
164
165static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
166				  unsigned int len, u8 *out)
167{
168	return crypto_shash_init(desc) ?:
169	       crypto_shash_finup(desc, data, len, out);
170}
171
172int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
173			unsigned int len, u8 *out)
174{
175	struct crypto_shash *tfm = desc->tfm;
176	struct shash_alg *shash = crypto_shash_alg(tfm);
177	unsigned long alignmask = crypto_shash_alignmask(tfm);
178
 
 
 
179	if (((unsigned long)data | (unsigned long)out) & alignmask)
180		return shash_digest_unaligned(desc, data, len, out);
181
182	return shash->digest(desc, data, len, out);
183}
184EXPORT_SYMBOL_GPL(crypto_shash_digest);
185
186static int shash_default_export(struct shash_desc *desc, void *out)
187{
188	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
189	return 0;
190}
191
192static int shash_default_import(struct shash_desc *desc, const void *in)
193{
194	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
195	return 0;
196}
197
198static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
199			      unsigned int keylen)
200{
201	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
202
203	return crypto_shash_setkey(*ctx, key, keylen);
204}
205
206static int shash_async_init(struct ahash_request *req)
207{
208	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
209	struct shash_desc *desc = ahash_request_ctx(req);
210
211	desc->tfm = *ctx;
212	desc->flags = req->base.flags;
213
214	return crypto_shash_init(desc);
215}
216
217int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
218{
219	struct crypto_hash_walk walk;
220	int nbytes;
221
222	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
223	     nbytes = crypto_hash_walk_done(&walk, nbytes))
224		nbytes = crypto_shash_update(desc, walk.data, nbytes);
225
226	return nbytes;
227}
228EXPORT_SYMBOL_GPL(shash_ahash_update);
229
230static int shash_async_update(struct ahash_request *req)
231{
232	return shash_ahash_update(req, ahash_request_ctx(req));
233}
234
235static int shash_async_final(struct ahash_request *req)
236{
237	return crypto_shash_final(ahash_request_ctx(req), req->result);
238}
239
240int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
241{
242	struct crypto_hash_walk walk;
243	int nbytes;
244
245	nbytes = crypto_hash_walk_first(req, &walk);
246	if (!nbytes)
247		return crypto_shash_final(desc, req->result);
248
249	do {
250		nbytes = crypto_hash_walk_last(&walk) ?
251			 crypto_shash_finup(desc, walk.data, nbytes,
252					    req->result) :
253			 crypto_shash_update(desc, walk.data, nbytes);
254		nbytes = crypto_hash_walk_done(&walk, nbytes);
255	} while (nbytes > 0);
256
257	return nbytes;
258}
259EXPORT_SYMBOL_GPL(shash_ahash_finup);
260
261static int shash_async_finup(struct ahash_request *req)
262{
263	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
264	struct shash_desc *desc = ahash_request_ctx(req);
265
266	desc->tfm = *ctx;
267	desc->flags = req->base.flags;
268
269	return shash_ahash_finup(req, desc);
270}
271
272int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
273{
274	struct scatterlist *sg = req->src;
275	unsigned int offset = sg->offset;
276	unsigned int nbytes = req->nbytes;
 
 
277	int err;
278
279	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
 
 
280		void *data;
281
282		data = crypto_kmap(sg_page(sg), 0);
283		err = crypto_shash_digest(desc, data + offset, nbytes,
284					  req->result);
285		crypto_kunmap(data, 0);
286		crypto_yield(desc->flags);
287	} else
288		err = crypto_shash_init(desc) ?:
289		      shash_ahash_finup(req, desc);
290
291	return err;
292}
293EXPORT_SYMBOL_GPL(shash_ahash_digest);
294
295static int shash_async_digest(struct ahash_request *req)
296{
297	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
298	struct shash_desc *desc = ahash_request_ctx(req);
299
300	desc->tfm = *ctx;
301	desc->flags = req->base.flags;
302
303	return shash_ahash_digest(req, desc);
304}
305
306static int shash_async_export(struct ahash_request *req, void *out)
307{
308	return crypto_shash_export(ahash_request_ctx(req), out);
309}
310
311static int shash_async_import(struct ahash_request *req, const void *in)
312{
313	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
314	struct shash_desc *desc = ahash_request_ctx(req);
315
316	desc->tfm = *ctx;
317	desc->flags = req->base.flags;
318
319	return crypto_shash_import(desc, in);
320}
321
322static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
323{
324	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
325
326	crypto_free_shash(*ctx);
327}
328
329int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
330{
331	struct crypto_alg *calg = tfm->__crt_alg;
332	struct shash_alg *alg = __crypto_shash_alg(calg);
333	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
334	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
335	struct crypto_shash *shash;
336
337	if (!crypto_mod_get(calg))
338		return -EAGAIN;
339
340	shash = crypto_create_tfm(calg, &crypto_shash_type);
341	if (IS_ERR(shash)) {
342		crypto_mod_put(calg);
343		return PTR_ERR(shash);
344	}
345
346	*ctx = shash;
347	tfm->exit = crypto_exit_shash_ops_async;
348
349	crt->init = shash_async_init;
350	crt->update = shash_async_update;
351	crt->final = shash_async_final;
352	crt->finup = shash_async_finup;
353	crt->digest = shash_async_digest;
 
 
354
355	if (alg->setkey)
356		crt->setkey = shash_async_setkey;
357	if (alg->export)
358		crt->export = shash_async_export;
359	if (alg->import)
360		crt->import = shash_async_import;
361
362	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
363
364	return 0;
365}
366
367static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
368			       unsigned int keylen)
369{
370	struct shash_desc **descp = crypto_hash_ctx(tfm);
371	struct shash_desc *desc = *descp;
372
373	return crypto_shash_setkey(desc->tfm, key, keylen);
374}
375
376static int shash_compat_init(struct hash_desc *hdesc)
377{
378	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
379	struct shash_desc *desc = *descp;
380
381	desc->flags = hdesc->flags;
382
383	return crypto_shash_init(desc);
384}
385
386static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
387			       unsigned int len)
388{
389	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
390	struct shash_desc *desc = *descp;
391	struct crypto_hash_walk walk;
392	int nbytes;
393
394	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
395	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
396		nbytes = crypto_shash_update(desc, walk.data, nbytes);
397
398	return nbytes;
399}
400
401static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
402{
403	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
404
405	return crypto_shash_final(*descp, out);
406}
407
408static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
409			       unsigned int nbytes, u8 *out)
410{
411	unsigned int offset = sg->offset;
412	int err;
413
414	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
415		struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
416		struct shash_desc *desc = *descp;
417		void *data;
418
419		desc->flags = hdesc->flags;
420
421		data = crypto_kmap(sg_page(sg), 0);
422		err = crypto_shash_digest(desc, data + offset, nbytes, out);
423		crypto_kunmap(data, 0);
424		crypto_yield(desc->flags);
425		goto out;
426	}
427
428	err = shash_compat_init(hdesc);
429	if (err)
430		goto out;
431
432	err = shash_compat_update(hdesc, sg, nbytes);
433	if (err)
434		goto out;
435
436	err = shash_compat_final(hdesc, out);
437
438out:
439	return err;
440}
441
442static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
443{
444	struct shash_desc **descp = crypto_tfm_ctx(tfm);
445	struct shash_desc *desc = *descp;
446
447	crypto_free_shash(desc->tfm);
448	kzfree(desc);
449}
450
451static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
452{
453	struct hash_tfm *crt = &tfm->crt_hash;
454	struct crypto_alg *calg = tfm->__crt_alg;
455	struct shash_alg *alg = __crypto_shash_alg(calg);
456	struct shash_desc **descp = crypto_tfm_ctx(tfm);
457	struct crypto_shash *shash;
458	struct shash_desc *desc;
459
460	if (!crypto_mod_get(calg))
461		return -EAGAIN;
462
463	shash = crypto_create_tfm(calg, &crypto_shash_type);
464	if (IS_ERR(shash)) {
465		crypto_mod_put(calg);
466		return PTR_ERR(shash);
467	}
468
469	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
470		       GFP_KERNEL);
471	if (!desc) {
472		crypto_free_shash(shash);
473		return -ENOMEM;
474	}
475
476	*descp = desc;
477	desc->tfm = shash;
478	tfm->exit = crypto_exit_shash_ops_compat;
479
480	crt->init = shash_compat_init;
481	crt->update = shash_compat_update;
482	crt->final  = shash_compat_final;
483	crt->digest = shash_compat_digest;
484	crt->setkey = shash_compat_setkey;
485
486	crt->digestsize = alg->digestsize;
487
488	return 0;
489}
490
491static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 
492{
493	switch (mask & CRYPTO_ALG_TYPE_MASK) {
494	case CRYPTO_ALG_TYPE_HASH_MASK:
495		return crypto_init_shash_ops_compat(tfm);
496	}
497
498	return -EINVAL;
499}
500
501static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
502					 u32 mask)
503{
504	switch (mask & CRYPTO_ALG_TYPE_MASK) {
505	case CRYPTO_ALG_TYPE_HASH_MASK:
506		return sizeof(struct shash_desc *);
507	}
508
509	return 0;
510}
511
512static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
513{
514	struct crypto_shash *hash = __crypto_shash_cast(tfm);
515
516	hash->descsize = crypto_shash_alg(hash)->descsize;
517	return 0;
518}
519
520static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
521{
522	return alg->cra_ctxsize;
523}
 
524
525static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
526	__attribute__ ((unused));
527static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
528{
529	struct shash_alg *salg = __crypto_shash_alg(alg);
530
531	seq_printf(m, "type         : shash\n");
532	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
533	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
534}
535
536static const struct crypto_type crypto_shash_type = {
537	.ctxsize = crypto_shash_ctxsize,
538	.extsize = crypto_shash_extsize,
539	.init = crypto_init_shash_ops,
540	.init_tfm = crypto_shash_init_tfm,
541#ifdef CONFIG_PROC_FS
542	.show = crypto_shash_show,
543#endif
 
544	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
545	.maskset = CRYPTO_ALG_TYPE_MASK,
546	.type = CRYPTO_ALG_TYPE_SHASH,
547	.tfmsize = offsetof(struct crypto_shash, base),
548};
549
550struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
551					u32 mask)
552{
553	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
554}
555EXPORT_SYMBOL_GPL(crypto_alloc_shash);
556
557static int shash_prepare_alg(struct shash_alg *alg)
558{
559	struct crypto_alg *base = &alg->base;
560
561	if (alg->digestsize > PAGE_SIZE / 8 ||
562	    alg->descsize > PAGE_SIZE / 8 ||
563	    alg->statesize > PAGE_SIZE / 8)
 
 
 
564		return -EINVAL;
565
566	base->cra_type = &crypto_shash_type;
567	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
568	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
569
570	if (!alg->finup)
571		alg->finup = shash_finup_unaligned;
572	if (!alg->digest)
573		alg->digest = shash_digest_unaligned;
574	if (!alg->export) {
575		alg->export = shash_default_export;
576		alg->import = shash_default_import;
577		alg->statesize = alg->descsize;
578	}
579	if (!alg->setkey)
580		alg->setkey = shash_no_setkey;
581
582	return 0;
583}
584
585int crypto_register_shash(struct shash_alg *alg)
586{
587	struct crypto_alg *base = &alg->base;
588	int err;
589
590	err = shash_prepare_alg(alg);
591	if (err)
592		return err;
593
594	return crypto_register_alg(base);
595}
596EXPORT_SYMBOL_GPL(crypto_register_shash);
597
598int crypto_unregister_shash(struct shash_alg *alg)
599{
600	return crypto_unregister_alg(&alg->base);
601}
602EXPORT_SYMBOL_GPL(crypto_unregister_shash);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
603
604int shash_register_instance(struct crypto_template *tmpl,
605			    struct shash_instance *inst)
606{
607	int err;
608
609	err = shash_prepare_alg(&inst->alg);
610	if (err)
611		return err;
612
613	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
614}
615EXPORT_SYMBOL_GPL(shash_register_instance);
616
617void shash_free_instance(struct crypto_instance *inst)
618{
619	crypto_drop_spawn(crypto_instance_ctx(inst));
620	kfree(shash_instance(inst));
621}
622EXPORT_SYMBOL_GPL(shash_free_instance);
623
624int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
625			    struct shash_alg *alg,
626			    struct crypto_instance *inst)
627{
628	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
629				  &crypto_shash_type);
630}
631EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
632
633struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
634{
635	struct crypto_alg *alg;
636
637	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
638	return IS_ERR(alg) ? ERR_CAST(alg) :
639	       container_of(alg, struct shash_alg, base);
640}
641EXPORT_SYMBOL_GPL(shash_attr_alg);
642
643MODULE_LICENSE("GPL");
644MODULE_DESCRIPTION("Synchronous cryptographic hash type");