Linux Audio

Check our new training course

Loading...
v6.8
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Synchronous Cryptographic Hash operations.
  4 *
  5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7
  8#include <crypto/scatterwalk.h>
  9#include <linux/cryptouser.h>
 10#include <linux/err.h>
 11#include <linux/kernel.h>
 12#include <linux/module.h>
 
 13#include <linux/seq_file.h>
 14#include <linux/string.h>
 15#include <net/netlink.h>
 
 16
 17#include "hash.h"
 18
 19static inline struct crypto_istat_hash *shash_get_stat(struct shash_alg *alg)
 20{
 21	return hash_get_stat(&alg->halg);
 22}
 23
 24static inline int crypto_shash_errstat(struct shash_alg *alg, int err)
 25{
 26	if (IS_ENABLED(CONFIG_CRYPTO_STATS) && err)
 27		atomic64_inc(&shash_get_stat(alg)->err_cnt);
 28	return err;
 29}
 30
 31int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 32		    unsigned int keylen)
 33{
 34	return -ENOSYS;
 35}
 36EXPORT_SYMBOL_GPL(shash_no_setkey);
 37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 38static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
 39{
 40	if (crypto_shash_alg_needs_key(alg))
 
 41		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
 42}
 43
 44int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 45			unsigned int keylen)
 46{
 47	struct shash_alg *shash = crypto_shash_alg(tfm);
 
 48	int err;
 49
 50	err = shash->setkey(tfm, key, keylen);
 
 
 
 
 51	if (unlikely(err)) {
 52		shash_set_needkey(tfm, shash);
 53		return err;
 54	}
 55
 56	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 57	return 0;
 58}
 59EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 60
 61int crypto_shash_update(struct shash_desc *desc, const u8 *data,
 62			unsigned int len)
 63{
 64	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
 
 
 
 
 
 
 
 
 
 
 65	int err;
 66
 67	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
 68		atomic64_add(len, &shash_get_stat(shash)->hash_tlen);
 
 
 
 69
 70	err = shash->update(desc, data, len);
 
 
 71
 72	return crypto_shash_errstat(shash, err);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 73}
 74EXPORT_SYMBOL_GPL(crypto_shash_update);
 75
 76int crypto_shash_final(struct shash_desc *desc, u8 *out)
 77{
 78	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
 
 
 
 
 
 
 
 
 
 79	int err;
 80
 81	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
 82		atomic64_inc(&shash_get_stat(shash)->hash_cnt);
 
 
 
 
 83
 84	err = shash->final(desc, out);
 85
 86	return crypto_shash_errstat(shash, err);
 
 
 87}
 88EXPORT_SYMBOL_GPL(crypto_shash_final);
 89
 90static int shash_default_finup(struct shash_desc *desc, const u8 *data,
 91			       unsigned int len, u8 *out)
 92{
 93	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
 
 
 94
 95	return shash->update(desc, data, len) ?:
 96	       shash->final(desc, out);
 
 
 
 
 
 
 
 
 
 
 97}
 98
 99int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
100		       unsigned int len, u8 *out)
101{
102	struct crypto_shash *tfm = desc->tfm;
103	struct shash_alg *shash = crypto_shash_alg(tfm);
104	int err;
105
106	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
107		struct crypto_istat_hash *istat = shash_get_stat(shash);
108
109		atomic64_inc(&istat->hash_cnt);
110		atomic64_add(len, &istat->hash_tlen);
111	}
112
113	err = shash->finup(desc, data, len, out);
114
115	return crypto_shash_errstat(shash, err);
116}
117EXPORT_SYMBOL_GPL(crypto_shash_finup);
118
119static int shash_default_digest(struct shash_desc *desc, const u8 *data,
120				unsigned int len, u8 *out)
121{
122	struct shash_alg *shash = crypto_shash_alg(desc->tfm);
123
124	return shash->init(desc) ?:
125	       shash->finup(desc, data, len, out);
126}
127
128int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
129			unsigned int len, u8 *out)
130{
131	struct crypto_shash *tfm = desc->tfm;
132	struct shash_alg *shash = crypto_shash_alg(tfm);
133	int err;
134
135	if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
136		struct crypto_istat_hash *istat = shash_get_stat(shash);
137
138		atomic64_inc(&istat->hash_cnt);
139		atomic64_add(len, &istat->hash_tlen);
140	}
141
142	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
143		err = -ENOKEY;
144	else
145		err = shash->digest(desc, data, len, out);
146
147	return crypto_shash_errstat(shash, err);
 
 
 
148}
149EXPORT_SYMBOL_GPL(crypto_shash_digest);
150
151int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
152			    unsigned int len, u8 *out)
153{
154	SHASH_DESC_ON_STACK(desc, tfm);
155	int err;
 
 
 
 
 
 
 
156
157	desc->tfm = tfm;
 
 
 
158
159	err = crypto_shash_digest(desc, data, len, out);
 
160
161	shash_desc_zero(desc);
 
 
 
162
163	return err;
 
 
164}
165EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
166
167int crypto_shash_export(struct shash_desc *desc, void *out)
168{
169	struct crypto_shash *tfm = desc->tfm;
170	struct shash_alg *shash = crypto_shash_alg(tfm);
171
172	if (shash->export)
173		return shash->export(desc, out);
 
174
175	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(tfm));
176	return 0;
177}
178EXPORT_SYMBOL_GPL(crypto_shash_export);
179
180int crypto_shash_import(struct shash_desc *desc, const void *in)
181{
182	struct crypto_shash *tfm = desc->tfm;
183	struct shash_alg *shash = crypto_shash_alg(tfm);
184
185	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
186		return -ENOKEY;
 
 
 
 
 
 
 
187
188	if (shash->import)
189		return shash->import(desc, in);
 
190
191	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
192	return 0;
 
 
 
 
 
 
 
193}
194EXPORT_SYMBOL_GPL(crypto_shash_import);
195
196static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
197{
198	struct crypto_shash *hash = __crypto_shash_cast(tfm);
199	struct shash_alg *alg = crypto_shash_alg(hash);
 
 
200
201	alg->exit_tfm(hash);
202}
203
204static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
205{
206	struct crypto_shash *hash = __crypto_shash_cast(tfm);
207	struct shash_alg *alg = crypto_shash_alg(hash);
 
208	int err;
209
210	hash->descsize = alg->descsize;
 
 
 
211
212	shash_set_needkey(hash, alg);
 
 
 
 
 
 
 
 
 
 
213
214	if (alg->exit_tfm)
215		tfm->exit = crypto_shash_exit_tfm;
 
 
216
217	if (!alg->init_tfm)
218		return 0;
219
220	err = alg->init_tfm(hash);
221	if (err)
222		return err;
223
224	/* ->init_tfm() may have increased the descsize. */
225	if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
226		if (alg->exit_tfm)
227			alg->exit_tfm(hash);
228		return -EINVAL;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
229	}
230
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231	return 0;
232}
233
234static void crypto_shash_free_instance(struct crypto_instance *inst)
235{
236	struct shash_instance *shash = shash_instance(inst);
 
 
 
237
238	shash->free(shash);
 
 
239}
240
241static int __maybe_unused crypto_shash_report(
242	struct sk_buff *skb, struct crypto_alg *alg)
243{
244	struct crypto_report_hash rhash;
245	struct shash_alg *salg = __crypto_shash_alg(alg);
246
247	memset(&rhash, 0, sizeof(rhash));
248
249	strscpy(rhash.type, "shash", sizeof(rhash.type));
250
251	rhash.blocksize = alg->cra_blocksize;
252	rhash.digestsize = salg->digestsize;
253
254	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
255}
 
 
 
 
 
 
256
257static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
258	__maybe_unused;
259static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
260{
261	struct shash_alg *salg = __crypto_shash_alg(alg);
262
263	seq_printf(m, "type         : shash\n");
264	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
265	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
266}
267
268static int __maybe_unused crypto_shash_report_stat(
269	struct sk_buff *skb, struct crypto_alg *alg)
270{
271	return crypto_hash_report_stat(skb, alg, "shash");
272}
273
274const struct crypto_type crypto_shash_type = {
275	.extsize = crypto_alg_extsize,
276	.init_tfm = crypto_shash_init_tfm,
277	.free = crypto_shash_free_instance,
278#ifdef CONFIG_PROC_FS
279	.show = crypto_shash_show,
280#endif
281#if IS_ENABLED(CONFIG_CRYPTO_USER)
282	.report = crypto_shash_report,
283#endif
284#ifdef CONFIG_CRYPTO_STATS
285	.report_stat = crypto_shash_report_stat,
286#endif
287	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
288	.maskset = CRYPTO_ALG_TYPE_MASK,
289	.type = CRYPTO_ALG_TYPE_SHASH,
290	.tfmsize = offsetof(struct crypto_shash, base),
291};
292
293int crypto_grab_shash(struct crypto_shash_spawn *spawn,
294		      struct crypto_instance *inst,
295		      const char *name, u32 type, u32 mask)
296{
297	spawn->base.frontend = &crypto_shash_type;
298	return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
299}
300EXPORT_SYMBOL_GPL(crypto_grab_shash);
301
302struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
303					u32 mask)
304{
305	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
306}
307EXPORT_SYMBOL_GPL(crypto_alloc_shash);
308
309int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
310{
311	return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
312}
313EXPORT_SYMBOL_GPL(crypto_has_shash);
314
315struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash)
316{
317	struct crypto_tfm *tfm = crypto_shash_tfm(hash);
318	struct shash_alg *alg = crypto_shash_alg(hash);
319	struct crypto_shash *nhash;
320	int err;
321
322	if (!crypto_shash_alg_has_setkey(alg)) {
323		tfm = crypto_tfm_get(tfm);
324		if (IS_ERR(tfm))
325			return ERR_CAST(tfm);
326
327		return hash;
328	}
329
330	if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init))
331		return ERR_PTR(-ENOSYS);
332
333	nhash = crypto_clone_tfm(&crypto_shash_type, tfm);
334	if (IS_ERR(nhash))
335		return nhash;
336
337	nhash->descsize = hash->descsize;
338
339	if (alg->clone_tfm) {
340		err = alg->clone_tfm(nhash, hash);
341		if (err) {
342			crypto_free_shash(nhash);
343			return ERR_PTR(err);
344		}
345	}
346
347	return nhash;
348}
349EXPORT_SYMBOL_GPL(crypto_clone_shash);
350
351int hash_prepare_alg(struct hash_alg_common *alg)
352{
353	struct crypto_istat_hash *istat = hash_get_stat(alg);
354	struct crypto_alg *base = &alg->base;
355
356	if (alg->digestsize > HASH_MAX_DIGESTSIZE)
357		return -EINVAL;
358
359	/* alignmask is not useful for hashes, so it is not supported. */
360	if (base->cra_alignmask)
361		return -EINVAL;
362
363	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
364
365	if (IS_ENABLED(CONFIG_CRYPTO_STATS))
366		memset(istat, 0, sizeof(*istat));
367
368	return 0;
369}
370
371static int shash_prepare_alg(struct shash_alg *alg)
372{
373	struct crypto_alg *base = &alg->halg.base;
374	int err;
375
376	if (alg->descsize > HASH_MAX_DESCSIZE)
 
 
377		return -EINVAL;
378
379	if ((alg->export && !alg->import) || (alg->import && !alg->export))
380		return -EINVAL;
381
382	err = hash_prepare_alg(&alg->halg);
383	if (err)
384		return err;
385
386	base->cra_type = &crypto_shash_type;
 
387	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
388
389	/*
390	 * Handle missing optional functions.  For each one we can either
391	 * install a default here, or we can leave the pointer as NULL and check
392	 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
393	 * when the default behavior is desired.  For ->finup and ->digest we
394	 * install defaults, since for optimal performance algorithms should
395	 * implement these anyway.  On the other hand, for ->import and
396	 * ->export the common case and best performance comes from the simple
397	 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
398	 * leave them NULL and provide the memcpy with no indirect call.
399	 */
400	if (!alg->finup)
401		alg->finup = shash_default_finup;
402	if (!alg->digest)
403		alg->digest = shash_default_digest;
404	if (!alg->export)
405		alg->halg.statesize = alg->descsize;
 
 
 
406	if (!alg->setkey)
407		alg->setkey = shash_no_setkey;
408
409	return 0;
410}
411
412int crypto_register_shash(struct shash_alg *alg)
413{
414	struct crypto_alg *base = &alg->base;
415	int err;
416
417	err = shash_prepare_alg(alg);
418	if (err)
419		return err;
420
421	return crypto_register_alg(base);
422}
423EXPORT_SYMBOL_GPL(crypto_register_shash);
424
425void crypto_unregister_shash(struct shash_alg *alg)
426{
427	crypto_unregister_alg(&alg->base);
428}
429EXPORT_SYMBOL_GPL(crypto_unregister_shash);
430
431int crypto_register_shashes(struct shash_alg *algs, int count)
432{
433	int i, ret;
434
435	for (i = 0; i < count; i++) {
436		ret = crypto_register_shash(&algs[i]);
437		if (ret)
438			goto err;
439	}
440
441	return 0;
442
443err:
444	for (--i; i >= 0; --i)
445		crypto_unregister_shash(&algs[i]);
446
447	return ret;
448}
449EXPORT_SYMBOL_GPL(crypto_register_shashes);
450
451void crypto_unregister_shashes(struct shash_alg *algs, int count)
452{
453	int i;
454
455	for (i = count - 1; i >= 0; --i)
456		crypto_unregister_shash(&algs[i]);
 
 
 
 
 
 
 
457}
458EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
459
460int shash_register_instance(struct crypto_template *tmpl,
461			    struct shash_instance *inst)
462{
463	int err;
464
465	if (WARN_ON(!inst->free))
466		return -EINVAL;
467
468	err = shash_prepare_alg(&inst->alg);
469	if (err)
470		return err;
471
472	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
473}
474EXPORT_SYMBOL_GPL(shash_register_instance);
475
476void shash_free_singlespawn_instance(struct shash_instance *inst)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
477{
478	crypto_drop_spawn(shash_instance_ctx(inst));
479	kfree(inst);
 
 
 
480}
481EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
482
483MODULE_LICENSE("GPL");
484MODULE_DESCRIPTION("Synchronous cryptographic hash type");
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Synchronous Cryptographic Hash operations.
  4 *
  5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
  6 */
  7
  8#include <crypto/scatterwalk.h>
  9#include <crypto/internal/hash.h>
 10#include <linux/err.h>
 11#include <linux/kernel.h>
 12#include <linux/module.h>
 13#include <linux/slab.h>
 14#include <linux/seq_file.h>
 15#include <linux/cryptouser.h>
 16#include <net/netlink.h>
 17#include <linux/compiler.h>
 18
 19#include "internal.h"
 20
 21static const struct crypto_type crypto_shash_type;
 
 
 
 
 
 
 
 
 
 
 22
 23int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
 24		    unsigned int keylen)
 25{
 26	return -ENOSYS;
 27}
 28EXPORT_SYMBOL_GPL(shash_no_setkey);
 29
 30static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
 31				  unsigned int keylen)
 32{
 33	struct shash_alg *shash = crypto_shash_alg(tfm);
 34	unsigned long alignmask = crypto_shash_alignmask(tfm);
 35	unsigned long absize;
 36	u8 *buffer, *alignbuffer;
 37	int err;
 38
 39	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
 40	buffer = kmalloc(absize, GFP_ATOMIC);
 41	if (!buffer)
 42		return -ENOMEM;
 43
 44	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 45	memcpy(alignbuffer, key, keylen);
 46	err = shash->setkey(tfm, alignbuffer, keylen);
 47	kzfree(buffer);
 48	return err;
 49}
 50
 51static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
 52{
 53	if (crypto_shash_alg_has_setkey(alg) &&
 54	    !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
 55		crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
 56}
 57
 58int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
 59			unsigned int keylen)
 60{
 61	struct shash_alg *shash = crypto_shash_alg(tfm);
 62	unsigned long alignmask = crypto_shash_alignmask(tfm);
 63	int err;
 64
 65	if ((unsigned long)key & alignmask)
 66		err = shash_setkey_unaligned(tfm, key, keylen);
 67	else
 68		err = shash->setkey(tfm, key, keylen);
 69
 70	if (unlikely(err)) {
 71		shash_set_needkey(tfm, shash);
 72		return err;
 73	}
 74
 75	crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
 76	return 0;
 77}
 78EXPORT_SYMBOL_GPL(crypto_shash_setkey);
 79
 80static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
 81				  unsigned int len)
 82{
 83	struct crypto_shash *tfm = desc->tfm;
 84	struct shash_alg *shash = crypto_shash_alg(tfm);
 85	unsigned long alignmask = crypto_shash_alignmask(tfm);
 86	unsigned int unaligned_len = alignmask + 1 -
 87				     ((unsigned long)data & alignmask);
 88	/*
 89	 * We cannot count on __aligned() working for large values:
 90	 * https://patchwork.kernel.org/patch/9507697/
 91	 */
 92	u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
 93	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
 94	int err;
 95
 96	if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
 97		return -EINVAL;
 98
 99	if (unaligned_len > len)
100		unaligned_len = len;
101
102	memcpy(buf, data, unaligned_len);
103	err = shash->update(desc, buf, unaligned_len);
104	memset(buf, 0, unaligned_len);
105
106	return err ?:
107	       shash->update(desc, data + unaligned_len, len - unaligned_len);
108}
109
110int crypto_shash_update(struct shash_desc *desc, const u8 *data,
111			unsigned int len)
112{
113	struct crypto_shash *tfm = desc->tfm;
114	struct shash_alg *shash = crypto_shash_alg(tfm);
115	unsigned long alignmask = crypto_shash_alignmask(tfm);
116
117	if ((unsigned long)data & alignmask)
118		return shash_update_unaligned(desc, data, len);
119
120	return shash->update(desc, data, len);
121}
122EXPORT_SYMBOL_GPL(crypto_shash_update);
123
124static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
125{
126	struct crypto_shash *tfm = desc->tfm;
127	unsigned long alignmask = crypto_shash_alignmask(tfm);
128	struct shash_alg *shash = crypto_shash_alg(tfm);
129	unsigned int ds = crypto_shash_digestsize(tfm);
130	/*
131	 * We cannot count on __aligned() working for large values:
132	 * https://patchwork.kernel.org/patch/9507697/
133	 */
134	u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
135	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
136	int err;
137
138	if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
139		return -EINVAL;
140
141	err = shash->final(desc, buf);
142	if (err)
143		goto out;
144
145	memcpy(out, buf, ds);
146
147out:
148	memset(buf, 0, ds);
149	return err;
150}
 
151
152int crypto_shash_final(struct shash_desc *desc, u8 *out)
 
153{
154	struct crypto_shash *tfm = desc->tfm;
155	struct shash_alg *shash = crypto_shash_alg(tfm);
156	unsigned long alignmask = crypto_shash_alignmask(tfm);
157
158	if ((unsigned long)out & alignmask)
159		return shash_final_unaligned(desc, out);
160
161	return shash->final(desc, out);
162}
163EXPORT_SYMBOL_GPL(crypto_shash_final);
164
165static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
166				 unsigned int len, u8 *out)
167{
168	return crypto_shash_update(desc, data, len) ?:
169	       crypto_shash_final(desc, out);
170}
171
172int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
173		       unsigned int len, u8 *out)
174{
175	struct crypto_shash *tfm = desc->tfm;
176	struct shash_alg *shash = crypto_shash_alg(tfm);
177	unsigned long alignmask = crypto_shash_alignmask(tfm);
178
179	if (((unsigned long)data | (unsigned long)out) & alignmask)
180		return shash_finup_unaligned(desc, data, len, out);
181
182	return shash->finup(desc, data, len, out);
 
 
 
 
 
 
183}
184EXPORT_SYMBOL_GPL(crypto_shash_finup);
185
186static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
187				  unsigned int len, u8 *out)
188{
189	return crypto_shash_init(desc) ?:
190	       crypto_shash_finup(desc, data, len, out);
 
 
191}
192
193int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
194			unsigned int len, u8 *out)
195{
196	struct crypto_shash *tfm = desc->tfm;
197	struct shash_alg *shash = crypto_shash_alg(tfm);
198	unsigned long alignmask = crypto_shash_alignmask(tfm);
 
 
 
 
 
 
 
199
200	if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
201		return -ENOKEY;
 
 
202
203	if (((unsigned long)data | (unsigned long)out) & alignmask)
204		return shash_digest_unaligned(desc, data, len, out);
205
206	return shash->digest(desc, data, len, out);
207}
208EXPORT_SYMBOL_GPL(crypto_shash_digest);
209
210static int shash_default_export(struct shash_desc *desc, void *out)
 
211{
212	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
213	return 0;
214}
215
216static int shash_default_import(struct shash_desc *desc, const void *in)
217{
218	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
219	return 0;
220}
221
222static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
223			      unsigned int keylen)
224{
225	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
226
227	return crypto_shash_setkey(*ctx, key, keylen);
228}
229
230static int shash_async_init(struct ahash_request *req)
231{
232	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
233	struct shash_desc *desc = ahash_request_ctx(req);
234
235	desc->tfm = *ctx;
236
237	return crypto_shash_init(desc);
238}
 
239
240int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
241{
242	struct crypto_hash_walk walk;
243	int nbytes;
244
245	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
246	     nbytes = crypto_hash_walk_done(&walk, nbytes))
247		nbytes = crypto_shash_update(desc, walk.data, nbytes);
248
249	return nbytes;
 
250}
251EXPORT_SYMBOL_GPL(shash_ahash_update);
252
253static int shash_async_update(struct ahash_request *req)
254{
255	return shash_ahash_update(req, ahash_request_ctx(req));
256}
257
258static int shash_async_final(struct ahash_request *req)
259{
260	return crypto_shash_final(ahash_request_ctx(req), req->result);
261}
262
263int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
264{
265	struct crypto_hash_walk walk;
266	int nbytes;
267
268	nbytes = crypto_hash_walk_first(req, &walk);
269	if (!nbytes)
270		return crypto_shash_final(desc, req->result);
271
272	do {
273		nbytes = crypto_hash_walk_last(&walk) ?
274			 crypto_shash_finup(desc, walk.data, nbytes,
275					    req->result) :
276			 crypto_shash_update(desc, walk.data, nbytes);
277		nbytes = crypto_hash_walk_done(&walk, nbytes);
278	} while (nbytes > 0);
279
280	return nbytes;
281}
282EXPORT_SYMBOL_GPL(shash_ahash_finup);
283
284static int shash_async_finup(struct ahash_request *req)
285{
286	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
287	struct shash_desc *desc = ahash_request_ctx(req);
288
289	desc->tfm = *ctx;
290
291	return shash_ahash_finup(req, desc);
292}
293
294int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
295{
296	unsigned int nbytes = req->nbytes;
297	struct scatterlist *sg;
298	unsigned int offset;
299	int err;
300
301	if (nbytes &&
302	    (sg = req->src, offset = sg->offset,
303	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
304		void *data;
305
306		data = kmap_atomic(sg_page(sg));
307		err = crypto_shash_digest(desc, data + offset, nbytes,
308					  req->result);
309		kunmap_atomic(data);
310	} else
311		err = crypto_shash_init(desc) ?:
312		      shash_ahash_finup(req, desc);
313
314	return err;
315}
316EXPORT_SYMBOL_GPL(shash_ahash_digest);
317
318static int shash_async_digest(struct ahash_request *req)
319{
320	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
321	struct shash_desc *desc = ahash_request_ctx(req);
322
323	desc->tfm = *ctx;
 
324
325	return shash_ahash_digest(req, desc);
326}
 
327
328static int shash_async_export(struct ahash_request *req, void *out)
329{
330	return crypto_shash_export(ahash_request_ctx(req), out);
331}
332
333static int shash_async_import(struct ahash_request *req, const void *in)
334{
335	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
336	struct shash_desc *desc = ahash_request_ctx(req);
337
338	desc->tfm = *ctx;
339
340	return crypto_shash_import(desc, in);
341}
342
343static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
344{
345	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
346
347	crypto_free_shash(*ctx);
348}
349
350int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
351{
352	struct crypto_alg *calg = tfm->__crt_alg;
353	struct shash_alg *alg = __crypto_shash_alg(calg);
354	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
355	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
356	struct crypto_shash *shash;
357
358	if (!crypto_mod_get(calg))
359		return -EAGAIN;
360
361	shash = crypto_create_tfm(calg, &crypto_shash_type);
362	if (IS_ERR(shash)) {
363		crypto_mod_put(calg);
364		return PTR_ERR(shash);
365	}
366
367	*ctx = shash;
368	tfm->exit = crypto_exit_shash_ops_async;
369
370	crt->init = shash_async_init;
371	crt->update = shash_async_update;
372	crt->final = shash_async_final;
373	crt->finup = shash_async_finup;
374	crt->digest = shash_async_digest;
375	if (crypto_shash_alg_has_setkey(alg))
376		crt->setkey = shash_async_setkey;
377
378	crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
379				    CRYPTO_TFM_NEED_KEY);
380
381	crt->export = shash_async_export;
382	crt->import = shash_async_import;
383
384	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
385
386	return 0;
387}
388
389static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
390{
391	struct crypto_shash *hash = __crypto_shash_cast(tfm);
392	struct shash_alg *alg = crypto_shash_alg(hash);
393
394	hash->descsize = alg->descsize;
395
396	shash_set_needkey(hash, alg);
397
398	return 0;
399}
400
401#ifdef CONFIG_NET
402static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
403{
404	struct crypto_report_hash rhash;
405	struct shash_alg *salg = __crypto_shash_alg(alg);
406
407	memset(&rhash, 0, sizeof(rhash));
408
409	strscpy(rhash.type, "shash", sizeof(rhash.type));
410
411	rhash.blocksize = alg->cra_blocksize;
412	rhash.digestsize = salg->digestsize;
413
414	return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
415}
416#else
417static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
418{
419	return -ENOSYS;
420}
421#endif
422
423static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
424	__maybe_unused;
425static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
426{
427	struct shash_alg *salg = __crypto_shash_alg(alg);
428
429	seq_printf(m, "type         : shash\n");
430	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
431	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
432}
433
434static const struct crypto_type crypto_shash_type = {
 
 
 
 
 
 
435	.extsize = crypto_alg_extsize,
436	.init_tfm = crypto_shash_init_tfm,
 
437#ifdef CONFIG_PROC_FS
438	.show = crypto_shash_show,
439#endif
 
440	.report = crypto_shash_report,
 
 
 
 
441	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
442	.maskset = CRYPTO_ALG_TYPE_MASK,
443	.type = CRYPTO_ALG_TYPE_SHASH,
444	.tfmsize = offsetof(struct crypto_shash, base),
445};
446
 
 
 
 
 
 
 
 
 
447struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
448					u32 mask)
449{
450	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
451}
452EXPORT_SYMBOL_GPL(crypto_alloc_shash);
453
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
454static int shash_prepare_alg(struct shash_alg *alg)
455{
456	struct crypto_alg *base = &alg->base;
 
457
458	if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
459	    alg->descsize > HASH_MAX_DESCSIZE ||
460	    alg->statesize > HASH_MAX_STATESIZE)
461		return -EINVAL;
462
463	if ((alg->export && !alg->import) || (alg->import && !alg->export))
464		return -EINVAL;
465
 
 
 
 
466	base->cra_type = &crypto_shash_type;
467	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
468	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
469
 
 
 
 
 
 
 
 
 
 
 
470	if (!alg->finup)
471		alg->finup = shash_finup_unaligned;
472	if (!alg->digest)
473		alg->digest = shash_digest_unaligned;
474	if (!alg->export) {
475		alg->export = shash_default_export;
476		alg->import = shash_default_import;
477		alg->statesize = alg->descsize;
478	}
479	if (!alg->setkey)
480		alg->setkey = shash_no_setkey;
481
482	return 0;
483}
484
485int crypto_register_shash(struct shash_alg *alg)
486{
487	struct crypto_alg *base = &alg->base;
488	int err;
489
490	err = shash_prepare_alg(alg);
491	if (err)
492		return err;
493
494	return crypto_register_alg(base);
495}
496EXPORT_SYMBOL_GPL(crypto_register_shash);
497
498int crypto_unregister_shash(struct shash_alg *alg)
499{
500	return crypto_unregister_alg(&alg->base);
501}
502EXPORT_SYMBOL_GPL(crypto_unregister_shash);
503
504int crypto_register_shashes(struct shash_alg *algs, int count)
505{
506	int i, ret;
507
508	for (i = 0; i < count; i++) {
509		ret = crypto_register_shash(&algs[i]);
510		if (ret)
511			goto err;
512	}
513
514	return 0;
515
516err:
517	for (--i; i >= 0; --i)
518		crypto_unregister_shash(&algs[i]);
519
520	return ret;
521}
522EXPORT_SYMBOL_GPL(crypto_register_shashes);
523
524int crypto_unregister_shashes(struct shash_alg *algs, int count)
525{
526	int i, ret;
527
528	for (i = count - 1; i >= 0; --i) {
529		ret = crypto_unregister_shash(&algs[i]);
530		if (ret)
531			pr_err("Failed to unregister %s %s: %d\n",
532			       algs[i].base.cra_driver_name,
533			       algs[i].base.cra_name, ret);
534	}
535
536	return 0;
537}
538EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
539
540int shash_register_instance(struct crypto_template *tmpl,
541			    struct shash_instance *inst)
542{
543	int err;
544
 
 
 
545	err = shash_prepare_alg(&inst->alg);
546	if (err)
547		return err;
548
549	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
550}
551EXPORT_SYMBOL_GPL(shash_register_instance);
552
553void shash_free_instance(struct crypto_instance *inst)
554{
555	crypto_drop_spawn(crypto_instance_ctx(inst));
556	kfree(shash_instance(inst));
557}
558EXPORT_SYMBOL_GPL(shash_free_instance);
559
560int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
561			    struct shash_alg *alg,
562			    struct crypto_instance *inst)
563{
564	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
565				  &crypto_shash_type);
566}
567EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
568
569struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
570{
571	struct crypto_alg *alg;
572
573	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
574	return IS_ERR(alg) ? ERR_CAST(alg) :
575	       container_of(alg, struct shash_alg, base);
576}
577EXPORT_SYMBOL_GPL(shash_attr_alg);
578
579MODULE_LICENSE("GPL");
580MODULE_DESCRIPTION("Synchronous cryptographic hash type");