Loading...
1/*
2 * Synchronous Cryptographic Hash operations.
3 *
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/scatterwalk.h>
14#include <crypto/internal/hash.h>
15#include <linux/err.h>
16#include <linux/kernel.h>
17#include <linux/module.h>
18#include <linux/slab.h>
19#include <linux/seq_file.h>
20#include <linux/cryptouser.h>
21#include <net/netlink.h>
22
23#include "internal.h"
24
25static const struct crypto_type crypto_shash_type;
26
27static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28 unsigned int keylen)
29{
30 return -ENOSYS;
31}
32
33static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
34 unsigned int keylen)
35{
36 struct shash_alg *shash = crypto_shash_alg(tfm);
37 unsigned long alignmask = crypto_shash_alignmask(tfm);
38 unsigned long absize;
39 u8 *buffer, *alignbuffer;
40 int err;
41
42 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
43 buffer = kmalloc(absize, GFP_KERNEL);
44 if (!buffer)
45 return -ENOMEM;
46
47 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
48 memcpy(alignbuffer, key, keylen);
49 err = shash->setkey(tfm, alignbuffer, keylen);
50 kzfree(buffer);
51 return err;
52}
53
54int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
55 unsigned int keylen)
56{
57 struct shash_alg *shash = crypto_shash_alg(tfm);
58 unsigned long alignmask = crypto_shash_alignmask(tfm);
59
60 if ((unsigned long)key & alignmask)
61 return shash_setkey_unaligned(tfm, key, keylen);
62
63 return shash->setkey(tfm, key, keylen);
64}
65EXPORT_SYMBOL_GPL(crypto_shash_setkey);
66
67static inline unsigned int shash_align_buffer_size(unsigned len,
68 unsigned long mask)
69{
70 typedef u8 __attribute__ ((aligned)) u8_aligned;
71 return len + (mask & ~(__alignof__(u8_aligned) - 1));
72}
73
74static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
75 unsigned int len)
76{
77 struct crypto_shash *tfm = desc->tfm;
78 struct shash_alg *shash = crypto_shash_alg(tfm);
79 unsigned long alignmask = crypto_shash_alignmask(tfm);
80 unsigned int unaligned_len = alignmask + 1 -
81 ((unsigned long)data & alignmask);
82 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
83 __attribute__ ((aligned));
84 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
85 int err;
86
87 if (unaligned_len > len)
88 unaligned_len = len;
89
90 memcpy(buf, data, unaligned_len);
91 err = shash->update(desc, buf, unaligned_len);
92 memset(buf, 0, unaligned_len);
93
94 return err ?:
95 shash->update(desc, data + unaligned_len, len - unaligned_len);
96}
97
98int crypto_shash_update(struct shash_desc *desc, const u8 *data,
99 unsigned int len)
100{
101 struct crypto_shash *tfm = desc->tfm;
102 struct shash_alg *shash = crypto_shash_alg(tfm);
103 unsigned long alignmask = crypto_shash_alignmask(tfm);
104
105 if ((unsigned long)data & alignmask)
106 return shash_update_unaligned(desc, data, len);
107
108 return shash->update(desc, data, len);
109}
110EXPORT_SYMBOL_GPL(crypto_shash_update);
111
112static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
113{
114 struct crypto_shash *tfm = desc->tfm;
115 unsigned long alignmask = crypto_shash_alignmask(tfm);
116 struct shash_alg *shash = crypto_shash_alg(tfm);
117 unsigned int ds = crypto_shash_digestsize(tfm);
118 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
119 __attribute__ ((aligned));
120 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
121 int err;
122
123 err = shash->final(desc, buf);
124 if (err)
125 goto out;
126
127 memcpy(out, buf, ds);
128
129out:
130 memset(buf, 0, ds);
131 return err;
132}
133
134int crypto_shash_final(struct shash_desc *desc, u8 *out)
135{
136 struct crypto_shash *tfm = desc->tfm;
137 struct shash_alg *shash = crypto_shash_alg(tfm);
138 unsigned long alignmask = crypto_shash_alignmask(tfm);
139
140 if ((unsigned long)out & alignmask)
141 return shash_final_unaligned(desc, out);
142
143 return shash->final(desc, out);
144}
145EXPORT_SYMBOL_GPL(crypto_shash_final);
146
147static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
148 unsigned int len, u8 *out)
149{
150 return crypto_shash_update(desc, data, len) ?:
151 crypto_shash_final(desc, out);
152}
153
154int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
155 unsigned int len, u8 *out)
156{
157 struct crypto_shash *tfm = desc->tfm;
158 struct shash_alg *shash = crypto_shash_alg(tfm);
159 unsigned long alignmask = crypto_shash_alignmask(tfm);
160
161 if (((unsigned long)data | (unsigned long)out) & alignmask)
162 return shash_finup_unaligned(desc, data, len, out);
163
164 return shash->finup(desc, data, len, out);
165}
166EXPORT_SYMBOL_GPL(crypto_shash_finup);
167
168static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
169 unsigned int len, u8 *out)
170{
171 return crypto_shash_init(desc) ?:
172 crypto_shash_finup(desc, data, len, out);
173}
174
175int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
176 unsigned int len, u8 *out)
177{
178 struct crypto_shash *tfm = desc->tfm;
179 struct shash_alg *shash = crypto_shash_alg(tfm);
180 unsigned long alignmask = crypto_shash_alignmask(tfm);
181
182 if (((unsigned long)data | (unsigned long)out) & alignmask)
183 return shash_digest_unaligned(desc, data, len, out);
184
185 return shash->digest(desc, data, len, out);
186}
187EXPORT_SYMBOL_GPL(crypto_shash_digest);
188
189static int shash_default_export(struct shash_desc *desc, void *out)
190{
191 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
192 return 0;
193}
194
195static int shash_default_import(struct shash_desc *desc, const void *in)
196{
197 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
198 return 0;
199}
200
201static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
202 unsigned int keylen)
203{
204 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
205
206 return crypto_shash_setkey(*ctx, key, keylen);
207}
208
209static int shash_async_init(struct ahash_request *req)
210{
211 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
212 struct shash_desc *desc = ahash_request_ctx(req);
213
214 desc->tfm = *ctx;
215 desc->flags = req->base.flags;
216
217 return crypto_shash_init(desc);
218}
219
220int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
221{
222 struct crypto_hash_walk walk;
223 int nbytes;
224
225 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
226 nbytes = crypto_hash_walk_done(&walk, nbytes))
227 nbytes = crypto_shash_update(desc, walk.data, nbytes);
228
229 return nbytes;
230}
231EXPORT_SYMBOL_GPL(shash_ahash_update);
232
233static int shash_async_update(struct ahash_request *req)
234{
235 return shash_ahash_update(req, ahash_request_ctx(req));
236}
237
238static int shash_async_final(struct ahash_request *req)
239{
240 return crypto_shash_final(ahash_request_ctx(req), req->result);
241}
242
243int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
244{
245 struct crypto_hash_walk walk;
246 int nbytes;
247
248 nbytes = crypto_hash_walk_first(req, &walk);
249 if (!nbytes)
250 return crypto_shash_final(desc, req->result);
251
252 do {
253 nbytes = crypto_hash_walk_last(&walk) ?
254 crypto_shash_finup(desc, walk.data, nbytes,
255 req->result) :
256 crypto_shash_update(desc, walk.data, nbytes);
257 nbytes = crypto_hash_walk_done(&walk, nbytes);
258 } while (nbytes > 0);
259
260 return nbytes;
261}
262EXPORT_SYMBOL_GPL(shash_ahash_finup);
263
264static int shash_async_finup(struct ahash_request *req)
265{
266 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
267 struct shash_desc *desc = ahash_request_ctx(req);
268
269 desc->tfm = *ctx;
270 desc->flags = req->base.flags;
271
272 return shash_ahash_finup(req, desc);
273}
274
275int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
276{
277 struct scatterlist *sg = req->src;
278 unsigned int offset = sg->offset;
279 unsigned int nbytes = req->nbytes;
280 int err;
281
282 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
283 void *data;
284
285 data = kmap_atomic(sg_page(sg));
286 err = crypto_shash_digest(desc, data + offset, nbytes,
287 req->result);
288 kunmap_atomic(data);
289 crypto_yield(desc->flags);
290 } else
291 err = crypto_shash_init(desc) ?:
292 shash_ahash_finup(req, desc);
293
294 return err;
295}
296EXPORT_SYMBOL_GPL(shash_ahash_digest);
297
298static int shash_async_digest(struct ahash_request *req)
299{
300 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
301 struct shash_desc *desc = ahash_request_ctx(req);
302
303 desc->tfm = *ctx;
304 desc->flags = req->base.flags;
305
306 return shash_ahash_digest(req, desc);
307}
308
309static int shash_async_export(struct ahash_request *req, void *out)
310{
311 return crypto_shash_export(ahash_request_ctx(req), out);
312}
313
314static int shash_async_import(struct ahash_request *req, const void *in)
315{
316 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
317 struct shash_desc *desc = ahash_request_ctx(req);
318
319 desc->tfm = *ctx;
320 desc->flags = req->base.flags;
321
322 return crypto_shash_import(desc, in);
323}
324
325static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
326{
327 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
328
329 crypto_free_shash(*ctx);
330}
331
332int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
333{
334 struct crypto_alg *calg = tfm->__crt_alg;
335 struct shash_alg *alg = __crypto_shash_alg(calg);
336 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
337 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
338 struct crypto_shash *shash;
339
340 if (!crypto_mod_get(calg))
341 return -EAGAIN;
342
343 shash = crypto_create_tfm(calg, &crypto_shash_type);
344 if (IS_ERR(shash)) {
345 crypto_mod_put(calg);
346 return PTR_ERR(shash);
347 }
348
349 *ctx = shash;
350 tfm->exit = crypto_exit_shash_ops_async;
351
352 crt->init = shash_async_init;
353 crt->update = shash_async_update;
354 crt->final = shash_async_final;
355 crt->finup = shash_async_finup;
356 crt->digest = shash_async_digest;
357 crt->setkey = shash_async_setkey;
358
359 crt->has_setkey = alg->setkey != shash_no_setkey;
360
361 if (alg->export)
362 crt->export = shash_async_export;
363 if (alg->import)
364 crt->import = shash_async_import;
365
366 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
367
368 return 0;
369}
370
371static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
372{
373 struct crypto_shash *hash = __crypto_shash_cast(tfm);
374
375 hash->descsize = crypto_shash_alg(hash)->descsize;
376 return 0;
377}
378
379#ifdef CONFIG_NET
380static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
381{
382 struct crypto_report_hash rhash;
383 struct shash_alg *salg = __crypto_shash_alg(alg);
384
385 strncpy(rhash.type, "shash", sizeof(rhash.type));
386
387 rhash.blocksize = alg->cra_blocksize;
388 rhash.digestsize = salg->digestsize;
389
390 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
391 sizeof(struct crypto_report_hash), &rhash))
392 goto nla_put_failure;
393 return 0;
394
395nla_put_failure:
396 return -EMSGSIZE;
397}
398#else
399static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
400{
401 return -ENOSYS;
402}
403#endif
404
405static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
406 __attribute__ ((unused));
407static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
408{
409 struct shash_alg *salg = __crypto_shash_alg(alg);
410
411 seq_printf(m, "type : shash\n");
412 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
413 seq_printf(m, "digestsize : %u\n", salg->digestsize);
414}
415
416static const struct crypto_type crypto_shash_type = {
417 .extsize = crypto_alg_extsize,
418 .init_tfm = crypto_shash_init_tfm,
419#ifdef CONFIG_PROC_FS
420 .show = crypto_shash_show,
421#endif
422 .report = crypto_shash_report,
423 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
424 .maskset = CRYPTO_ALG_TYPE_MASK,
425 .type = CRYPTO_ALG_TYPE_SHASH,
426 .tfmsize = offsetof(struct crypto_shash, base),
427};
428
429struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
430 u32 mask)
431{
432 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
433}
434EXPORT_SYMBOL_GPL(crypto_alloc_shash);
435
436static int shash_prepare_alg(struct shash_alg *alg)
437{
438 struct crypto_alg *base = &alg->base;
439
440 if (alg->digestsize > PAGE_SIZE / 8 ||
441 alg->descsize > PAGE_SIZE / 8 ||
442 alg->statesize > PAGE_SIZE / 8)
443 return -EINVAL;
444
445 base->cra_type = &crypto_shash_type;
446 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
447 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
448
449 if (!alg->finup)
450 alg->finup = shash_finup_unaligned;
451 if (!alg->digest)
452 alg->digest = shash_digest_unaligned;
453 if (!alg->export) {
454 alg->export = shash_default_export;
455 alg->import = shash_default_import;
456 alg->statesize = alg->descsize;
457 }
458 if (!alg->setkey)
459 alg->setkey = shash_no_setkey;
460
461 return 0;
462}
463
464int crypto_register_shash(struct shash_alg *alg)
465{
466 struct crypto_alg *base = &alg->base;
467 int err;
468
469 err = shash_prepare_alg(alg);
470 if (err)
471 return err;
472
473 return crypto_register_alg(base);
474}
475EXPORT_SYMBOL_GPL(crypto_register_shash);
476
477int crypto_unregister_shash(struct shash_alg *alg)
478{
479 return crypto_unregister_alg(&alg->base);
480}
481EXPORT_SYMBOL_GPL(crypto_unregister_shash);
482
483int crypto_register_shashes(struct shash_alg *algs, int count)
484{
485 int i, ret;
486
487 for (i = 0; i < count; i++) {
488 ret = crypto_register_shash(&algs[i]);
489 if (ret)
490 goto err;
491 }
492
493 return 0;
494
495err:
496 for (--i; i >= 0; --i)
497 crypto_unregister_shash(&algs[i]);
498
499 return ret;
500}
501EXPORT_SYMBOL_GPL(crypto_register_shashes);
502
503int crypto_unregister_shashes(struct shash_alg *algs, int count)
504{
505 int i, ret;
506
507 for (i = count - 1; i >= 0; --i) {
508 ret = crypto_unregister_shash(&algs[i]);
509 if (ret)
510 pr_err("Failed to unregister %s %s: %d\n",
511 algs[i].base.cra_driver_name,
512 algs[i].base.cra_name, ret);
513 }
514
515 return 0;
516}
517EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
518
519int shash_register_instance(struct crypto_template *tmpl,
520 struct shash_instance *inst)
521{
522 int err;
523
524 err = shash_prepare_alg(&inst->alg);
525 if (err)
526 return err;
527
528 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
529}
530EXPORT_SYMBOL_GPL(shash_register_instance);
531
532void shash_free_instance(struct crypto_instance *inst)
533{
534 crypto_drop_spawn(crypto_instance_ctx(inst));
535 kfree(shash_instance(inst));
536}
537EXPORT_SYMBOL_GPL(shash_free_instance);
538
539int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
540 struct shash_alg *alg,
541 struct crypto_instance *inst)
542{
543 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
544 &crypto_shash_type);
545}
546EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
547
548struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
549{
550 struct crypto_alg *alg;
551
552 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
553 return IS_ERR(alg) ? ERR_CAST(alg) :
554 container_of(alg, struct shash_alg, base);
555}
556EXPORT_SYMBOL_GPL(shash_attr_alg);
557
558MODULE_LICENSE("GPL");
559MODULE_DESCRIPTION("Synchronous cryptographic hash type");
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#include <crypto/scatterwalk.h>
9#include <crypto/internal/hash.h>
10#include <linux/err.h>
11#include <linux/kernel.h>
12#include <linux/module.h>
13#include <linux/slab.h>
14#include <linux/seq_file.h>
15#include <linux/cryptouser.h>
16#include <net/netlink.h>
17#include <linux/compiler.h>
18
19#include "internal.h"
20
21#define MAX_SHASH_ALIGNMASK 63
22
23static const struct crypto_type crypto_shash_type;
24
25int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen)
27{
28 return -ENOSYS;
29}
30EXPORT_SYMBOL_GPL(shash_no_setkey);
31
32static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
33 unsigned int keylen)
34{
35 struct shash_alg *shash = crypto_shash_alg(tfm);
36 unsigned long alignmask = crypto_shash_alignmask(tfm);
37 unsigned long absize;
38 u8 *buffer, *alignbuffer;
39 int err;
40
41 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
42 buffer = kmalloc(absize, GFP_ATOMIC);
43 if (!buffer)
44 return -ENOMEM;
45
46 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 memcpy(alignbuffer, key, keylen);
48 err = shash->setkey(tfm, alignbuffer, keylen);
49 kfree_sensitive(buffer);
50 return err;
51}
52
53static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
54{
55 if (crypto_shash_alg_needs_key(alg))
56 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
57}
58
59int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
60 unsigned int keylen)
61{
62 struct shash_alg *shash = crypto_shash_alg(tfm);
63 unsigned long alignmask = crypto_shash_alignmask(tfm);
64 int err;
65
66 if ((unsigned long)key & alignmask)
67 err = shash_setkey_unaligned(tfm, key, keylen);
68 else
69 err = shash->setkey(tfm, key, keylen);
70
71 if (unlikely(err)) {
72 shash_set_needkey(tfm, shash);
73 return err;
74 }
75
76 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
77 return 0;
78}
79EXPORT_SYMBOL_GPL(crypto_shash_setkey);
80
81static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
82 unsigned int len)
83{
84 struct crypto_shash *tfm = desc->tfm;
85 struct shash_alg *shash = crypto_shash_alg(tfm);
86 unsigned long alignmask = crypto_shash_alignmask(tfm);
87 unsigned int unaligned_len = alignmask + 1 -
88 ((unsigned long)data & alignmask);
89 /*
90 * We cannot count on __aligned() working for large values:
91 * https://patchwork.kernel.org/patch/9507697/
92 */
93 u8 ubuf[MAX_SHASH_ALIGNMASK * 2];
94 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
95 int err;
96
97 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
98 return -EINVAL;
99
100 if (unaligned_len > len)
101 unaligned_len = len;
102
103 memcpy(buf, data, unaligned_len);
104 err = shash->update(desc, buf, unaligned_len);
105 memset(buf, 0, unaligned_len);
106
107 return err ?:
108 shash->update(desc, data + unaligned_len, len - unaligned_len);
109}
110
111int crypto_shash_update(struct shash_desc *desc, const u8 *data,
112 unsigned int len)
113{
114 struct crypto_shash *tfm = desc->tfm;
115 struct shash_alg *shash = crypto_shash_alg(tfm);
116 unsigned long alignmask = crypto_shash_alignmask(tfm);
117
118 if ((unsigned long)data & alignmask)
119 return shash_update_unaligned(desc, data, len);
120
121 return shash->update(desc, data, len);
122}
123EXPORT_SYMBOL_GPL(crypto_shash_update);
124
125static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
126{
127 struct crypto_shash *tfm = desc->tfm;
128 unsigned long alignmask = crypto_shash_alignmask(tfm);
129 struct shash_alg *shash = crypto_shash_alg(tfm);
130 unsigned int ds = crypto_shash_digestsize(tfm);
131 /*
132 * We cannot count on __aligned() working for large values:
133 * https://patchwork.kernel.org/patch/9507697/
134 */
135 u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE];
136 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
137 int err;
138
139 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
140 return -EINVAL;
141
142 err = shash->final(desc, buf);
143 if (err)
144 goto out;
145
146 memcpy(out, buf, ds);
147
148out:
149 memset(buf, 0, ds);
150 return err;
151}
152
153int crypto_shash_final(struct shash_desc *desc, u8 *out)
154{
155 struct crypto_shash *tfm = desc->tfm;
156 struct shash_alg *shash = crypto_shash_alg(tfm);
157 unsigned long alignmask = crypto_shash_alignmask(tfm);
158
159 if ((unsigned long)out & alignmask)
160 return shash_final_unaligned(desc, out);
161
162 return shash->final(desc, out);
163}
164EXPORT_SYMBOL_GPL(crypto_shash_final);
165
166static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
167 unsigned int len, u8 *out)
168{
169 return crypto_shash_update(desc, data, len) ?:
170 crypto_shash_final(desc, out);
171}
172
173int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
174 unsigned int len, u8 *out)
175{
176 struct crypto_shash *tfm = desc->tfm;
177 struct shash_alg *shash = crypto_shash_alg(tfm);
178 unsigned long alignmask = crypto_shash_alignmask(tfm);
179
180 if (((unsigned long)data | (unsigned long)out) & alignmask)
181 return shash_finup_unaligned(desc, data, len, out);
182
183 return shash->finup(desc, data, len, out);
184}
185EXPORT_SYMBOL_GPL(crypto_shash_finup);
186
187static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
188 unsigned int len, u8 *out)
189{
190 return crypto_shash_init(desc) ?:
191 crypto_shash_finup(desc, data, len, out);
192}
193
194int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
195 unsigned int len, u8 *out)
196{
197 struct crypto_shash *tfm = desc->tfm;
198 struct shash_alg *shash = crypto_shash_alg(tfm);
199 unsigned long alignmask = crypto_shash_alignmask(tfm);
200
201 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
202 return -ENOKEY;
203
204 if (((unsigned long)data | (unsigned long)out) & alignmask)
205 return shash_digest_unaligned(desc, data, len, out);
206
207 return shash->digest(desc, data, len, out);
208}
209EXPORT_SYMBOL_GPL(crypto_shash_digest);
210
211int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
212 unsigned int len, u8 *out)
213{
214 SHASH_DESC_ON_STACK(desc, tfm);
215 int err;
216
217 desc->tfm = tfm;
218
219 err = crypto_shash_digest(desc, data, len, out);
220
221 shash_desc_zero(desc);
222
223 return err;
224}
225EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
226
227static int shash_default_export(struct shash_desc *desc, void *out)
228{
229 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
230 return 0;
231}
232
233static int shash_default_import(struct shash_desc *desc, const void *in)
234{
235 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
236 return 0;
237}
238
239static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
240 unsigned int keylen)
241{
242 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
243
244 return crypto_shash_setkey(*ctx, key, keylen);
245}
246
247static int shash_async_init(struct ahash_request *req)
248{
249 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
250 struct shash_desc *desc = ahash_request_ctx(req);
251
252 desc->tfm = *ctx;
253
254 return crypto_shash_init(desc);
255}
256
257int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
258{
259 struct crypto_hash_walk walk;
260 int nbytes;
261
262 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
263 nbytes = crypto_hash_walk_done(&walk, nbytes))
264 nbytes = crypto_shash_update(desc, walk.data, nbytes);
265
266 return nbytes;
267}
268EXPORT_SYMBOL_GPL(shash_ahash_update);
269
270static int shash_async_update(struct ahash_request *req)
271{
272 return shash_ahash_update(req, ahash_request_ctx(req));
273}
274
275static int shash_async_final(struct ahash_request *req)
276{
277 return crypto_shash_final(ahash_request_ctx(req), req->result);
278}
279
280int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
281{
282 struct crypto_hash_walk walk;
283 int nbytes;
284
285 nbytes = crypto_hash_walk_first(req, &walk);
286 if (!nbytes)
287 return crypto_shash_final(desc, req->result);
288
289 do {
290 nbytes = crypto_hash_walk_last(&walk) ?
291 crypto_shash_finup(desc, walk.data, nbytes,
292 req->result) :
293 crypto_shash_update(desc, walk.data, nbytes);
294 nbytes = crypto_hash_walk_done(&walk, nbytes);
295 } while (nbytes > 0);
296
297 return nbytes;
298}
299EXPORT_SYMBOL_GPL(shash_ahash_finup);
300
301static int shash_async_finup(struct ahash_request *req)
302{
303 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304 struct shash_desc *desc = ahash_request_ctx(req);
305
306 desc->tfm = *ctx;
307
308 return shash_ahash_finup(req, desc);
309}
310
311int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
312{
313 unsigned int nbytes = req->nbytes;
314 struct scatterlist *sg;
315 unsigned int offset;
316 int err;
317
318 if (nbytes &&
319 (sg = req->src, offset = sg->offset,
320 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
321 void *data;
322
323 data = kmap_atomic(sg_page(sg));
324 err = crypto_shash_digest(desc, data + offset, nbytes,
325 req->result);
326 kunmap_atomic(data);
327 } else
328 err = crypto_shash_init(desc) ?:
329 shash_ahash_finup(req, desc);
330
331 return err;
332}
333EXPORT_SYMBOL_GPL(shash_ahash_digest);
334
335static int shash_async_digest(struct ahash_request *req)
336{
337 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
338 struct shash_desc *desc = ahash_request_ctx(req);
339
340 desc->tfm = *ctx;
341
342 return shash_ahash_digest(req, desc);
343}
344
345static int shash_async_export(struct ahash_request *req, void *out)
346{
347 return crypto_shash_export(ahash_request_ctx(req), out);
348}
349
350static int shash_async_import(struct ahash_request *req, const void *in)
351{
352 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
353 struct shash_desc *desc = ahash_request_ctx(req);
354
355 desc->tfm = *ctx;
356
357 return crypto_shash_import(desc, in);
358}
359
360static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
361{
362 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
363
364 crypto_free_shash(*ctx);
365}
366
367int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
368{
369 struct crypto_alg *calg = tfm->__crt_alg;
370 struct shash_alg *alg = __crypto_shash_alg(calg);
371 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
372 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
373 struct crypto_shash *shash;
374
375 if (!crypto_mod_get(calg))
376 return -EAGAIN;
377
378 shash = crypto_create_tfm(calg, &crypto_shash_type);
379 if (IS_ERR(shash)) {
380 crypto_mod_put(calg);
381 return PTR_ERR(shash);
382 }
383
384 *ctx = shash;
385 tfm->exit = crypto_exit_shash_ops_async;
386
387 crt->init = shash_async_init;
388 crt->update = shash_async_update;
389 crt->final = shash_async_final;
390 crt->finup = shash_async_finup;
391 crt->digest = shash_async_digest;
392 if (crypto_shash_alg_has_setkey(alg))
393 crt->setkey = shash_async_setkey;
394
395 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
396 CRYPTO_TFM_NEED_KEY);
397
398 crt->export = shash_async_export;
399 crt->import = shash_async_import;
400
401 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
402
403 return 0;
404}
405
406static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
407{
408 struct crypto_shash *hash = __crypto_shash_cast(tfm);
409 struct shash_alg *alg = crypto_shash_alg(hash);
410
411 alg->exit_tfm(hash);
412}
413
414static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
415{
416 struct crypto_shash *hash = __crypto_shash_cast(tfm);
417 struct shash_alg *alg = crypto_shash_alg(hash);
418 int err;
419
420 hash->descsize = alg->descsize;
421
422 shash_set_needkey(hash, alg);
423
424 if (alg->exit_tfm)
425 tfm->exit = crypto_shash_exit_tfm;
426
427 if (!alg->init_tfm)
428 return 0;
429
430 err = alg->init_tfm(hash);
431 if (err)
432 return err;
433
434 /* ->init_tfm() may have increased the descsize. */
435 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
436 if (alg->exit_tfm)
437 alg->exit_tfm(hash);
438 return -EINVAL;
439 }
440
441 return 0;
442}
443
444static void crypto_shash_free_instance(struct crypto_instance *inst)
445{
446 struct shash_instance *shash = shash_instance(inst);
447
448 shash->free(shash);
449}
450
451#ifdef CONFIG_NET
452static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
453{
454 struct crypto_report_hash rhash;
455 struct shash_alg *salg = __crypto_shash_alg(alg);
456
457 memset(&rhash, 0, sizeof(rhash));
458
459 strscpy(rhash.type, "shash", sizeof(rhash.type));
460
461 rhash.blocksize = alg->cra_blocksize;
462 rhash.digestsize = salg->digestsize;
463
464 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
465}
466#else
467static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
468{
469 return -ENOSYS;
470}
471#endif
472
473static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
474 __maybe_unused;
475static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
476{
477 struct shash_alg *salg = __crypto_shash_alg(alg);
478
479 seq_printf(m, "type : shash\n");
480 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
481 seq_printf(m, "digestsize : %u\n", salg->digestsize);
482}
483
484static const struct crypto_type crypto_shash_type = {
485 .extsize = crypto_alg_extsize,
486 .init_tfm = crypto_shash_init_tfm,
487 .free = crypto_shash_free_instance,
488#ifdef CONFIG_PROC_FS
489 .show = crypto_shash_show,
490#endif
491 .report = crypto_shash_report,
492 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
493 .maskset = CRYPTO_ALG_TYPE_MASK,
494 .type = CRYPTO_ALG_TYPE_SHASH,
495 .tfmsize = offsetof(struct crypto_shash, base),
496};
497
498int crypto_grab_shash(struct crypto_shash_spawn *spawn,
499 struct crypto_instance *inst,
500 const char *name, u32 type, u32 mask)
501{
502 spawn->base.frontend = &crypto_shash_type;
503 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
504}
505EXPORT_SYMBOL_GPL(crypto_grab_shash);
506
507struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
508 u32 mask)
509{
510 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
511}
512EXPORT_SYMBOL_GPL(crypto_alloc_shash);
513
514int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
515{
516 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
517}
518EXPORT_SYMBOL_GPL(crypto_has_shash);
519
520static int shash_prepare_alg(struct shash_alg *alg)
521{
522 struct crypto_alg *base = &alg->base;
523
524 if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
525 alg->descsize > HASH_MAX_DESCSIZE ||
526 alg->statesize > HASH_MAX_STATESIZE)
527 return -EINVAL;
528
529 if (base->cra_alignmask > MAX_SHASH_ALIGNMASK)
530 return -EINVAL;
531
532 if ((alg->export && !alg->import) || (alg->import && !alg->export))
533 return -EINVAL;
534
535 base->cra_type = &crypto_shash_type;
536 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
537 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
538
539 if (!alg->finup)
540 alg->finup = shash_finup_unaligned;
541 if (!alg->digest)
542 alg->digest = shash_digest_unaligned;
543 if (!alg->export) {
544 alg->export = shash_default_export;
545 alg->import = shash_default_import;
546 alg->statesize = alg->descsize;
547 }
548 if (!alg->setkey)
549 alg->setkey = shash_no_setkey;
550
551 return 0;
552}
553
554int crypto_register_shash(struct shash_alg *alg)
555{
556 struct crypto_alg *base = &alg->base;
557 int err;
558
559 err = shash_prepare_alg(alg);
560 if (err)
561 return err;
562
563 return crypto_register_alg(base);
564}
565EXPORT_SYMBOL_GPL(crypto_register_shash);
566
567void crypto_unregister_shash(struct shash_alg *alg)
568{
569 crypto_unregister_alg(&alg->base);
570}
571EXPORT_SYMBOL_GPL(crypto_unregister_shash);
572
573int crypto_register_shashes(struct shash_alg *algs, int count)
574{
575 int i, ret;
576
577 for (i = 0; i < count; i++) {
578 ret = crypto_register_shash(&algs[i]);
579 if (ret)
580 goto err;
581 }
582
583 return 0;
584
585err:
586 for (--i; i >= 0; --i)
587 crypto_unregister_shash(&algs[i]);
588
589 return ret;
590}
591EXPORT_SYMBOL_GPL(crypto_register_shashes);
592
593void crypto_unregister_shashes(struct shash_alg *algs, int count)
594{
595 int i;
596
597 for (i = count - 1; i >= 0; --i)
598 crypto_unregister_shash(&algs[i]);
599}
600EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
601
602int shash_register_instance(struct crypto_template *tmpl,
603 struct shash_instance *inst)
604{
605 int err;
606
607 if (WARN_ON(!inst->free))
608 return -EINVAL;
609
610 err = shash_prepare_alg(&inst->alg);
611 if (err)
612 return err;
613
614 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
615}
616EXPORT_SYMBOL_GPL(shash_register_instance);
617
618void shash_free_singlespawn_instance(struct shash_instance *inst)
619{
620 crypto_drop_spawn(shash_instance_ctx(inst));
621 kfree(inst);
622}
623EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
624
625MODULE_LICENSE("GPL");
626MODULE_DESCRIPTION("Synchronous cryptographic hash type");