Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#include <crypto/scatterwalk.h>
9#include <linux/cryptouser.h>
10#include <linux/err.h>
11#include <linux/kernel.h>
12#include <linux/module.h>
13#include <linux/seq_file.h>
14#include <linux/string.h>
15#include <net/netlink.h>
16
17#include "hash.h"
18
19int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
20 unsigned int keylen)
21{
22 return -ENOSYS;
23}
24EXPORT_SYMBOL_GPL(shash_no_setkey);
25
26static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
27{
28 if (crypto_shash_alg_needs_key(alg))
29 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
30}
31
32int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
33 unsigned int keylen)
34{
35 struct shash_alg *shash = crypto_shash_alg(tfm);
36 int err;
37
38 err = shash->setkey(tfm, key, keylen);
39 if (unlikely(err)) {
40 shash_set_needkey(tfm, shash);
41 return err;
42 }
43
44 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
45 return 0;
46}
47EXPORT_SYMBOL_GPL(crypto_shash_setkey);
48
49int crypto_shash_update(struct shash_desc *desc, const u8 *data,
50 unsigned int len)
51{
52 return crypto_shash_alg(desc->tfm)->update(desc, data, len);
53}
54EXPORT_SYMBOL_GPL(crypto_shash_update);
55
56int crypto_shash_final(struct shash_desc *desc, u8 *out)
57{
58 return crypto_shash_alg(desc->tfm)->final(desc, out);
59}
60EXPORT_SYMBOL_GPL(crypto_shash_final);
61
62static int shash_default_finup(struct shash_desc *desc, const u8 *data,
63 unsigned int len, u8 *out)
64{
65 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
66
67 return shash->update(desc, data, len) ?:
68 shash->final(desc, out);
69}
70
71int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
72 unsigned int len, u8 *out)
73{
74 return crypto_shash_alg(desc->tfm)->finup(desc, data, len, out);
75}
76EXPORT_SYMBOL_GPL(crypto_shash_finup);
77
78static int shash_default_digest(struct shash_desc *desc, const u8 *data,
79 unsigned int len, u8 *out)
80{
81 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
82
83 return shash->init(desc) ?:
84 shash->finup(desc, data, len, out);
85}
86
87int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
88 unsigned int len, u8 *out)
89{
90 struct crypto_shash *tfm = desc->tfm;
91
92 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
93 return -ENOKEY;
94
95 return crypto_shash_alg(tfm)->digest(desc, data, len, out);
96}
97EXPORT_SYMBOL_GPL(crypto_shash_digest);
98
99int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
100 unsigned int len, u8 *out)
101{
102 SHASH_DESC_ON_STACK(desc, tfm);
103 int err;
104
105 desc->tfm = tfm;
106
107 err = crypto_shash_digest(desc, data, len, out);
108
109 shash_desc_zero(desc);
110
111 return err;
112}
113EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
114
115int crypto_shash_export(struct shash_desc *desc, void *out)
116{
117 struct crypto_shash *tfm = desc->tfm;
118 struct shash_alg *shash = crypto_shash_alg(tfm);
119
120 if (shash->export)
121 return shash->export(desc, out);
122
123 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(tfm));
124 return 0;
125}
126EXPORT_SYMBOL_GPL(crypto_shash_export);
127
128int crypto_shash_import(struct shash_desc *desc, const void *in)
129{
130 struct crypto_shash *tfm = desc->tfm;
131 struct shash_alg *shash = crypto_shash_alg(tfm);
132
133 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
134 return -ENOKEY;
135
136 if (shash->import)
137 return shash->import(desc, in);
138
139 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
140 return 0;
141}
142EXPORT_SYMBOL_GPL(crypto_shash_import);
143
144static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
145{
146 struct crypto_shash *hash = __crypto_shash_cast(tfm);
147 struct shash_alg *alg = crypto_shash_alg(hash);
148
149 alg->exit_tfm(hash);
150}
151
152static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
153{
154 struct crypto_shash *hash = __crypto_shash_cast(tfm);
155 struct shash_alg *alg = crypto_shash_alg(hash);
156 int err;
157
158 hash->descsize = alg->descsize;
159
160 shash_set_needkey(hash, alg);
161
162 if (alg->exit_tfm)
163 tfm->exit = crypto_shash_exit_tfm;
164
165 if (!alg->init_tfm)
166 return 0;
167
168 err = alg->init_tfm(hash);
169 if (err)
170 return err;
171
172 /* ->init_tfm() may have increased the descsize. */
173 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
174 if (alg->exit_tfm)
175 alg->exit_tfm(hash);
176 return -EINVAL;
177 }
178
179 return 0;
180}
181
182static void crypto_shash_free_instance(struct crypto_instance *inst)
183{
184 struct shash_instance *shash = shash_instance(inst);
185
186 shash->free(shash);
187}
188
189static int __maybe_unused crypto_shash_report(
190 struct sk_buff *skb, struct crypto_alg *alg)
191{
192 struct crypto_report_hash rhash;
193 struct shash_alg *salg = __crypto_shash_alg(alg);
194
195 memset(&rhash, 0, sizeof(rhash));
196
197 strscpy(rhash.type, "shash", sizeof(rhash.type));
198
199 rhash.blocksize = alg->cra_blocksize;
200 rhash.digestsize = salg->digestsize;
201
202 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
203}
204
205static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
206 __maybe_unused;
207static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
208{
209 struct shash_alg *salg = __crypto_shash_alg(alg);
210
211 seq_printf(m, "type : shash\n");
212 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
213 seq_printf(m, "digestsize : %u\n", salg->digestsize);
214}
215
216const struct crypto_type crypto_shash_type = {
217 .extsize = crypto_alg_extsize,
218 .init_tfm = crypto_shash_init_tfm,
219 .free = crypto_shash_free_instance,
220#ifdef CONFIG_PROC_FS
221 .show = crypto_shash_show,
222#endif
223#if IS_ENABLED(CONFIG_CRYPTO_USER)
224 .report = crypto_shash_report,
225#endif
226 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
227 .maskset = CRYPTO_ALG_TYPE_MASK,
228 .type = CRYPTO_ALG_TYPE_SHASH,
229 .tfmsize = offsetof(struct crypto_shash, base),
230};
231
232int crypto_grab_shash(struct crypto_shash_spawn *spawn,
233 struct crypto_instance *inst,
234 const char *name, u32 type, u32 mask)
235{
236 spawn->base.frontend = &crypto_shash_type;
237 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
238}
239EXPORT_SYMBOL_GPL(crypto_grab_shash);
240
241struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
242 u32 mask)
243{
244 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
245}
246EXPORT_SYMBOL_GPL(crypto_alloc_shash);
247
248int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
249{
250 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
251}
252EXPORT_SYMBOL_GPL(crypto_has_shash);
253
254struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash)
255{
256 struct crypto_tfm *tfm = crypto_shash_tfm(hash);
257 struct shash_alg *alg = crypto_shash_alg(hash);
258 struct crypto_shash *nhash;
259 int err;
260
261 if (!crypto_shash_alg_has_setkey(alg)) {
262 tfm = crypto_tfm_get(tfm);
263 if (IS_ERR(tfm))
264 return ERR_CAST(tfm);
265
266 return hash;
267 }
268
269 if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init))
270 return ERR_PTR(-ENOSYS);
271
272 nhash = crypto_clone_tfm(&crypto_shash_type, tfm);
273 if (IS_ERR(nhash))
274 return nhash;
275
276 nhash->descsize = hash->descsize;
277
278 if (alg->clone_tfm) {
279 err = alg->clone_tfm(nhash, hash);
280 if (err) {
281 crypto_free_shash(nhash);
282 return ERR_PTR(err);
283 }
284 }
285
286 return nhash;
287}
288EXPORT_SYMBOL_GPL(crypto_clone_shash);
289
290int hash_prepare_alg(struct hash_alg_common *alg)
291{
292 struct crypto_alg *base = &alg->base;
293
294 if (alg->digestsize > HASH_MAX_DIGESTSIZE)
295 return -EINVAL;
296
297 /* alignmask is not useful for hashes, so it is not supported. */
298 if (base->cra_alignmask)
299 return -EINVAL;
300
301 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
302
303 return 0;
304}
305
306static int shash_prepare_alg(struct shash_alg *alg)
307{
308 struct crypto_alg *base = &alg->halg.base;
309 int err;
310
311 if (alg->descsize > HASH_MAX_DESCSIZE)
312 return -EINVAL;
313
314 if ((alg->export && !alg->import) || (alg->import && !alg->export))
315 return -EINVAL;
316
317 err = hash_prepare_alg(&alg->halg);
318 if (err)
319 return err;
320
321 base->cra_type = &crypto_shash_type;
322 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
323
324 /*
325 * Handle missing optional functions. For each one we can either
326 * install a default here, or we can leave the pointer as NULL and check
327 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
328 * when the default behavior is desired. For ->finup and ->digest we
329 * install defaults, since for optimal performance algorithms should
330 * implement these anyway. On the other hand, for ->import and
331 * ->export the common case and best performance comes from the simple
332 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
333 * leave them NULL and provide the memcpy with no indirect call.
334 */
335 if (!alg->finup)
336 alg->finup = shash_default_finup;
337 if (!alg->digest)
338 alg->digest = shash_default_digest;
339 if (!alg->export)
340 alg->halg.statesize = alg->descsize;
341 if (!alg->setkey)
342 alg->setkey = shash_no_setkey;
343
344 return 0;
345}
346
347int crypto_register_shash(struct shash_alg *alg)
348{
349 struct crypto_alg *base = &alg->base;
350 int err;
351
352 err = shash_prepare_alg(alg);
353 if (err)
354 return err;
355
356 return crypto_register_alg(base);
357}
358EXPORT_SYMBOL_GPL(crypto_register_shash);
359
360void crypto_unregister_shash(struct shash_alg *alg)
361{
362 crypto_unregister_alg(&alg->base);
363}
364EXPORT_SYMBOL_GPL(crypto_unregister_shash);
365
366int crypto_register_shashes(struct shash_alg *algs, int count)
367{
368 int i, ret;
369
370 for (i = 0; i < count; i++) {
371 ret = crypto_register_shash(&algs[i]);
372 if (ret)
373 goto err;
374 }
375
376 return 0;
377
378err:
379 for (--i; i >= 0; --i)
380 crypto_unregister_shash(&algs[i]);
381
382 return ret;
383}
384EXPORT_SYMBOL_GPL(crypto_register_shashes);
385
386void crypto_unregister_shashes(struct shash_alg *algs, int count)
387{
388 int i;
389
390 for (i = count - 1; i >= 0; --i)
391 crypto_unregister_shash(&algs[i]);
392}
393EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
394
395int shash_register_instance(struct crypto_template *tmpl,
396 struct shash_instance *inst)
397{
398 int err;
399
400 if (WARN_ON(!inst->free))
401 return -EINVAL;
402
403 err = shash_prepare_alg(&inst->alg);
404 if (err)
405 return err;
406
407 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
408}
409EXPORT_SYMBOL_GPL(shash_register_instance);
410
411void shash_free_singlespawn_instance(struct shash_instance *inst)
412{
413 crypto_drop_spawn(shash_instance_ctx(inst));
414 kfree(inst);
415}
416EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
417
418MODULE_LICENSE("GPL");
419MODULE_DESCRIPTION("Synchronous cryptographic hash type");
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#include <crypto/scatterwalk.h>
9#include <crypto/internal/hash.h>
10#include <linux/err.h>
11#include <linux/kernel.h>
12#include <linux/module.h>
13#include <linux/slab.h>
14#include <linux/seq_file.h>
15#include <linux/cryptouser.h>
16#include <net/netlink.h>
17#include <linux/compiler.h>
18
19#include "internal.h"
20
21#define MAX_SHASH_ALIGNMASK 63
22
23static const struct crypto_type crypto_shash_type;
24
25int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
26 unsigned int keylen)
27{
28 return -ENOSYS;
29}
30EXPORT_SYMBOL_GPL(shash_no_setkey);
31
32static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
33 unsigned int keylen)
34{
35 struct shash_alg *shash = crypto_shash_alg(tfm);
36 unsigned long alignmask = crypto_shash_alignmask(tfm);
37 unsigned long absize;
38 u8 *buffer, *alignbuffer;
39 int err;
40
41 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
42 buffer = kmalloc(absize, GFP_ATOMIC);
43 if (!buffer)
44 return -ENOMEM;
45
46 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
47 memcpy(alignbuffer, key, keylen);
48 err = shash->setkey(tfm, alignbuffer, keylen);
49 kfree_sensitive(buffer);
50 return err;
51}
52
53static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
54{
55 if (crypto_shash_alg_needs_key(alg))
56 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
57}
58
59int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
60 unsigned int keylen)
61{
62 struct shash_alg *shash = crypto_shash_alg(tfm);
63 unsigned long alignmask = crypto_shash_alignmask(tfm);
64 int err;
65
66 if ((unsigned long)key & alignmask)
67 err = shash_setkey_unaligned(tfm, key, keylen);
68 else
69 err = shash->setkey(tfm, key, keylen);
70
71 if (unlikely(err)) {
72 shash_set_needkey(tfm, shash);
73 return err;
74 }
75
76 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
77 return 0;
78}
79EXPORT_SYMBOL_GPL(crypto_shash_setkey);
80
81static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
82 unsigned int len)
83{
84 struct crypto_shash *tfm = desc->tfm;
85 struct shash_alg *shash = crypto_shash_alg(tfm);
86 unsigned long alignmask = crypto_shash_alignmask(tfm);
87 unsigned int unaligned_len = alignmask + 1 -
88 ((unsigned long)data & alignmask);
89 /*
90 * We cannot count on __aligned() working for large values:
91 * https://patchwork.kernel.org/patch/9507697/
92 */
93 u8 ubuf[MAX_SHASH_ALIGNMASK * 2];
94 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
95 int err;
96
97 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
98 return -EINVAL;
99
100 if (unaligned_len > len)
101 unaligned_len = len;
102
103 memcpy(buf, data, unaligned_len);
104 err = shash->update(desc, buf, unaligned_len);
105 memset(buf, 0, unaligned_len);
106
107 return err ?:
108 shash->update(desc, data + unaligned_len, len - unaligned_len);
109}
110
111int crypto_shash_update(struct shash_desc *desc, const u8 *data,
112 unsigned int len)
113{
114 struct crypto_shash *tfm = desc->tfm;
115 struct shash_alg *shash = crypto_shash_alg(tfm);
116 unsigned long alignmask = crypto_shash_alignmask(tfm);
117
118 if ((unsigned long)data & alignmask)
119 return shash_update_unaligned(desc, data, len);
120
121 return shash->update(desc, data, len);
122}
123EXPORT_SYMBOL_GPL(crypto_shash_update);
124
125static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
126{
127 struct crypto_shash *tfm = desc->tfm;
128 unsigned long alignmask = crypto_shash_alignmask(tfm);
129 struct shash_alg *shash = crypto_shash_alg(tfm);
130 unsigned int ds = crypto_shash_digestsize(tfm);
131 /*
132 * We cannot count on __aligned() working for large values:
133 * https://patchwork.kernel.org/patch/9507697/
134 */
135 u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE];
136 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
137 int err;
138
139 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
140 return -EINVAL;
141
142 err = shash->final(desc, buf);
143 if (err)
144 goto out;
145
146 memcpy(out, buf, ds);
147
148out:
149 memset(buf, 0, ds);
150 return err;
151}
152
153int crypto_shash_final(struct shash_desc *desc, u8 *out)
154{
155 struct crypto_shash *tfm = desc->tfm;
156 struct shash_alg *shash = crypto_shash_alg(tfm);
157 unsigned long alignmask = crypto_shash_alignmask(tfm);
158
159 if ((unsigned long)out & alignmask)
160 return shash_final_unaligned(desc, out);
161
162 return shash->final(desc, out);
163}
164EXPORT_SYMBOL_GPL(crypto_shash_final);
165
166static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
167 unsigned int len, u8 *out)
168{
169 return crypto_shash_update(desc, data, len) ?:
170 crypto_shash_final(desc, out);
171}
172
173int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
174 unsigned int len, u8 *out)
175{
176 struct crypto_shash *tfm = desc->tfm;
177 struct shash_alg *shash = crypto_shash_alg(tfm);
178 unsigned long alignmask = crypto_shash_alignmask(tfm);
179
180 if (((unsigned long)data | (unsigned long)out) & alignmask)
181 return shash_finup_unaligned(desc, data, len, out);
182
183 return shash->finup(desc, data, len, out);
184}
185EXPORT_SYMBOL_GPL(crypto_shash_finup);
186
187static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
188 unsigned int len, u8 *out)
189{
190 return crypto_shash_init(desc) ?:
191 crypto_shash_finup(desc, data, len, out);
192}
193
194int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
195 unsigned int len, u8 *out)
196{
197 struct crypto_shash *tfm = desc->tfm;
198 struct shash_alg *shash = crypto_shash_alg(tfm);
199 unsigned long alignmask = crypto_shash_alignmask(tfm);
200
201 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
202 return -ENOKEY;
203
204 if (((unsigned long)data | (unsigned long)out) & alignmask)
205 return shash_digest_unaligned(desc, data, len, out);
206
207 return shash->digest(desc, data, len, out);
208}
209EXPORT_SYMBOL_GPL(crypto_shash_digest);
210
211int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
212 unsigned int len, u8 *out)
213{
214 SHASH_DESC_ON_STACK(desc, tfm);
215 int err;
216
217 desc->tfm = tfm;
218
219 err = crypto_shash_digest(desc, data, len, out);
220
221 shash_desc_zero(desc);
222
223 return err;
224}
225EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
226
227static int shash_default_export(struct shash_desc *desc, void *out)
228{
229 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
230 return 0;
231}
232
233static int shash_default_import(struct shash_desc *desc, const void *in)
234{
235 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
236 return 0;
237}
238
239static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
240 unsigned int keylen)
241{
242 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
243
244 return crypto_shash_setkey(*ctx, key, keylen);
245}
246
247static int shash_async_init(struct ahash_request *req)
248{
249 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
250 struct shash_desc *desc = ahash_request_ctx(req);
251
252 desc->tfm = *ctx;
253
254 return crypto_shash_init(desc);
255}
256
257int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
258{
259 struct crypto_hash_walk walk;
260 int nbytes;
261
262 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
263 nbytes = crypto_hash_walk_done(&walk, nbytes))
264 nbytes = crypto_shash_update(desc, walk.data, nbytes);
265
266 return nbytes;
267}
268EXPORT_SYMBOL_GPL(shash_ahash_update);
269
270static int shash_async_update(struct ahash_request *req)
271{
272 return shash_ahash_update(req, ahash_request_ctx(req));
273}
274
275static int shash_async_final(struct ahash_request *req)
276{
277 return crypto_shash_final(ahash_request_ctx(req), req->result);
278}
279
280int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
281{
282 struct crypto_hash_walk walk;
283 int nbytes;
284
285 nbytes = crypto_hash_walk_first(req, &walk);
286 if (!nbytes)
287 return crypto_shash_final(desc, req->result);
288
289 do {
290 nbytes = crypto_hash_walk_last(&walk) ?
291 crypto_shash_finup(desc, walk.data, nbytes,
292 req->result) :
293 crypto_shash_update(desc, walk.data, nbytes);
294 nbytes = crypto_hash_walk_done(&walk, nbytes);
295 } while (nbytes > 0);
296
297 return nbytes;
298}
299EXPORT_SYMBOL_GPL(shash_ahash_finup);
300
301static int shash_async_finup(struct ahash_request *req)
302{
303 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304 struct shash_desc *desc = ahash_request_ctx(req);
305
306 desc->tfm = *ctx;
307
308 return shash_ahash_finup(req, desc);
309}
310
311int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
312{
313 unsigned int nbytes = req->nbytes;
314 struct scatterlist *sg;
315 unsigned int offset;
316 int err;
317
318 if (nbytes &&
319 (sg = req->src, offset = sg->offset,
320 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
321 void *data;
322
323 data = kmap_atomic(sg_page(sg));
324 err = crypto_shash_digest(desc, data + offset, nbytes,
325 req->result);
326 kunmap_atomic(data);
327 } else
328 err = crypto_shash_init(desc) ?:
329 shash_ahash_finup(req, desc);
330
331 return err;
332}
333EXPORT_SYMBOL_GPL(shash_ahash_digest);
334
335static int shash_async_digest(struct ahash_request *req)
336{
337 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
338 struct shash_desc *desc = ahash_request_ctx(req);
339
340 desc->tfm = *ctx;
341
342 return shash_ahash_digest(req, desc);
343}
344
345static int shash_async_export(struct ahash_request *req, void *out)
346{
347 return crypto_shash_export(ahash_request_ctx(req), out);
348}
349
350static int shash_async_import(struct ahash_request *req, const void *in)
351{
352 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
353 struct shash_desc *desc = ahash_request_ctx(req);
354
355 desc->tfm = *ctx;
356
357 return crypto_shash_import(desc, in);
358}
359
360static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
361{
362 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
363
364 crypto_free_shash(*ctx);
365}
366
367int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
368{
369 struct crypto_alg *calg = tfm->__crt_alg;
370 struct shash_alg *alg = __crypto_shash_alg(calg);
371 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
372 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
373 struct crypto_shash *shash;
374
375 if (!crypto_mod_get(calg))
376 return -EAGAIN;
377
378 shash = crypto_create_tfm(calg, &crypto_shash_type);
379 if (IS_ERR(shash)) {
380 crypto_mod_put(calg);
381 return PTR_ERR(shash);
382 }
383
384 *ctx = shash;
385 tfm->exit = crypto_exit_shash_ops_async;
386
387 crt->init = shash_async_init;
388 crt->update = shash_async_update;
389 crt->final = shash_async_final;
390 crt->finup = shash_async_finup;
391 crt->digest = shash_async_digest;
392 if (crypto_shash_alg_has_setkey(alg))
393 crt->setkey = shash_async_setkey;
394
395 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
396 CRYPTO_TFM_NEED_KEY);
397
398 crt->export = shash_async_export;
399 crt->import = shash_async_import;
400
401 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
402
403 return 0;
404}
405
406static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
407{
408 struct crypto_shash *hash = __crypto_shash_cast(tfm);
409 struct shash_alg *alg = crypto_shash_alg(hash);
410
411 alg->exit_tfm(hash);
412}
413
414static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
415{
416 struct crypto_shash *hash = __crypto_shash_cast(tfm);
417 struct shash_alg *alg = crypto_shash_alg(hash);
418 int err;
419
420 hash->descsize = alg->descsize;
421
422 shash_set_needkey(hash, alg);
423
424 if (alg->exit_tfm)
425 tfm->exit = crypto_shash_exit_tfm;
426
427 if (!alg->init_tfm)
428 return 0;
429
430 err = alg->init_tfm(hash);
431 if (err)
432 return err;
433
434 /* ->init_tfm() may have increased the descsize. */
435 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
436 if (alg->exit_tfm)
437 alg->exit_tfm(hash);
438 return -EINVAL;
439 }
440
441 return 0;
442}
443
444static void crypto_shash_free_instance(struct crypto_instance *inst)
445{
446 struct shash_instance *shash = shash_instance(inst);
447
448 shash->free(shash);
449}
450
451#ifdef CONFIG_NET
452static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
453{
454 struct crypto_report_hash rhash;
455 struct shash_alg *salg = __crypto_shash_alg(alg);
456
457 memset(&rhash, 0, sizeof(rhash));
458
459 strscpy(rhash.type, "shash", sizeof(rhash.type));
460
461 rhash.blocksize = alg->cra_blocksize;
462 rhash.digestsize = salg->digestsize;
463
464 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
465}
466#else
467static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
468{
469 return -ENOSYS;
470}
471#endif
472
473static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
474 __maybe_unused;
475static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
476{
477 struct shash_alg *salg = __crypto_shash_alg(alg);
478
479 seq_printf(m, "type : shash\n");
480 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
481 seq_printf(m, "digestsize : %u\n", salg->digestsize);
482}
483
484static const struct crypto_type crypto_shash_type = {
485 .extsize = crypto_alg_extsize,
486 .init_tfm = crypto_shash_init_tfm,
487 .free = crypto_shash_free_instance,
488#ifdef CONFIG_PROC_FS
489 .show = crypto_shash_show,
490#endif
491 .report = crypto_shash_report,
492 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
493 .maskset = CRYPTO_ALG_TYPE_MASK,
494 .type = CRYPTO_ALG_TYPE_SHASH,
495 .tfmsize = offsetof(struct crypto_shash, base),
496};
497
498int crypto_grab_shash(struct crypto_shash_spawn *spawn,
499 struct crypto_instance *inst,
500 const char *name, u32 type, u32 mask)
501{
502 spawn->base.frontend = &crypto_shash_type;
503 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
504}
505EXPORT_SYMBOL_GPL(crypto_grab_shash);
506
507struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
508 u32 mask)
509{
510 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
511}
512EXPORT_SYMBOL_GPL(crypto_alloc_shash);
513
514int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
515{
516 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
517}
518EXPORT_SYMBOL_GPL(crypto_has_shash);
519
520static int shash_prepare_alg(struct shash_alg *alg)
521{
522 struct crypto_alg *base = &alg->base;
523
524 if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
525 alg->descsize > HASH_MAX_DESCSIZE ||
526 alg->statesize > HASH_MAX_STATESIZE)
527 return -EINVAL;
528
529 if (base->cra_alignmask > MAX_SHASH_ALIGNMASK)
530 return -EINVAL;
531
532 if ((alg->export && !alg->import) || (alg->import && !alg->export))
533 return -EINVAL;
534
535 base->cra_type = &crypto_shash_type;
536 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
537 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
538
539 if (!alg->finup)
540 alg->finup = shash_finup_unaligned;
541 if (!alg->digest)
542 alg->digest = shash_digest_unaligned;
543 if (!alg->export) {
544 alg->export = shash_default_export;
545 alg->import = shash_default_import;
546 alg->statesize = alg->descsize;
547 }
548 if (!alg->setkey)
549 alg->setkey = shash_no_setkey;
550
551 return 0;
552}
553
554int crypto_register_shash(struct shash_alg *alg)
555{
556 struct crypto_alg *base = &alg->base;
557 int err;
558
559 err = shash_prepare_alg(alg);
560 if (err)
561 return err;
562
563 return crypto_register_alg(base);
564}
565EXPORT_SYMBOL_GPL(crypto_register_shash);
566
567void crypto_unregister_shash(struct shash_alg *alg)
568{
569 crypto_unregister_alg(&alg->base);
570}
571EXPORT_SYMBOL_GPL(crypto_unregister_shash);
572
573int crypto_register_shashes(struct shash_alg *algs, int count)
574{
575 int i, ret;
576
577 for (i = 0; i < count; i++) {
578 ret = crypto_register_shash(&algs[i]);
579 if (ret)
580 goto err;
581 }
582
583 return 0;
584
585err:
586 for (--i; i >= 0; --i)
587 crypto_unregister_shash(&algs[i]);
588
589 return ret;
590}
591EXPORT_SYMBOL_GPL(crypto_register_shashes);
592
593void crypto_unregister_shashes(struct shash_alg *algs, int count)
594{
595 int i;
596
597 for (i = count - 1; i >= 0; --i)
598 crypto_unregister_shash(&algs[i]);
599}
600EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
601
602int shash_register_instance(struct crypto_template *tmpl,
603 struct shash_instance *inst)
604{
605 int err;
606
607 if (WARN_ON(!inst->free))
608 return -EINVAL;
609
610 err = shash_prepare_alg(&inst->alg);
611 if (err)
612 return err;
613
614 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
615}
616EXPORT_SYMBOL_GPL(shash_register_instance);
617
618void shash_free_singlespawn_instance(struct shash_instance *inst)
619{
620 crypto_drop_spawn(shash_instance_ctx(inst));
621 kfree(inst);
622}
623EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
624
625MODULE_LICENSE("GPL");
626MODULE_DESCRIPTION("Synchronous cryptographic hash type");