Linux Audio

Check our new training course

Loading...
v6.13.7
  1// SPDX-License-Identifier: GPL-2.0
  2/*
  3 * ESSIV skcipher and aead template for block encryption
  4 *
  5 * This template encapsulates the ESSIV IV generation algorithm used by
  6 * dm-crypt and fscrypt, which converts the initial vector for the skcipher
  7 * used for block encryption, by encrypting it using the hash of the
  8 * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
  9 * number in LE representation zero-padded to the size of the IV, but this
 10 * is not assumed by this driver.
 11 *
 12 * The typical use of this template is to instantiate the skcipher
 13 * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
 14 * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
 15 * also permits ESSIV to be used in combination with the authenc template,
 16 * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
 17 * we need to instantiate an aead that accepts the same special key format
 18 * as the authenc template, and deals with the way the encrypted IV is
 19 * embedded into the AAD area of the aead request. This means the AEAD
 20 * flavor produced by this template is tightly coupled to the way dm-crypt
 21 * happens to use it.
 22 *
 23 * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
 24 *
 25 * Heavily based on:
 26 * adiantum length-preserving encryption mode
 27 *
 28 * Copyright 2018 Google LLC
 29 */
 30
 31#include <crypto/authenc.h>
 32#include <crypto/internal/aead.h>
 33#include <crypto/internal/cipher.h>
 34#include <crypto/internal/hash.h>
 35#include <crypto/internal/skcipher.h>
 36#include <crypto/scatterwalk.h>
 37#include <linux/module.h>
 38
 39#include "internal.h"
 40
 41struct essiv_instance_ctx {
 42	union {
 43		struct crypto_skcipher_spawn	skcipher_spawn;
 44		struct crypto_aead_spawn	aead_spawn;
 45	} u;
 46	char	essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
 47	char	shash_driver_name[CRYPTO_MAX_ALG_NAME];
 48};
 49
 50struct essiv_tfm_ctx {
 51	union {
 52		struct crypto_skcipher	*skcipher;
 53		struct crypto_aead	*aead;
 54	} u;
 55	struct crypto_cipher		*essiv_cipher;
 56	struct crypto_shash		*hash;
 57	int				ivoffset;
 58};
 59
 60struct essiv_aead_request_ctx {
 61	struct scatterlist		sg[4];
 62	u8				*assoc;
 63	struct aead_request		aead_req;
 64};
 65
 66static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
 67				 const u8 *key, unsigned int keylen)
 68{
 69	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 70	u8 salt[HASH_MAX_DIGESTSIZE];
 71	int err;
 72
 73	crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
 74	crypto_skcipher_set_flags(tctx->u.skcipher,
 75				  crypto_skcipher_get_flags(tfm) &
 76				  CRYPTO_TFM_REQ_MASK);
 77	err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
 78	if (err)
 79		return err;
 80
 81	err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt);
 82	if (err)
 83		return err;
 84
 85	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
 86	crypto_cipher_set_flags(tctx->essiv_cipher,
 87				crypto_skcipher_get_flags(tfm) &
 88				CRYPTO_TFM_REQ_MASK);
 89	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
 90				    crypto_shash_digestsize(tctx->hash));
 91}
 92
 93static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
 94			     unsigned int keylen)
 95{
 96	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
 97	SHASH_DESC_ON_STACK(desc, tctx->hash);
 98	struct crypto_authenc_keys keys;
 99	u8 salt[HASH_MAX_DIGESTSIZE];
100	int err;
101
102	crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
103	crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
104					    CRYPTO_TFM_REQ_MASK);
105	err = crypto_aead_setkey(tctx->u.aead, key, keylen);
106	if (err)
107		return err;
108
109	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
110		return -EINVAL;
111
112	desc->tfm = tctx->hash;
113	err = crypto_shash_init(desc) ?:
114	      crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
115	      crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
116	if (err)
117		return err;
118
119	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
120	crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
121						    CRYPTO_TFM_REQ_MASK);
122	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
123				    crypto_shash_digestsize(tctx->hash));
124}
125
126static int essiv_aead_setauthsize(struct crypto_aead *tfm,
127				  unsigned int authsize)
128{
129	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
130
131	return crypto_aead_setauthsize(tctx->u.aead, authsize);
132}
133
134static void essiv_skcipher_done(void *data, int err)
135{
136	struct skcipher_request *req = data;
137
138	skcipher_request_complete(req, err);
139}
140
141static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
142{
143	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
144	const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
145	struct skcipher_request *subreq = skcipher_request_ctx(req);
146
147	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
148
149	skcipher_request_set_tfm(subreq, tctx->u.skcipher);
150	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
151				   req->iv);
152	skcipher_request_set_callback(subreq, skcipher_request_flags(req),
153				      essiv_skcipher_done, req);
154
155	return enc ? crypto_skcipher_encrypt(subreq) :
156		     crypto_skcipher_decrypt(subreq);
157}
158
159static int essiv_skcipher_encrypt(struct skcipher_request *req)
160{
161	return essiv_skcipher_crypt(req, true);
162}
163
164static int essiv_skcipher_decrypt(struct skcipher_request *req)
165{
166	return essiv_skcipher_crypt(req, false);
167}
168
169static void essiv_aead_done(void *data, int err)
170{
171	struct aead_request *req = data;
172	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
173
174	if (err == -EINPROGRESS)
175		goto out;
176
177	kfree(rctx->assoc);
178
179out:
180	aead_request_complete(req, err);
181}
182
183static int essiv_aead_crypt(struct aead_request *req, bool enc)
184{
185	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
186	const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
187	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
188	struct aead_request *subreq = &rctx->aead_req;
189	struct scatterlist *src = req->src;
190	int err;
191
192	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
193
194	/*
195	 * dm-crypt embeds the sector number and the IV in the AAD region, so
196	 * we have to copy the converted IV into the right scatterlist before
197	 * we pass it on.
198	 */
199	rctx->assoc = NULL;
200	if (req->src == req->dst || !enc) {
201		scatterwalk_map_and_copy(req->iv, req->dst,
202					 req->assoclen - crypto_aead_ivsize(tfm),
203					 crypto_aead_ivsize(tfm), 1);
204	} else {
205		u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
206		int ivsize = crypto_aead_ivsize(tfm);
207		int ssize = req->assoclen - ivsize;
208		struct scatterlist *sg;
209		int nents;
210
211		if (ssize < 0)
212			return -EINVAL;
213
214		nents = sg_nents_for_len(req->src, ssize);
215		if (nents < 0)
216			return -EINVAL;
217
218		memcpy(iv, req->iv, ivsize);
219		sg_init_table(rctx->sg, 4);
220
221		if (unlikely(nents > 1)) {
222			/*
223			 * This is a case that rarely occurs in practice, but
224			 * for correctness, we have to deal with it nonetheless.
225			 */
226			rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
227			if (!rctx->assoc)
228				return -ENOMEM;
229
230			scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
231						 ssize, 0);
232			sg_set_buf(rctx->sg, rctx->assoc, ssize);
233		} else {
234			sg_set_page(rctx->sg, sg_page(req->src), ssize,
235				    req->src->offset);
236		}
237
238		sg_set_buf(rctx->sg + 1, iv, ivsize);
239		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
240		if (sg != rctx->sg + 2)
241			sg_chain(rctx->sg, 3, sg);
242
243		src = rctx->sg;
244	}
245
246	aead_request_set_tfm(subreq, tctx->u.aead);
247	aead_request_set_ad(subreq, req->assoclen);
248	aead_request_set_callback(subreq, aead_request_flags(req),
249				  essiv_aead_done, req);
250	aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
251
252	err = enc ? crypto_aead_encrypt(subreq) :
253		    crypto_aead_decrypt(subreq);
254
255	if (rctx->assoc && err != -EINPROGRESS && err != -EBUSY)
256		kfree(rctx->assoc);
257	return err;
258}
259
260static int essiv_aead_encrypt(struct aead_request *req)
261{
262	return essiv_aead_crypt(req, true);
263}
264
265static int essiv_aead_decrypt(struct aead_request *req)
266{
267	return essiv_aead_crypt(req, false);
268}
269
270static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
271			  struct essiv_tfm_ctx *tctx)
272{
273	struct crypto_cipher *essiv_cipher;
274	struct crypto_shash *hash;
275	int err;
276
277	essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
278	if (IS_ERR(essiv_cipher))
279		return PTR_ERR(essiv_cipher);
280
281	hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
282	if (IS_ERR(hash)) {
283		err = PTR_ERR(hash);
284		goto err_free_essiv_cipher;
285	}
286
287	tctx->essiv_cipher = essiv_cipher;
288	tctx->hash = hash;
289
290	return 0;
291
292err_free_essiv_cipher:
293	crypto_free_cipher(essiv_cipher);
294	return err;
295}
296
297static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
298{
299	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
300	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
301	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
302	struct crypto_skcipher *skcipher;
303	int err;
304
305	skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
306	if (IS_ERR(skcipher))
307		return PTR_ERR(skcipher);
308
309	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
310				         crypto_skcipher_reqsize(skcipher));
311
312	err = essiv_init_tfm(ictx, tctx);
313	if (err) {
314		crypto_free_skcipher(skcipher);
315		return err;
316	}
317
318	tctx->u.skcipher = skcipher;
319	return 0;
320}
321
322static int essiv_aead_init_tfm(struct crypto_aead *tfm)
323{
324	struct aead_instance *inst = aead_alg_instance(tfm);
325	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
326	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
327	struct crypto_aead *aead;
328	unsigned int subreq_size;
329	int err;
330
331	BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
332		     sizeof(struct essiv_aead_request_ctx));
333
334	aead = crypto_spawn_aead(&ictx->u.aead_spawn);
335	if (IS_ERR(aead))
336		return PTR_ERR(aead);
337
338	subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
339		      crypto_aead_reqsize(aead);
340
341	tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
342			 subreq_size;
343	crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
344
345	err = essiv_init_tfm(ictx, tctx);
346	if (err) {
347		crypto_free_aead(aead);
348		return err;
349	}
350
351	tctx->u.aead = aead;
352	return 0;
353}
354
355static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
356{
357	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
358
359	crypto_free_skcipher(tctx->u.skcipher);
360	crypto_free_cipher(tctx->essiv_cipher);
361	crypto_free_shash(tctx->hash);
362}
363
364static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
365{
366	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
367
368	crypto_free_aead(tctx->u.aead);
369	crypto_free_cipher(tctx->essiv_cipher);
370	crypto_free_shash(tctx->hash);
371}
372
373static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
374{
375	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
376
377	crypto_drop_skcipher(&ictx->u.skcipher_spawn);
378	kfree(inst);
379}
380
381static void essiv_aead_free_instance(struct aead_instance *inst)
382{
383	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
384
385	crypto_drop_aead(&ictx->u.aead_spawn);
386	kfree(inst);
387}
388
389static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
390{
391	const char *p, *q;
392	int len;
393
394	/* find the last opening parens */
395	p = strrchr(cra_name, '(');
396	if (!p++)
397		return false;
398
399	/* find the first closing parens in the tail of the string */
400	q = strchr(p, ')');
401	if (!q)
402		return false;
403
404	len = q - p;
405	if (len >= CRYPTO_MAX_ALG_NAME)
406		return false;
407
408	memcpy(essiv_cipher_name, p, len);
409	essiv_cipher_name[len] = '\0';
410	return true;
411}
412
413static bool essiv_supported_algorithms(const char *essiv_cipher_name,
414				       struct shash_alg *hash_alg,
415				       int ivsize)
416{
417	struct crypto_alg *alg;
418	bool ret = false;
419
420	alg = crypto_alg_mod_lookup(essiv_cipher_name,
421				    CRYPTO_ALG_TYPE_CIPHER,
422				    CRYPTO_ALG_TYPE_MASK);
423	if (IS_ERR(alg))
424		return false;
425
426	if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
427	    hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
428		goto out;
429
430	if (ivsize != alg->cra_blocksize)
431		goto out;
432
433	if (crypto_shash_alg_needs_key(hash_alg))
434		goto out;
435
436	ret = true;
437
438out:
439	crypto_mod_put(alg);
440	return ret;
441}
442
443static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
444{
445	struct skcipher_alg_common *skcipher_alg = NULL;
446	struct crypto_attr_type *algt;
447	const char *inner_cipher_name;
448	const char *shash_name;
449	struct skcipher_instance *skcipher_inst = NULL;
450	struct aead_instance *aead_inst = NULL;
451	struct crypto_instance *inst;
452	struct crypto_alg *base, *block_base;
453	struct essiv_instance_ctx *ictx;
 
454	struct aead_alg *aead_alg = NULL;
455	struct crypto_alg *_hash_alg;
456	struct shash_alg *hash_alg;
457	int ivsize;
458	u32 type;
459	u32 mask;
460	int err;
461
462	algt = crypto_get_attr_type(tb);
463	if (IS_ERR(algt))
464		return PTR_ERR(algt);
465
466	inner_cipher_name = crypto_attr_alg_name(tb[1]);
467	if (IS_ERR(inner_cipher_name))
468		return PTR_ERR(inner_cipher_name);
469
470	shash_name = crypto_attr_alg_name(tb[2]);
471	if (IS_ERR(shash_name))
472		return PTR_ERR(shash_name);
473
474	type = algt->type & algt->mask;
475	mask = crypto_algt_inherited_mask(algt);
476
477	switch (type) {
478	case CRYPTO_ALG_TYPE_LSKCIPHER:
479		skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
480					sizeof(*ictx), GFP_KERNEL);
481		if (!skcipher_inst)
482			return -ENOMEM;
483		inst = skcipher_crypto_instance(skcipher_inst);
484		base = &skcipher_inst->alg.base;
485		ictx = crypto_instance_ctx(inst);
486
487		/* Symmetric cipher, e.g., "cbc(aes)" */
488		err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
489					   inner_cipher_name, 0, mask);
490		if (err)
491			goto out_free_inst;
492		skcipher_alg = crypto_spawn_skcipher_alg_common(
493			&ictx->u.skcipher_spawn);
494		block_base = &skcipher_alg->base;
495		ivsize = skcipher_alg->ivsize;
496		break;
497
498	case CRYPTO_ALG_TYPE_AEAD:
499		aead_inst = kzalloc(sizeof(*aead_inst) +
500				    sizeof(*ictx), GFP_KERNEL);
501		if (!aead_inst)
502			return -ENOMEM;
503		inst = aead_crypto_instance(aead_inst);
504		base = &aead_inst->alg.base;
505		ictx = crypto_instance_ctx(inst);
506
507		/* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
508		err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
509				       inner_cipher_name, 0, mask);
510		if (err)
511			goto out_free_inst;
512		aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
513		block_base = &aead_alg->base;
514		if (!strstarts(block_base->cra_name, "authenc(")) {
515			pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
516			err = -EINVAL;
517			goto out_drop_skcipher;
518		}
519		ivsize = aead_alg->ivsize;
520		break;
521
522	default:
523		return -EINVAL;
524	}
525
526	if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
527		pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
528		err = -EINVAL;
529		goto out_drop_skcipher;
530	}
531
532	/* Synchronous hash, e.g., "sha256" */
533	_hash_alg = crypto_alg_mod_lookup(shash_name,
534					  CRYPTO_ALG_TYPE_SHASH,
535					  CRYPTO_ALG_TYPE_MASK | mask);
536	if (IS_ERR(_hash_alg)) {
537		err = PTR_ERR(_hash_alg);
538		goto out_drop_skcipher;
539	}
540	hash_alg = __crypto_shash_alg(_hash_alg);
541
542	/* Check the set of algorithms */
543	if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
544					ivsize)) {
545		pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
546			block_base->cra_name, hash_alg->base.cra_name);
547		err = -EINVAL;
548		goto out_free_hash;
549	}
550
551	/* record the driver name so we can instantiate this exact algo later */
552	strscpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
553		CRYPTO_MAX_ALG_NAME);
554
555	/* Instance fields */
556
557	err = -ENAMETOOLONG;
558	if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
559		     "essiv(%s,%s)", block_base->cra_name,
560		     hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
561		goto out_free_hash;
562	if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
563		     "essiv(%s,%s)", block_base->cra_driver_name,
564		     hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
565		goto out_free_hash;
566
567	/*
568	 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
569	 * flags manually.
570	 */
571	base->cra_flags        |= (hash_alg->base.cra_flags &
572				   CRYPTO_ALG_INHERITED_FLAGS);
573	base->cra_blocksize	= block_base->cra_blocksize;
574	base->cra_ctxsize	= sizeof(struct essiv_tfm_ctx);
575	base->cra_alignmask	= block_base->cra_alignmask;
576	base->cra_priority	= block_base->cra_priority;
577
578	if (type == CRYPTO_ALG_TYPE_LSKCIPHER) {
579		skcipher_inst->alg.setkey	= essiv_skcipher_setkey;
580		skcipher_inst->alg.encrypt	= essiv_skcipher_encrypt;
581		skcipher_inst->alg.decrypt	= essiv_skcipher_decrypt;
582		skcipher_inst->alg.init		= essiv_skcipher_init_tfm;
583		skcipher_inst->alg.exit		= essiv_skcipher_exit_tfm;
584
585		skcipher_inst->alg.min_keysize	= skcipher_alg->min_keysize;
586		skcipher_inst->alg.max_keysize	= skcipher_alg->max_keysize;
587		skcipher_inst->alg.ivsize	= ivsize;
588		skcipher_inst->alg.chunksize	= skcipher_alg->chunksize;
 
589
590		skcipher_inst->free		= essiv_skcipher_free_instance;
591
592		err = skcipher_register_instance(tmpl, skcipher_inst);
593	} else {
594		aead_inst->alg.setkey		= essiv_aead_setkey;
595		aead_inst->alg.setauthsize	= essiv_aead_setauthsize;
596		aead_inst->alg.encrypt		= essiv_aead_encrypt;
597		aead_inst->alg.decrypt		= essiv_aead_decrypt;
598		aead_inst->alg.init		= essiv_aead_init_tfm;
599		aead_inst->alg.exit		= essiv_aead_exit_tfm;
600
601		aead_inst->alg.ivsize		= ivsize;
602		aead_inst->alg.maxauthsize	= crypto_aead_alg_maxauthsize(aead_alg);
603		aead_inst->alg.chunksize	= crypto_aead_alg_chunksize(aead_alg);
604
605		aead_inst->free			= essiv_aead_free_instance;
606
607		err = aead_register_instance(tmpl, aead_inst);
608	}
609
610	if (err)
611		goto out_free_hash;
612
613	crypto_mod_put(_hash_alg);
614	return 0;
615
616out_free_hash:
617	crypto_mod_put(_hash_alg);
618out_drop_skcipher:
619	if (type == CRYPTO_ALG_TYPE_LSKCIPHER)
620		crypto_drop_skcipher(&ictx->u.skcipher_spawn);
621	else
622		crypto_drop_aead(&ictx->u.aead_spawn);
623out_free_inst:
624	kfree(skcipher_inst);
625	kfree(aead_inst);
626	return err;
627}
628
629/* essiv(cipher_name, shash_name) */
630static struct crypto_template essiv_tmpl = {
631	.name	= "essiv",
632	.create	= essiv_create,
633	.module	= THIS_MODULE,
634};
635
636static int __init essiv_module_init(void)
637{
638	return crypto_register_template(&essiv_tmpl);
639}
640
641static void __exit essiv_module_exit(void)
642{
643	crypto_unregister_template(&essiv_tmpl);
644}
645
646subsys_initcall(essiv_module_init);
647module_exit(essiv_module_exit);
648
649MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
650MODULE_LICENSE("GPL v2");
651MODULE_ALIAS_CRYPTO("essiv");
652MODULE_IMPORT_NS("CRYPTO_INTERNAL");
v5.9
  1// SPDX-License-Identifier: GPL-2.0
  2/*
  3 * ESSIV skcipher and aead template for block encryption
  4 *
  5 * This template encapsulates the ESSIV IV generation algorithm used by
  6 * dm-crypt and fscrypt, which converts the initial vector for the skcipher
  7 * used for block encryption, by encrypting it using the hash of the
  8 * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
  9 * number in LE representation zero-padded to the size of the IV, but this
 10 * is not assumed by this driver.
 11 *
 12 * The typical use of this template is to instantiate the skcipher
 13 * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
 14 * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
 15 * also permits ESSIV to be used in combination with the authenc template,
 16 * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
 17 * we need to instantiate an aead that accepts the same special key format
 18 * as the authenc template, and deals with the way the encrypted IV is
 19 * embedded into the AAD area of the aead request. This means the AEAD
 20 * flavor produced by this template is tightly coupled to the way dm-crypt
 21 * happens to use it.
 22 *
 23 * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
 24 *
 25 * Heavily based on:
 26 * adiantum length-preserving encryption mode
 27 *
 28 * Copyright 2018 Google LLC
 29 */
 30
 31#include <crypto/authenc.h>
 32#include <crypto/internal/aead.h>
 
 33#include <crypto/internal/hash.h>
 34#include <crypto/internal/skcipher.h>
 35#include <crypto/scatterwalk.h>
 36#include <linux/module.h>
 37
 38#include "internal.h"
 39
 40struct essiv_instance_ctx {
 41	union {
 42		struct crypto_skcipher_spawn	skcipher_spawn;
 43		struct crypto_aead_spawn	aead_spawn;
 44	} u;
 45	char	essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
 46	char	shash_driver_name[CRYPTO_MAX_ALG_NAME];
 47};
 48
 49struct essiv_tfm_ctx {
 50	union {
 51		struct crypto_skcipher	*skcipher;
 52		struct crypto_aead	*aead;
 53	} u;
 54	struct crypto_cipher		*essiv_cipher;
 55	struct crypto_shash		*hash;
 56	int				ivoffset;
 57};
 58
 59struct essiv_aead_request_ctx {
 60	struct scatterlist		sg[4];
 61	u8				*assoc;
 62	struct aead_request		aead_req;
 63};
 64
 65static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
 66				 const u8 *key, unsigned int keylen)
 67{
 68	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
 69	u8 salt[HASH_MAX_DIGESTSIZE];
 70	int err;
 71
 72	crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
 73	crypto_skcipher_set_flags(tctx->u.skcipher,
 74				  crypto_skcipher_get_flags(tfm) &
 75				  CRYPTO_TFM_REQ_MASK);
 76	err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
 77	if (err)
 78		return err;
 79
 80	err = crypto_shash_tfm_digest(tctx->hash, key, keylen, salt);
 81	if (err)
 82		return err;
 83
 84	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
 85	crypto_cipher_set_flags(tctx->essiv_cipher,
 86				crypto_skcipher_get_flags(tfm) &
 87				CRYPTO_TFM_REQ_MASK);
 88	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
 89				    crypto_shash_digestsize(tctx->hash));
 90}
 91
 92static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
 93			     unsigned int keylen)
 94{
 95	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
 96	SHASH_DESC_ON_STACK(desc, tctx->hash);
 97	struct crypto_authenc_keys keys;
 98	u8 salt[HASH_MAX_DIGESTSIZE];
 99	int err;
100
101	crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
102	crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
103					    CRYPTO_TFM_REQ_MASK);
104	err = crypto_aead_setkey(tctx->u.aead, key, keylen);
105	if (err)
106		return err;
107
108	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
109		return -EINVAL;
110
111	desc->tfm = tctx->hash;
112	err = crypto_shash_init(desc) ?:
113	      crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
114	      crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
115	if (err)
116		return err;
117
118	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
119	crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
120						    CRYPTO_TFM_REQ_MASK);
121	return crypto_cipher_setkey(tctx->essiv_cipher, salt,
122				    crypto_shash_digestsize(tctx->hash));
123}
124
125static int essiv_aead_setauthsize(struct crypto_aead *tfm,
126				  unsigned int authsize)
127{
128	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
129
130	return crypto_aead_setauthsize(tctx->u.aead, authsize);
131}
132
133static void essiv_skcipher_done(struct crypto_async_request *areq, int err)
134{
135	struct skcipher_request *req = areq->data;
136
137	skcipher_request_complete(req, err);
138}
139
140static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
141{
142	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
143	const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
144	struct skcipher_request *subreq = skcipher_request_ctx(req);
145
146	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
147
148	skcipher_request_set_tfm(subreq, tctx->u.skcipher);
149	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
150				   req->iv);
151	skcipher_request_set_callback(subreq, skcipher_request_flags(req),
152				      essiv_skcipher_done, req);
153
154	return enc ? crypto_skcipher_encrypt(subreq) :
155		     crypto_skcipher_decrypt(subreq);
156}
157
158static int essiv_skcipher_encrypt(struct skcipher_request *req)
159{
160	return essiv_skcipher_crypt(req, true);
161}
162
163static int essiv_skcipher_decrypt(struct skcipher_request *req)
164{
165	return essiv_skcipher_crypt(req, false);
166}
167
168static void essiv_aead_done(struct crypto_async_request *areq, int err)
169{
170	struct aead_request *req = areq->data;
171	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
172
 
 
 
173	kfree(rctx->assoc);
 
 
174	aead_request_complete(req, err);
175}
176
177static int essiv_aead_crypt(struct aead_request *req, bool enc)
178{
179	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
180	const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
181	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
182	struct aead_request *subreq = &rctx->aead_req;
183	struct scatterlist *src = req->src;
184	int err;
185
186	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
187
188	/*
189	 * dm-crypt embeds the sector number and the IV in the AAD region, so
190	 * we have to copy the converted IV into the right scatterlist before
191	 * we pass it on.
192	 */
193	rctx->assoc = NULL;
194	if (req->src == req->dst || !enc) {
195		scatterwalk_map_and_copy(req->iv, req->dst,
196					 req->assoclen - crypto_aead_ivsize(tfm),
197					 crypto_aead_ivsize(tfm), 1);
198	} else {
199		u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
200		int ivsize = crypto_aead_ivsize(tfm);
201		int ssize = req->assoclen - ivsize;
202		struct scatterlist *sg;
203		int nents;
204
205		if (ssize < 0)
206			return -EINVAL;
207
208		nents = sg_nents_for_len(req->src, ssize);
209		if (nents < 0)
210			return -EINVAL;
211
212		memcpy(iv, req->iv, ivsize);
213		sg_init_table(rctx->sg, 4);
214
215		if (unlikely(nents > 1)) {
216			/*
217			 * This is a case that rarely occurs in practice, but
218			 * for correctness, we have to deal with it nonetheless.
219			 */
220			rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
221			if (!rctx->assoc)
222				return -ENOMEM;
223
224			scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
225						 ssize, 0);
226			sg_set_buf(rctx->sg, rctx->assoc, ssize);
227		} else {
228			sg_set_page(rctx->sg, sg_page(req->src), ssize,
229				    req->src->offset);
230		}
231
232		sg_set_buf(rctx->sg + 1, iv, ivsize);
233		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
234		if (sg != rctx->sg + 2)
235			sg_chain(rctx->sg, 3, sg);
236
237		src = rctx->sg;
238	}
239
240	aead_request_set_tfm(subreq, tctx->u.aead);
241	aead_request_set_ad(subreq, req->assoclen);
242	aead_request_set_callback(subreq, aead_request_flags(req),
243				  essiv_aead_done, req);
244	aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
245
246	err = enc ? crypto_aead_encrypt(subreq) :
247		    crypto_aead_decrypt(subreq);
248
249	if (rctx->assoc && err != -EINPROGRESS)
250		kfree(rctx->assoc);
251	return err;
252}
253
254static int essiv_aead_encrypt(struct aead_request *req)
255{
256	return essiv_aead_crypt(req, true);
257}
258
259static int essiv_aead_decrypt(struct aead_request *req)
260{
261	return essiv_aead_crypt(req, false);
262}
263
264static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
265			  struct essiv_tfm_ctx *tctx)
266{
267	struct crypto_cipher *essiv_cipher;
268	struct crypto_shash *hash;
269	int err;
270
271	essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
272	if (IS_ERR(essiv_cipher))
273		return PTR_ERR(essiv_cipher);
274
275	hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
276	if (IS_ERR(hash)) {
277		err = PTR_ERR(hash);
278		goto err_free_essiv_cipher;
279	}
280
281	tctx->essiv_cipher = essiv_cipher;
282	tctx->hash = hash;
283
284	return 0;
285
286err_free_essiv_cipher:
287	crypto_free_cipher(essiv_cipher);
288	return err;
289}
290
291static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
292{
293	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
294	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
295	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
296	struct crypto_skcipher *skcipher;
297	int err;
298
299	skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
300	if (IS_ERR(skcipher))
301		return PTR_ERR(skcipher);
302
303	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
304				         crypto_skcipher_reqsize(skcipher));
305
306	err = essiv_init_tfm(ictx, tctx);
307	if (err) {
308		crypto_free_skcipher(skcipher);
309		return err;
310	}
311
312	tctx->u.skcipher = skcipher;
313	return 0;
314}
315
316static int essiv_aead_init_tfm(struct crypto_aead *tfm)
317{
318	struct aead_instance *inst = aead_alg_instance(tfm);
319	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
320	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
321	struct crypto_aead *aead;
322	unsigned int subreq_size;
323	int err;
324
325	BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
326		     sizeof(struct essiv_aead_request_ctx));
327
328	aead = crypto_spawn_aead(&ictx->u.aead_spawn);
329	if (IS_ERR(aead))
330		return PTR_ERR(aead);
331
332	subreq_size = sizeof_field(struct essiv_aead_request_ctx, aead_req) +
333		      crypto_aead_reqsize(aead);
334
335	tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
336			 subreq_size;
337	crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
338
339	err = essiv_init_tfm(ictx, tctx);
340	if (err) {
341		crypto_free_aead(aead);
342		return err;
343	}
344
345	tctx->u.aead = aead;
346	return 0;
347}
348
349static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
350{
351	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
352
353	crypto_free_skcipher(tctx->u.skcipher);
354	crypto_free_cipher(tctx->essiv_cipher);
355	crypto_free_shash(tctx->hash);
356}
357
358static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
359{
360	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
361
362	crypto_free_aead(tctx->u.aead);
363	crypto_free_cipher(tctx->essiv_cipher);
364	crypto_free_shash(tctx->hash);
365}
366
367static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
368{
369	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
370
371	crypto_drop_skcipher(&ictx->u.skcipher_spawn);
372	kfree(inst);
373}
374
375static void essiv_aead_free_instance(struct aead_instance *inst)
376{
377	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
378
379	crypto_drop_aead(&ictx->u.aead_spawn);
380	kfree(inst);
381}
382
383static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
384{
385	const char *p, *q;
386	int len;
387
388	/* find the last opening parens */
389	p = strrchr(cra_name, '(');
390	if (!p++)
391		return false;
392
393	/* find the first closing parens in the tail of the string */
394	q = strchr(p, ')');
395	if (!q)
396		return false;
397
398	len = q - p;
399	if (len >= CRYPTO_MAX_ALG_NAME)
400		return false;
401
402	memcpy(essiv_cipher_name, p, len);
403	essiv_cipher_name[len] = '\0';
404	return true;
405}
406
407static bool essiv_supported_algorithms(const char *essiv_cipher_name,
408				       struct shash_alg *hash_alg,
409				       int ivsize)
410{
411	struct crypto_alg *alg;
412	bool ret = false;
413
414	alg = crypto_alg_mod_lookup(essiv_cipher_name,
415				    CRYPTO_ALG_TYPE_CIPHER,
416				    CRYPTO_ALG_TYPE_MASK);
417	if (IS_ERR(alg))
418		return false;
419
420	if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
421	    hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
422		goto out;
423
424	if (ivsize != alg->cra_blocksize)
425		goto out;
426
427	if (crypto_shash_alg_needs_key(hash_alg))
428		goto out;
429
430	ret = true;
431
432out:
433	crypto_mod_put(alg);
434	return ret;
435}
436
437static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
438{
 
439	struct crypto_attr_type *algt;
440	const char *inner_cipher_name;
441	const char *shash_name;
442	struct skcipher_instance *skcipher_inst = NULL;
443	struct aead_instance *aead_inst = NULL;
444	struct crypto_instance *inst;
445	struct crypto_alg *base, *block_base;
446	struct essiv_instance_ctx *ictx;
447	struct skcipher_alg *skcipher_alg = NULL;
448	struct aead_alg *aead_alg = NULL;
449	struct crypto_alg *_hash_alg;
450	struct shash_alg *hash_alg;
451	int ivsize;
452	u32 type;
453	u32 mask;
454	int err;
455
456	algt = crypto_get_attr_type(tb);
457	if (IS_ERR(algt))
458		return PTR_ERR(algt);
459
460	inner_cipher_name = crypto_attr_alg_name(tb[1]);
461	if (IS_ERR(inner_cipher_name))
462		return PTR_ERR(inner_cipher_name);
463
464	shash_name = crypto_attr_alg_name(tb[2]);
465	if (IS_ERR(shash_name))
466		return PTR_ERR(shash_name);
467
468	type = algt->type & algt->mask;
469	mask = crypto_algt_inherited_mask(algt);
470
471	switch (type) {
472	case CRYPTO_ALG_TYPE_SKCIPHER:
473		skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
474					sizeof(*ictx), GFP_KERNEL);
475		if (!skcipher_inst)
476			return -ENOMEM;
477		inst = skcipher_crypto_instance(skcipher_inst);
478		base = &skcipher_inst->alg.base;
479		ictx = crypto_instance_ctx(inst);
480
481		/* Symmetric cipher, e.g., "cbc(aes)" */
482		err = crypto_grab_skcipher(&ictx->u.skcipher_spawn, inst,
483					   inner_cipher_name, 0, mask);
484		if (err)
485			goto out_free_inst;
486		skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
 
487		block_base = &skcipher_alg->base;
488		ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
489		break;
490
491	case CRYPTO_ALG_TYPE_AEAD:
492		aead_inst = kzalloc(sizeof(*aead_inst) +
493				    sizeof(*ictx), GFP_KERNEL);
494		if (!aead_inst)
495			return -ENOMEM;
496		inst = aead_crypto_instance(aead_inst);
497		base = &aead_inst->alg.base;
498		ictx = crypto_instance_ctx(inst);
499
500		/* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
501		err = crypto_grab_aead(&ictx->u.aead_spawn, inst,
502				       inner_cipher_name, 0, mask);
503		if (err)
504			goto out_free_inst;
505		aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
506		block_base = &aead_alg->base;
507		if (!strstarts(block_base->cra_name, "authenc(")) {
508			pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
509			err = -EINVAL;
510			goto out_drop_skcipher;
511		}
512		ivsize = aead_alg->ivsize;
513		break;
514
515	default:
516		return -EINVAL;
517	}
518
519	if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
520		pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
521		err = -EINVAL;
522		goto out_drop_skcipher;
523	}
524
525	/* Synchronous hash, e.g., "sha256" */
526	_hash_alg = crypto_alg_mod_lookup(shash_name,
527					  CRYPTO_ALG_TYPE_SHASH,
528					  CRYPTO_ALG_TYPE_MASK | mask);
529	if (IS_ERR(_hash_alg)) {
530		err = PTR_ERR(_hash_alg);
531		goto out_drop_skcipher;
532	}
533	hash_alg = __crypto_shash_alg(_hash_alg);
534
535	/* Check the set of algorithms */
536	if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
537					ivsize)) {
538		pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
539			block_base->cra_name, hash_alg->base.cra_name);
540		err = -EINVAL;
541		goto out_free_hash;
542	}
543
544	/* record the driver name so we can instantiate this exact algo later */
545	strlcpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
546		CRYPTO_MAX_ALG_NAME);
547
548	/* Instance fields */
549
550	err = -ENAMETOOLONG;
551	if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
552		     "essiv(%s,%s)", block_base->cra_name,
553		     hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
554		goto out_free_hash;
555	if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
556		     "essiv(%s,%s)", block_base->cra_driver_name,
557		     hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
558		goto out_free_hash;
559
560	/*
561	 * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its
562	 * flags manually.
563	 */
564	base->cra_flags        |= (hash_alg->base.cra_flags &
565				   CRYPTO_ALG_INHERITED_FLAGS);
566	base->cra_blocksize	= block_base->cra_blocksize;
567	base->cra_ctxsize	= sizeof(struct essiv_tfm_ctx);
568	base->cra_alignmask	= block_base->cra_alignmask;
569	base->cra_priority	= block_base->cra_priority;
570
571	if (type == CRYPTO_ALG_TYPE_SKCIPHER) {
572		skcipher_inst->alg.setkey	= essiv_skcipher_setkey;
573		skcipher_inst->alg.encrypt	= essiv_skcipher_encrypt;
574		skcipher_inst->alg.decrypt	= essiv_skcipher_decrypt;
575		skcipher_inst->alg.init		= essiv_skcipher_init_tfm;
576		skcipher_inst->alg.exit		= essiv_skcipher_exit_tfm;
577
578		skcipher_inst->alg.min_keysize	= crypto_skcipher_alg_min_keysize(skcipher_alg);
579		skcipher_inst->alg.max_keysize	= crypto_skcipher_alg_max_keysize(skcipher_alg);
580		skcipher_inst->alg.ivsize	= ivsize;
581		skcipher_inst->alg.chunksize	= crypto_skcipher_alg_chunksize(skcipher_alg);
582		skcipher_inst->alg.walksize	= crypto_skcipher_alg_walksize(skcipher_alg);
583
584		skcipher_inst->free		= essiv_skcipher_free_instance;
585
586		err = skcipher_register_instance(tmpl, skcipher_inst);
587	} else {
588		aead_inst->alg.setkey		= essiv_aead_setkey;
589		aead_inst->alg.setauthsize	= essiv_aead_setauthsize;
590		aead_inst->alg.encrypt		= essiv_aead_encrypt;
591		aead_inst->alg.decrypt		= essiv_aead_decrypt;
592		aead_inst->alg.init		= essiv_aead_init_tfm;
593		aead_inst->alg.exit		= essiv_aead_exit_tfm;
594
595		aead_inst->alg.ivsize		= ivsize;
596		aead_inst->alg.maxauthsize	= crypto_aead_alg_maxauthsize(aead_alg);
597		aead_inst->alg.chunksize	= crypto_aead_alg_chunksize(aead_alg);
598
599		aead_inst->free			= essiv_aead_free_instance;
600
601		err = aead_register_instance(tmpl, aead_inst);
602	}
603
604	if (err)
605		goto out_free_hash;
606
607	crypto_mod_put(_hash_alg);
608	return 0;
609
610out_free_hash:
611	crypto_mod_put(_hash_alg);
612out_drop_skcipher:
613	if (type == CRYPTO_ALG_TYPE_SKCIPHER)
614		crypto_drop_skcipher(&ictx->u.skcipher_spawn);
615	else
616		crypto_drop_aead(&ictx->u.aead_spawn);
617out_free_inst:
618	kfree(skcipher_inst);
619	kfree(aead_inst);
620	return err;
621}
622
623/* essiv(cipher_name, shash_name) */
624static struct crypto_template essiv_tmpl = {
625	.name	= "essiv",
626	.create	= essiv_create,
627	.module	= THIS_MODULE,
628};
629
630static int __init essiv_module_init(void)
631{
632	return crypto_register_template(&essiv_tmpl);
633}
634
635static void __exit essiv_module_exit(void)
636{
637	crypto_unregister_template(&essiv_tmpl);
638}
639
640subsys_initcall(essiv_module_init);
641module_exit(essiv_module_exit);
642
643MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
644MODULE_LICENSE("GPL v2");
645MODULE_ALIAS_CRYPTO("essiv");