Linux Audio

Check our new training course

Loading...
v6.2
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers,
  4 *                 derived from authenc.c
  5 *
  6 * Copyright (C) 2010 secunet Security Networks AG
  7 * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com>
  8 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  9 */
 10
 11#include <crypto/internal/aead.h>
 12#include <crypto/internal/hash.h>
 13#include <crypto/internal/skcipher.h>
 14#include <crypto/authenc.h>
 15#include <crypto/null.h>
 16#include <crypto/scatterwalk.h>
 17#include <linux/err.h>
 18#include <linux/init.h>
 19#include <linux/kernel.h>
 20#include <linux/module.h>
 21#include <linux/rtnetlink.h>
 22#include <linux/slab.h>
 23#include <linux/spinlock.h>
 24
 25struct authenc_esn_instance_ctx {
 26	struct crypto_ahash_spawn auth;
 27	struct crypto_skcipher_spawn enc;
 28};
 29
 30struct crypto_authenc_esn_ctx {
 31	unsigned int reqoff;
 32	struct crypto_ahash *auth;
 33	struct crypto_skcipher *enc;
 34	struct crypto_sync_skcipher *null;
 35};
 36
 37struct authenc_esn_request_ctx {
 38	struct scatterlist src[2];
 39	struct scatterlist dst[2];
 40	char tail[];
 41};
 42
 43static void authenc_esn_request_complete(struct aead_request *req, int err)
 44{
 45	if (err != -EINPROGRESS)
 46		aead_request_complete(req, err);
 47}
 48
 49static int crypto_authenc_esn_setauthsize(struct crypto_aead *authenc_esn,
 50					  unsigned int authsize)
 51{
 52	if (authsize > 0 && authsize < 4)
 53		return -EINVAL;
 54
 55	return 0;
 56}
 57
 58static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key,
 59				     unsigned int keylen)
 60{
 61	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
 62	struct crypto_ahash *auth = ctx->auth;
 63	struct crypto_skcipher *enc = ctx->enc;
 64	struct crypto_authenc_keys keys;
 65	int err = -EINVAL;
 66
 67	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
 68		goto out;
 69
 70	crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
 71	crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
 72				     CRYPTO_TFM_REQ_MASK);
 73	err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
 
 
 
 74	if (err)
 75		goto out;
 76
 77	crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
 78	crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
 79					 CRYPTO_TFM_REQ_MASK);
 80	err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
 
 
 
 81out:
 82	memzero_explicit(&keys, sizeof(keys));
 83	return err;
 
 
 
 
 84}
 85
 86static int crypto_authenc_esn_genicv_tail(struct aead_request *req,
 87					  unsigned int flags)
 88{
 89	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
 90	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
 91	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
 92	struct crypto_ahash *auth = ctx->auth;
 93	u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
 94			     crypto_ahash_alignmask(auth) + 1);
 95	unsigned int authsize = crypto_aead_authsize(authenc_esn);
 96	unsigned int assoclen = req->assoclen;
 97	unsigned int cryptlen = req->cryptlen;
 98	struct scatterlist *dst = req->dst;
 99	u32 tmp[2];
100
101	/* Move high-order bits of sequence number back. */
102	scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
103	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
104	scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
105
106	scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1);
107	return 0;
108}
109
110static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq,
111					 int err)
112{
113	struct aead_request *req = areq->data;
114
115	err = err ?: crypto_authenc_esn_genicv_tail(req, 0);
116	aead_request_complete(req, err);
117}
118
119static int crypto_authenc_esn_genicv(struct aead_request *req,
120				     unsigned int flags)
121{
122	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
123	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
124	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
125	struct crypto_ahash *auth = ctx->auth;
126	u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
127			     crypto_ahash_alignmask(auth) + 1);
128	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
129	unsigned int authsize = crypto_aead_authsize(authenc_esn);
130	unsigned int assoclen = req->assoclen;
131	unsigned int cryptlen = req->cryptlen;
132	struct scatterlist *dst = req->dst;
133	u32 tmp[2];
134
135	if (!authsize)
136		return 0;
137
138	/* Move high-order bits of sequence number to the end. */
139	scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
140	scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
141	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
142
143	sg_init_table(areq_ctx->dst, 2);
144	dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
145
146	ahash_request_set_tfm(ahreq, auth);
147	ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen);
148	ahash_request_set_callback(ahreq, flags,
149				   authenc_esn_geniv_ahash_done, req);
150
151	return crypto_ahash_digest(ahreq) ?:
152	       crypto_authenc_esn_genicv_tail(req, aead_request_flags(req));
153}
154
155
156static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req,
157					    int err)
158{
159	struct aead_request *areq = req->data;
160
161	if (!err)
162		err = crypto_authenc_esn_genicv(areq, 0);
163
164	authenc_esn_request_complete(areq, err);
165}
166
167static int crypto_authenc_esn_copy(struct aead_request *req, unsigned int len)
168{
169	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
170	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
171	SYNC_SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
172
173	skcipher_request_set_sync_tfm(skreq, ctx->null);
174	skcipher_request_set_callback(skreq, aead_request_flags(req),
175				      NULL, NULL);
176	skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL);
177
178	return crypto_skcipher_encrypt(skreq);
179}
180
181static int crypto_authenc_esn_encrypt(struct aead_request *req)
182{
183	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
184	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
185	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
186	struct skcipher_request *skreq = (void *)(areq_ctx->tail +
187						  ctx->reqoff);
188	struct crypto_skcipher *enc = ctx->enc;
189	unsigned int assoclen = req->assoclen;
190	unsigned int cryptlen = req->cryptlen;
191	struct scatterlist *src, *dst;
192	int err;
193
194	sg_init_table(areq_ctx->src, 2);
195	src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen);
196	dst = src;
197
198	if (req->src != req->dst) {
199		err = crypto_authenc_esn_copy(req, assoclen);
200		if (err)
201			return err;
202
203		sg_init_table(areq_ctx->dst, 2);
204		dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen);
205	}
206
207	skcipher_request_set_tfm(skreq, enc);
208	skcipher_request_set_callback(skreq, aead_request_flags(req),
209				      crypto_authenc_esn_encrypt_done, req);
210	skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
211
212	err = crypto_skcipher_encrypt(skreq);
213	if (err)
214		return err;
215
216	return crypto_authenc_esn_genicv(req, aead_request_flags(req));
217}
218
219static int crypto_authenc_esn_decrypt_tail(struct aead_request *req,
220					   unsigned int flags)
221{
222	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
223	unsigned int authsize = crypto_aead_authsize(authenc_esn);
224	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
225	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
226	struct skcipher_request *skreq = (void *)(areq_ctx->tail +
227						  ctx->reqoff);
228	struct crypto_ahash *auth = ctx->auth;
229	u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
230			      crypto_ahash_alignmask(auth) + 1);
231	unsigned int cryptlen = req->cryptlen - authsize;
232	unsigned int assoclen = req->assoclen;
233	struct scatterlist *dst = req->dst;
234	u8 *ihash = ohash + crypto_ahash_digestsize(auth);
235	u32 tmp[2];
236
237	if (!authsize)
238		goto decrypt;
239
240	/* Move high-order bits of sequence number back. */
241	scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
242	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
243	scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
244
245	if (crypto_memneq(ihash, ohash, authsize))
246		return -EBADMSG;
247
248decrypt:
249
250	sg_init_table(areq_ctx->dst, 2);
251	dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen);
252
253	skcipher_request_set_tfm(skreq, ctx->enc);
254	skcipher_request_set_callback(skreq, flags,
255				      req->base.complete, req->base.data);
256	skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv);
257
258	return crypto_skcipher_decrypt(skreq);
259}
260
261static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq,
262					  int err)
263{
264	struct aead_request *req = areq->data;
265
266	err = err ?: crypto_authenc_esn_decrypt_tail(req, 0);
267	authenc_esn_request_complete(req, err);
268}
269
270static int crypto_authenc_esn_decrypt(struct aead_request *req)
271{
272	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
273	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
274	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
275	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
276	unsigned int authsize = crypto_aead_authsize(authenc_esn);
277	struct crypto_ahash *auth = ctx->auth;
278	u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
279			      crypto_ahash_alignmask(auth) + 1);
280	unsigned int assoclen = req->assoclen;
281	unsigned int cryptlen = req->cryptlen;
282	u8 *ihash = ohash + crypto_ahash_digestsize(auth);
283	struct scatterlist *dst = req->dst;
284	u32 tmp[2];
285	int err;
286
287	cryptlen -= authsize;
288
289	if (req->src != dst) {
290		err = crypto_authenc_esn_copy(req, assoclen + cryptlen);
291		if (err)
292			return err;
293	}
294
295	scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen,
296				 authsize, 0);
297
298	if (!authsize)
299		goto tail;
300
301	/* Move high-order bits of sequence number to the end. */
302	scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
303	scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
304	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
305
306	sg_init_table(areq_ctx->dst, 2);
307	dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
308
309	ahash_request_set_tfm(ahreq, auth);
310	ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen);
311	ahash_request_set_callback(ahreq, aead_request_flags(req),
312				   authenc_esn_verify_ahash_done, req);
313
314	err = crypto_ahash_digest(ahreq);
315	if (err)
316		return err;
317
318tail:
319	return crypto_authenc_esn_decrypt_tail(req, aead_request_flags(req));
320}
321
322static int crypto_authenc_esn_init_tfm(struct crypto_aead *tfm)
323{
324	struct aead_instance *inst = aead_alg_instance(tfm);
325	struct authenc_esn_instance_ctx *ictx = aead_instance_ctx(inst);
326	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
327	struct crypto_ahash *auth;
328	struct crypto_skcipher *enc;
329	struct crypto_sync_skcipher *null;
330	int err;
331
332	auth = crypto_spawn_ahash(&ictx->auth);
333	if (IS_ERR(auth))
334		return PTR_ERR(auth);
335
336	enc = crypto_spawn_skcipher(&ictx->enc);
337	err = PTR_ERR(enc);
338	if (IS_ERR(enc))
339		goto err_free_ahash;
340
341	null = crypto_get_default_null_skcipher();
342	err = PTR_ERR(null);
343	if (IS_ERR(null))
344		goto err_free_skcipher;
345
346	ctx->auth = auth;
347	ctx->enc = enc;
348	ctx->null = null;
349
350	ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth),
351			    crypto_ahash_alignmask(auth) + 1);
352
353	crypto_aead_set_reqsize(
354		tfm,
355		sizeof(struct authenc_esn_request_ctx) +
356		ctx->reqoff +
357		max_t(unsigned int,
358		      crypto_ahash_reqsize(auth) +
359		      sizeof(struct ahash_request),
360		      sizeof(struct skcipher_request) +
361		      crypto_skcipher_reqsize(enc)));
362
363	return 0;
364
365err_free_skcipher:
366	crypto_free_skcipher(enc);
367err_free_ahash:
368	crypto_free_ahash(auth);
369	return err;
370}
371
372static void crypto_authenc_esn_exit_tfm(struct crypto_aead *tfm)
373{
374	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
375
376	crypto_free_ahash(ctx->auth);
377	crypto_free_skcipher(ctx->enc);
378	crypto_put_default_null_skcipher();
379}
380
381static void crypto_authenc_esn_free(struct aead_instance *inst)
382{
383	struct authenc_esn_instance_ctx *ctx = aead_instance_ctx(inst);
384
385	crypto_drop_skcipher(&ctx->enc);
386	crypto_drop_ahash(&ctx->auth);
387	kfree(inst);
388}
389
390static int crypto_authenc_esn_create(struct crypto_template *tmpl,
391				     struct rtattr **tb)
392{
393	u32 mask;
394	struct aead_instance *inst;
395	struct authenc_esn_instance_ctx *ctx;
396	struct hash_alg_common *auth;
397	struct crypto_alg *auth_base;
398	struct skcipher_alg *enc;
 
 
399	int err;
400
401	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask);
402	if (err)
403		return err;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
404
405	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
 
406	if (!inst)
407		return -ENOMEM;
 
408	ctx = aead_instance_ctx(inst);
409
410	err = crypto_grab_ahash(&ctx->auth, aead_crypto_instance(inst),
411				crypto_attr_alg_name(tb[1]), 0, mask);
412	if (err)
413		goto err_free_inst;
414	auth = crypto_spawn_ahash_alg(&ctx->auth);
415	auth_base = &auth->base;
416
417	err = crypto_grab_skcipher(&ctx->enc, aead_crypto_instance(inst),
418				   crypto_attr_alg_name(tb[2]), 0, mask);
 
 
419	if (err)
420		goto err_free_inst;
 
421	enc = crypto_spawn_skcipher_alg(&ctx->enc);
422
423	err = -ENAMETOOLONG;
424	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
425		     "authencesn(%s,%s)", auth_base->cra_name,
426		     enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
427		goto err_free_inst;
428
429	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
430		     "authencesn(%s,%s)", auth_base->cra_driver_name,
431		     enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
432		goto err_free_inst;
433
 
 
434	inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
435				      auth_base->cra_priority;
436	inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
437	inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
438				       enc->base.cra_alignmask;
439	inst->alg.base.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx);
440
441	inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
442	inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
443	inst->alg.maxauthsize = auth->digestsize;
444
445	inst->alg.init = crypto_authenc_esn_init_tfm;
446	inst->alg.exit = crypto_authenc_esn_exit_tfm;
447
448	inst->alg.setkey = crypto_authenc_esn_setkey;
449	inst->alg.setauthsize = crypto_authenc_esn_setauthsize;
450	inst->alg.encrypt = crypto_authenc_esn_encrypt;
451	inst->alg.decrypt = crypto_authenc_esn_decrypt;
452
453	inst->free = crypto_authenc_esn_free;
454
455	err = aead_register_instance(tmpl, inst);
456	if (err) {
457err_free_inst:
458		crypto_authenc_esn_free(inst);
459	}
 
460	return err;
 
 
 
 
 
 
 
 
 
461}
462
463static struct crypto_template crypto_authenc_esn_tmpl = {
464	.name = "authencesn",
465	.create = crypto_authenc_esn_create,
466	.module = THIS_MODULE,
467};
468
469static int __init crypto_authenc_esn_module_init(void)
470{
471	return crypto_register_template(&crypto_authenc_esn_tmpl);
472}
473
474static void __exit crypto_authenc_esn_module_exit(void)
475{
476	crypto_unregister_template(&crypto_authenc_esn_tmpl);
477}
478
479subsys_initcall(crypto_authenc_esn_module_init);
480module_exit(crypto_authenc_esn_module_exit);
481
482MODULE_LICENSE("GPL");
483MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
484MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers");
485MODULE_ALIAS_CRYPTO("authencesn");
v4.17
 
  1/*
  2 * authencesn.c - AEAD wrapper for IPsec with extended sequence numbers,
  3 *                 derived from authenc.c
  4 *
  5 * Copyright (C) 2010 secunet Security Networks AG
  6 * Copyright (C) 2010 Steffen Klassert <steffen.klassert@secunet.com>
  7 * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
  8 *
  9 * This program is free software; you can redistribute it and/or modify it
 10 * under the terms of the GNU General Public License as published by the Free
 11 * Software Foundation; either version 2 of the License, or (at your option)
 12 * any later version.
 13 *
 14 */
 15
 16#include <crypto/internal/aead.h>
 17#include <crypto/internal/hash.h>
 18#include <crypto/internal/skcipher.h>
 19#include <crypto/authenc.h>
 20#include <crypto/null.h>
 21#include <crypto/scatterwalk.h>
 22#include <linux/err.h>
 23#include <linux/init.h>
 24#include <linux/kernel.h>
 25#include <linux/module.h>
 26#include <linux/rtnetlink.h>
 27#include <linux/slab.h>
 28#include <linux/spinlock.h>
 29
 30struct authenc_esn_instance_ctx {
 31	struct crypto_ahash_spawn auth;
 32	struct crypto_skcipher_spawn enc;
 33};
 34
 35struct crypto_authenc_esn_ctx {
 36	unsigned int reqoff;
 37	struct crypto_ahash *auth;
 38	struct crypto_skcipher *enc;
 39	struct crypto_skcipher *null;
 40};
 41
 42struct authenc_esn_request_ctx {
 43	struct scatterlist src[2];
 44	struct scatterlist dst[2];
 45	char tail[];
 46};
 47
 48static void authenc_esn_request_complete(struct aead_request *req, int err)
 49{
 50	if (err != -EINPROGRESS)
 51		aead_request_complete(req, err);
 52}
 53
 54static int crypto_authenc_esn_setauthsize(struct crypto_aead *authenc_esn,
 55					  unsigned int authsize)
 56{
 57	if (authsize > 0 && authsize < 4)
 58		return -EINVAL;
 59
 60	return 0;
 61}
 62
 63static int crypto_authenc_esn_setkey(struct crypto_aead *authenc_esn, const u8 *key,
 64				     unsigned int keylen)
 65{
 66	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
 67	struct crypto_ahash *auth = ctx->auth;
 68	struct crypto_skcipher *enc = ctx->enc;
 69	struct crypto_authenc_keys keys;
 70	int err = -EINVAL;
 71
 72	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
 73		goto badkey;
 74
 75	crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
 76	crypto_ahash_set_flags(auth, crypto_aead_get_flags(authenc_esn) &
 77				     CRYPTO_TFM_REQ_MASK);
 78	err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
 79	crypto_aead_set_flags(authenc_esn, crypto_ahash_get_flags(auth) &
 80					   CRYPTO_TFM_RES_MASK);
 81
 82	if (err)
 83		goto out;
 84
 85	crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
 86	crypto_skcipher_set_flags(enc, crypto_aead_get_flags(authenc_esn) &
 87					 CRYPTO_TFM_REQ_MASK);
 88	err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
 89	crypto_aead_set_flags(authenc_esn, crypto_skcipher_get_flags(enc) &
 90					   CRYPTO_TFM_RES_MASK);
 91
 92out:
 
 93	return err;
 94
 95badkey:
 96	crypto_aead_set_flags(authenc_esn, CRYPTO_TFM_RES_BAD_KEY_LEN);
 97	goto out;
 98}
 99
100static int crypto_authenc_esn_genicv_tail(struct aead_request *req,
101					  unsigned int flags)
102{
103	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
104	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
105	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
106	struct crypto_ahash *auth = ctx->auth;
107	u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
108			     crypto_ahash_alignmask(auth) + 1);
109	unsigned int authsize = crypto_aead_authsize(authenc_esn);
110	unsigned int assoclen = req->assoclen;
111	unsigned int cryptlen = req->cryptlen;
112	struct scatterlist *dst = req->dst;
113	u32 tmp[2];
114
115	/* Move high-order bits of sequence number back. */
116	scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
117	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
118	scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
119
120	scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1);
121	return 0;
122}
123
124static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq,
125					 int err)
126{
127	struct aead_request *req = areq->data;
128
129	err = err ?: crypto_authenc_esn_genicv_tail(req, 0);
130	aead_request_complete(req, err);
131}
132
133static int crypto_authenc_esn_genicv(struct aead_request *req,
134				     unsigned int flags)
135{
136	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
137	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
138	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
139	struct crypto_ahash *auth = ctx->auth;
140	u8 *hash = PTR_ALIGN((u8 *)areq_ctx->tail,
141			     crypto_ahash_alignmask(auth) + 1);
142	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
143	unsigned int authsize = crypto_aead_authsize(authenc_esn);
144	unsigned int assoclen = req->assoclen;
145	unsigned int cryptlen = req->cryptlen;
146	struct scatterlist *dst = req->dst;
147	u32 tmp[2];
148
149	if (!authsize)
150		return 0;
151
152	/* Move high-order bits of sequence number to the end. */
153	scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
154	scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
155	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
156
157	sg_init_table(areq_ctx->dst, 2);
158	dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
159
160	ahash_request_set_tfm(ahreq, auth);
161	ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen);
162	ahash_request_set_callback(ahreq, flags,
163				   authenc_esn_geniv_ahash_done, req);
164
165	return crypto_ahash_digest(ahreq) ?:
166	       crypto_authenc_esn_genicv_tail(req, aead_request_flags(req));
167}
168
169
170static void crypto_authenc_esn_encrypt_done(struct crypto_async_request *req,
171					    int err)
172{
173	struct aead_request *areq = req->data;
174
175	if (!err)
176		err = crypto_authenc_esn_genicv(areq, 0);
177
178	authenc_esn_request_complete(areq, err);
179}
180
181static int crypto_authenc_esn_copy(struct aead_request *req, unsigned int len)
182{
183	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
184	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
185	SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
186
187	skcipher_request_set_tfm(skreq, ctx->null);
188	skcipher_request_set_callback(skreq, aead_request_flags(req),
189				      NULL, NULL);
190	skcipher_request_set_crypt(skreq, req->src, req->dst, len, NULL);
191
192	return crypto_skcipher_encrypt(skreq);
193}
194
195static int crypto_authenc_esn_encrypt(struct aead_request *req)
196{
197	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
198	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
199	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
200	struct skcipher_request *skreq = (void *)(areq_ctx->tail +
201						  ctx->reqoff);
202	struct crypto_skcipher *enc = ctx->enc;
203	unsigned int assoclen = req->assoclen;
204	unsigned int cryptlen = req->cryptlen;
205	struct scatterlist *src, *dst;
206	int err;
207
208	sg_init_table(areq_ctx->src, 2);
209	src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen);
210	dst = src;
211
212	if (req->src != req->dst) {
213		err = crypto_authenc_esn_copy(req, assoclen);
214		if (err)
215			return err;
216
217		sg_init_table(areq_ctx->dst, 2);
218		dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen);
219	}
220
221	skcipher_request_set_tfm(skreq, enc);
222	skcipher_request_set_callback(skreq, aead_request_flags(req),
223				      crypto_authenc_esn_encrypt_done, req);
224	skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
225
226	err = crypto_skcipher_encrypt(skreq);
227	if (err)
228		return err;
229
230	return crypto_authenc_esn_genicv(req, aead_request_flags(req));
231}
232
233static int crypto_authenc_esn_decrypt_tail(struct aead_request *req,
234					   unsigned int flags)
235{
236	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
237	unsigned int authsize = crypto_aead_authsize(authenc_esn);
238	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
239	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
240	struct skcipher_request *skreq = (void *)(areq_ctx->tail +
241						  ctx->reqoff);
242	struct crypto_ahash *auth = ctx->auth;
243	u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
244			      crypto_ahash_alignmask(auth) + 1);
245	unsigned int cryptlen = req->cryptlen - authsize;
246	unsigned int assoclen = req->assoclen;
247	struct scatterlist *dst = req->dst;
248	u8 *ihash = ohash + crypto_ahash_digestsize(auth);
249	u32 tmp[2];
250
251	if (!authsize)
252		goto decrypt;
253
254	/* Move high-order bits of sequence number back. */
255	scatterwalk_map_and_copy(tmp, dst, 4, 4, 0);
256	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0);
257	scatterwalk_map_and_copy(tmp, dst, 0, 8, 1);
258
259	if (crypto_memneq(ihash, ohash, authsize))
260		return -EBADMSG;
261
262decrypt:
263
264	sg_init_table(areq_ctx->dst, 2);
265	dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen);
266
267	skcipher_request_set_tfm(skreq, ctx->enc);
268	skcipher_request_set_callback(skreq, flags,
269				      req->base.complete, req->base.data);
270	skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv);
271
272	return crypto_skcipher_decrypt(skreq);
273}
274
275static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq,
276					  int err)
277{
278	struct aead_request *req = areq->data;
279
280	err = err ?: crypto_authenc_esn_decrypt_tail(req, 0);
281	aead_request_complete(req, err);
282}
283
284static int crypto_authenc_esn_decrypt(struct aead_request *req)
285{
286	struct crypto_aead *authenc_esn = crypto_aead_reqtfm(req);
287	struct authenc_esn_request_ctx *areq_ctx = aead_request_ctx(req);
288	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(authenc_esn);
289	struct ahash_request *ahreq = (void *)(areq_ctx->tail + ctx->reqoff);
290	unsigned int authsize = crypto_aead_authsize(authenc_esn);
291	struct crypto_ahash *auth = ctx->auth;
292	u8 *ohash = PTR_ALIGN((u8 *)areq_ctx->tail,
293			      crypto_ahash_alignmask(auth) + 1);
294	unsigned int assoclen = req->assoclen;
295	unsigned int cryptlen = req->cryptlen;
296	u8 *ihash = ohash + crypto_ahash_digestsize(auth);
297	struct scatterlist *dst = req->dst;
298	u32 tmp[2];
299	int err;
300
301	cryptlen -= authsize;
302
303	if (req->src != dst) {
304		err = crypto_authenc_esn_copy(req, assoclen + cryptlen);
305		if (err)
306			return err;
307	}
308
309	scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen,
310				 authsize, 0);
311
312	if (!authsize)
313		goto tail;
314
315	/* Move high-order bits of sequence number to the end. */
316	scatterwalk_map_and_copy(tmp, dst, 0, 8, 0);
317	scatterwalk_map_and_copy(tmp, dst, 4, 4, 1);
318	scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1);
319
320	sg_init_table(areq_ctx->dst, 2);
321	dst = scatterwalk_ffwd(areq_ctx->dst, dst, 4);
322
323	ahash_request_set_tfm(ahreq, auth);
324	ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen);
325	ahash_request_set_callback(ahreq, aead_request_flags(req),
326				   authenc_esn_verify_ahash_done, req);
327
328	err = crypto_ahash_digest(ahreq);
329	if (err)
330		return err;
331
332tail:
333	return crypto_authenc_esn_decrypt_tail(req, aead_request_flags(req));
334}
335
336static int crypto_authenc_esn_init_tfm(struct crypto_aead *tfm)
337{
338	struct aead_instance *inst = aead_alg_instance(tfm);
339	struct authenc_esn_instance_ctx *ictx = aead_instance_ctx(inst);
340	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
341	struct crypto_ahash *auth;
342	struct crypto_skcipher *enc;
343	struct crypto_skcipher *null;
344	int err;
345
346	auth = crypto_spawn_ahash(&ictx->auth);
347	if (IS_ERR(auth))
348		return PTR_ERR(auth);
349
350	enc = crypto_spawn_skcipher(&ictx->enc);
351	err = PTR_ERR(enc);
352	if (IS_ERR(enc))
353		goto err_free_ahash;
354
355	null = crypto_get_default_null_skcipher();
356	err = PTR_ERR(null);
357	if (IS_ERR(null))
358		goto err_free_skcipher;
359
360	ctx->auth = auth;
361	ctx->enc = enc;
362	ctx->null = null;
363
364	ctx->reqoff = ALIGN(2 * crypto_ahash_digestsize(auth),
365			    crypto_ahash_alignmask(auth) + 1);
366
367	crypto_aead_set_reqsize(
368		tfm,
369		sizeof(struct authenc_esn_request_ctx) +
370		ctx->reqoff +
371		max_t(unsigned int,
372		      crypto_ahash_reqsize(auth) +
373		      sizeof(struct ahash_request),
374		      sizeof(struct skcipher_request) +
375		      crypto_skcipher_reqsize(enc)));
376
377	return 0;
378
379err_free_skcipher:
380	crypto_free_skcipher(enc);
381err_free_ahash:
382	crypto_free_ahash(auth);
383	return err;
384}
385
386static void crypto_authenc_esn_exit_tfm(struct crypto_aead *tfm)
387{
388	struct crypto_authenc_esn_ctx *ctx = crypto_aead_ctx(tfm);
389
390	crypto_free_ahash(ctx->auth);
391	crypto_free_skcipher(ctx->enc);
392	crypto_put_default_null_skcipher();
393}
394
395static void crypto_authenc_esn_free(struct aead_instance *inst)
396{
397	struct authenc_esn_instance_ctx *ctx = aead_instance_ctx(inst);
398
399	crypto_drop_skcipher(&ctx->enc);
400	crypto_drop_ahash(&ctx->auth);
401	kfree(inst);
402}
403
404static int crypto_authenc_esn_create(struct crypto_template *tmpl,
405				     struct rtattr **tb)
406{
407	struct crypto_attr_type *algt;
408	struct aead_instance *inst;
 
409	struct hash_alg_common *auth;
410	struct crypto_alg *auth_base;
411	struct skcipher_alg *enc;
412	struct authenc_esn_instance_ctx *ctx;
413	const char *enc_name;
414	int err;
415
416	algt = crypto_get_attr_type(tb);
417	if (IS_ERR(algt))
418		return PTR_ERR(algt);
419
420	if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
421		return -EINVAL;
422
423	auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
424			      CRYPTO_ALG_TYPE_AHASH_MASK |
425			      crypto_requires_sync(algt->type, algt->mask));
426	if (IS_ERR(auth))
427		return PTR_ERR(auth);
428
429	auth_base = &auth->base;
430
431	enc_name = crypto_attr_alg_name(tb[2]);
432	err = PTR_ERR(enc_name);
433	if (IS_ERR(enc_name))
434		goto out_put_auth;
435
436	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
437	err = -ENOMEM;
438	if (!inst)
439		goto out_put_auth;
440
441	ctx = aead_instance_ctx(inst);
442
443	err = crypto_init_ahash_spawn(&ctx->auth, auth,
444				      aead_crypto_instance(inst));
445	if (err)
446		goto err_free_inst;
 
 
447
448	crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
449	err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
450				   crypto_requires_sync(algt->type,
451							algt->mask));
452	if (err)
453		goto err_drop_auth;
454
455	enc = crypto_spawn_skcipher_alg(&ctx->enc);
456
457	err = -ENAMETOOLONG;
458	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
459		     "authencesn(%s,%s)", auth_base->cra_name,
460		     enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
461		goto err_drop_enc;
462
463	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
464		     "authencesn(%s,%s)", auth_base->cra_driver_name,
465		     enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
466		goto err_drop_enc;
467
468	inst->alg.base.cra_flags = (auth_base->cra_flags |
469				    enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
470	inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
471				      auth_base->cra_priority;
472	inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
473	inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
474				       enc->base.cra_alignmask;
475	inst->alg.base.cra_ctxsize = sizeof(struct crypto_authenc_esn_ctx);
476
477	inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
478	inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
479	inst->alg.maxauthsize = auth->digestsize;
480
481	inst->alg.init = crypto_authenc_esn_init_tfm;
482	inst->alg.exit = crypto_authenc_esn_exit_tfm;
483
484	inst->alg.setkey = crypto_authenc_esn_setkey;
485	inst->alg.setauthsize = crypto_authenc_esn_setauthsize;
486	inst->alg.encrypt = crypto_authenc_esn_encrypt;
487	inst->alg.decrypt = crypto_authenc_esn_decrypt;
488
489	inst->free = crypto_authenc_esn_free,
490
491	err = aead_register_instance(tmpl, inst);
492	if (err)
493		goto err_drop_enc;
494
495out:
496	crypto_mod_put(auth_base);
497	return err;
498
499err_drop_enc:
500	crypto_drop_skcipher(&ctx->enc);
501err_drop_auth:
502	crypto_drop_ahash(&ctx->auth);
503err_free_inst:
504	kfree(inst);
505out_put_auth:
506	goto out;
507}
508
509static struct crypto_template crypto_authenc_esn_tmpl = {
510	.name = "authencesn",
511	.create = crypto_authenc_esn_create,
512	.module = THIS_MODULE,
513};
514
515static int __init crypto_authenc_esn_module_init(void)
516{
517	return crypto_register_template(&crypto_authenc_esn_tmpl);
518}
519
520static void __exit crypto_authenc_esn_module_exit(void)
521{
522	crypto_unregister_template(&crypto_authenc_esn_tmpl);
523}
524
525module_init(crypto_authenc_esn_module_init);
526module_exit(crypto_authenc_esn_module_exit);
527
528MODULE_LICENSE("GPL");
529MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
530MODULE_DESCRIPTION("AEAD wrapper for IPsec with extended sequence numbers");
531MODULE_ALIAS_CRYPTO("authencesn");