Linux Audio

Check our new training course

Loading...
v6.13.7
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * The AEGIS-128 Authenticated-Encryption Algorithm
  4 *
  5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
  6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
  7 */
  8
  9#include <crypto/algapi.h>
 10#include <crypto/internal/aead.h>
 11#include <crypto/internal/simd.h>
 12#include <crypto/internal/skcipher.h>
 13#include <crypto/scatterwalk.h>
 14#include <linux/err.h>
 15#include <linux/init.h>
 16#include <linux/jump_label.h>
 17#include <linux/kernel.h>
 18#include <linux/module.h>
 19#include <linux/scatterlist.h>
 20
 21#include <asm/simd.h>
 22
 23#include "aegis.h"
 24
 25#define AEGIS128_NONCE_SIZE 16
 26#define AEGIS128_STATE_BLOCKS 5
 27#define AEGIS128_KEY_SIZE 16
 28#define AEGIS128_MIN_AUTH_SIZE 8
 29#define AEGIS128_MAX_AUTH_SIZE 16
 30
 31struct aegis_state {
 32	union aegis_block blocks[AEGIS128_STATE_BLOCKS];
 33};
 34
 35struct aegis_ctx {
 36	union aegis_block key;
 37};
 38
 39static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_simd);
 
 
 
 
 
 
 
 
 40
 41static const union aegis_block crypto_aegis_const[2] = {
 42	{ .words64 = {
 43		cpu_to_le64(U64_C(0x0d08050302010100)),
 44		cpu_to_le64(U64_C(0x6279e99059372215)),
 45	} },
 46	{ .words64 = {
 47		cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
 48		cpu_to_le64(U64_C(0xdd28b57342311120)),
 49	} },
 50};
 51
 52static bool aegis128_do_simd(void)
 53{
 54#ifdef CONFIG_CRYPTO_AEGIS128_SIMD
 55	if (static_branch_likely(&have_simd))
 56		return crypto_simd_usable();
 57#endif
 58	return false;
 59}
 60
 
 
 
 
 
 
 
 61static void crypto_aegis128_update(struct aegis_state *state)
 62{
 63	union aegis_block tmp;
 64	unsigned int i;
 65
 66	tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1];
 67	for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--)
 68		crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
 69				    &state->blocks[i]);
 70	crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
 71}
 72
 73static void crypto_aegis128_update_a(struct aegis_state *state,
 74				     const union aegis_block *msg,
 75				     bool do_simd)
 76{
 77	if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
 78		crypto_aegis128_update_simd(state, msg);
 79		return;
 80	}
 81
 82	crypto_aegis128_update(state);
 83	crypto_aegis_block_xor(&state->blocks[0], msg);
 84}
 85
 86static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg,
 87				     bool do_simd)
 88{
 89	if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) && do_simd) {
 90		crypto_aegis128_update_simd(state, msg);
 91		return;
 92	}
 93
 94	crypto_aegis128_update(state);
 95	crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
 96}
 97
 98static void crypto_aegis128_init(struct aegis_state *state,
 99				 const union aegis_block *key,
100				 const u8 *iv)
101{
102	union aegis_block key_iv;
103	unsigned int i;
104
105	key_iv = *key;
106	crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE);
107
108	state->blocks[0] = key_iv;
109	state->blocks[1] = crypto_aegis_const[1];
110	state->blocks[2] = crypto_aegis_const[0];
111	state->blocks[3] = *key;
112	state->blocks[4] = *key;
113
114	crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]);
115	crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]);
116
117	for (i = 0; i < 5; i++) {
118		crypto_aegis128_update_a(state, key, false);
119		crypto_aegis128_update_a(state, &key_iv, false);
120	}
121}
122
123static void crypto_aegis128_ad(struct aegis_state *state,
124			       const u8 *src, unsigned int size,
125			       bool do_simd)
126{
127	if (AEGIS_ALIGNED(src)) {
128		const union aegis_block *src_blk =
129				(const union aegis_block *)src;
130
131		while (size >= AEGIS_BLOCK_SIZE) {
132			crypto_aegis128_update_a(state, src_blk, do_simd);
133
134			size -= AEGIS_BLOCK_SIZE;
135			src_blk++;
136		}
137	} else {
138		while (size >= AEGIS_BLOCK_SIZE) {
139			crypto_aegis128_update_u(state, src, do_simd);
140
141			size -= AEGIS_BLOCK_SIZE;
142			src += AEGIS_BLOCK_SIZE;
143		}
144	}
145}
146
147static void crypto_aegis128_wipe_chunk(struct aegis_state *state, u8 *dst,
148				       const u8 *src, unsigned int size)
149{
150	memzero_explicit(dst, size);
151}
152
153static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst,
154					  const u8 *src, unsigned int size)
155{
156	union aegis_block tmp;
157
158	if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
159		while (size >= AEGIS_BLOCK_SIZE) {
160			union aegis_block *dst_blk =
161					(union aegis_block *)dst;
162			const union aegis_block *src_blk =
163					(const union aegis_block *)src;
164
165			tmp = state->blocks[2];
166			crypto_aegis_block_and(&tmp, &state->blocks[3]);
167			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
168			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
169			crypto_aegis_block_xor(&tmp, src_blk);
170
171			crypto_aegis128_update_a(state, src_blk, false);
172
173			*dst_blk = tmp;
174
175			size -= AEGIS_BLOCK_SIZE;
176			src += AEGIS_BLOCK_SIZE;
177			dst += AEGIS_BLOCK_SIZE;
178		}
179	} else {
180		while (size >= AEGIS_BLOCK_SIZE) {
181			tmp = state->blocks[2];
182			crypto_aegis_block_and(&tmp, &state->blocks[3]);
183			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
184			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
185			crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
186
187			crypto_aegis128_update_u(state, src, false);
188
189			memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
190
191			size -= AEGIS_BLOCK_SIZE;
192			src += AEGIS_BLOCK_SIZE;
193			dst += AEGIS_BLOCK_SIZE;
194		}
195	}
196
197	if (size > 0) {
198		union aegis_block msg = {};
199		memcpy(msg.bytes, src, size);
200
201		tmp = state->blocks[2];
202		crypto_aegis_block_and(&tmp, &state->blocks[3]);
203		crypto_aegis_block_xor(&tmp, &state->blocks[4]);
204		crypto_aegis_block_xor(&tmp, &state->blocks[1]);
205
206		crypto_aegis128_update_a(state, &msg, false);
207
208		crypto_aegis_block_xor(&msg, &tmp);
209
210		memcpy(dst, msg.bytes, size);
211	}
212}
213
214static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst,
215					  const u8 *src, unsigned int size)
216{
217	union aegis_block tmp;
218
219	if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
220		while (size >= AEGIS_BLOCK_SIZE) {
221			union aegis_block *dst_blk =
222					(union aegis_block *)dst;
223			const union aegis_block *src_blk =
224					(const union aegis_block *)src;
225
226			tmp = state->blocks[2];
227			crypto_aegis_block_and(&tmp, &state->blocks[3]);
228			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
229			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
230			crypto_aegis_block_xor(&tmp, src_blk);
231
232			crypto_aegis128_update_a(state, &tmp, false);
233
234			*dst_blk = tmp;
235
236			size -= AEGIS_BLOCK_SIZE;
237			src += AEGIS_BLOCK_SIZE;
238			dst += AEGIS_BLOCK_SIZE;
239		}
240	} else {
241		while (size >= AEGIS_BLOCK_SIZE) {
242			tmp = state->blocks[2];
243			crypto_aegis_block_and(&tmp, &state->blocks[3]);
244			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
245			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
246			crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
247
248			crypto_aegis128_update_a(state, &tmp, false);
249
250			memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
251
252			size -= AEGIS_BLOCK_SIZE;
253			src += AEGIS_BLOCK_SIZE;
254			dst += AEGIS_BLOCK_SIZE;
255		}
256	}
257
258	if (size > 0) {
259		union aegis_block msg = {};
260		memcpy(msg.bytes, src, size);
261
262		tmp = state->blocks[2];
263		crypto_aegis_block_and(&tmp, &state->blocks[3]);
264		crypto_aegis_block_xor(&tmp, &state->blocks[4]);
265		crypto_aegis_block_xor(&tmp, &state->blocks[1]);
266		crypto_aegis_block_xor(&msg, &tmp);
267
268		memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
269
270		crypto_aegis128_update_a(state, &msg, false);
271
272		memcpy(dst, msg.bytes, size);
273	}
274}
275
276static void crypto_aegis128_process_ad(struct aegis_state *state,
277				       struct scatterlist *sg_src,
278				       unsigned int assoclen,
279				       bool do_simd)
280{
281	struct scatter_walk walk;
282	union aegis_block buf;
283	unsigned int pos = 0;
284
285	scatterwalk_start(&walk, sg_src);
286	while (assoclen != 0) {
287		unsigned int size = scatterwalk_clamp(&walk, assoclen);
288		unsigned int left = size;
289		void *mapped = scatterwalk_map(&walk);
290		const u8 *src = (const u8 *)mapped;
291
292		if (pos + size >= AEGIS_BLOCK_SIZE) {
293			if (pos > 0) {
294				unsigned int fill = AEGIS_BLOCK_SIZE - pos;
295				memcpy(buf.bytes + pos, src, fill);
296				crypto_aegis128_update_a(state, &buf, do_simd);
297				pos = 0;
298				left -= fill;
299				src += fill;
300			}
301
302			crypto_aegis128_ad(state, src, left, do_simd);
303			src += left & ~(AEGIS_BLOCK_SIZE - 1);
304			left &= AEGIS_BLOCK_SIZE - 1;
305		}
306
307		memcpy(buf.bytes + pos, src, left);
308
309		pos += left;
310		assoclen -= size;
311		scatterwalk_unmap(mapped);
312		scatterwalk_advance(&walk, size);
313		scatterwalk_done(&walk, 0, assoclen);
314	}
315
316	if (pos > 0) {
317		memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
318		crypto_aegis128_update_a(state, &buf, do_simd);
319	}
320}
321
322static __always_inline
323int crypto_aegis128_process_crypt(struct aegis_state *state,
324				  struct skcipher_walk *walk,
325				  void (*crypt)(struct aegis_state *state,
326						u8 *dst,
327						const u8 *src,
328						unsigned int size))
329{
330	int err = 0;
331
332	while (walk->nbytes) {
333		unsigned int nbytes = walk->nbytes;
334
335		if (nbytes < walk->total)
336			nbytes = round_down(nbytes, walk->stride);
337
338		crypt(state, walk->dst.virt.addr, walk->src.virt.addr, nbytes);
 
339
340		err = skcipher_walk_done(walk, walk->nbytes - nbytes);
 
 
 
341	}
342	return err;
343}
344
345static void crypto_aegis128_final(struct aegis_state *state,
346				  union aegis_block *tag_xor,
347				  u64 assoclen, u64 cryptlen)
348{
349	u64 assocbits = assoclen * 8;
350	u64 cryptbits = cryptlen * 8;
351
352	union aegis_block tmp;
353	unsigned int i;
354
355	tmp.words64[0] = cpu_to_le64(assocbits);
356	tmp.words64[1] = cpu_to_le64(cryptbits);
357
358	crypto_aegis_block_xor(&tmp, &state->blocks[3]);
359
360	for (i = 0; i < 7; i++)
361		crypto_aegis128_update_a(state, &tmp, false);
362
363	for (i = 0; i < AEGIS128_STATE_BLOCKS; i++)
364		crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
365}
366
367static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key,
368				  unsigned int keylen)
369{
370	struct aegis_ctx *ctx = crypto_aead_ctx(aead);
371
372	if (keylen != AEGIS128_KEY_SIZE)
 
373		return -EINVAL;
 
374
375	memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
376	return 0;
377}
378
379static int crypto_aegis128_setauthsize(struct crypto_aead *tfm,
380				       unsigned int authsize)
381{
382	if (authsize > AEGIS128_MAX_AUTH_SIZE)
383		return -EINVAL;
384	if (authsize < AEGIS128_MIN_AUTH_SIZE)
385		return -EINVAL;
386	return 0;
387}
388
389static int crypto_aegis128_encrypt_generic(struct aead_request *req)
 
 
 
390{
391	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
392	union aegis_block tag = {};
393	unsigned int authsize = crypto_aead_authsize(tfm);
394	struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
395	unsigned int cryptlen = req->cryptlen;
396	struct skcipher_walk walk;
397	struct aegis_state state;
398
399	skcipher_walk_aead_encrypt(&walk, req, false);
400	crypto_aegis128_init(&state, &ctx->key, req->iv);
401	crypto_aegis128_process_ad(&state, req->src, req->assoclen, false);
402	crypto_aegis128_process_crypt(&state, &walk,
403				      crypto_aegis128_encrypt_chunk);
404	crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
405
406	scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
407				 authsize, 1);
408	return 0;
409}
410
411static int crypto_aegis128_decrypt_generic(struct aead_request *req)
412{
413	static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {};
414	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
415	union aegis_block tag;
416	unsigned int authsize = crypto_aead_authsize(tfm);
417	unsigned int cryptlen = req->cryptlen - authsize;
418	struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
419	struct skcipher_walk walk;
420	struct aegis_state state;
421
422	scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
423				 authsize, 0);
424
425	skcipher_walk_aead_decrypt(&walk, req, false);
426	crypto_aegis128_init(&state, &ctx->key, req->iv);
427	crypto_aegis128_process_ad(&state, req->src, req->assoclen, false);
428	crypto_aegis128_process_crypt(&state, &walk,
429				      crypto_aegis128_decrypt_chunk);
430	crypto_aegis128_final(&state, &tag, req->assoclen, cryptlen);
431
432	if (unlikely(crypto_memneq(tag.bytes, zeros, authsize))) {
433		/*
434		 * From Chapter 4. 'Security Analysis' of the AEGIS spec [0]
435		 *
436		 * "3. If verification fails, the decrypted plaintext and the
437		 *     wrong authentication tag should not be given as output."
438		 *
439		 * [0] https://competitions.cr.yp.to/round3/aegisv11.pdf
440		 */
441		skcipher_walk_aead_decrypt(&walk, req, false);
442		crypto_aegis128_process_crypt(NULL, &walk,
443					      crypto_aegis128_wipe_chunk);
444		memzero_explicit(&tag, sizeof(tag));
445		return -EBADMSG;
446	}
447	return 0;
448}
449
450static int crypto_aegis128_encrypt_simd(struct aead_request *req)
451{
452	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
453	union aegis_block tag = {};
454	unsigned int authsize = crypto_aead_authsize(tfm);
455	struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
456	unsigned int cryptlen = req->cryptlen;
457	struct skcipher_walk walk;
458	struct aegis_state state;
459
460	if (!aegis128_do_simd())
461		return crypto_aegis128_encrypt_generic(req);
 
 
462
463	skcipher_walk_aead_encrypt(&walk, req, false);
464	crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
465	crypto_aegis128_process_ad(&state, req->src, req->assoclen, true);
466	crypto_aegis128_process_crypt(&state, &walk,
467				      crypto_aegis128_encrypt_chunk_simd);
468	crypto_aegis128_final_simd(&state, &tag, req->assoclen, cryptlen, 0);
469
470	scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
471				 authsize, 1);
472	return 0;
473}
474
475static int crypto_aegis128_decrypt_simd(struct aead_request *req)
476{
 
 
 
 
 
 
477	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
478	union aegis_block tag;
479	unsigned int authsize = crypto_aead_authsize(tfm);
480	unsigned int cryptlen = req->cryptlen - authsize;
481	struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
482	struct skcipher_walk walk;
483	struct aegis_state state;
484
485	if (!aegis128_do_simd())
486		return crypto_aegis128_decrypt_generic(req);
487
488	scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
489				 authsize, 0);
490
491	skcipher_walk_aead_decrypt(&walk, req, false);
492	crypto_aegis128_init_simd(&state, &ctx->key, req->iv);
493	crypto_aegis128_process_ad(&state, req->src, req->assoclen, true);
494	crypto_aegis128_process_crypt(&state, &walk,
495				      crypto_aegis128_decrypt_chunk_simd);
496
497	if (unlikely(crypto_aegis128_final_simd(&state, &tag, req->assoclen,
498						cryptlen, authsize))) {
499		skcipher_walk_aead_decrypt(&walk, req, false);
500		crypto_aegis128_process_crypt(NULL, &walk,
501					      crypto_aegis128_wipe_chunk);
502		return -EBADMSG;
503	}
504	return 0;
505}
506
507static struct aead_alg crypto_aegis128_alg_generic = {
508	.setkey			= crypto_aegis128_setkey,
509	.setauthsize		= crypto_aegis128_setauthsize,
510	.encrypt		= crypto_aegis128_encrypt_generic,
511	.decrypt		= crypto_aegis128_decrypt_generic,
512
513	.ivsize			= AEGIS128_NONCE_SIZE,
514	.maxauthsize		= AEGIS128_MAX_AUTH_SIZE,
515	.chunksize		= AEGIS_BLOCK_SIZE,
516
517	.base.cra_blocksize	= 1,
518	.base.cra_ctxsize	= sizeof(struct aegis_ctx),
519	.base.cra_alignmask	= 0,
520	.base.cra_priority	= 100,
521	.base.cra_name		= "aegis128",
522	.base.cra_driver_name	= "aegis128-generic",
523	.base.cra_module	= THIS_MODULE,
524};
525
526static struct aead_alg crypto_aegis128_alg_simd = {
527	.setkey			= crypto_aegis128_setkey,
528	.setauthsize		= crypto_aegis128_setauthsize,
529	.encrypt		= crypto_aegis128_encrypt_simd,
530	.decrypt		= crypto_aegis128_decrypt_simd,
531
532	.ivsize			= AEGIS128_NONCE_SIZE,
533	.maxauthsize		= AEGIS128_MAX_AUTH_SIZE,
534	.chunksize		= AEGIS_BLOCK_SIZE,
535
536	.base.cra_blocksize	= 1,
537	.base.cra_ctxsize	= sizeof(struct aegis_ctx),
538	.base.cra_alignmask	= 0,
539	.base.cra_priority	= 200,
540	.base.cra_name		= "aegis128",
541	.base.cra_driver_name	= "aegis128-simd",
542	.base.cra_module	= THIS_MODULE,
543};
544
545static int __init crypto_aegis128_module_init(void)
546{
547	int ret;
 
548
549	ret = crypto_register_aead(&crypto_aegis128_alg_generic);
550	if (ret)
551		return ret;
552
553	if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
554	    crypto_aegis128_have_simd()) {
555		ret = crypto_register_aead(&crypto_aegis128_alg_simd);
556		if (ret) {
557			crypto_unregister_aead(&crypto_aegis128_alg_generic);
558			return ret;
559		}
560		static_branch_enable(&have_simd);
561	}
562	return 0;
563}
564
565static void __exit crypto_aegis128_module_exit(void)
566{
567	if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD) &&
568	    crypto_aegis128_have_simd())
569		crypto_unregister_aead(&crypto_aegis128_alg_simd);
570
571	crypto_unregister_aead(&crypto_aegis128_alg_generic);
572}
573
574subsys_initcall(crypto_aegis128_module_init);
575module_exit(crypto_aegis128_module_exit);
576
577MODULE_LICENSE("GPL");
578MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
579MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
580MODULE_ALIAS_CRYPTO("aegis128");
581MODULE_ALIAS_CRYPTO("aegis128-generic");
582MODULE_ALIAS_CRYPTO("aegis128-simd");
v5.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * The AEGIS-128 Authenticated-Encryption Algorithm
  4 *
  5 * Copyright (c) 2017-2018 Ondrej Mosnacek <omosnacek@gmail.com>
  6 * Copyright (C) 2017-2018 Red Hat, Inc. All rights reserved.
  7 */
  8
  9#include <crypto/algapi.h>
 10#include <crypto/internal/aead.h>
 11#include <crypto/internal/simd.h>
 12#include <crypto/internal/skcipher.h>
 13#include <crypto/scatterwalk.h>
 14#include <linux/err.h>
 15#include <linux/init.h>
 
 16#include <linux/kernel.h>
 17#include <linux/module.h>
 18#include <linux/scatterlist.h>
 19
 20#include <asm/simd.h>
 21
 22#include "aegis.h"
 23
 24#define AEGIS128_NONCE_SIZE 16
 25#define AEGIS128_STATE_BLOCKS 5
 26#define AEGIS128_KEY_SIZE 16
 27#define AEGIS128_MIN_AUTH_SIZE 8
 28#define AEGIS128_MAX_AUTH_SIZE 16
 29
 30struct aegis_state {
 31	union aegis_block blocks[AEGIS128_STATE_BLOCKS];
 32};
 33
 34struct aegis_ctx {
 35	union aegis_block key;
 36};
 37
 38struct aegis128_ops {
 39	int (*skcipher_walk_init)(struct skcipher_walk *walk,
 40				  struct aead_request *req, bool atomic);
 41
 42	void (*crypt_chunk)(struct aegis_state *state, u8 *dst,
 43			    const u8 *src, unsigned int size);
 44};
 45
 46static bool have_simd;
 47
 48static const union aegis_block crypto_aegis_const[2] = {
 49	{ .words64 = {
 50		cpu_to_le64(U64_C(0x0d08050302010100)),
 51		cpu_to_le64(U64_C(0x6279e99059372215)),
 52	} },
 53	{ .words64 = {
 54		cpu_to_le64(U64_C(0xf12fc26d55183ddb)),
 55		cpu_to_le64(U64_C(0xdd28b57342311120)),
 56	} },
 57};
 58
 59static bool aegis128_do_simd(void)
 60{
 61#ifdef CONFIG_CRYPTO_AEGIS128_SIMD
 62	if (have_simd)
 63		return crypto_simd_usable();
 64#endif
 65	return false;
 66}
 67
 68bool crypto_aegis128_have_simd(void);
 69void crypto_aegis128_update_simd(struct aegis_state *state, const void *msg);
 70void crypto_aegis128_encrypt_chunk_simd(struct aegis_state *state, u8 *dst,
 71					const u8 *src, unsigned int size);
 72void crypto_aegis128_decrypt_chunk_simd(struct aegis_state *state, u8 *dst,
 73					const u8 *src, unsigned int size);
 74
 75static void crypto_aegis128_update(struct aegis_state *state)
 76{
 77	union aegis_block tmp;
 78	unsigned int i;
 79
 80	tmp = state->blocks[AEGIS128_STATE_BLOCKS - 1];
 81	for (i = AEGIS128_STATE_BLOCKS - 1; i > 0; i--)
 82		crypto_aegis_aesenc(&state->blocks[i], &state->blocks[i - 1],
 83				    &state->blocks[i]);
 84	crypto_aegis_aesenc(&state->blocks[0], &tmp, &state->blocks[0]);
 85}
 86
 87static void crypto_aegis128_update_a(struct aegis_state *state,
 88				     const union aegis_block *msg)
 
 89{
 90	if (aegis128_do_simd()) {
 91		crypto_aegis128_update_simd(state, msg);
 92		return;
 93	}
 94
 95	crypto_aegis128_update(state);
 96	crypto_aegis_block_xor(&state->blocks[0], msg);
 97}
 98
 99static void crypto_aegis128_update_u(struct aegis_state *state, const void *msg)
 
100{
101	if (aegis128_do_simd()) {
102		crypto_aegis128_update_simd(state, msg);
103		return;
104	}
105
106	crypto_aegis128_update(state);
107	crypto_xor(state->blocks[0].bytes, msg, AEGIS_BLOCK_SIZE);
108}
109
110static void crypto_aegis128_init(struct aegis_state *state,
111				 const union aegis_block *key,
112				 const u8 *iv)
113{
114	union aegis_block key_iv;
115	unsigned int i;
116
117	key_iv = *key;
118	crypto_xor(key_iv.bytes, iv, AEGIS_BLOCK_SIZE);
119
120	state->blocks[0] = key_iv;
121	state->blocks[1] = crypto_aegis_const[1];
122	state->blocks[2] = crypto_aegis_const[0];
123	state->blocks[3] = *key;
124	state->blocks[4] = *key;
125
126	crypto_aegis_block_xor(&state->blocks[3], &crypto_aegis_const[0]);
127	crypto_aegis_block_xor(&state->blocks[4], &crypto_aegis_const[1]);
128
129	for (i = 0; i < 5; i++) {
130		crypto_aegis128_update_a(state, key);
131		crypto_aegis128_update_a(state, &key_iv);
132	}
133}
134
135static void crypto_aegis128_ad(struct aegis_state *state,
136			       const u8 *src, unsigned int size)
 
137{
138	if (AEGIS_ALIGNED(src)) {
139		const union aegis_block *src_blk =
140				(const union aegis_block *)src;
141
142		while (size >= AEGIS_BLOCK_SIZE) {
143			crypto_aegis128_update_a(state, src_blk);
144
145			size -= AEGIS_BLOCK_SIZE;
146			src_blk++;
147		}
148	} else {
149		while (size >= AEGIS_BLOCK_SIZE) {
150			crypto_aegis128_update_u(state, src);
151
152			size -= AEGIS_BLOCK_SIZE;
153			src += AEGIS_BLOCK_SIZE;
154		}
155	}
156}
157
 
 
 
 
 
 
158static void crypto_aegis128_encrypt_chunk(struct aegis_state *state, u8 *dst,
159					  const u8 *src, unsigned int size)
160{
161	union aegis_block tmp;
162
163	if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
164		while (size >= AEGIS_BLOCK_SIZE) {
165			union aegis_block *dst_blk =
166					(union aegis_block *)dst;
167			const union aegis_block *src_blk =
168					(const union aegis_block *)src;
169
170			tmp = state->blocks[2];
171			crypto_aegis_block_and(&tmp, &state->blocks[3]);
172			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
173			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
174			crypto_aegis_block_xor(&tmp, src_blk);
175
176			crypto_aegis128_update_a(state, src_blk);
177
178			*dst_blk = tmp;
179
180			size -= AEGIS_BLOCK_SIZE;
181			src += AEGIS_BLOCK_SIZE;
182			dst += AEGIS_BLOCK_SIZE;
183		}
184	} else {
185		while (size >= AEGIS_BLOCK_SIZE) {
186			tmp = state->blocks[2];
187			crypto_aegis_block_and(&tmp, &state->blocks[3]);
188			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
189			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
190			crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
191
192			crypto_aegis128_update_u(state, src);
193
194			memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
195
196			size -= AEGIS_BLOCK_SIZE;
197			src += AEGIS_BLOCK_SIZE;
198			dst += AEGIS_BLOCK_SIZE;
199		}
200	}
201
202	if (size > 0) {
203		union aegis_block msg = {};
204		memcpy(msg.bytes, src, size);
205
206		tmp = state->blocks[2];
207		crypto_aegis_block_and(&tmp, &state->blocks[3]);
208		crypto_aegis_block_xor(&tmp, &state->blocks[4]);
209		crypto_aegis_block_xor(&tmp, &state->blocks[1]);
210
211		crypto_aegis128_update_a(state, &msg);
212
213		crypto_aegis_block_xor(&msg, &tmp);
214
215		memcpy(dst, msg.bytes, size);
216	}
217}
218
219static void crypto_aegis128_decrypt_chunk(struct aegis_state *state, u8 *dst,
220					  const u8 *src, unsigned int size)
221{
222	union aegis_block tmp;
223
224	if (AEGIS_ALIGNED(src) && AEGIS_ALIGNED(dst)) {
225		while (size >= AEGIS_BLOCK_SIZE) {
226			union aegis_block *dst_blk =
227					(union aegis_block *)dst;
228			const union aegis_block *src_blk =
229					(const union aegis_block *)src;
230
231			tmp = state->blocks[2];
232			crypto_aegis_block_and(&tmp, &state->blocks[3]);
233			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
234			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
235			crypto_aegis_block_xor(&tmp, src_blk);
236
237			crypto_aegis128_update_a(state, &tmp);
238
239			*dst_blk = tmp;
240
241			size -= AEGIS_BLOCK_SIZE;
242			src += AEGIS_BLOCK_SIZE;
243			dst += AEGIS_BLOCK_SIZE;
244		}
245	} else {
246		while (size >= AEGIS_BLOCK_SIZE) {
247			tmp = state->blocks[2];
248			crypto_aegis_block_and(&tmp, &state->blocks[3]);
249			crypto_aegis_block_xor(&tmp, &state->blocks[4]);
250			crypto_aegis_block_xor(&tmp, &state->blocks[1]);
251			crypto_xor(tmp.bytes, src, AEGIS_BLOCK_SIZE);
252
253			crypto_aegis128_update_a(state, &tmp);
254
255			memcpy(dst, tmp.bytes, AEGIS_BLOCK_SIZE);
256
257			size -= AEGIS_BLOCK_SIZE;
258			src += AEGIS_BLOCK_SIZE;
259			dst += AEGIS_BLOCK_SIZE;
260		}
261	}
262
263	if (size > 0) {
264		union aegis_block msg = {};
265		memcpy(msg.bytes, src, size);
266
267		tmp = state->blocks[2];
268		crypto_aegis_block_and(&tmp, &state->blocks[3]);
269		crypto_aegis_block_xor(&tmp, &state->blocks[4]);
270		crypto_aegis_block_xor(&tmp, &state->blocks[1]);
271		crypto_aegis_block_xor(&msg, &tmp);
272
273		memset(msg.bytes + size, 0, AEGIS_BLOCK_SIZE - size);
274
275		crypto_aegis128_update_a(state, &msg);
276
277		memcpy(dst, msg.bytes, size);
278	}
279}
280
281static void crypto_aegis128_process_ad(struct aegis_state *state,
282				       struct scatterlist *sg_src,
283				       unsigned int assoclen)
 
284{
285	struct scatter_walk walk;
286	union aegis_block buf;
287	unsigned int pos = 0;
288
289	scatterwalk_start(&walk, sg_src);
290	while (assoclen != 0) {
291		unsigned int size = scatterwalk_clamp(&walk, assoclen);
292		unsigned int left = size;
293		void *mapped = scatterwalk_map(&walk);
294		const u8 *src = (const u8 *)mapped;
295
296		if (pos + size >= AEGIS_BLOCK_SIZE) {
297			if (pos > 0) {
298				unsigned int fill = AEGIS_BLOCK_SIZE - pos;
299				memcpy(buf.bytes + pos, src, fill);
300				crypto_aegis128_update_a(state, &buf);
301				pos = 0;
302				left -= fill;
303				src += fill;
304			}
305
306			crypto_aegis128_ad(state, src, left);
307			src += left & ~(AEGIS_BLOCK_SIZE - 1);
308			left &= AEGIS_BLOCK_SIZE - 1;
309		}
310
311		memcpy(buf.bytes + pos, src, left);
312
313		pos += left;
314		assoclen -= size;
315		scatterwalk_unmap(mapped);
316		scatterwalk_advance(&walk, size);
317		scatterwalk_done(&walk, 0, assoclen);
318	}
319
320	if (pos > 0) {
321		memset(buf.bytes + pos, 0, AEGIS_BLOCK_SIZE - pos);
322		crypto_aegis128_update_a(state, &buf);
323	}
324}
325
326static void crypto_aegis128_process_crypt(struct aegis_state *state,
327					  struct aead_request *req,
328					  const struct aegis128_ops *ops)
 
 
 
 
329{
330	struct skcipher_walk walk;
331
332	ops->skcipher_walk_init(&walk, req, false);
 
333
334	while (walk.nbytes) {
335		unsigned int nbytes = walk.nbytes;
336
337		if (nbytes < walk.total)
338			nbytes = round_down(nbytes, walk.stride);
339
340		ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
341				 nbytes);
342
343		skcipher_walk_done(&walk, walk.nbytes - nbytes);
344	}
 
345}
346
347static void crypto_aegis128_final(struct aegis_state *state,
348				  union aegis_block *tag_xor,
349				  u64 assoclen, u64 cryptlen)
350{
351	u64 assocbits = assoclen * 8;
352	u64 cryptbits = cryptlen * 8;
353
354	union aegis_block tmp;
355	unsigned int i;
356
357	tmp.words64[0] = cpu_to_le64(assocbits);
358	tmp.words64[1] = cpu_to_le64(cryptbits);
359
360	crypto_aegis_block_xor(&tmp, &state->blocks[3]);
361
362	for (i = 0; i < 7; i++)
363		crypto_aegis128_update_a(state, &tmp);
364
365	for (i = 0; i < AEGIS128_STATE_BLOCKS; i++)
366		crypto_aegis_block_xor(tag_xor, &state->blocks[i]);
367}
368
369static int crypto_aegis128_setkey(struct crypto_aead *aead, const u8 *key,
370				  unsigned int keylen)
371{
372	struct aegis_ctx *ctx = crypto_aead_ctx(aead);
373
374	if (keylen != AEGIS128_KEY_SIZE) {
375		crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
376		return -EINVAL;
377	}
378
379	memcpy(ctx->key.bytes, key, AEGIS128_KEY_SIZE);
380	return 0;
381}
382
383static int crypto_aegis128_setauthsize(struct crypto_aead *tfm,
384				       unsigned int authsize)
385{
386	if (authsize > AEGIS128_MAX_AUTH_SIZE)
387		return -EINVAL;
388	if (authsize < AEGIS128_MIN_AUTH_SIZE)
389		return -EINVAL;
390	return 0;
391}
392
393static void crypto_aegis128_crypt(struct aead_request *req,
394				  union aegis_block *tag_xor,
395				  unsigned int cryptlen,
396				  const struct aegis128_ops *ops)
397{
398	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
 
 
399	struct aegis_ctx *ctx = crypto_aead_ctx(tfm);
 
 
400	struct aegis_state state;
401
 
402	crypto_aegis128_init(&state, &ctx->key, req->iv);
403	crypto_aegis128_process_ad(&state, req->src, req->assoclen);
404	crypto_aegis128_process_crypt(&state, req, ops);
405	crypto_aegis128_final(&state, tag_xor, req->assoclen, cryptlen);
 
 
 
 
 
406}
407
408static int crypto_aegis128_encrypt(struct aead_request *req)
409{
410	const struct aegis128_ops *ops = &(struct aegis128_ops){
411		.skcipher_walk_init = skcipher_walk_aead_encrypt,
412		.crypt_chunk = crypto_aegis128_encrypt_chunk,
413	};
 
 
 
 
 
 
 
414
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
415	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
416	union aegis_block tag = {};
417	unsigned int authsize = crypto_aead_authsize(tfm);
 
418	unsigned int cryptlen = req->cryptlen;
 
 
419
420	if (aegis128_do_simd())
421		ops = &(struct aegis128_ops){
422			.skcipher_walk_init = skcipher_walk_aead_encrypt,
423			.crypt_chunk = crypto_aegis128_encrypt_chunk_simd };
424
425	crypto_aegis128_crypt(req, &tag, cryptlen, ops);
 
 
 
 
 
426
427	scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen,
428				 authsize, 1);
429	return 0;
430}
431
432static int crypto_aegis128_decrypt(struct aead_request *req)
433{
434	const struct aegis128_ops *ops = &(struct aegis128_ops){
435		.skcipher_walk_init = skcipher_walk_aead_decrypt,
436		.crypt_chunk = crypto_aegis128_decrypt_chunk,
437	};
438	static const u8 zeros[AEGIS128_MAX_AUTH_SIZE] = {};
439
440	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
441	union aegis_block tag;
442	unsigned int authsize = crypto_aead_authsize(tfm);
443	unsigned int cryptlen = req->cryptlen - authsize;
 
 
 
 
 
 
444
445	scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen,
446				 authsize, 0);
447
448	if (aegis128_do_simd())
449		ops = &(struct aegis128_ops){
450			.skcipher_walk_init = skcipher_walk_aead_decrypt,
451			.crypt_chunk = crypto_aegis128_decrypt_chunk_simd };
452
453	crypto_aegis128_crypt(req, &tag, cryptlen, ops);
454
455	return crypto_memneq(tag.bytes, zeros, authsize) ? -EBADMSG : 0;
 
 
 
 
 
 
456}
457
458static struct aead_alg crypto_aegis128_alg = {
459	.setkey = crypto_aegis128_setkey,
460	.setauthsize = crypto_aegis128_setauthsize,
461	.encrypt = crypto_aegis128_encrypt,
462	.decrypt = crypto_aegis128_decrypt,
463
464	.ivsize = AEGIS128_NONCE_SIZE,
465	.maxauthsize = AEGIS128_MAX_AUTH_SIZE,
466	.chunksize = AEGIS_BLOCK_SIZE,
467
468	.base = {
469		.cra_blocksize = 1,
470		.cra_ctxsize = sizeof(struct aegis_ctx),
471		.cra_alignmask = 0,
 
 
 
 
472
473		.cra_priority = 100,
474
475		.cra_name = "aegis128",
476		.cra_driver_name = "aegis128-generic",
477
478		.cra_module = THIS_MODULE,
479	}
 
 
 
 
 
 
 
 
 
 
480};
481
482static int __init crypto_aegis128_module_init(void)
483{
484	if (IS_ENABLED(CONFIG_CRYPTO_AEGIS128_SIMD))
485		have_simd = crypto_aegis128_have_simd();
486
487	return crypto_register_aead(&crypto_aegis128_alg);
 
 
 
 
 
 
 
 
 
 
 
 
 
488}
489
490static void __exit crypto_aegis128_module_exit(void)
491{
492	crypto_unregister_aead(&crypto_aegis128_alg);
 
 
 
 
493}
494
495subsys_initcall(crypto_aegis128_module_init);
496module_exit(crypto_aegis128_module_exit);
497
498MODULE_LICENSE("GPL");
499MODULE_AUTHOR("Ondrej Mosnacek <omosnacek@gmail.com>");
500MODULE_DESCRIPTION("AEGIS-128 AEAD algorithm");
501MODULE_ALIAS_CRYPTO("aegis128");
502MODULE_ALIAS_CRYPTO("aegis128-generic");