Linux Audio

Check our new training course

Loading...
v5.9
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/* 
  3 * Cryptographic API.
  4 *
  5 * TEA, XTEA, and XETA crypto alogrithms
  6 *
  7 * The TEA and Xtended TEA algorithms were developed by David Wheeler 
  8 * and Roger Needham at the Computer Laboratory of Cambridge University.
  9 *
 10 * Due to the order of evaluation in XTEA many people have incorrectly
 11 * implemented it.  XETA (XTEA in the wrong order), exists for
 12 * compatibility with these implementations.
 13 *
 14 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
 
 
 
 
 
 
 15 */
 16
 17#include <linux/init.h>
 18#include <linux/module.h>
 19#include <linux/mm.h>
 20#include <asm/byteorder.h>
 21#include <linux/crypto.h>
 22#include <linux/types.h>
 23
 24#define TEA_KEY_SIZE		16
 25#define TEA_BLOCK_SIZE		8
 26#define TEA_ROUNDS		32
 27#define TEA_DELTA		0x9e3779b9
 28
 29#define XTEA_KEY_SIZE		16
 30#define XTEA_BLOCK_SIZE		8
 31#define XTEA_ROUNDS		32
 32#define XTEA_DELTA		0x9e3779b9
 33
 34struct tea_ctx {
 35	u32 KEY[4];
 36};
 37
 38struct xtea_ctx {
 39	u32 KEY[4];
 40};
 41
 42static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
 43		      unsigned int key_len)
 44{
 45	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 46	const __le32 *key = (const __le32 *)in_key;
 47
 48	ctx->KEY[0] = le32_to_cpu(key[0]);
 49	ctx->KEY[1] = le32_to_cpu(key[1]);
 50	ctx->KEY[2] = le32_to_cpu(key[2]);
 51	ctx->KEY[3] = le32_to_cpu(key[3]);
 52
 53	return 0; 
 54
 55}
 56
 57static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 58{
 59	u32 y, z, n, sum = 0;
 60	u32 k0, k1, k2, k3;
 61	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 62	const __le32 *in = (const __le32 *)src;
 63	__le32 *out = (__le32 *)dst;
 64
 65	y = le32_to_cpu(in[0]);
 66	z = le32_to_cpu(in[1]);
 67
 68	k0 = ctx->KEY[0];
 69	k1 = ctx->KEY[1];
 70	k2 = ctx->KEY[2];
 71	k3 = ctx->KEY[3];
 72
 73	n = TEA_ROUNDS;
 74
 75	while (n-- > 0) {
 76		sum += TEA_DELTA;
 77		y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
 78		z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
 79	}
 80	
 81	out[0] = cpu_to_le32(y);
 82	out[1] = cpu_to_le32(z);
 83}
 84
 85static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 86{
 87	u32 y, z, n, sum;
 88	u32 k0, k1, k2, k3;
 89	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 90	const __le32 *in = (const __le32 *)src;
 91	__le32 *out = (__le32 *)dst;
 92
 93	y = le32_to_cpu(in[0]);
 94	z = le32_to_cpu(in[1]);
 95
 96	k0 = ctx->KEY[0];
 97	k1 = ctx->KEY[1];
 98	k2 = ctx->KEY[2];
 99	k3 = ctx->KEY[3];
100
101	sum = TEA_DELTA << 5;
102
103	n = TEA_ROUNDS;
104
105	while (n-- > 0) {
106		z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
107		y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
108		sum -= TEA_DELTA;
109	}
110	
111	out[0] = cpu_to_le32(y);
112	out[1] = cpu_to_le32(z);
113}
114
115static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
116		       unsigned int key_len)
117{
118	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
119	const __le32 *key = (const __le32 *)in_key;
120
121	ctx->KEY[0] = le32_to_cpu(key[0]);
122	ctx->KEY[1] = le32_to_cpu(key[1]);
123	ctx->KEY[2] = le32_to_cpu(key[2]);
124	ctx->KEY[3] = le32_to_cpu(key[3]);
125
126	return 0; 
127
128}
129
130static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
131{
132	u32 y, z, sum = 0;
133	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
134	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
135	const __le32 *in = (const __le32 *)src;
136	__le32 *out = (__le32 *)dst;
137
138	y = le32_to_cpu(in[0]);
139	z = le32_to_cpu(in[1]);
140
141	while (sum != limit) {
142		y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); 
143		sum += XTEA_DELTA;
144		z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); 
145	}
146	
147	out[0] = cpu_to_le32(y);
148	out[1] = cpu_to_le32(z);
149}
150
151static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
152{
153	u32 y, z, sum;
154	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
155	const __le32 *in = (const __le32 *)src;
156	__le32 *out = (__le32 *)dst;
157
158	y = le32_to_cpu(in[0]);
159	z = le32_to_cpu(in[1]);
160
161	sum = XTEA_DELTA * XTEA_ROUNDS;
162
163	while (sum) {
164		z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
165		sum -= XTEA_DELTA;
166		y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
167	}
168	
169	out[0] = cpu_to_le32(y);
170	out[1] = cpu_to_le32(z);
171}
172
173
174static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
175{
176	u32 y, z, sum = 0;
177	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
178	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
179	const __le32 *in = (const __le32 *)src;
180	__le32 *out = (__le32 *)dst;
181
182	y = le32_to_cpu(in[0]);
183	z = le32_to_cpu(in[1]);
184
185	while (sum != limit) {
186		y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
187		sum += XTEA_DELTA;
188		z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
189	}
190	
191	out[0] = cpu_to_le32(y);
192	out[1] = cpu_to_le32(z);
193}
194
195static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
196{
197	u32 y, z, sum;
198	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
199	const __le32 *in = (const __le32 *)src;
200	__le32 *out = (__le32 *)dst;
201
202	y = le32_to_cpu(in[0]);
203	z = le32_to_cpu(in[1]);
204
205	sum = XTEA_DELTA * XTEA_ROUNDS;
206
207	while (sum) {
208		z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
209		sum -= XTEA_DELTA;
210		y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
211	}
212	
213	out[0] = cpu_to_le32(y);
214	out[1] = cpu_to_le32(z);
215}
216
217static struct crypto_alg tea_algs[3] = { {
218	.cra_name		=	"tea",
219	.cra_driver_name	=	"tea-generic",
220	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
221	.cra_blocksize		=	TEA_BLOCK_SIZE,
222	.cra_ctxsize		=	sizeof (struct tea_ctx),
223	.cra_alignmask		=	3,
224	.cra_module		=	THIS_MODULE,
225	.cra_u			=	{ .cipher = {
226	.cia_min_keysize	=	TEA_KEY_SIZE,
227	.cia_max_keysize	=	TEA_KEY_SIZE,
228	.cia_setkey		= 	tea_setkey,
229	.cia_encrypt		=	tea_encrypt,
230	.cia_decrypt		=	tea_decrypt } }
231}, {
232	.cra_name		=	"xtea",
233	.cra_driver_name	=	"xtea-generic",
234	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
235	.cra_blocksize		=	XTEA_BLOCK_SIZE,
236	.cra_ctxsize		=	sizeof (struct xtea_ctx),
237	.cra_alignmask		=	3,
238	.cra_module		=	THIS_MODULE,
239	.cra_u			=	{ .cipher = {
240	.cia_min_keysize	=	XTEA_KEY_SIZE,
241	.cia_max_keysize	=	XTEA_KEY_SIZE,
242	.cia_setkey		= 	xtea_setkey,
243	.cia_encrypt		=	xtea_encrypt,
244	.cia_decrypt		=	xtea_decrypt } }
245}, {
246	.cra_name		=	"xeta",
247	.cra_driver_name	=	"xeta-generic",
248	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
249	.cra_blocksize		=	XTEA_BLOCK_SIZE,
250	.cra_ctxsize		=	sizeof (struct xtea_ctx),
251	.cra_alignmask		=	3,
252	.cra_module		=	THIS_MODULE,
253	.cra_u			=	{ .cipher = {
254	.cia_min_keysize	=	XTEA_KEY_SIZE,
255	.cia_max_keysize	=	XTEA_KEY_SIZE,
256	.cia_setkey		= 	xtea_setkey,
257	.cia_encrypt		=	xeta_encrypt,
258	.cia_decrypt		=	xeta_decrypt } }
259} };
260
261static int __init tea_mod_init(void)
262{
263	return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs));
264}
265
266static void __exit tea_mod_fini(void)
267{
268	crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs));
269}
270
271MODULE_ALIAS_CRYPTO("tea");
272MODULE_ALIAS_CRYPTO("xtea");
273MODULE_ALIAS_CRYPTO("xeta");
274
275subsys_initcall(tea_mod_init);
276module_exit(tea_mod_fini);
277
278MODULE_LICENSE("GPL");
279MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");
v4.17
 
  1/* 
  2 * Cryptographic API.
  3 *
  4 * TEA, XTEA, and XETA crypto alogrithms
  5 *
  6 * The TEA and Xtended TEA algorithms were developed by David Wheeler 
  7 * and Roger Needham at the Computer Laboratory of Cambridge University.
  8 *
  9 * Due to the order of evaluation in XTEA many people have incorrectly
 10 * implemented it.  XETA (XTEA in the wrong order), exists for
 11 * compatibility with these implementations.
 12 *
 13 * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
 14 *
 15 * This program is free software; you can redistribute it and/or modify
 16 * it under the terms of the GNU General Public License as published by
 17 * the Free Software Foundation; either version 2 of the License, or
 18 * (at your option) any later version.
 19 *
 20 */
 21
 22#include <linux/init.h>
 23#include <linux/module.h>
 24#include <linux/mm.h>
 25#include <asm/byteorder.h>
 26#include <linux/crypto.h>
 27#include <linux/types.h>
 28
 29#define TEA_KEY_SIZE		16
 30#define TEA_BLOCK_SIZE		8
 31#define TEA_ROUNDS		32
 32#define TEA_DELTA		0x9e3779b9
 33
 34#define XTEA_KEY_SIZE		16
 35#define XTEA_BLOCK_SIZE		8
 36#define XTEA_ROUNDS		32
 37#define XTEA_DELTA		0x9e3779b9
 38
 39struct tea_ctx {
 40	u32 KEY[4];
 41};
 42
 43struct xtea_ctx {
 44	u32 KEY[4];
 45};
 46
 47static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
 48		      unsigned int key_len)
 49{
 50	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 51	const __le32 *key = (const __le32 *)in_key;
 52
 53	ctx->KEY[0] = le32_to_cpu(key[0]);
 54	ctx->KEY[1] = le32_to_cpu(key[1]);
 55	ctx->KEY[2] = le32_to_cpu(key[2]);
 56	ctx->KEY[3] = le32_to_cpu(key[3]);
 57
 58	return 0; 
 59
 60}
 61
 62static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 63{
 64	u32 y, z, n, sum = 0;
 65	u32 k0, k1, k2, k3;
 66	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 67	const __le32 *in = (const __le32 *)src;
 68	__le32 *out = (__le32 *)dst;
 69
 70	y = le32_to_cpu(in[0]);
 71	z = le32_to_cpu(in[1]);
 72
 73	k0 = ctx->KEY[0];
 74	k1 = ctx->KEY[1];
 75	k2 = ctx->KEY[2];
 76	k3 = ctx->KEY[3];
 77
 78	n = TEA_ROUNDS;
 79
 80	while (n-- > 0) {
 81		sum += TEA_DELTA;
 82		y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
 83		z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
 84	}
 85	
 86	out[0] = cpu_to_le32(y);
 87	out[1] = cpu_to_le32(z);
 88}
 89
 90static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
 91{
 92	u32 y, z, n, sum;
 93	u32 k0, k1, k2, k3;
 94	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
 95	const __le32 *in = (const __le32 *)src;
 96	__le32 *out = (__le32 *)dst;
 97
 98	y = le32_to_cpu(in[0]);
 99	z = le32_to_cpu(in[1]);
100
101	k0 = ctx->KEY[0];
102	k1 = ctx->KEY[1];
103	k2 = ctx->KEY[2];
104	k3 = ctx->KEY[3];
105
106	sum = TEA_DELTA << 5;
107
108	n = TEA_ROUNDS;
109
110	while (n-- > 0) {
111		z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
112		y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
113		sum -= TEA_DELTA;
114	}
115	
116	out[0] = cpu_to_le32(y);
117	out[1] = cpu_to_le32(z);
118}
119
120static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
121		       unsigned int key_len)
122{
123	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
124	const __le32 *key = (const __le32 *)in_key;
125
126	ctx->KEY[0] = le32_to_cpu(key[0]);
127	ctx->KEY[1] = le32_to_cpu(key[1]);
128	ctx->KEY[2] = le32_to_cpu(key[2]);
129	ctx->KEY[3] = le32_to_cpu(key[3]);
130
131	return 0; 
132
133}
134
135static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
136{
137	u32 y, z, sum = 0;
138	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
139	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
140	const __le32 *in = (const __le32 *)src;
141	__le32 *out = (__le32 *)dst;
142
143	y = le32_to_cpu(in[0]);
144	z = le32_to_cpu(in[1]);
145
146	while (sum != limit) {
147		y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]); 
148		sum += XTEA_DELTA;
149		z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]); 
150	}
151	
152	out[0] = cpu_to_le32(y);
153	out[1] = cpu_to_le32(z);
154}
155
156static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
157{
158	u32 y, z, sum;
159	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
160	const __le32 *in = (const __le32 *)src;
161	__le32 *out = (__le32 *)dst;
162
163	y = le32_to_cpu(in[0]);
164	z = le32_to_cpu(in[1]);
165
166	sum = XTEA_DELTA * XTEA_ROUNDS;
167
168	while (sum) {
169		z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
170		sum -= XTEA_DELTA;
171		y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
172	}
173	
174	out[0] = cpu_to_le32(y);
175	out[1] = cpu_to_le32(z);
176}
177
178
179static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
180{
181	u32 y, z, sum = 0;
182	u32 limit = XTEA_DELTA * XTEA_ROUNDS;
183	struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
184	const __le32 *in = (const __le32 *)src;
185	__le32 *out = (__le32 *)dst;
186
187	y = le32_to_cpu(in[0]);
188	z = le32_to_cpu(in[1]);
189
190	while (sum != limit) {
191		y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
192		sum += XTEA_DELTA;
193		z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
194	}
195	
196	out[0] = cpu_to_le32(y);
197	out[1] = cpu_to_le32(z);
198}
199
200static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
201{
202	u32 y, z, sum;
203	struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
204	const __le32 *in = (const __le32 *)src;
205	__le32 *out = (__le32 *)dst;
206
207	y = le32_to_cpu(in[0]);
208	z = le32_to_cpu(in[1]);
209
210	sum = XTEA_DELTA * XTEA_ROUNDS;
211
212	while (sum) {
213		z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
214		sum -= XTEA_DELTA;
215		y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
216	}
217	
218	out[0] = cpu_to_le32(y);
219	out[1] = cpu_to_le32(z);
220}
221
222static struct crypto_alg tea_algs[3] = { {
223	.cra_name		=	"tea",
 
224	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
225	.cra_blocksize		=	TEA_BLOCK_SIZE,
226	.cra_ctxsize		=	sizeof (struct tea_ctx),
227	.cra_alignmask		=	3,
228	.cra_module		=	THIS_MODULE,
229	.cra_u			=	{ .cipher = {
230	.cia_min_keysize	=	TEA_KEY_SIZE,
231	.cia_max_keysize	=	TEA_KEY_SIZE,
232	.cia_setkey		= 	tea_setkey,
233	.cia_encrypt		=	tea_encrypt,
234	.cia_decrypt		=	tea_decrypt } }
235}, {
236	.cra_name		=	"xtea",
 
237	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
238	.cra_blocksize		=	XTEA_BLOCK_SIZE,
239	.cra_ctxsize		=	sizeof (struct xtea_ctx),
240	.cra_alignmask		=	3,
241	.cra_module		=	THIS_MODULE,
242	.cra_u			=	{ .cipher = {
243	.cia_min_keysize	=	XTEA_KEY_SIZE,
244	.cia_max_keysize	=	XTEA_KEY_SIZE,
245	.cia_setkey		= 	xtea_setkey,
246	.cia_encrypt		=	xtea_encrypt,
247	.cia_decrypt		=	xtea_decrypt } }
248}, {
249	.cra_name		=	"xeta",
 
250	.cra_flags		=	CRYPTO_ALG_TYPE_CIPHER,
251	.cra_blocksize		=	XTEA_BLOCK_SIZE,
252	.cra_ctxsize		=	sizeof (struct xtea_ctx),
253	.cra_alignmask		=	3,
254	.cra_module		=	THIS_MODULE,
255	.cra_u			=	{ .cipher = {
256	.cia_min_keysize	=	XTEA_KEY_SIZE,
257	.cia_max_keysize	=	XTEA_KEY_SIZE,
258	.cia_setkey		= 	xtea_setkey,
259	.cia_encrypt		=	xeta_encrypt,
260	.cia_decrypt		=	xeta_decrypt } }
261} };
262
263static int __init tea_mod_init(void)
264{
265	return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs));
266}
267
268static void __exit tea_mod_fini(void)
269{
270	crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs));
271}
272
273MODULE_ALIAS_CRYPTO("tea");
274MODULE_ALIAS_CRYPTO("xtea");
275MODULE_ALIAS_CRYPTO("xeta");
276
277module_init(tea_mod_init);
278module_exit(tea_mod_fini);
279
280MODULE_LICENSE("GPL");
281MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");