Linux Audio

Check our new training course

Embedded Linux training

Mar 31-Apr 8, 2025
Register
Loading...
v3.5.6
  1/*
  2 * Cryptographic API.
  3 *
  4 * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
  5 * Supplemental SSE3 instructions.
  6 *
  7 * This file is based on sha1_generic.c
  8 *
  9 * Copyright (c) Alan Smithee.
 10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
 11 * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
 12 * Copyright (c) Mathias Krause <minipli@googlemail.com>
 
 13 *
 14 * This program is free software; you can redistribute it and/or modify it
 15 * under the terms of the GNU General Public License as published by the Free
 16 * Software Foundation; either version 2 of the License, or (at your option)
 17 * any later version.
 18 *
 19 */
 20
 21#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
 22
 23#include <crypto/internal/hash.h>
 24#include <linux/init.h>
 25#include <linux/module.h>
 26#include <linux/mm.h>
 27#include <linux/cryptohash.h>
 28#include <linux/types.h>
 29#include <crypto/sha.h>
 30#include <asm/byteorder.h>
 31#include <asm/i387.h>
 32#include <asm/xcr.h>
 33#include <asm/xsave.h>
 34
 
 
 35
 36asmlinkage void sha1_transform_ssse3(u32 *digest, const char *data,
 37				     unsigned int rounds);
 38#ifdef SHA1_ENABLE_AVX_SUPPORT
 39asmlinkage void sha1_transform_avx(u32 *digest, const char *data,
 40				   unsigned int rounds);
 41#endif
 42
 43static asmlinkage void (*sha1_transform_asm)(u32 *, const char *, unsigned int);
 44
 45
 46static int sha1_ssse3_init(struct shash_desc *desc)
 47{
 48	struct sha1_state *sctx = shash_desc_ctx(desc);
 49
 50	*sctx = (struct sha1_state){
 51		.state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
 52	};
 
 
 
 
 
 
 
 
 53
 54	return 0;
 55}
 56
 57static int __sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
 58			       unsigned int len, unsigned int partial)
 59{
 60	struct sha1_state *sctx = shash_desc_ctx(desc);
 61	unsigned int done = 0;
 62
 63	sctx->count += len;
 
 
 
 
 
 64
 65	if (partial) {
 66		done = SHA1_BLOCK_SIZE - partial;
 67		memcpy(sctx->buffer + partial, data, done);
 68		sha1_transform_asm(sctx->state, sctx->buffer, 1);
 69	}
 70
 71	if (len - done >= SHA1_BLOCK_SIZE) {
 72		const unsigned int rounds = (len - done) / SHA1_BLOCK_SIZE;
 73
 74		sha1_transform_asm(sctx->state, data + done, rounds);
 75		done += rounds * SHA1_BLOCK_SIZE;
 76	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 77
 78	memcpy(sctx->buffer, data + done, len - done);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 79
 
 
 
 
 80	return 0;
 81}
 82
 83static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
 
 
 
 
 
 
 
 
 
 
 84			     unsigned int len)
 85{
 86	struct sha1_state *sctx = shash_desc_ctx(desc);
 87	unsigned int partial = sctx->count % SHA1_BLOCK_SIZE;
 88	int res;
 89
 90	/* Handle the fast case right here */
 91	if (partial + len < SHA1_BLOCK_SIZE) {
 92		sctx->count += len;
 93		memcpy(sctx->buffer + partial, data, len);
 
 
 94
 95		return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 96	}
 
 97
 98	if (!irq_fpu_usable()) {
 99		res = crypto_sha1_update(desc, data, len);
100	} else {
101		kernel_fpu_begin();
102		res = __sha1_ssse3_update(desc, data, len, partial);
103		kernel_fpu_end();
104	}
105
106	return res;
107}
108
 
 
 
 
 
 
109
110/* Add padding and return the message digest. */
111static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
112{
113	struct sha1_state *sctx = shash_desc_ctx(desc);
114	unsigned int i, index, padlen;
115	__be32 *dst = (__be32 *)out;
116	__be64 bits;
117	static const u8 padding[SHA1_BLOCK_SIZE] = { 0x80, };
118
119	bits = cpu_to_be64(sctx->count << 3);
120
121	/* Pad out to 56 mod 64 and append length */
122	index = sctx->count % SHA1_BLOCK_SIZE;
123	padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index);
124	if (!irq_fpu_usable()) {
125		crypto_sha1_update(desc, padding, padlen);
126		crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits));
127	} else {
128		kernel_fpu_begin();
129		/* We need to fill a whole block for __sha1_ssse3_update() */
130		if (padlen <= 56) {
131			sctx->count += padlen;
132			memcpy(sctx->buffer + index, padding, padlen);
133		} else {
134			__sha1_ssse3_update(desc, padding, padlen, index);
135		}
136		__sha1_ssse3_update(desc, (const u8 *)&bits, sizeof(bits), 56);
137		kernel_fpu_end();
138	}
139
140	/* Store state in digest */
141	for (i = 0; i < 5; i++)
142		dst[i] = cpu_to_be32(sctx->state[i]);
143
144	/* Wipe context */
145	memset(sctx, 0, sizeof(*sctx));
 
 
146
147	return 0;
148}
149
150static int sha1_ssse3_export(struct shash_desc *desc, void *out)
 
 
 
 
 
 
151{
152	struct sha1_state *sctx = shash_desc_ctx(desc);
 
 
 
153
154	memcpy(out, sctx, sizeof(*sctx));
 
155
156	return 0;
 
 
 
 
 
 
 
157}
158
159static int sha1_ssse3_import(struct shash_desc *desc, const void *in)
 
160{
161	struct sha1_state *sctx = shash_desc_ctx(desc);
 
 
162
163	memcpy(sctx, in, sizeof(*sctx));
 
 
 
 
 
164
165	return 0;
 
 
166}
167
168static struct shash_alg alg = {
169	.digestsize	=	SHA1_DIGEST_SIZE,
170	.init		=	sha1_ssse3_init,
171	.update		=	sha1_ssse3_update,
172	.final		=	sha1_ssse3_final,
173	.export		=	sha1_ssse3_export,
174	.import		=	sha1_ssse3_import,
175	.descsize	=	sizeof(struct sha1_state),
176	.statesize	=	sizeof(struct sha1_state),
177	.base		=	{
178		.cra_name	=	"sha1",
179		.cra_driver_name=	"sha1-ssse3",
180		.cra_priority	=	150,
181		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
182		.cra_blocksize	=	SHA1_BLOCK_SIZE,
183		.cra_module	=	THIS_MODULE,
184	}
185};
186
187#ifdef SHA1_ENABLE_AVX_SUPPORT
188static bool __init avx_usable(void)
189{
190	u64 xcr0;
 
 
 
191
192	if (!cpu_has_avx || !cpu_has_osxsave)
193		return false;
 
 
 
194
195	xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
196	if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
197		pr_info("AVX detected but unusable.\n");
 
198
199		return false;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200	}
 
201
202	return true;
 
 
 
 
 
 
 
 
 
 
203}
 
 
 
 
204#endif
205
206static int __init sha1_ssse3_mod_init(void)
207{
208	/* test for SSSE3 first */
209	if (cpu_has_ssse3)
210		sha1_transform_asm = sha1_transform_ssse3;
211
212#ifdef SHA1_ENABLE_AVX_SUPPORT
213	/* allow AVX to override SSSE3, it's a little faster */
214	if (avx_usable())
215		sha1_transform_asm = sha1_transform_avx;
216#endif
217
218	if (sha1_transform_asm) {
219		pr_info("Using %s optimized SHA-1 implementation\n",
220		        sha1_transform_asm == sha1_transform_ssse3 ? "SSSE3"
221		                                                   : "AVX");
222		return crypto_register_shash(&alg);
223	}
224	pr_info("Neither AVX nor SSSE3 is available/usable.\n");
225
 
 
 
 
 
 
 
 
 
226	return -ENODEV;
227}
228
229static void __exit sha1_ssse3_mod_fini(void)
230{
231	crypto_unregister_shash(&alg);
 
 
 
232}
233
234module_init(sha1_ssse3_mod_init);
235module_exit(sha1_ssse3_mod_fini);
236
237MODULE_LICENSE("GPL");
238MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
239
240MODULE_ALIAS("sha1");
 
 
 
 
 
 
v4.17
  1/*
  2 * Cryptographic API.
  3 *
  4 * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
  5 * Supplemental SSE3 instructions.
  6 *
  7 * This file is based on sha1_generic.c
  8 *
  9 * Copyright (c) Alan Smithee.
 10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
 11 * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
 12 * Copyright (c) Mathias Krause <minipli@googlemail.com>
 13 * Copyright (c) Chandramouli Narayanan <mouli@linux.intel.com>
 14 *
 15 * This program is free software; you can redistribute it and/or modify it
 16 * under the terms of the GNU General Public License as published by the Free
 17 * Software Foundation; either version 2 of the License, or (at your option)
 18 * any later version.
 19 *
 20 */
 21
 22#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
 23
 24#include <crypto/internal/hash.h>
 25#include <linux/init.h>
 26#include <linux/module.h>
 27#include <linux/mm.h>
 28#include <linux/cryptohash.h>
 29#include <linux/types.h>
 30#include <crypto/sha.h>
 31#include <crypto/sha1_base.h>
 32#include <asm/fpu/api.h>
 
 
 33
 34typedef void (sha1_transform_fn)(u32 *digest, const char *data,
 35				unsigned int rounds);
 36
 37static int sha1_update(struct shash_desc *desc, const u8 *data,
 38			     unsigned int len, sha1_transform_fn *sha1_xform)
 
 
 
 
 
 
 
 
 
 39{
 40	struct sha1_state *sctx = shash_desc_ctx(desc);
 41
 42	if (!irq_fpu_usable() ||
 43	    (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
 44		return crypto_sha1_update(desc, data, len);
 45
 46	/* make sure casting to sha1_block_fn() is safe */
 47	BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
 48
 49	kernel_fpu_begin();
 50	sha1_base_do_update(desc, data, len,
 51			    (sha1_block_fn *)sha1_xform);
 52	kernel_fpu_end();
 53
 54	return 0;
 55}
 56
 57static int sha1_finup(struct shash_desc *desc, const u8 *data,
 58		      unsigned int len, u8 *out, sha1_transform_fn *sha1_xform)
 59{
 60	if (!irq_fpu_usable())
 61		return crypto_sha1_finup(desc, data, len, out);
 62
 63	kernel_fpu_begin();
 64	if (len)
 65		sha1_base_do_update(desc, data, len,
 66				    (sha1_block_fn *)sha1_xform);
 67	sha1_base_do_finalize(desc, (sha1_block_fn *)sha1_xform);
 68	kernel_fpu_end();
 69
 70	return sha1_base_finish(desc, out);
 71}
 
 
 
 72
 73asmlinkage void sha1_transform_ssse3(u32 *digest, const char *data,
 74				     unsigned int rounds);
 75
 76static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
 77			     unsigned int len)
 78{
 79	return sha1_update(desc, data, len,
 80			(sha1_transform_fn *) sha1_transform_ssse3);
 81}
 82
 83static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
 84			      unsigned int len, u8 *out)
 85{
 86	return sha1_finup(desc, data, len, out,
 87			(sha1_transform_fn *) sha1_transform_ssse3);
 88}
 89
 90/* Add padding and return the message digest. */
 91static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
 92{
 93	return sha1_ssse3_finup(desc, NULL, 0, out);
 94}
 95
 96static struct shash_alg sha1_ssse3_alg = {
 97	.digestsize	=	SHA1_DIGEST_SIZE,
 98	.init		=	sha1_base_init,
 99	.update		=	sha1_ssse3_update,
100	.final		=	sha1_ssse3_final,
101	.finup		=	sha1_ssse3_finup,
102	.descsize	=	sizeof(struct sha1_state),
103	.base		=	{
104		.cra_name	=	"sha1",
105		.cra_driver_name =	"sha1-ssse3",
106		.cra_priority	=	150,
107		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
108		.cra_blocksize	=	SHA1_BLOCK_SIZE,
109		.cra_module	=	THIS_MODULE,
110	}
111};
112
113static int register_sha1_ssse3(void)
114{
115	if (boot_cpu_has(X86_FEATURE_SSSE3))
116		return crypto_register_shash(&sha1_ssse3_alg);
117	return 0;
118}
119
120static void unregister_sha1_ssse3(void)
121{
122	if (boot_cpu_has(X86_FEATURE_SSSE3))
123		crypto_unregister_shash(&sha1_ssse3_alg);
124}
125
126#ifdef CONFIG_AS_AVX
127asmlinkage void sha1_transform_avx(u32 *digest, const char *data,
128				   unsigned int rounds);
129
130static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
131			     unsigned int len)
132{
133	return sha1_update(desc, data, len,
134			(sha1_transform_fn *) sha1_transform_avx);
135}
136
137static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
138			      unsigned int len, u8 *out)
139{
140	return sha1_finup(desc, data, len, out,
141			(sha1_transform_fn *) sha1_transform_avx);
142}
143
144static int sha1_avx_final(struct shash_desc *desc, u8 *out)
145{
146	return sha1_avx_finup(desc, NULL, 0, out);
147}
148
149static struct shash_alg sha1_avx_alg = {
150	.digestsize	=	SHA1_DIGEST_SIZE,
151	.init		=	sha1_base_init,
152	.update		=	sha1_avx_update,
153	.final		=	sha1_avx_final,
154	.finup		=	sha1_avx_finup,
155	.descsize	=	sizeof(struct sha1_state),
156	.base		=	{
157		.cra_name	=	"sha1",
158		.cra_driver_name =	"sha1-avx",
159		.cra_priority	=	160,
160		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
161		.cra_blocksize	=	SHA1_BLOCK_SIZE,
162		.cra_module	=	THIS_MODULE,
163	}
164};
165
166static bool avx_usable(void)
167{
168	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
169		if (boot_cpu_has(X86_FEATURE_AVX))
170			pr_info("AVX detected but unusable.\n");
171		return false;
172	}
173
174	return true;
175}
176
177static int register_sha1_avx(void)
178{
179	if (avx_usable())
180		return crypto_register_shash(&sha1_avx_alg);
181	return 0;
182}
183
184static void unregister_sha1_avx(void)
 
185{
186	if (avx_usable())
187		crypto_unregister_shash(&sha1_avx_alg);
188}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
189
190#else  /* CONFIG_AS_AVX */
191static inline int register_sha1_avx(void) { return 0; }
192static inline void unregister_sha1_avx(void) { }
193#endif /* CONFIG_AS_AVX */
194
 
 
195
196#if defined(CONFIG_AS_AVX2) && (CONFIG_AS_AVX)
197#define SHA1_AVX2_BLOCK_OPTSIZE	4	/* optimal 4*64 bytes of SHA1 blocks */
198
199asmlinkage void sha1_transform_avx2(u32 *digest, const char *data,
200				    unsigned int rounds);
201
202static bool avx2_usable(void)
203{
204	if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
205		&& boot_cpu_has(X86_FEATURE_BMI1)
206		&& boot_cpu_has(X86_FEATURE_BMI2))
207		return true;
208
209	return false;
210}
211
212static void sha1_apply_transform_avx2(u32 *digest, const char *data,
213				unsigned int rounds)
214{
215	/* Select the optimal transform based on data block size */
216	if (rounds >= SHA1_AVX2_BLOCK_OPTSIZE)
217		sha1_transform_avx2(digest, data, rounds);
218	else
219		sha1_transform_avx(digest, data, rounds);
220}
221
222static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
223			     unsigned int len)
224{
225	return sha1_update(desc, data, len,
226		(sha1_transform_fn *) sha1_apply_transform_avx2);
227}
228
229static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
230			      unsigned int len, u8 *out)
231{
232	return sha1_finup(desc, data, len, out,
233		(sha1_transform_fn *) sha1_apply_transform_avx2);
234}
235
236static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
237{
238	return sha1_avx2_finup(desc, NULL, 0, out);
239}
240
241static struct shash_alg sha1_avx2_alg = {
242	.digestsize	=	SHA1_DIGEST_SIZE,
243	.init		=	sha1_base_init,
244	.update		=	sha1_avx2_update,
245	.final		=	sha1_avx2_final,
246	.finup		=	sha1_avx2_finup,
 
247	.descsize	=	sizeof(struct sha1_state),
 
248	.base		=	{
249		.cra_name	=	"sha1",
250		.cra_driver_name =	"sha1-avx2",
251		.cra_priority	=	170,
252		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
253		.cra_blocksize	=	SHA1_BLOCK_SIZE,
254		.cra_module	=	THIS_MODULE,
255	}
256};
257
258static int register_sha1_avx2(void)
 
259{
260	if (avx2_usable())
261		return crypto_register_shash(&sha1_avx2_alg);
262	return 0;
263}
264
265static void unregister_sha1_avx2(void)
266{
267	if (avx2_usable())
268		crypto_unregister_shash(&sha1_avx2_alg);
269}
270
271#else
272static inline int register_sha1_avx2(void) { return 0; }
273static inline void unregister_sha1_avx2(void) { }
274#endif
275
276#ifdef CONFIG_AS_SHA1_NI
277asmlinkage void sha1_ni_transform(u32 *digest, const char *data,
278				   unsigned int rounds);
279
280static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
281			     unsigned int len)
282{
283	return sha1_update(desc, data, len,
284		(sha1_transform_fn *) sha1_ni_transform);
285}
286
287static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
288			      unsigned int len, u8 *out)
289{
290	return sha1_finup(desc, data, len, out,
291		(sha1_transform_fn *) sha1_ni_transform);
292}
293
294static int sha1_ni_final(struct shash_desc *desc, u8 *out)
295{
296	return sha1_ni_finup(desc, NULL, 0, out);
297}
298
299static struct shash_alg sha1_ni_alg = {
300	.digestsize	=	SHA1_DIGEST_SIZE,
301	.init		=	sha1_base_init,
302	.update		=	sha1_ni_update,
303	.final		=	sha1_ni_final,
304	.finup		=	sha1_ni_finup,
305	.descsize	=	sizeof(struct sha1_state),
306	.base		=	{
307		.cra_name	=	"sha1",
308		.cra_driver_name =	"sha1-ni",
309		.cra_priority	=	250,
310		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
311		.cra_blocksize	=	SHA1_BLOCK_SIZE,
312		.cra_module	=	THIS_MODULE,
313	}
314};
315
316static int register_sha1_ni(void)
317{
318	if (boot_cpu_has(X86_FEATURE_SHA_NI))
319		return crypto_register_shash(&sha1_ni_alg);
320	return 0;
321}
322
323static void unregister_sha1_ni(void)
324{
325	if (boot_cpu_has(X86_FEATURE_SHA_NI))
326		crypto_unregister_shash(&sha1_ni_alg);
327}
328
329#else
330static inline int register_sha1_ni(void) { return 0; }
331static inline void unregister_sha1_ni(void) { }
332#endif
333
334static int __init sha1_ssse3_mod_init(void)
335{
336	if (register_sha1_ssse3())
337		goto fail;
 
338
339	if (register_sha1_avx()) {
340		unregister_sha1_ssse3();
341		goto fail;
342	}
 
343
344	if (register_sha1_avx2()) {
345		unregister_sha1_avx();
346		unregister_sha1_ssse3();
347		goto fail;
 
348	}
 
349
350	if (register_sha1_ni()) {
351		unregister_sha1_avx2();
352		unregister_sha1_avx();
353		unregister_sha1_ssse3();
354		goto fail;
355	}
356
357	return 0;
358fail:
359	return -ENODEV;
360}
361
362static void __exit sha1_ssse3_mod_fini(void)
363{
364	unregister_sha1_ni();
365	unregister_sha1_avx2();
366	unregister_sha1_avx();
367	unregister_sha1_ssse3();
368}
369
370module_init(sha1_ssse3_mod_init);
371module_exit(sha1_ssse3_mod_fini);
372
373MODULE_LICENSE("GPL");
374MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
375
376MODULE_ALIAS_CRYPTO("sha1");
377MODULE_ALIAS_CRYPTO("sha1-ssse3");
378MODULE_ALIAS_CRYPTO("sha1-avx");
379MODULE_ALIAS_CRYPTO("sha1-avx2");
380#ifdef CONFIG_AS_SHA1_NI
381MODULE_ALIAS_CRYPTO("sha1-ni");
382#endif