Linux Audio

Check our new training course

Loading...
v3.5.6
 
  1/*
  2 * Cryptographic API.
  3 *
  4 * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
  5 * Supplemental SSE3 instructions.
  6 *
  7 * This file is based on sha1_generic.c
  8 *
  9 * Copyright (c) Alan Smithee.
 10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
 11 * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
 12 * Copyright (c) Mathias Krause <minipli@googlemail.com>
 13 *
 14 * This program is free software; you can redistribute it and/or modify it
 15 * under the terms of the GNU General Public License as published by the Free
 16 * Software Foundation; either version 2 of the License, or (at your option)
 17 * any later version.
 18 *
 19 */
 20
 21#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
 22
 23#include <crypto/internal/hash.h>
 
 24#include <linux/init.h>
 25#include <linux/module.h>
 26#include <linux/mm.h>
 27#include <linux/cryptohash.h>
 28#include <linux/types.h>
 29#include <crypto/sha.h>
 30#include <asm/byteorder.h>
 31#include <asm/i387.h>
 32#include <asm/xcr.h>
 33#include <asm/xsave.h>
 34
 35
 36asmlinkage void sha1_transform_ssse3(u32 *digest, const char *data,
 37				     unsigned int rounds);
 38#ifdef SHA1_ENABLE_AVX_SUPPORT
 39asmlinkage void sha1_transform_avx(u32 *digest, const char *data,
 40				   unsigned int rounds);
 41#endif
 42
 43static asmlinkage void (*sha1_transform_asm)(u32 *, const char *, unsigned int);
 44
 45
 46static int sha1_ssse3_init(struct shash_desc *desc)
 47{
 48	struct sha1_state *sctx = shash_desc_ctx(desc);
 49
 50	*sctx = (struct sha1_state){
 51		.state = { SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4 },
 52	};
 
 
 
 
 
 
 
 
 
 
 53
 54	return 0;
 55}
 56
 57static int __sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
 58			       unsigned int len, unsigned int partial)
 59{
 60	struct sha1_state *sctx = shash_desc_ctx(desc);
 61	unsigned int done = 0;
 62
 63	sctx->count += len;
 
 
 
 
 64
 65	if (partial) {
 66		done = SHA1_BLOCK_SIZE - partial;
 67		memcpy(sctx->buffer + partial, data, done);
 68		sha1_transform_asm(sctx->state, sctx->buffer, 1);
 69	}
 70
 71	if (len - done >= SHA1_BLOCK_SIZE) {
 72		const unsigned int rounds = (len - done) / SHA1_BLOCK_SIZE;
 73
 74		sha1_transform_asm(sctx->state, data + done, rounds);
 75		done += rounds * SHA1_BLOCK_SIZE;
 76	}
 
 
 77
 78	memcpy(sctx->buffer, data + done, len - done);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 79
 
 
 
 
 80	return 0;
 81}
 82
 83static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
 
 
 
 
 
 
 
 
 
 84			     unsigned int len)
 85{
 86	struct sha1_state *sctx = shash_desc_ctx(desc);
 87	unsigned int partial = sctx->count % SHA1_BLOCK_SIZE;
 88	int res;
 
 
 
 
 
 89
 90	/* Handle the fast case right here */
 91	if (partial + len < SHA1_BLOCK_SIZE) {
 92		sctx->count += len;
 93		memcpy(sctx->buffer + partial, data, len);
 94
 95		return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 96	}
 
 97
 98	if (!irq_fpu_usable()) {
 99		res = crypto_sha1_update(desc, data, len);
100	} else {
101		kernel_fpu_begin();
102		res = __sha1_ssse3_update(desc, data, len, partial);
103		kernel_fpu_end();
104	}
105
106	return res;
107}
108
 
 
 
 
 
 
109
110/* Add padding and return the message digest. */
111static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
112{
113	struct sha1_state *sctx = shash_desc_ctx(desc);
114	unsigned int i, index, padlen;
115	__be32 *dst = (__be32 *)out;
116	__be64 bits;
117	static const u8 padding[SHA1_BLOCK_SIZE] = { 0x80, };
118
119	bits = cpu_to_be64(sctx->count << 3);
120
121	/* Pad out to 56 mod 64 and append length */
122	index = sctx->count % SHA1_BLOCK_SIZE;
123	padlen = (index < 56) ? (56 - index) : ((SHA1_BLOCK_SIZE+56) - index);
124	if (!irq_fpu_usable()) {
125		crypto_sha1_update(desc, padding, padlen);
126		crypto_sha1_update(desc, (const u8 *)&bits, sizeof(bits));
127	} else {
128		kernel_fpu_begin();
129		/* We need to fill a whole block for __sha1_ssse3_update() */
130		if (padlen <= 56) {
131			sctx->count += padlen;
132			memcpy(sctx->buffer + index, padding, padlen);
133		} else {
134			__sha1_ssse3_update(desc, padding, padlen, index);
135		}
136		__sha1_ssse3_update(desc, (const u8 *)&bits, sizeof(bits), 56);
137		kernel_fpu_end();
138	}
139
140	/* Store state in digest */
141	for (i = 0; i < 5; i++)
142		dst[i] = cpu_to_be32(sctx->state[i]);
143
144	/* Wipe context */
145	memset(sctx, 0, sizeof(*sctx));
146
147	return 0;
148}
149
150static int sha1_ssse3_export(struct shash_desc *desc, void *out)
151{
152	struct sha1_state *sctx = shash_desc_ctx(desc);
 
 
 
153
154	memcpy(out, sctx, sizeof(*sctx));
 
155
156	return 0;
 
 
 
 
 
 
 
157}
158
159static int sha1_ssse3_import(struct shash_desc *desc, const void *in)
 
160{
161	struct sha1_state *sctx = shash_desc_ctx(desc);
 
162
163	memcpy(sctx, in, sizeof(*sctx));
 
 
 
 
164
165	return 0;
 
 
166}
167
168static struct shash_alg alg = {
169	.digestsize	=	SHA1_DIGEST_SIZE,
170	.init		=	sha1_ssse3_init,
171	.update		=	sha1_ssse3_update,
172	.final		=	sha1_ssse3_final,
173	.export		=	sha1_ssse3_export,
174	.import		=	sha1_ssse3_import,
175	.descsize	=	sizeof(struct sha1_state),
176	.statesize	=	sizeof(struct sha1_state),
177	.base		=	{
178		.cra_name	=	"sha1",
179		.cra_driver_name=	"sha1-ssse3",
180		.cra_priority	=	150,
181		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
182		.cra_blocksize	=	SHA1_BLOCK_SIZE,
183		.cra_module	=	THIS_MODULE,
184	}
185};
186
187#ifdef SHA1_ENABLE_AVX_SUPPORT
188static bool __init avx_usable(void)
189{
190	u64 xcr0;
 
 
 
191
192	if (!cpu_has_avx || !cpu_has_osxsave)
193		return false;
 
 
 
194
195	xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
196	if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
197		pr_info("AVX detected but unusable.\n");
198
199		return false;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
200	}
 
201
202	return true;
 
 
 
 
203}
 
 
 
 
 
 
 
 
 
 
204#endif
205
206static int __init sha1_ssse3_mod_init(void)
207{
208	/* test for SSSE3 first */
209	if (cpu_has_ssse3)
210		sha1_transform_asm = sha1_transform_ssse3;
211
212#ifdef SHA1_ENABLE_AVX_SUPPORT
213	/* allow AVX to override SSSE3, it's a little faster */
214	if (avx_usable())
215		sha1_transform_asm = sha1_transform_avx;
216#endif
217
218	if (sha1_transform_asm) {
219		pr_info("Using %s optimized SHA-1 implementation\n",
220		        sha1_transform_asm == sha1_transform_ssse3 ? "SSSE3"
221		                                                   : "AVX");
222		return crypto_register_shash(&alg);
223	}
224	pr_info("Neither AVX nor SSSE3 is available/usable.\n");
225
 
 
 
 
 
 
 
 
 
226	return -ENODEV;
227}
228
229static void __exit sha1_ssse3_mod_fini(void)
230{
231	crypto_unregister_shash(&alg);
 
 
 
232}
233
234module_init(sha1_ssse3_mod_init);
235module_exit(sha1_ssse3_mod_fini);
236
237MODULE_LICENSE("GPL");
238MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
239
240MODULE_ALIAS("sha1");
 
 
 
 
 
 
v5.14.15
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Cryptographic API.
  4 *
  5 * Glue code for the SHA1 Secure Hash Algorithm assembler implementation using
  6 * Supplemental SSE3 instructions.
  7 *
  8 * This file is based on sha1_generic.c
  9 *
 10 * Copyright (c) Alan Smithee.
 11 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
 12 * Copyright (c) Jean-Francois Dive <jef@linuxbe.org>
 13 * Copyright (c) Mathias Krause <minipli@googlemail.com>
 14 * Copyright (c) Chandramouli Narayanan <mouli@linux.intel.com>
 
 
 
 
 
 15 */
 16
 17#define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
 18
 19#include <crypto/internal/hash.h>
 20#include <crypto/internal/simd.h>
 21#include <linux/init.h>
 22#include <linux/module.h>
 23#include <linux/mm.h>
 
 24#include <linux/types.h>
 25#include <crypto/sha1.h>
 26#include <crypto/sha1_base.h>
 27#include <asm/simd.h>
 
 
 
 
 
 
 
 
 
 
 
 
 28
 29static int sha1_update(struct shash_desc *desc, const u8 *data,
 30			     unsigned int len, sha1_block_fn *sha1_xform)
 31{
 32	struct sha1_state *sctx = shash_desc_ctx(desc);
 33
 34	if (!crypto_simd_usable() ||
 35	    (sctx->count % SHA1_BLOCK_SIZE) + len < SHA1_BLOCK_SIZE)
 36		return crypto_sha1_update(desc, data, len);
 37
 38	/*
 39	 * Make sure struct sha1_state begins directly with the SHA1
 40	 * 160-bit internal state, as this is what the asm functions expect.
 41	 */
 42	BUILD_BUG_ON(offsetof(struct sha1_state, state) != 0);
 43
 44	kernel_fpu_begin();
 45	sha1_base_do_update(desc, data, len, sha1_xform);
 46	kernel_fpu_end();
 47
 48	return 0;
 49}
 50
 51static int sha1_finup(struct shash_desc *desc, const u8 *data,
 52		      unsigned int len, u8 *out, sha1_block_fn *sha1_xform)
 53{
 54	if (!crypto_simd_usable())
 55		return crypto_sha1_finup(desc, data, len, out);
 56
 57	kernel_fpu_begin();
 58	if (len)
 59		sha1_base_do_update(desc, data, len, sha1_xform);
 60	sha1_base_do_finalize(desc, sha1_xform);
 61	kernel_fpu_end();
 62
 63	return sha1_base_finish(desc, out);
 64}
 
 
 
 65
 66asmlinkage void sha1_transform_ssse3(struct sha1_state *state,
 67				     const u8 *data, int blocks);
 68
 69static int sha1_ssse3_update(struct shash_desc *desc, const u8 *data,
 70			     unsigned int len)
 71{
 72	return sha1_update(desc, data, len, sha1_transform_ssse3);
 73}
 74
 75static int sha1_ssse3_finup(struct shash_desc *desc, const u8 *data,
 76			      unsigned int len, u8 *out)
 77{
 78	return sha1_finup(desc, data, len, out, sha1_transform_ssse3);
 79}
 80
 81/* Add padding and return the message digest. */
 82static int sha1_ssse3_final(struct shash_desc *desc, u8 *out)
 83{
 84	return sha1_ssse3_finup(desc, NULL, 0, out);
 85}
 86
 87static struct shash_alg sha1_ssse3_alg = {
 88	.digestsize	=	SHA1_DIGEST_SIZE,
 89	.init		=	sha1_base_init,
 90	.update		=	sha1_ssse3_update,
 91	.final		=	sha1_ssse3_final,
 92	.finup		=	sha1_ssse3_finup,
 93	.descsize	=	sizeof(struct sha1_state),
 94	.base		=	{
 95		.cra_name	=	"sha1",
 96		.cra_driver_name =	"sha1-ssse3",
 97		.cra_priority	=	150,
 98		.cra_blocksize	=	SHA1_BLOCK_SIZE,
 99		.cra_module	=	THIS_MODULE,
100	}
101};
102
103static int register_sha1_ssse3(void)
104{
105	if (boot_cpu_has(X86_FEATURE_SSSE3))
106		return crypto_register_shash(&sha1_ssse3_alg);
107	return 0;
108}
109
110static void unregister_sha1_ssse3(void)
111{
112	if (boot_cpu_has(X86_FEATURE_SSSE3))
113		crypto_unregister_shash(&sha1_ssse3_alg);
114}
115
116asmlinkage void sha1_transform_avx(struct sha1_state *state,
117				   const u8 *data, int blocks);
118
119static int sha1_avx_update(struct shash_desc *desc, const u8 *data,
120			     unsigned int len)
121{
122	return sha1_update(desc, data, len, sha1_transform_avx);
123}
124
125static int sha1_avx_finup(struct shash_desc *desc, const u8 *data,
126			      unsigned int len, u8 *out)
127{
128	return sha1_finup(desc, data, len, out, sha1_transform_avx);
129}
130
131static int sha1_avx_final(struct shash_desc *desc, u8 *out)
132{
133	return sha1_avx_finup(desc, NULL, 0, out);
134}
135
136static struct shash_alg sha1_avx_alg = {
137	.digestsize	=	SHA1_DIGEST_SIZE,
138	.init		=	sha1_base_init,
139	.update		=	sha1_avx_update,
140	.final		=	sha1_avx_final,
141	.finup		=	sha1_avx_finup,
142	.descsize	=	sizeof(struct sha1_state),
143	.base		=	{
144		.cra_name	=	"sha1",
145		.cra_driver_name =	"sha1-avx",
146		.cra_priority	=	160,
147		.cra_blocksize	=	SHA1_BLOCK_SIZE,
148		.cra_module	=	THIS_MODULE,
149	}
150};
151
152static bool avx_usable(void)
153{
154	if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
155		if (boot_cpu_has(X86_FEATURE_AVX))
156			pr_info("AVX detected but unusable.\n");
157		return false;
158	}
159
160	return true;
161}
162
163static int register_sha1_avx(void)
164{
165	if (avx_usable())
166		return crypto_register_shash(&sha1_avx_alg);
167	return 0;
168}
169
170static void unregister_sha1_avx(void)
 
171{
172	if (avx_usable())
173		crypto_unregister_shash(&sha1_avx_alg);
174}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
175
176#define SHA1_AVX2_BLOCK_OPTSIZE	4	/* optimal 4*64 bytes of SHA1 blocks */
 
177
178asmlinkage void sha1_transform_avx2(struct sha1_state *state,
179				    const u8 *data, int blocks);
180
181static bool avx2_usable(void)
182{
183	if (avx_usable() && boot_cpu_has(X86_FEATURE_AVX2)
184		&& boot_cpu_has(X86_FEATURE_BMI1)
185		&& boot_cpu_has(X86_FEATURE_BMI2))
186		return true;
187
188	return false;
189}
190
191static void sha1_apply_transform_avx2(struct sha1_state *state,
192				      const u8 *data, int blocks)
193{
194	/* Select the optimal transform based on data block size */
195	if (blocks >= SHA1_AVX2_BLOCK_OPTSIZE)
196		sha1_transform_avx2(state, data, blocks);
197	else
198		sha1_transform_avx(state, data, blocks);
199}
200
201static int sha1_avx2_update(struct shash_desc *desc, const u8 *data,
202			     unsigned int len)
203{
204	return sha1_update(desc, data, len, sha1_apply_transform_avx2);
205}
206
207static int sha1_avx2_finup(struct shash_desc *desc, const u8 *data,
208			      unsigned int len, u8 *out)
209{
210	return sha1_finup(desc, data, len, out, sha1_apply_transform_avx2);
211}
212
213static int sha1_avx2_final(struct shash_desc *desc, u8 *out)
214{
215	return sha1_avx2_finup(desc, NULL, 0, out);
216}
217
218static struct shash_alg sha1_avx2_alg = {
219	.digestsize	=	SHA1_DIGEST_SIZE,
220	.init		=	sha1_base_init,
221	.update		=	sha1_avx2_update,
222	.final		=	sha1_avx2_final,
223	.finup		=	sha1_avx2_finup,
 
224	.descsize	=	sizeof(struct sha1_state),
 
225	.base		=	{
226		.cra_name	=	"sha1",
227		.cra_driver_name =	"sha1-avx2",
228		.cra_priority	=	170,
 
229		.cra_blocksize	=	SHA1_BLOCK_SIZE,
230		.cra_module	=	THIS_MODULE,
231	}
232};
233
234static int register_sha1_avx2(void)
 
235{
236	if (avx2_usable())
237		return crypto_register_shash(&sha1_avx2_alg);
238	return 0;
239}
240
241static void unregister_sha1_avx2(void)
242{
243	if (avx2_usable())
244		crypto_unregister_shash(&sha1_avx2_alg);
245}
246
247#ifdef CONFIG_AS_SHA1_NI
248asmlinkage void sha1_ni_transform(struct sha1_state *digest, const u8 *data,
249				  int rounds);
250
251static int sha1_ni_update(struct shash_desc *desc, const u8 *data,
252			     unsigned int len)
253{
254	return sha1_update(desc, data, len, sha1_ni_transform);
255}
256
257static int sha1_ni_finup(struct shash_desc *desc, const u8 *data,
258			      unsigned int len, u8 *out)
259{
260	return sha1_finup(desc, data, len, out, sha1_ni_transform);
261}
262
263static int sha1_ni_final(struct shash_desc *desc, u8 *out)
264{
265	return sha1_ni_finup(desc, NULL, 0, out);
266}
267
268static struct shash_alg sha1_ni_alg = {
269	.digestsize	=	SHA1_DIGEST_SIZE,
270	.init		=	sha1_base_init,
271	.update		=	sha1_ni_update,
272	.final		=	sha1_ni_final,
273	.finup		=	sha1_ni_finup,
274	.descsize	=	sizeof(struct sha1_state),
275	.base		=	{
276		.cra_name	=	"sha1",
277		.cra_driver_name =	"sha1-ni",
278		.cra_priority	=	250,
279		.cra_blocksize	=	SHA1_BLOCK_SIZE,
280		.cra_module	=	THIS_MODULE,
281	}
282};
283
284static int register_sha1_ni(void)
285{
286	if (boot_cpu_has(X86_FEATURE_SHA_NI))
287		return crypto_register_shash(&sha1_ni_alg);
288	return 0;
289}
290
291static void unregister_sha1_ni(void)
292{
293	if (boot_cpu_has(X86_FEATURE_SHA_NI))
294		crypto_unregister_shash(&sha1_ni_alg);
295}
296
297#else
298static inline int register_sha1_ni(void) { return 0; }
299static inline void unregister_sha1_ni(void) { }
300#endif
301
302static int __init sha1_ssse3_mod_init(void)
303{
304	if (register_sha1_ssse3())
305		goto fail;
 
306
307	if (register_sha1_avx()) {
308		unregister_sha1_ssse3();
309		goto fail;
310	}
 
311
312	if (register_sha1_avx2()) {
313		unregister_sha1_avx();
314		unregister_sha1_ssse3();
315		goto fail;
 
316	}
 
317
318	if (register_sha1_ni()) {
319		unregister_sha1_avx2();
320		unregister_sha1_avx();
321		unregister_sha1_ssse3();
322		goto fail;
323	}
324
325	return 0;
326fail:
327	return -ENODEV;
328}
329
330static void __exit sha1_ssse3_mod_fini(void)
331{
332	unregister_sha1_ni();
333	unregister_sha1_avx2();
334	unregister_sha1_avx();
335	unregister_sha1_ssse3();
336}
337
338module_init(sha1_ssse3_mod_init);
339module_exit(sha1_ssse3_mod_fini);
340
341MODULE_LICENSE("GPL");
342MODULE_DESCRIPTION("SHA1 Secure Hash Algorithm, Supplemental SSE3 accelerated");
343
344MODULE_ALIAS_CRYPTO("sha1");
345MODULE_ALIAS_CRYPTO("sha1-ssse3");
346MODULE_ALIAS_CRYPTO("sha1-avx");
347MODULE_ALIAS_CRYPTO("sha1-avx2");
348#ifdef CONFIG_AS_SHA1_NI
349MODULE_ALIAS_CRYPTO("sha1-ni");
350#endif