Linux Audio

Check our new training course

Loading...
v6.13.7
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Glue code for the SHA256 Secure Hash Algorithm assembly implementation
  4 * using optimized ARM assembler and NEON instructions.
  5 *
  6 * Copyright © 2015 Google Inc.
  7 *
  8 * This file is based on sha256_ssse3_glue.c:
  9 *   Copyright (C) 2013 Intel Corporation
 10 *   Author: Tim Chen <tim.c.chen@linux.intel.com>
 11 */
 12
 13#include <crypto/internal/hash.h>
 14#include <linux/crypto.h>
 15#include <linux/init.h>
 16#include <linux/module.h>
 17#include <linux/mm.h>
 18#include <linux/types.h>
 19#include <linux/string.h>
 20#include <crypto/sha2.h>
 21#include <crypto/sha256_base.h>
 22#include <asm/simd.h>
 23#include <asm/neon.h>
 24
 25#include "sha256_glue.h"
 26
 27asmlinkage void sha256_block_data_order(struct sha256_state *state,
 28					const u8 *data, int num_blks);
 29
 30int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
 31			     unsigned int len)
 32{
 33	/* make sure casting to sha256_block_fn() is safe */
 34	BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
 35
 36	return sha256_base_do_update(desc, data, len, sha256_block_data_order);
 
 37}
 38EXPORT_SYMBOL(crypto_sha256_arm_update);
 39
 40static int crypto_sha256_arm_final(struct shash_desc *desc, u8 *out)
 41{
 42	sha256_base_do_finalize(desc, sha256_block_data_order);
 
 43	return sha256_base_finish(desc, out);
 44}
 45
 46int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
 47			    unsigned int len, u8 *out)
 48{
 49	sha256_base_do_update(desc, data, len, sha256_block_data_order);
 
 50	return crypto_sha256_arm_final(desc, out);
 51}
 52EXPORT_SYMBOL(crypto_sha256_arm_finup);
 53
 54static struct shash_alg algs[] = { {
 55	.digestsize	=	SHA256_DIGEST_SIZE,
 56	.init		=	sha256_base_init,
 57	.update		=	crypto_sha256_arm_update,
 58	.final		=	crypto_sha256_arm_final,
 59	.finup		=	crypto_sha256_arm_finup,
 60	.descsize	=	sizeof(struct sha256_state),
 61	.base		=	{
 62		.cra_name	=	"sha256",
 63		.cra_driver_name =	"sha256-asm",
 64		.cra_priority	=	150,
 65		.cra_blocksize	=	SHA256_BLOCK_SIZE,
 66		.cra_module	=	THIS_MODULE,
 67	}
 68}, {
 69	.digestsize	=	SHA224_DIGEST_SIZE,
 70	.init		=	sha224_base_init,
 71	.update		=	crypto_sha256_arm_update,
 72	.final		=	crypto_sha256_arm_final,
 73	.finup		=	crypto_sha256_arm_finup,
 74	.descsize	=	sizeof(struct sha256_state),
 75	.base		=	{
 76		.cra_name	=	"sha224",
 77		.cra_driver_name =	"sha224-asm",
 78		.cra_priority	=	150,
 79		.cra_blocksize	=	SHA224_BLOCK_SIZE,
 80		.cra_module	=	THIS_MODULE,
 81	}
 82} };
 83
 84static int __init sha256_mod_init(void)
 85{
 86	int res = crypto_register_shashes(algs, ARRAY_SIZE(algs));
 87
 88	if (res < 0)
 89		return res;
 90
 91	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon()) {
 92		res = crypto_register_shashes(sha256_neon_algs,
 93					      ARRAY_SIZE(sha256_neon_algs));
 94
 95		if (res < 0)
 96			crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
 97	}
 98
 99	return res;
100}
101
102static void __exit sha256_mod_fini(void)
103{
104	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
105
106	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon())
107		crypto_unregister_shashes(sha256_neon_algs,
108					  ARRAY_SIZE(sha256_neon_algs));
109}
110
111module_init(sha256_mod_init);
112module_exit(sha256_mod_fini);
113
114MODULE_LICENSE("GPL");
115MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm (ARM), including NEON");
116
117MODULE_ALIAS_CRYPTO("sha256");
v6.8
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Glue code for the SHA256 Secure Hash Algorithm assembly implementation
  4 * using optimized ARM assembler and NEON instructions.
  5 *
  6 * Copyright © 2015 Google Inc.
  7 *
  8 * This file is based on sha256_ssse3_glue.c:
  9 *   Copyright (C) 2013 Intel Corporation
 10 *   Author: Tim Chen <tim.c.chen@linux.intel.com>
 11 */
 12
 13#include <crypto/internal/hash.h>
 14#include <linux/crypto.h>
 15#include <linux/init.h>
 16#include <linux/module.h>
 17#include <linux/mm.h>
 18#include <linux/types.h>
 19#include <linux/string.h>
 20#include <crypto/sha2.h>
 21#include <crypto/sha256_base.h>
 22#include <asm/simd.h>
 23#include <asm/neon.h>
 24
 25#include "sha256_glue.h"
 26
 27asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
 28					unsigned int num_blks);
 29
 30int crypto_sha256_arm_update(struct shash_desc *desc, const u8 *data,
 31			     unsigned int len)
 32{
 33	/* make sure casting to sha256_block_fn() is safe */
 34	BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
 35
 36	return sha256_base_do_update(desc, data, len,
 37				(sha256_block_fn *)sha256_block_data_order);
 38}
 39EXPORT_SYMBOL(crypto_sha256_arm_update);
 40
 41static int crypto_sha256_arm_final(struct shash_desc *desc, u8 *out)
 42{
 43	sha256_base_do_finalize(desc,
 44				(sha256_block_fn *)sha256_block_data_order);
 45	return sha256_base_finish(desc, out);
 46}
 47
 48int crypto_sha256_arm_finup(struct shash_desc *desc, const u8 *data,
 49			    unsigned int len, u8 *out)
 50{
 51	sha256_base_do_update(desc, data, len,
 52			      (sha256_block_fn *)sha256_block_data_order);
 53	return crypto_sha256_arm_final(desc, out);
 54}
 55EXPORT_SYMBOL(crypto_sha256_arm_finup);
 56
 57static struct shash_alg algs[] = { {
 58	.digestsize	=	SHA256_DIGEST_SIZE,
 59	.init		=	sha256_base_init,
 60	.update		=	crypto_sha256_arm_update,
 61	.final		=	crypto_sha256_arm_final,
 62	.finup		=	crypto_sha256_arm_finup,
 63	.descsize	=	sizeof(struct sha256_state),
 64	.base		=	{
 65		.cra_name	=	"sha256",
 66		.cra_driver_name =	"sha256-asm",
 67		.cra_priority	=	150,
 68		.cra_blocksize	=	SHA256_BLOCK_SIZE,
 69		.cra_module	=	THIS_MODULE,
 70	}
 71}, {
 72	.digestsize	=	SHA224_DIGEST_SIZE,
 73	.init		=	sha224_base_init,
 74	.update		=	crypto_sha256_arm_update,
 75	.final		=	crypto_sha256_arm_final,
 76	.finup		=	crypto_sha256_arm_finup,
 77	.descsize	=	sizeof(struct sha256_state),
 78	.base		=	{
 79		.cra_name	=	"sha224",
 80		.cra_driver_name =	"sha224-asm",
 81		.cra_priority	=	150,
 82		.cra_blocksize	=	SHA224_BLOCK_SIZE,
 83		.cra_module	=	THIS_MODULE,
 84	}
 85} };
 86
 87static int __init sha256_mod_init(void)
 88{
 89	int res = crypto_register_shashes(algs, ARRAY_SIZE(algs));
 90
 91	if (res < 0)
 92		return res;
 93
 94	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon()) {
 95		res = crypto_register_shashes(sha256_neon_algs,
 96					      ARRAY_SIZE(sha256_neon_algs));
 97
 98		if (res < 0)
 99			crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
100	}
101
102	return res;
103}
104
105static void __exit sha256_mod_fini(void)
106{
107	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
108
109	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon())
110		crypto_unregister_shashes(sha256_neon_algs,
111					  ARRAY_SIZE(sha256_neon_algs));
112}
113
114module_init(sha256_mod_init);
115module_exit(sha256_mod_fini);
116
117MODULE_LICENSE("GPL");
118MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm (ARM), including NEON");
119
120MODULE_ALIAS_CRYPTO("sha256");