Linux Audio

Check our new training course

Loading...
v6.8
  1// SPDX-License-Identifier: GPL-2.0
  2/*
  3 * Crypto-API module for CRC-32 algorithms implemented with the
  4 * z/Architecture Vector Extension Facility.
  5 *
  6 * Copyright IBM Corp. 2015
  7 * Author(s): Hendrik Brueckner <brueckner@linux.vnet.ibm.com>
  8 */
  9#define KMSG_COMPONENT	"crc32-vx"
 10#define pr_fmt(fmt)	KMSG_COMPONENT ": " fmt
 11
 12#include <linux/module.h>
 13#include <linux/cpufeature.h>
 14#include <linux/crc32.h>
 15#include <crypto/internal/hash.h>
 16#include <asm/fpu/api.h>
 17
 18
 19#define CRC32_BLOCK_SIZE	1
 20#define CRC32_DIGEST_SIZE	4
 21
 22#define VX_MIN_LEN		64
 23#define VX_ALIGNMENT		16L
 24#define VX_ALIGN_MASK		(VX_ALIGNMENT - 1)
 25
 26struct crc_ctx {
 27	u32 key;
 28};
 29
 30struct crc_desc_ctx {
 31	u32 crc;
 32};
 33
 34/* Prototypes for functions in assembly files */
 35u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
 36u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
 37u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
 38
 39/*
 40 * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
 41 *
 42 * Creates a function to perform a particular CRC-32 computation. Depending
 43 * on the message buffer, the hardware-accelerated or software implementation
 44 * is used.   Note that the message buffer is aligned to improve fetch
 45 * operations of VECTOR LOAD MULTIPLE instructions.
 46 *
 47 */
 48#define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw)		    \
 49	static u32 __pure ___fname(u32 crc,				    \
 50				unsigned char const *data, size_t datalen)  \
 51	{								    \
 52		struct kernel_fpu vxstate;				    \
 53		unsigned long prealign, aligned, remaining;		    \
 54									    \
 55		if (datalen < VX_MIN_LEN + VX_ALIGN_MASK)		    \
 56			return ___crc32_sw(crc, data, datalen);		    \
 57									    \
 58		if ((unsigned long)data & VX_ALIGN_MASK) {		    \
 59			prealign = VX_ALIGNMENT -			    \
 60				  ((unsigned long)data & VX_ALIGN_MASK);    \
 61			datalen -= prealign;				    \
 62			crc = ___crc32_sw(crc, data, prealign);		    \
 63			data = (void *)((unsigned long)data + prealign);    \
 64		}							    \
 65									    \
 66		aligned = datalen & ~VX_ALIGN_MASK;			    \
 67		remaining = datalen & VX_ALIGN_MASK;			    \
 68									    \
 69		kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW);		    \
 70		crc = ___crc32_vx(crc, data, aligned);			    \
 71		kernel_fpu_end(&vxstate, KERNEL_VXR_LOW);		    \
 72									    \
 73		if (remaining)						    \
 74			crc = ___crc32_sw(crc, data + aligned, remaining);  \
 75									    \
 76		return crc;						    \
 77	}
 78
 79DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
 80DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
 81DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
 82
 83
 84static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
 85{
 86	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
 87
 88	mctx->key = 0;
 89	return 0;
 90}
 91
 92static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
 93{
 94	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
 95
 96	mctx->key = ~0;
 97	return 0;
 98}
 99
100static int crc32_vx_init(struct shash_desc *desc)
101{
102	struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
103	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
104
105	ctx->crc = mctx->key;
106	return 0;
107}
108
109static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
110			   unsigned int newkeylen)
111{
112	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
113
114	if (newkeylen != sizeof(mctx->key))
 
115		return -EINVAL;
 
116	mctx->key = le32_to_cpu(*(__le32 *)newkey);
117	return 0;
118}
119
120static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
121			     unsigned int newkeylen)
122{
123	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
124
125	if (newkeylen != sizeof(mctx->key))
 
126		return -EINVAL;
 
127	mctx->key = be32_to_cpu(*(__be32 *)newkey);
128	return 0;
129}
130
131static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
132{
133	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
134
135	*(__le32 *)out = cpu_to_le32p(&ctx->crc);
136	return 0;
137}
138
139static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
140{
141	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
142
143	*(__be32 *)out = cpu_to_be32p(&ctx->crc);
144	return 0;
145}
146
147static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
148{
149	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
150
151	/*
152	 * Perform a final XOR with 0xFFFFFFFF to be in sync
153	 * with the generic crc32c shash implementation.
154	 */
155	*(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
156	return 0;
157}
158
159static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
160			      u8 *out)
161{
162	*(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
163	return 0;
164}
165
166static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
167			      u8 *out)
168{
169	*(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
170	return 0;
171}
172
173static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
174			     u8 *out)
175{
176	/*
177	 * Perform a final XOR with 0xFFFFFFFF to be in sync
178	 * with the generic crc32c shash implementation.
179	 */
180	*(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
181	return 0;
182}
183
184
185#define CRC32_VX_FINUP(alg, func)					      \
186	static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data,  \
187				   unsigned int datalen, u8 *out)	      \
188	{								      \
189		return __ ## alg ## _vx_finup(shash_desc_ctx(desc),	      \
190					      data, datalen, out);	      \
191	}
192
193CRC32_VX_FINUP(crc32le, crc32_le_vx)
194CRC32_VX_FINUP(crc32be, crc32_be_vx)
195CRC32_VX_FINUP(crc32c, crc32c_le_vx)
196
197#define CRC32_VX_DIGEST(alg, func)					      \
198	static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
199				     unsigned int len, u8 *out)		      \
200	{								      \
201		return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm),    \
202					      data, len, out);		      \
203	}
204
205CRC32_VX_DIGEST(crc32le, crc32_le_vx)
206CRC32_VX_DIGEST(crc32be, crc32_be_vx)
207CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
208
209#define CRC32_VX_UPDATE(alg, func)					      \
210	static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
211				     unsigned int datalen)		      \
212	{								      \
213		struct crc_desc_ctx *ctx = shash_desc_ctx(desc);	      \
214		ctx->crc = func(ctx->crc, data, datalen);		      \
215		return 0;						      \
216	}
217
218CRC32_VX_UPDATE(crc32le, crc32_le_vx)
219CRC32_VX_UPDATE(crc32be, crc32_be_vx)
220CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
221
222
223static struct shash_alg crc32_vx_algs[] = {
224	/* CRC-32 LE */
225	{
226		.init		=	crc32_vx_init,
227		.setkey		=	crc32_vx_setkey,
228		.update		=	crc32le_vx_update,
229		.final		=	crc32le_vx_final,
230		.finup		=	crc32le_vx_finup,
231		.digest		=	crc32le_vx_digest,
232		.descsize	=	sizeof(struct crc_desc_ctx),
233		.digestsize	=	CRC32_DIGEST_SIZE,
234		.base		=	{
235			.cra_name	 = "crc32",
236			.cra_driver_name = "crc32-vx",
237			.cra_priority	 = 200,
238			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
239			.cra_blocksize	 = CRC32_BLOCK_SIZE,
240			.cra_ctxsize	 = sizeof(struct crc_ctx),
241			.cra_module	 = THIS_MODULE,
242			.cra_init	 = crc32_vx_cra_init_zero,
243		},
244	},
245	/* CRC-32 BE */
246	{
247		.init		=	crc32_vx_init,
248		.setkey		=	crc32be_vx_setkey,
249		.update		=	crc32be_vx_update,
250		.final		=	crc32be_vx_final,
251		.finup		=	crc32be_vx_finup,
252		.digest		=	crc32be_vx_digest,
253		.descsize	=	sizeof(struct crc_desc_ctx),
254		.digestsize	=	CRC32_DIGEST_SIZE,
255		.base		=	{
256			.cra_name	 = "crc32be",
257			.cra_driver_name = "crc32be-vx",
258			.cra_priority	 = 200,
259			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
260			.cra_blocksize	 = CRC32_BLOCK_SIZE,
261			.cra_ctxsize	 = sizeof(struct crc_ctx),
262			.cra_module	 = THIS_MODULE,
263			.cra_init	 = crc32_vx_cra_init_zero,
264		},
265	},
266	/* CRC-32C LE */
267	{
268		.init		=	crc32_vx_init,
269		.setkey		=	crc32_vx_setkey,
270		.update		=	crc32c_vx_update,
271		.final		=	crc32c_vx_final,
272		.finup		=	crc32c_vx_finup,
273		.digest		=	crc32c_vx_digest,
274		.descsize	=	sizeof(struct crc_desc_ctx),
275		.digestsize	=	CRC32_DIGEST_SIZE,
276		.base		=	{
277			.cra_name	 = "crc32c",
278			.cra_driver_name = "crc32c-vx",
279			.cra_priority	 = 200,
280			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
281			.cra_blocksize	 = CRC32_BLOCK_SIZE,
282			.cra_ctxsize	 = sizeof(struct crc_ctx),
283			.cra_module	 = THIS_MODULE,
284			.cra_init	 = crc32_vx_cra_init_invert,
285		},
286	},
287};
288
289
290static int __init crc_vx_mod_init(void)
291{
292	return crypto_register_shashes(crc32_vx_algs,
293				       ARRAY_SIZE(crc32_vx_algs));
294}
295
296static void __exit crc_vx_mod_exit(void)
297{
298	crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
299}
300
301module_cpu_feature_match(S390_CPU_FEATURE_VXRS, crc_vx_mod_init);
302module_exit(crc_vx_mod_exit);
303
304MODULE_AUTHOR("Hendrik Brueckner <brueckner@linux.vnet.ibm.com>");
305MODULE_LICENSE("GPL");
306
307MODULE_ALIAS_CRYPTO("crc32");
308MODULE_ALIAS_CRYPTO("crc32-vx");
309MODULE_ALIAS_CRYPTO("crc32c");
310MODULE_ALIAS_CRYPTO("crc32c-vx");
v5.4
  1// SPDX-License-Identifier: GPL-2.0
  2/*
  3 * Crypto-API module for CRC-32 algorithms implemented with the
  4 * z/Architecture Vector Extension Facility.
  5 *
  6 * Copyright IBM Corp. 2015
  7 * Author(s): Hendrik Brueckner <brueckner@linux.vnet.ibm.com>
  8 */
  9#define KMSG_COMPONENT	"crc32-vx"
 10#define pr_fmt(fmt)	KMSG_COMPONENT ": " fmt
 11
 12#include <linux/module.h>
 13#include <linux/cpufeature.h>
 14#include <linux/crc32.h>
 15#include <crypto/internal/hash.h>
 16#include <asm/fpu/api.h>
 17
 18
 19#define CRC32_BLOCK_SIZE	1
 20#define CRC32_DIGEST_SIZE	4
 21
 22#define VX_MIN_LEN		64
 23#define VX_ALIGNMENT		16L
 24#define VX_ALIGN_MASK		(VX_ALIGNMENT - 1)
 25
 26struct crc_ctx {
 27	u32 key;
 28};
 29
 30struct crc_desc_ctx {
 31	u32 crc;
 32};
 33
 34/* Prototypes for functions in assembly files */
 35u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
 36u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
 37u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
 38
 39/*
 40 * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
 41 *
 42 * Creates a function to perform a particular CRC-32 computation. Depending
 43 * on the message buffer, the hardware-accelerated or software implementation
 44 * is used.   Note that the message buffer is aligned to improve fetch
 45 * operations of VECTOR LOAD MULTIPLE instructions.
 46 *
 47 */
 48#define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw)		    \
 49	static u32 __pure ___fname(u32 crc,				    \
 50				unsigned char const *data, size_t datalen)  \
 51	{								    \
 52		struct kernel_fpu vxstate;				    \
 53		unsigned long prealign, aligned, remaining;		    \
 54									    \
 55		if (datalen < VX_MIN_LEN + VX_ALIGN_MASK)		    \
 56			return ___crc32_sw(crc, data, datalen);		    \
 57									    \
 58		if ((unsigned long)data & VX_ALIGN_MASK) {		    \
 59			prealign = VX_ALIGNMENT -			    \
 60				  ((unsigned long)data & VX_ALIGN_MASK);    \
 61			datalen -= prealign;				    \
 62			crc = ___crc32_sw(crc, data, prealign);		    \
 63			data = (void *)((unsigned long)data + prealign);    \
 64		}							    \
 65									    \
 66		aligned = datalen & ~VX_ALIGN_MASK;			    \
 67		remaining = datalen & VX_ALIGN_MASK;			    \
 68									    \
 69		kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW);		    \
 70		crc = ___crc32_vx(crc, data, aligned);			    \
 71		kernel_fpu_end(&vxstate, KERNEL_VXR_LOW);		    \
 72									    \
 73		if (remaining)						    \
 74			crc = ___crc32_sw(crc, data + aligned, remaining);  \
 75									    \
 76		return crc;						    \
 77	}
 78
 79DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
 80DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
 81DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
 82
 83
 84static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
 85{
 86	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
 87
 88	mctx->key = 0;
 89	return 0;
 90}
 91
 92static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
 93{
 94	struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
 95
 96	mctx->key = ~0;
 97	return 0;
 98}
 99
100static int crc32_vx_init(struct shash_desc *desc)
101{
102	struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
103	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
104
105	ctx->crc = mctx->key;
106	return 0;
107}
108
109static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
110			   unsigned int newkeylen)
111{
112	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
113
114	if (newkeylen != sizeof(mctx->key)) {
115		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
116		return -EINVAL;
117	}
118	mctx->key = le32_to_cpu(*(__le32 *)newkey);
119	return 0;
120}
121
122static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
123			     unsigned int newkeylen)
124{
125	struct crc_ctx *mctx = crypto_shash_ctx(tfm);
126
127	if (newkeylen != sizeof(mctx->key)) {
128		crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
129		return -EINVAL;
130	}
131	mctx->key = be32_to_cpu(*(__be32 *)newkey);
132	return 0;
133}
134
135static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
136{
137	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
138
139	*(__le32 *)out = cpu_to_le32p(&ctx->crc);
140	return 0;
141}
142
143static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
144{
145	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
146
147	*(__be32 *)out = cpu_to_be32p(&ctx->crc);
148	return 0;
149}
150
151static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
152{
153	struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
154
155	/*
156	 * Perform a final XOR with 0xFFFFFFFF to be in sync
157	 * with the generic crc32c shash implementation.
158	 */
159	*(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
160	return 0;
161}
162
163static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
164			      u8 *out)
165{
166	*(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
167	return 0;
168}
169
170static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
171			      u8 *out)
172{
173	*(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
174	return 0;
175}
176
177static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
178			     u8 *out)
179{
180	/*
181	 * Perform a final XOR with 0xFFFFFFFF to be in sync
182	 * with the generic crc32c shash implementation.
183	 */
184	*(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
185	return 0;
186}
187
188
189#define CRC32_VX_FINUP(alg, func)					      \
190	static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data,  \
191				   unsigned int datalen, u8 *out)	      \
192	{								      \
193		return __ ## alg ## _vx_finup(shash_desc_ctx(desc),	      \
194					      data, datalen, out);	      \
195	}
196
197CRC32_VX_FINUP(crc32le, crc32_le_vx)
198CRC32_VX_FINUP(crc32be, crc32_be_vx)
199CRC32_VX_FINUP(crc32c, crc32c_le_vx)
200
201#define CRC32_VX_DIGEST(alg, func)					      \
202	static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
203				     unsigned int len, u8 *out)		      \
204	{								      \
205		return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm),    \
206					      data, len, out);		      \
207	}
208
209CRC32_VX_DIGEST(crc32le, crc32_le_vx)
210CRC32_VX_DIGEST(crc32be, crc32_be_vx)
211CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
212
213#define CRC32_VX_UPDATE(alg, func)					      \
214	static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
215				     unsigned int datalen)		      \
216	{								      \
217		struct crc_desc_ctx *ctx = shash_desc_ctx(desc);	      \
218		ctx->crc = func(ctx->crc, data, datalen);		      \
219		return 0;						      \
220	}
221
222CRC32_VX_UPDATE(crc32le, crc32_le_vx)
223CRC32_VX_UPDATE(crc32be, crc32_be_vx)
224CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
225
226
227static struct shash_alg crc32_vx_algs[] = {
228	/* CRC-32 LE */
229	{
230		.init		=	crc32_vx_init,
231		.setkey		=	crc32_vx_setkey,
232		.update		=	crc32le_vx_update,
233		.final		=	crc32le_vx_final,
234		.finup		=	crc32le_vx_finup,
235		.digest		=	crc32le_vx_digest,
236		.descsize	=	sizeof(struct crc_desc_ctx),
237		.digestsize	=	CRC32_DIGEST_SIZE,
238		.base		=	{
239			.cra_name	 = "crc32",
240			.cra_driver_name = "crc32-vx",
241			.cra_priority	 = 200,
242			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
243			.cra_blocksize	 = CRC32_BLOCK_SIZE,
244			.cra_ctxsize	 = sizeof(struct crc_ctx),
245			.cra_module	 = THIS_MODULE,
246			.cra_init	 = crc32_vx_cra_init_zero,
247		},
248	},
249	/* CRC-32 BE */
250	{
251		.init		=	crc32_vx_init,
252		.setkey		=	crc32be_vx_setkey,
253		.update		=	crc32be_vx_update,
254		.final		=	crc32be_vx_final,
255		.finup		=	crc32be_vx_finup,
256		.digest		=	crc32be_vx_digest,
257		.descsize	=	sizeof(struct crc_desc_ctx),
258		.digestsize	=	CRC32_DIGEST_SIZE,
259		.base		=	{
260			.cra_name	 = "crc32be",
261			.cra_driver_name = "crc32be-vx",
262			.cra_priority	 = 200,
263			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
264			.cra_blocksize	 = CRC32_BLOCK_SIZE,
265			.cra_ctxsize	 = sizeof(struct crc_ctx),
266			.cra_module	 = THIS_MODULE,
267			.cra_init	 = crc32_vx_cra_init_zero,
268		},
269	},
270	/* CRC-32C LE */
271	{
272		.init		=	crc32_vx_init,
273		.setkey		=	crc32_vx_setkey,
274		.update		=	crc32c_vx_update,
275		.final		=	crc32c_vx_final,
276		.finup		=	crc32c_vx_finup,
277		.digest		=	crc32c_vx_digest,
278		.descsize	=	sizeof(struct crc_desc_ctx),
279		.digestsize	=	CRC32_DIGEST_SIZE,
280		.base		=	{
281			.cra_name	 = "crc32c",
282			.cra_driver_name = "crc32c-vx",
283			.cra_priority	 = 200,
284			.cra_flags	 = CRYPTO_ALG_OPTIONAL_KEY,
285			.cra_blocksize	 = CRC32_BLOCK_SIZE,
286			.cra_ctxsize	 = sizeof(struct crc_ctx),
287			.cra_module	 = THIS_MODULE,
288			.cra_init	 = crc32_vx_cra_init_invert,
289		},
290	},
291};
292
293
294static int __init crc_vx_mod_init(void)
295{
296	return crypto_register_shashes(crc32_vx_algs,
297				       ARRAY_SIZE(crc32_vx_algs));
298}
299
300static void __exit crc_vx_mod_exit(void)
301{
302	crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
303}
304
305module_cpu_feature_match(VXRS, crc_vx_mod_init);
306module_exit(crc_vx_mod_exit);
307
308MODULE_AUTHOR("Hendrik Brueckner <brueckner@linux.vnet.ibm.com>");
309MODULE_LICENSE("GPL");
310
311MODULE_ALIAS_CRYPTO("crc32");
312MODULE_ALIAS_CRYPTO("crc32-vx");
313MODULE_ALIAS_CRYPTO("crc32c");
314MODULE_ALIAS_CRYPTO("crc32c-vx");