Loading...
1// SPDX-License-Identifier: GPL-2.0-only
2/* Glue code for CRC32C optimized for sparc64 crypto opcodes.
3 *
4 * This is based largely upon arch/x86/crypto/crc32c-intel.c
5 *
6 * Copyright (C) 2008 Intel Corporation
7 * Authors: Austin Zhang <austin_zhang@linux.intel.com>
8 * Kent Liu <kent.liu@intel.com>
9 */
10
11#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
12
13#include <linux/init.h>
14#include <linux/module.h>
15#include <linux/string.h>
16#include <linux/kernel.h>
17#include <linux/crc32.h>
18
19#include <crypto/internal/hash.h>
20
21#include <asm/pstate.h>
22#include <asm/elf.h>
23
24#include "opcodes.h"
25
26/*
27 * Setting the seed allows arbitrary accumulators and flexible XOR policy
28 * If your algorithm starts with ~0, then XOR with ~0 before you set
29 * the seed.
30 */
31static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key,
32 unsigned int keylen)
33{
34 u32 *mctx = crypto_shash_ctx(hash);
35
36 if (keylen != sizeof(u32))
37 return -EINVAL;
38 *mctx = le32_to_cpup((__le32 *)key);
39 return 0;
40}
41
42static int crc32c_sparc64_init(struct shash_desc *desc)
43{
44 u32 *mctx = crypto_shash_ctx(desc->tfm);
45 u32 *crcp = shash_desc_ctx(desc);
46
47 *crcp = *mctx;
48
49 return 0;
50}
51
52extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len);
53
54static void crc32c_compute(u32 *crcp, const u64 *data, unsigned int len)
55{
56 unsigned int asm_len;
57
58 asm_len = len & ~7U;
59 if (asm_len) {
60 crc32c_sparc64(crcp, data, asm_len);
61 data += asm_len / 8;
62 len -= asm_len;
63 }
64 if (len)
65 *crcp = __crc32c_le(*crcp, (const unsigned char *) data, len);
66}
67
68static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
69 unsigned int len)
70{
71 u32 *crcp = shash_desc_ctx(desc);
72
73 crc32c_compute(crcp, (const u64 *) data, len);
74
75 return 0;
76}
77
78static int __crc32c_sparc64_finup(u32 *crcp, const u8 *data, unsigned int len,
79 u8 *out)
80{
81 u32 tmp = *crcp;
82
83 crc32c_compute(&tmp, (const u64 *) data, len);
84
85 *(__le32 *) out = ~cpu_to_le32(tmp);
86 return 0;
87}
88
89static int crc32c_sparc64_finup(struct shash_desc *desc, const u8 *data,
90 unsigned int len, u8 *out)
91{
92 return __crc32c_sparc64_finup(shash_desc_ctx(desc), data, len, out);
93}
94
95static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out)
96{
97 u32 *crcp = shash_desc_ctx(desc);
98
99 *(__le32 *) out = ~cpu_to_le32p(crcp);
100 return 0;
101}
102
103static int crc32c_sparc64_digest(struct shash_desc *desc, const u8 *data,
104 unsigned int len, u8 *out)
105{
106 return __crc32c_sparc64_finup(crypto_shash_ctx(desc->tfm), data, len,
107 out);
108}
109
110static int crc32c_sparc64_cra_init(struct crypto_tfm *tfm)
111{
112 u32 *key = crypto_tfm_ctx(tfm);
113
114 *key = ~0;
115
116 return 0;
117}
118
119#define CHKSUM_BLOCK_SIZE 1
120#define CHKSUM_DIGEST_SIZE 4
121
122static struct shash_alg alg = {
123 .setkey = crc32c_sparc64_setkey,
124 .init = crc32c_sparc64_init,
125 .update = crc32c_sparc64_update,
126 .final = crc32c_sparc64_final,
127 .finup = crc32c_sparc64_finup,
128 .digest = crc32c_sparc64_digest,
129 .descsize = sizeof(u32),
130 .digestsize = CHKSUM_DIGEST_SIZE,
131 .base = {
132 .cra_name = "crc32c",
133 .cra_driver_name = "crc32c-sparc64",
134 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
135 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
136 .cra_blocksize = CHKSUM_BLOCK_SIZE,
137 .cra_ctxsize = sizeof(u32),
138 .cra_alignmask = 7,
139 .cra_module = THIS_MODULE,
140 .cra_init = crc32c_sparc64_cra_init,
141 }
142};
143
144static bool __init sparc64_has_crc32c_opcode(void)
145{
146 unsigned long cfr;
147
148 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
149 return false;
150
151 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
152 if (!(cfr & CFR_CRC32C))
153 return false;
154
155 return true;
156}
157
158static int __init crc32c_sparc64_mod_init(void)
159{
160 if (sparc64_has_crc32c_opcode()) {
161 pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
162 return crypto_register_shash(&alg);
163 }
164 pr_info("sparc64 crc32c opcode not available.\n");
165 return -ENODEV;
166}
167
168static void __exit crc32c_sparc64_mod_fini(void)
169{
170 crypto_unregister_shash(&alg);
171}
172
173module_init(crc32c_sparc64_mod_init);
174module_exit(crc32c_sparc64_mod_fini);
175
176MODULE_LICENSE("GPL");
177MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");
178
179MODULE_ALIAS_CRYPTO("crc32c");
180
181#include "crop_devid.c"
1// SPDX-License-Identifier: GPL-2.0-only
2/* Glue code for CRC32C optimized for sparc64 crypto opcodes.
3 *
4 * This is based largely upon arch/x86/crypto/crc32c-intel.c
5 *
6 * Copyright (C) 2008 Intel Corporation
7 * Authors: Austin Zhang <austin_zhang@linux.intel.com>
8 * Kent Liu <kent.liu@intel.com>
9 */
10
11#define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
12
13#include <linux/init.h>
14#include <linux/module.h>
15#include <linux/string.h>
16#include <linux/kernel.h>
17#include <linux/crc32.h>
18
19#include <crypto/internal/hash.h>
20
21#include <asm/pstate.h>
22#include <asm/elf.h>
23#include <asm/unaligned.h>
24
25#include "opcodes.h"
26
27/*
28 * Setting the seed allows arbitrary accumulators and flexible XOR policy
29 * If your algorithm starts with ~0, then XOR with ~0 before you set
30 * the seed.
31 */
32static int crc32c_sparc64_setkey(struct crypto_shash *hash, const u8 *key,
33 unsigned int keylen)
34{
35 u32 *mctx = crypto_shash_ctx(hash);
36
37 if (keylen != sizeof(u32))
38 return -EINVAL;
39 *mctx = get_unaligned_le32(key);
40 return 0;
41}
42
43static int crc32c_sparc64_init(struct shash_desc *desc)
44{
45 u32 *mctx = crypto_shash_ctx(desc->tfm);
46 u32 *crcp = shash_desc_ctx(desc);
47
48 *crcp = *mctx;
49
50 return 0;
51}
52
53extern void crc32c_sparc64(u32 *crcp, const u64 *data, unsigned int len);
54
55static u32 crc32c_compute(u32 crc, const u8 *data, unsigned int len)
56{
57 unsigned int n = -(uintptr_t)data & 7;
58
59 if (n) {
60 /* Data isn't 8-byte aligned. Align it. */
61 n = min(n, len);
62 crc = __crc32c_le(crc, data, n);
63 data += n;
64 len -= n;
65 }
66 n = len & ~7U;
67 if (n) {
68 crc32c_sparc64(&crc, (const u64 *)data, n);
69 data += n;
70 len -= n;
71 }
72 if (len)
73 crc = __crc32c_le(crc, data, len);
74 return crc;
75}
76
77static int crc32c_sparc64_update(struct shash_desc *desc, const u8 *data,
78 unsigned int len)
79{
80 u32 *crcp = shash_desc_ctx(desc);
81
82 *crcp = crc32c_compute(*crcp, data, len);
83 return 0;
84}
85
86static int __crc32c_sparc64_finup(const u32 *crcp, const u8 *data,
87 unsigned int len, u8 *out)
88{
89 put_unaligned_le32(~crc32c_compute(*crcp, data, len), out);
90 return 0;
91}
92
93static int crc32c_sparc64_finup(struct shash_desc *desc, const u8 *data,
94 unsigned int len, u8 *out)
95{
96 return __crc32c_sparc64_finup(shash_desc_ctx(desc), data, len, out);
97}
98
99static int crc32c_sparc64_final(struct shash_desc *desc, u8 *out)
100{
101 u32 *crcp = shash_desc_ctx(desc);
102
103 put_unaligned_le32(~*crcp, out);
104 return 0;
105}
106
107static int crc32c_sparc64_digest(struct shash_desc *desc, const u8 *data,
108 unsigned int len, u8 *out)
109{
110 return __crc32c_sparc64_finup(crypto_shash_ctx(desc->tfm), data, len,
111 out);
112}
113
114static int crc32c_sparc64_cra_init(struct crypto_tfm *tfm)
115{
116 u32 *key = crypto_tfm_ctx(tfm);
117
118 *key = ~0;
119
120 return 0;
121}
122
123#define CHKSUM_BLOCK_SIZE 1
124#define CHKSUM_DIGEST_SIZE 4
125
126static struct shash_alg alg = {
127 .setkey = crc32c_sparc64_setkey,
128 .init = crc32c_sparc64_init,
129 .update = crc32c_sparc64_update,
130 .final = crc32c_sparc64_final,
131 .finup = crc32c_sparc64_finup,
132 .digest = crc32c_sparc64_digest,
133 .descsize = sizeof(u32),
134 .digestsize = CHKSUM_DIGEST_SIZE,
135 .base = {
136 .cra_name = "crc32c",
137 .cra_driver_name = "crc32c-sparc64",
138 .cra_priority = SPARC_CR_OPCODE_PRIORITY,
139 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
140 .cra_blocksize = CHKSUM_BLOCK_SIZE,
141 .cra_ctxsize = sizeof(u32),
142 .cra_module = THIS_MODULE,
143 .cra_init = crc32c_sparc64_cra_init,
144 }
145};
146
147static bool __init sparc64_has_crc32c_opcode(void)
148{
149 unsigned long cfr;
150
151 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
152 return false;
153
154 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
155 if (!(cfr & CFR_CRC32C))
156 return false;
157
158 return true;
159}
160
161static int __init crc32c_sparc64_mod_init(void)
162{
163 if (sparc64_has_crc32c_opcode()) {
164 pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
165 return crypto_register_shash(&alg);
166 }
167 pr_info("sparc64 crc32c opcode not available.\n");
168 return -ENODEV;
169}
170
171static void __exit crc32c_sparc64_mod_fini(void)
172{
173 crypto_unregister_shash(&alg);
174}
175
176module_init(crc32c_sparc64_mod_init);
177module_exit(crc32c_sparc64_mod_fini);
178
179MODULE_LICENSE("GPL");
180MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");
181
182MODULE_ALIAS_CRYPTO("crc32c");
183
184#include "crop_devid.c"