blob: 6f4985f357c666258fbdb27f24ebc84a5422fc88 [file] [log] [blame]
Hendrik Bruecknerf848dbd2015-04-28 15:52:44 +02001/*
2 * Crypto-API module for CRC-32 algorithms implemented with the
3 * z/Architecture Vector Extension Facility.
4 *
5 * Copyright IBM Corp. 2015
6 * Author(s): Hendrik Brueckner <brueckner@linux.vnet.ibm.com>
7 */
8#define KMSG_COMPONENT "crc32-vx"
9#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
10
11#include <linux/module.h>
12#include <linux/cpufeature.h>
13#include <linux/crc32.h>
14#include <crypto/internal/hash.h>
15#include <asm/fpu/api.h>
16
17
18#define CRC32_BLOCK_SIZE 1
19#define CRC32_DIGEST_SIZE 4
20
21#define VX_MIN_LEN 64
22#define VX_ALIGNMENT 16L
23#define VX_ALIGN_MASK (VX_ALIGNMENT - 1)
24
25struct crc_ctx {
26 u32 key;
27};
28
29struct crc_desc_ctx {
30 u32 crc;
31};
32
33/* Prototypes for functions in assembly files */
34u32 crc32_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
35u32 crc32_be_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
36u32 crc32c_le_vgfm_16(u32 crc, unsigned char const *buf, size_t size);
37
38/*
39 * DEFINE_CRC32_VX() - Define a CRC-32 function using the vector extension
40 *
41 * Creates a function to perform a particular CRC-32 computation. Depending
42 * on the message buffer, the hardware-accelerated or software implementation
43 * is used. Note that the message buffer is aligned to improve fetch
44 * operations of VECTOR LOAD MULTIPLE instructions.
45 *
46 */
47#define DEFINE_CRC32_VX(___fname, ___crc32_vx, ___crc32_sw) \
48 static u32 __pure ___fname(u32 crc, \
49 unsigned char const *data, size_t datalen) \
50 { \
51 struct kernel_fpu vxstate; \
52 unsigned long prealign, aligned, remaining; \
53 \
Christian Borntraeger134a24c2016-08-03 17:55:00 +020054 if (datalen < VX_MIN_LEN + VX_ALIGN_MASK) \
55 return ___crc32_sw(crc, data, datalen); \
56 \
Hendrik Bruecknerf848dbd2015-04-28 15:52:44 +020057 if ((unsigned long)data & VX_ALIGN_MASK) { \
58 prealign = VX_ALIGNMENT - \
59 ((unsigned long)data & VX_ALIGN_MASK); \
60 datalen -= prealign; \
61 crc = ___crc32_sw(crc, data, prealign); \
62 data = (void *)((unsigned long)data + prealign); \
63 } \
64 \
Hendrik Bruecknerf848dbd2015-04-28 15:52:44 +020065 aligned = datalen & ~VX_ALIGN_MASK; \
66 remaining = datalen & VX_ALIGN_MASK; \
67 \
68 kernel_fpu_begin(&vxstate, KERNEL_VXR_LOW); \
69 crc = ___crc32_vx(crc, data, aligned); \
Martin Schwidefsky7f796952016-08-22 12:06:21 +020070 kernel_fpu_end(&vxstate, KERNEL_VXR_LOW); \
Hendrik Bruecknerf848dbd2015-04-28 15:52:44 +020071 \
72 if (remaining) \
73 crc = ___crc32_sw(crc, data + aligned, remaining); \
74 \
75 return crc; \
76 }
77
78DEFINE_CRC32_VX(crc32_le_vx, crc32_le_vgfm_16, crc32_le)
79DEFINE_CRC32_VX(crc32_be_vx, crc32_be_vgfm_16, crc32_be)
80DEFINE_CRC32_VX(crc32c_le_vx, crc32c_le_vgfm_16, __crc32c_le)
81
82
83static int crc32_vx_cra_init_zero(struct crypto_tfm *tfm)
84{
85 struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
86
87 mctx->key = 0;
88 return 0;
89}
90
91static int crc32_vx_cra_init_invert(struct crypto_tfm *tfm)
92{
93 struct crc_ctx *mctx = crypto_tfm_ctx(tfm);
94
95 mctx->key = ~0;
96 return 0;
97}
98
99static int crc32_vx_init(struct shash_desc *desc)
100{
101 struct crc_ctx *mctx = crypto_shash_ctx(desc->tfm);
102 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
103
104 ctx->crc = mctx->key;
105 return 0;
106}
107
108static int crc32_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
109 unsigned int newkeylen)
110{
111 struct crc_ctx *mctx = crypto_shash_ctx(tfm);
112
113 if (newkeylen != sizeof(mctx->key)) {
114 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
115 return -EINVAL;
116 }
117 mctx->key = le32_to_cpu(*(__le32 *)newkey);
118 return 0;
119}
120
121static int crc32be_vx_setkey(struct crypto_shash *tfm, const u8 *newkey,
122 unsigned int newkeylen)
123{
124 struct crc_ctx *mctx = crypto_shash_ctx(tfm);
125
126 if (newkeylen != sizeof(mctx->key)) {
127 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
128 return -EINVAL;
129 }
130 mctx->key = be32_to_cpu(*(__be32 *)newkey);
131 return 0;
132}
133
134static int crc32le_vx_final(struct shash_desc *desc, u8 *out)
135{
136 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
137
138 *(__le32 *)out = cpu_to_le32p(&ctx->crc);
139 return 0;
140}
141
142static int crc32be_vx_final(struct shash_desc *desc, u8 *out)
143{
144 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
145
146 *(__be32 *)out = cpu_to_be32p(&ctx->crc);
147 return 0;
148}
149
150static int crc32c_vx_final(struct shash_desc *desc, u8 *out)
151{
152 struct crc_desc_ctx *ctx = shash_desc_ctx(desc);
153
154 /*
155 * Perform a final XOR with 0xFFFFFFFF to be in sync
156 * with the generic crc32c shash implementation.
157 */
158 *(__le32 *)out = ~cpu_to_le32p(&ctx->crc);
159 return 0;
160}
161
162static int __crc32le_vx_finup(u32 *crc, const u8 *data, unsigned int len,
163 u8 *out)
164{
165 *(__le32 *)out = cpu_to_le32(crc32_le_vx(*crc, data, len));
166 return 0;
167}
168
169static int __crc32be_vx_finup(u32 *crc, const u8 *data, unsigned int len,
170 u8 *out)
171{
172 *(__be32 *)out = cpu_to_be32(crc32_be_vx(*crc, data, len));
173 return 0;
174}
175
176static int __crc32c_vx_finup(u32 *crc, const u8 *data, unsigned int len,
177 u8 *out)
178{
179 /*
180 * Perform a final XOR with 0xFFFFFFFF to be in sync
181 * with the generic crc32c shash implementation.
182 */
183 *(__le32 *)out = ~cpu_to_le32(crc32c_le_vx(*crc, data, len));
184 return 0;
185}
186
187
188#define CRC32_VX_FINUP(alg, func) \
189 static int alg ## _vx_finup(struct shash_desc *desc, const u8 *data, \
190 unsigned int datalen, u8 *out) \
191 { \
192 return __ ## alg ## _vx_finup(shash_desc_ctx(desc), \
193 data, datalen, out); \
194 }
195
196CRC32_VX_FINUP(crc32le, crc32_le_vx)
197CRC32_VX_FINUP(crc32be, crc32_be_vx)
198CRC32_VX_FINUP(crc32c, crc32c_le_vx)
199
200#define CRC32_VX_DIGEST(alg, func) \
201 static int alg ## _vx_digest(struct shash_desc *desc, const u8 *data, \
202 unsigned int len, u8 *out) \
203 { \
204 return __ ## alg ## _vx_finup(crypto_shash_ctx(desc->tfm), \
205 data, len, out); \
206 }
207
208CRC32_VX_DIGEST(crc32le, crc32_le_vx)
209CRC32_VX_DIGEST(crc32be, crc32_be_vx)
210CRC32_VX_DIGEST(crc32c, crc32c_le_vx)
211
212#define CRC32_VX_UPDATE(alg, func) \
213 static int alg ## _vx_update(struct shash_desc *desc, const u8 *data, \
214 unsigned int datalen) \
215 { \
216 struct crc_desc_ctx *ctx = shash_desc_ctx(desc); \
217 ctx->crc = func(ctx->crc, data, datalen); \
218 return 0; \
219 }
220
221CRC32_VX_UPDATE(crc32le, crc32_le_vx)
222CRC32_VX_UPDATE(crc32be, crc32_be_vx)
223CRC32_VX_UPDATE(crc32c, crc32c_le_vx)
224
225
226static struct shash_alg crc32_vx_algs[] = {
227 /* CRC-32 LE */
228 {
229 .init = crc32_vx_init,
230 .setkey = crc32_vx_setkey,
231 .update = crc32le_vx_update,
232 .final = crc32le_vx_final,
233 .finup = crc32le_vx_finup,
234 .digest = crc32le_vx_digest,
235 .descsize = sizeof(struct crc_desc_ctx),
236 .digestsize = CRC32_DIGEST_SIZE,
237 .base = {
238 .cra_name = "crc32",
239 .cra_driver_name = "crc32-vx",
240 .cra_priority = 200,
Eric Biggersb392a532018-01-03 11:16:26 -0800241 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
Hendrik Bruecknerf848dbd2015-04-28 15:52:44 +0200242 .cra_blocksize = CRC32_BLOCK_SIZE,
243 .cra_ctxsize = sizeof(struct crc_ctx),
244 .cra_module = THIS_MODULE,
245 .cra_init = crc32_vx_cra_init_zero,
246 },
247 },
248 /* CRC-32 BE */
249 {
250 .init = crc32_vx_init,
251 .setkey = crc32be_vx_setkey,
252 .update = crc32be_vx_update,
253 .final = crc32be_vx_final,
254 .finup = crc32be_vx_finup,
255 .digest = crc32be_vx_digest,
256 .descsize = sizeof(struct crc_desc_ctx),
257 .digestsize = CRC32_DIGEST_SIZE,
258 .base = {
259 .cra_name = "crc32be",
260 .cra_driver_name = "crc32be-vx",
261 .cra_priority = 200,
Eric Biggersb392a532018-01-03 11:16:26 -0800262 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
Hendrik Bruecknerf848dbd2015-04-28 15:52:44 +0200263 .cra_blocksize = CRC32_BLOCK_SIZE,
264 .cra_ctxsize = sizeof(struct crc_ctx),
265 .cra_module = THIS_MODULE,
266 .cra_init = crc32_vx_cra_init_zero,
267 },
268 },
269 /* CRC-32C LE */
270 {
271 .init = crc32_vx_init,
272 .setkey = crc32_vx_setkey,
273 .update = crc32c_vx_update,
274 .final = crc32c_vx_final,
275 .finup = crc32c_vx_finup,
276 .digest = crc32c_vx_digest,
277 .descsize = sizeof(struct crc_desc_ctx),
278 .digestsize = CRC32_DIGEST_SIZE,
279 .base = {
280 .cra_name = "crc32c",
281 .cra_driver_name = "crc32c-vx",
282 .cra_priority = 200,
Eric Biggersb392a532018-01-03 11:16:26 -0800283 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY,
Hendrik Bruecknerf848dbd2015-04-28 15:52:44 +0200284 .cra_blocksize = CRC32_BLOCK_SIZE,
285 .cra_ctxsize = sizeof(struct crc_ctx),
286 .cra_module = THIS_MODULE,
287 .cra_init = crc32_vx_cra_init_invert,
288 },
289 },
290};
291
292
293static int __init crc_vx_mod_init(void)
294{
295 return crypto_register_shashes(crc32_vx_algs,
296 ARRAY_SIZE(crc32_vx_algs));
297}
298
299static void __exit crc_vx_mod_exit(void)
300{
301 crypto_unregister_shashes(crc32_vx_algs, ARRAY_SIZE(crc32_vx_algs));
302}
303
304module_cpu_feature_match(VXRS, crc_vx_mod_init);
305module_exit(crc_vx_mod_exit);
306
307MODULE_AUTHOR("Hendrik Brueckner <brueckner@linux.vnet.ibm.com>");
308MODULE_LICENSE("GPL");
309
310MODULE_ALIAS_CRYPTO("crc32");
311MODULE_ALIAS_CRYPTO("crc32-vx");
312MODULE_ALIAS_CRYPTO("crc32c");
313MODULE_ALIAS_CRYPTO("crc32c-vx");