blob: ded1487833033a602b7736ca6d78967517153dae [file] [log] [blame]
Jeff Garzik53964b92016-06-17 10:30:35 +05301/*
2 * Cryptographic API.
3 *
4 * SHA-3, as specified in
5 * http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
6 *
7 * SHA-3 code by Jeff Garzik <jeff@garzik.org>
Ard Biesheuvel83dee2c2018-01-19 12:04:34 +00008 * Ard Biesheuvel <ard.biesheuvel@linaro.org>
Jeff Garzik53964b92016-06-17 10:30:35 +05309 *
10 * This program is free software; you can redistribute it and/or modify it
11 * under the terms of the GNU General Public License as published by the Free
12 * Software Foundation; either version 2 of the License, or (at your option)•
13 * any later version.
14 *
15 */
16#include <crypto/internal/hash.h>
17#include <linux/init.h>
18#include <linux/module.h>
19#include <linux/types.h>
20#include <crypto/sha3.h>
Ard Biesheuvelc013cee2018-01-19 12:04:33 +000021#include <asm/unaligned.h>
Jeff Garzik53964b92016-06-17 10:30:35 +053022
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +000023/*
24 * On some 32-bit architectures (mn10300 and h8300), GCC ends up using
25 * over 1 KB of stack if we inline the round calculation into the loop
26 * in keccakf(). On the other hand, on 64-bit architectures with plenty
27 * of [64-bit wide] general purpose registers, not inlining it severely
28 * hurts performance. So let's use 64-bitness as a heuristic to decide
29 * whether to inline or not.
30 */
31#ifdef CONFIG_64BIT
32#define SHA3_INLINE inline
33#else
34#define SHA3_INLINE noinline
35#endif
36
Jeff Garzik53964b92016-06-17 10:30:35 +053037#define KECCAK_ROUNDS 24
38
Jeff Garzik53964b92016-06-17 10:30:35 +053039static const u64 keccakf_rndc[24] = {
Geert Uytterhoevenf743e702016-08-03 19:37:03 +020040 0x0000000000000001ULL, 0x0000000000008082ULL, 0x800000000000808aULL,
41 0x8000000080008000ULL, 0x000000000000808bULL, 0x0000000080000001ULL,
42 0x8000000080008081ULL, 0x8000000000008009ULL, 0x000000000000008aULL,
43 0x0000000000000088ULL, 0x0000000080008009ULL, 0x000000008000000aULL,
44 0x000000008000808bULL, 0x800000000000008bULL, 0x8000000000008089ULL,
45 0x8000000000008003ULL, 0x8000000000008002ULL, 0x8000000000000080ULL,
46 0x000000000000800aULL, 0x800000008000000aULL, 0x8000000080008081ULL,
47 0x8000000000008080ULL, 0x0000000080000001ULL, 0x8000000080008008ULL
Jeff Garzik53964b92016-06-17 10:30:35 +053048};
49
Jeff Garzik53964b92016-06-17 10:30:35 +053050/* update the state with given number of rounds */
51
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +000052static SHA3_INLINE void keccakf_round(u64 st[25])
Jeff Garzik53964b92016-06-17 10:30:35 +053053{
Ard Biesheuvel83dee2c2018-01-19 12:04:34 +000054 u64 t[5], tt, bc[5];
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +000055
56 /* Theta */
57 bc[0] = st[0] ^ st[5] ^ st[10] ^ st[15] ^ st[20];
58 bc[1] = st[1] ^ st[6] ^ st[11] ^ st[16] ^ st[21];
59 bc[2] = st[2] ^ st[7] ^ st[12] ^ st[17] ^ st[22];
60 bc[3] = st[3] ^ st[8] ^ st[13] ^ st[18] ^ st[23];
61 bc[4] = st[4] ^ st[9] ^ st[14] ^ st[19] ^ st[24];
62
63 t[0] = bc[4] ^ rol64(bc[1], 1);
64 t[1] = bc[0] ^ rol64(bc[2], 1);
65 t[2] = bc[1] ^ rol64(bc[3], 1);
66 t[3] = bc[2] ^ rol64(bc[4], 1);
67 t[4] = bc[3] ^ rol64(bc[0], 1);
68
69 st[0] ^= t[0];
70
71 /* Rho Pi */
72 tt = st[1];
73 st[ 1] = rol64(st[ 6] ^ t[1], 44);
74 st[ 6] = rol64(st[ 9] ^ t[4], 20);
75 st[ 9] = rol64(st[22] ^ t[2], 61);
76 st[22] = rol64(st[14] ^ t[4], 39);
77 st[14] = rol64(st[20] ^ t[0], 18);
78 st[20] = rol64(st[ 2] ^ t[2], 62);
79 st[ 2] = rol64(st[12] ^ t[2], 43);
80 st[12] = rol64(st[13] ^ t[3], 25);
81 st[13] = rol64(st[19] ^ t[4], 8);
82 st[19] = rol64(st[23] ^ t[3], 56);
83 st[23] = rol64(st[15] ^ t[0], 41);
84 st[15] = rol64(st[ 4] ^ t[4], 27);
85 st[ 4] = rol64(st[24] ^ t[4], 14);
86 st[24] = rol64(st[21] ^ t[1], 2);
87 st[21] = rol64(st[ 8] ^ t[3], 55);
88 st[ 8] = rol64(st[16] ^ t[1], 45);
89 st[16] = rol64(st[ 5] ^ t[0], 36);
90 st[ 5] = rol64(st[ 3] ^ t[3], 28);
91 st[ 3] = rol64(st[18] ^ t[3], 21);
92 st[18] = rol64(st[17] ^ t[2], 15);
93 st[17] = rol64(st[11] ^ t[1], 10);
94 st[11] = rol64(st[ 7] ^ t[2], 6);
95 st[ 7] = rol64(st[10] ^ t[0], 3);
96 st[10] = rol64( tt ^ t[1], 1);
97
98 /* Chi */
99 bc[ 0] = ~st[ 1] & st[ 2];
100 bc[ 1] = ~st[ 2] & st[ 3];
101 bc[ 2] = ~st[ 3] & st[ 4];
102 bc[ 3] = ~st[ 4] & st[ 0];
103 bc[ 4] = ~st[ 0] & st[ 1];
104 st[ 0] ^= bc[ 0];
105 st[ 1] ^= bc[ 1];
106 st[ 2] ^= bc[ 2];
107 st[ 3] ^= bc[ 3];
108 st[ 4] ^= bc[ 4];
109
110 bc[ 0] = ~st[ 6] & st[ 7];
111 bc[ 1] = ~st[ 7] & st[ 8];
112 bc[ 2] = ~st[ 8] & st[ 9];
113 bc[ 3] = ~st[ 9] & st[ 5];
114 bc[ 4] = ~st[ 5] & st[ 6];
115 st[ 5] ^= bc[ 0];
116 st[ 6] ^= bc[ 1];
117 st[ 7] ^= bc[ 2];
118 st[ 8] ^= bc[ 3];
119 st[ 9] ^= bc[ 4];
120
121 bc[ 0] = ~st[11] & st[12];
122 bc[ 1] = ~st[12] & st[13];
123 bc[ 2] = ~st[13] & st[14];
124 bc[ 3] = ~st[14] & st[10];
125 bc[ 4] = ~st[10] & st[11];
126 st[10] ^= bc[ 0];
127 st[11] ^= bc[ 1];
128 st[12] ^= bc[ 2];
129 st[13] ^= bc[ 3];
130 st[14] ^= bc[ 4];
131
132 bc[ 0] = ~st[16] & st[17];
133 bc[ 1] = ~st[17] & st[18];
134 bc[ 2] = ~st[18] & st[19];
135 bc[ 3] = ~st[19] & st[15];
136 bc[ 4] = ~st[15] & st[16];
137 st[15] ^= bc[ 0];
138 st[16] ^= bc[ 1];
139 st[17] ^= bc[ 2];
140 st[18] ^= bc[ 3];
141 st[19] ^= bc[ 4];
142
143 bc[ 0] = ~st[21] & st[22];
144 bc[ 1] = ~st[22] & st[23];
145 bc[ 2] = ~st[23] & st[24];
146 bc[ 3] = ~st[24] & st[20];
147 bc[ 4] = ~st[20] & st[21];
148 st[20] ^= bc[ 0];
149 st[21] ^= bc[ 1];
150 st[22] ^= bc[ 2];
151 st[23] ^= bc[ 3];
152 st[24] ^= bc[ 4];
153}
154
Geert Uytterhoevenba916b6a2018-02-01 11:22:00 +0100155static void __optimize("O3") keccakf(u64 st[25])
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +0000156{
Ard Biesheuvel83dee2c2018-01-19 12:04:34 +0000157 int round;
Jeff Garzik53964b92016-06-17 10:30:35 +0530158
159 for (round = 0; round < KECCAK_ROUNDS; round++) {
Ard Biesheuvel4767b9a2018-01-27 09:18:32 +0000160 keccakf_round(st);
Jeff Garzik53964b92016-06-17 10:30:35 +0530161 /* Iota */
162 st[0] ^= keccakf_rndc[round];
163 }
164}
165
Ard Biesheuvel66576742018-01-19 12:04:36 +0000166int crypto_sha3_init(struct shash_desc *desc)
Jeff Garzik53964b92016-06-17 10:30:35 +0530167{
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000168 struct sha3_state *sctx = shash_desc_ctx(desc);
169 unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
170
171 sctx->rsiz = 200 - 2 * digest_size;
Jeff Garzik53964b92016-06-17 10:30:35 +0530172 sctx->rsizw = sctx->rsiz / 8;
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000173 sctx->partial = 0;
Jeff Garzik53964b92016-06-17 10:30:35 +0530174
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000175 memset(sctx->st, 0, sizeof(sctx->st));
Jeff Garzik53964b92016-06-17 10:30:35 +0530176 return 0;
177}
Ard Biesheuvel66576742018-01-19 12:04:36 +0000178EXPORT_SYMBOL(crypto_sha3_init);
Jeff Garzik53964b92016-06-17 10:30:35 +0530179
Ard Biesheuvel66576742018-01-19 12:04:36 +0000180int crypto_sha3_update(struct shash_desc *desc, const u8 *data,
Jeff Garzik53964b92016-06-17 10:30:35 +0530181 unsigned int len)
182{
183 struct sha3_state *sctx = shash_desc_ctx(desc);
184 unsigned int done;
185 const u8 *src;
186
187 done = 0;
188 src = data;
189
190 if ((sctx->partial + len) > (sctx->rsiz - 1)) {
191 if (sctx->partial) {
192 done = -sctx->partial;
193 memcpy(sctx->buf + sctx->partial, data,
194 done + sctx->rsiz);
195 src = sctx->buf;
196 }
197
198 do {
199 unsigned int i;
200
201 for (i = 0; i < sctx->rsizw; i++)
Ard Biesheuvelc013cee2018-01-19 12:04:33 +0000202 sctx->st[i] ^= get_unaligned_le64(src + 8 * i);
Jeff Garzik53964b92016-06-17 10:30:35 +0530203 keccakf(sctx->st);
204
205 done += sctx->rsiz;
206 src = data + done;
207 } while (done + (sctx->rsiz - 1) < len);
208
209 sctx->partial = 0;
210 }
211 memcpy(sctx->buf + sctx->partial, src, len - done);
212 sctx->partial += (len - done);
213
214 return 0;
215}
Ard Biesheuvel66576742018-01-19 12:04:36 +0000216EXPORT_SYMBOL(crypto_sha3_update);
Jeff Garzik53964b92016-06-17 10:30:35 +0530217
Ard Biesheuvel66576742018-01-19 12:04:36 +0000218int crypto_sha3_final(struct shash_desc *desc, u8 *out)
Jeff Garzik53964b92016-06-17 10:30:35 +0530219{
220 struct sha3_state *sctx = shash_desc_ctx(desc);
221 unsigned int i, inlen = sctx->partial;
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000222 unsigned int digest_size = crypto_shash_digestsize(desc->tfm);
223 __le64 *digest = (__le64 *)out;
Jeff Garzik53964b92016-06-17 10:30:35 +0530224
225 sctx->buf[inlen++] = 0x06;
226 memset(sctx->buf + inlen, 0, sctx->rsiz - inlen);
227 sctx->buf[sctx->rsiz - 1] |= 0x80;
228
229 for (i = 0; i < sctx->rsizw; i++)
Ard Biesheuvelc013cee2018-01-19 12:04:33 +0000230 sctx->st[i] ^= get_unaligned_le64(sctx->buf + 8 * i);
Jeff Garzik53964b92016-06-17 10:30:35 +0530231
232 keccakf(sctx->st);
233
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000234 for (i = 0; i < digest_size / 8; i++)
235 put_unaligned_le64(sctx->st[i], digest++);
Jeff Garzik53964b92016-06-17 10:30:35 +0530236
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000237 if (digest_size & 4)
238 put_unaligned_le32(sctx->st[i], (__le32 *)digest);
Jeff Garzik53964b92016-06-17 10:30:35 +0530239
240 memset(sctx, 0, sizeof(*sctx));
241 return 0;
242}
Ard Biesheuvel66576742018-01-19 12:04:36 +0000243EXPORT_SYMBOL(crypto_sha3_final);
Jeff Garzik53964b92016-06-17 10:30:35 +0530244
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000245static struct shash_alg algs[] = { {
246 .digestsize = SHA3_224_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000247 .init = crypto_sha3_init,
248 .update = crypto_sha3_update,
249 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000250 .descsize = sizeof(struct sha3_state),
251 .base.cra_name = "sha3-224",
252 .base.cra_driver_name = "sha3-224-generic",
253 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
254 .base.cra_blocksize = SHA3_224_BLOCK_SIZE,
255 .base.cra_module = THIS_MODULE,
256}, {
257 .digestsize = SHA3_256_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000258 .init = crypto_sha3_init,
259 .update = crypto_sha3_update,
260 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000261 .descsize = sizeof(struct sha3_state),
262 .base.cra_name = "sha3-256",
263 .base.cra_driver_name = "sha3-256-generic",
264 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
265 .base.cra_blocksize = SHA3_256_BLOCK_SIZE,
266 .base.cra_module = THIS_MODULE,
267}, {
268 .digestsize = SHA3_384_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000269 .init = crypto_sha3_init,
270 .update = crypto_sha3_update,
271 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000272 .descsize = sizeof(struct sha3_state),
273 .base.cra_name = "sha3-384",
274 .base.cra_driver_name = "sha3-384-generic",
275 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
276 .base.cra_blocksize = SHA3_384_BLOCK_SIZE,
277 .base.cra_module = THIS_MODULE,
278}, {
279 .digestsize = SHA3_512_DIGEST_SIZE,
Ard Biesheuvel66576742018-01-19 12:04:36 +0000280 .init = crypto_sha3_init,
281 .update = crypto_sha3_update,
282 .final = crypto_sha3_final,
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000283 .descsize = sizeof(struct sha3_state),
284 .base.cra_name = "sha3-512",
285 .base.cra_driver_name = "sha3-512-generic",
286 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
287 .base.cra_blocksize = SHA3_512_BLOCK_SIZE,
288 .base.cra_module = THIS_MODULE,
289} };
Jeff Garzik53964b92016-06-17 10:30:35 +0530290
291static int __init sha3_generic_mod_init(void)
292{
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000293 return crypto_register_shashes(algs, ARRAY_SIZE(algs));
Jeff Garzik53964b92016-06-17 10:30:35 +0530294}
295
296static void __exit sha3_generic_mod_fini(void)
297{
Ard Biesheuvelbeeb5042018-01-19 12:04:35 +0000298 crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
Jeff Garzik53964b92016-06-17 10:30:35 +0530299}
300
301module_init(sha3_generic_mod_init);
302module_exit(sha3_generic_mod_fini);
303
304MODULE_LICENSE("GPL");
305MODULE_DESCRIPTION("SHA-3 Secure Hash Algorithm");
306
307MODULE_ALIAS_CRYPTO("sha3-224");
308MODULE_ALIAS_CRYPTO("sha3-224-generic");
309MODULE_ALIAS_CRYPTO("sha3-256");
310MODULE_ALIAS_CRYPTO("sha3-256-generic");
311MODULE_ALIAS_CRYPTO("sha3-384");
312MODULE_ALIAS_CRYPTO("sha3-384-generic");
313MODULE_ALIAS_CRYPTO("sha3-512");
314MODULE_ALIAS_CRYPTO("sha3-512-generic");