blob: 78431163ed3c77eebb89db84852959cf45aee369 [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001/*
2 * Cryptographic API.
3 *
4 * SHA-256, as specified in
Adrian-Ken Rueegsegger8c882f62009-03-04 14:43:52 +08005 * http://csrc.nist.gov/groups/STM/cavp/documents/shs/sha256-384-512.pdf
Linus Torvalds1da177e2005-04-16 15:20:36 -07006 *
7 * SHA-256 code by Jean-Luc Cooke <jlcooke@certainkey.com>.
8 *
9 * Copyright (c) Jean-Luc Cooke <jlcooke@certainkey.com>
10 * Copyright (c) Andrew McDonald <andrew@mcdonald.org.uk>
11 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
Jonathan Lynchcd12fb92007-11-10 20:08:25 +080012 * SHA224 Support Copyright 2007 Intel Corporation <jonathan.lynch@intel.com>
Linus Torvalds1da177e2005-04-16 15:20:36 -070013 *
14 * This program is free software; you can redistribute it and/or modify it
15 * under the terms of the GNU General Public License as published by the Free
16 * Software Foundation; either version 2 of the License, or (at your option)
17 * any later version.
18 *
19 */
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +080020#include <crypto/internal/hash.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070021#include <linux/init.h>
22#include <linux/module.h>
23#include <linux/mm.h>
Herbert Xu06ace7a2005-10-30 21:25:15 +110024#include <linux/types.h>
Jan Glauber5265eeb2007-10-09 22:43:13 +080025#include <crypto/sha.h>
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +020026#include <crypto/sha256_base.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070027#include <asm/byteorder.h>
David S. Millerbe34c4ef2014-10-02 14:52:37 +080028#include <asm/unaligned.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070029
Linus Torvalds1da177e2005-04-16 15:20:36 -070030static inline u32 Ch(u32 x, u32 y, u32 z)
31{
32 return z ^ (x & (y ^ z));
33}
34
35static inline u32 Maj(u32 x, u32 y, u32 z)
36{
37 return (x & y) | (z & (x | y));
38}
39
40#define e0(x) (ror32(x, 2) ^ ror32(x,13) ^ ror32(x,22))
41#define e1(x) (ror32(x, 6) ^ ror32(x,11) ^ ror32(x,25))
42#define s0(x) (ror32(x, 7) ^ ror32(x,18) ^ (x >> 3))
43#define s1(x) (ror32(x,17) ^ ror32(x,19) ^ (x >> 10))
44
Linus Torvalds1da177e2005-04-16 15:20:36 -070045static inline void LOAD_OP(int I, u32 *W, const u8 *input)
46{
David S. Millerbe34c4ef2014-10-02 14:52:37 +080047 W[I] = get_unaligned_be32((__u32 *)input + I);
Linus Torvalds1da177e2005-04-16 15:20:36 -070048}
49
50static inline void BLEND_OP(int I, u32 *W)
51{
52 W[I] = s1(W[I-2]) + W[I-7] + s0(W[I-15]) + W[I-16];
53}
54
55static void sha256_transform(u32 *state, const u8 *input)
56{
57 u32 a, b, c, d, e, f, g, h, t1, t2;
58 u32 W[64];
59 int i;
60
61 /* load the input */
62 for (i = 0; i < 16; i++)
63 LOAD_OP(i, W, input);
64
65 /* now blend */
66 for (i = 16; i < 64; i++)
67 BLEND_OP(i, W);
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +080068
Linus Torvalds1da177e2005-04-16 15:20:36 -070069 /* load the state into our registers */
70 a=state[0]; b=state[1]; c=state[2]; d=state[3];
71 e=state[4]; f=state[5]; g=state[6]; h=state[7];
72
73 /* now iterate */
74 t1 = h + e1(e) + Ch(e,f,g) + 0x428a2f98 + W[ 0];
75 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
76 t1 = g + e1(d) + Ch(d,e,f) + 0x71374491 + W[ 1];
77 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
78 t1 = f + e1(c) + Ch(c,d,e) + 0xb5c0fbcf + W[ 2];
79 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
80 t1 = e + e1(b) + Ch(b,c,d) + 0xe9b5dba5 + W[ 3];
81 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
82 t1 = d + e1(a) + Ch(a,b,c) + 0x3956c25b + W[ 4];
83 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
84 t1 = c + e1(h) + Ch(h,a,b) + 0x59f111f1 + W[ 5];
85 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
86 t1 = b + e1(g) + Ch(g,h,a) + 0x923f82a4 + W[ 6];
87 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
88 t1 = a + e1(f) + Ch(f,g,h) + 0xab1c5ed5 + W[ 7];
89 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
90
91 t1 = h + e1(e) + Ch(e,f,g) + 0xd807aa98 + W[ 8];
92 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
93 t1 = g + e1(d) + Ch(d,e,f) + 0x12835b01 + W[ 9];
94 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
95 t1 = f + e1(c) + Ch(c,d,e) + 0x243185be + W[10];
96 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
97 t1 = e + e1(b) + Ch(b,c,d) + 0x550c7dc3 + W[11];
98 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
99 t1 = d + e1(a) + Ch(a,b,c) + 0x72be5d74 + W[12];
100 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
101 t1 = c + e1(h) + Ch(h,a,b) + 0x80deb1fe + W[13];
102 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
103 t1 = b + e1(g) + Ch(g,h,a) + 0x9bdc06a7 + W[14];
104 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
105 t1 = a + e1(f) + Ch(f,g,h) + 0xc19bf174 + W[15];
106 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
107
108 t1 = h + e1(e) + Ch(e,f,g) + 0xe49b69c1 + W[16];
109 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
110 t1 = g + e1(d) + Ch(d,e,f) + 0xefbe4786 + W[17];
111 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
112 t1 = f + e1(c) + Ch(c,d,e) + 0x0fc19dc6 + W[18];
113 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
114 t1 = e + e1(b) + Ch(b,c,d) + 0x240ca1cc + W[19];
115 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
116 t1 = d + e1(a) + Ch(a,b,c) + 0x2de92c6f + W[20];
117 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
118 t1 = c + e1(h) + Ch(h,a,b) + 0x4a7484aa + W[21];
119 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
120 t1 = b + e1(g) + Ch(g,h,a) + 0x5cb0a9dc + W[22];
121 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
122 t1 = a + e1(f) + Ch(f,g,h) + 0x76f988da + W[23];
123 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
124
125 t1 = h + e1(e) + Ch(e,f,g) + 0x983e5152 + W[24];
126 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
127 t1 = g + e1(d) + Ch(d,e,f) + 0xa831c66d + W[25];
128 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
129 t1 = f + e1(c) + Ch(c,d,e) + 0xb00327c8 + W[26];
130 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
131 t1 = e + e1(b) + Ch(b,c,d) + 0xbf597fc7 + W[27];
132 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
133 t1 = d + e1(a) + Ch(a,b,c) + 0xc6e00bf3 + W[28];
134 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
135 t1 = c + e1(h) + Ch(h,a,b) + 0xd5a79147 + W[29];
136 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
137 t1 = b + e1(g) + Ch(g,h,a) + 0x06ca6351 + W[30];
138 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
139 t1 = a + e1(f) + Ch(f,g,h) + 0x14292967 + W[31];
140 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
141
142 t1 = h + e1(e) + Ch(e,f,g) + 0x27b70a85 + W[32];
143 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
144 t1 = g + e1(d) + Ch(d,e,f) + 0x2e1b2138 + W[33];
145 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
146 t1 = f + e1(c) + Ch(c,d,e) + 0x4d2c6dfc + W[34];
147 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
148 t1 = e + e1(b) + Ch(b,c,d) + 0x53380d13 + W[35];
149 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
150 t1 = d + e1(a) + Ch(a,b,c) + 0x650a7354 + W[36];
151 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
152 t1 = c + e1(h) + Ch(h,a,b) + 0x766a0abb + W[37];
153 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
154 t1 = b + e1(g) + Ch(g,h,a) + 0x81c2c92e + W[38];
155 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
156 t1 = a + e1(f) + Ch(f,g,h) + 0x92722c85 + W[39];
157 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
158
159 t1 = h + e1(e) + Ch(e,f,g) + 0xa2bfe8a1 + W[40];
160 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
161 t1 = g + e1(d) + Ch(d,e,f) + 0xa81a664b + W[41];
162 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
163 t1 = f + e1(c) + Ch(c,d,e) + 0xc24b8b70 + W[42];
164 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
165 t1 = e + e1(b) + Ch(b,c,d) + 0xc76c51a3 + W[43];
166 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
167 t1 = d + e1(a) + Ch(a,b,c) + 0xd192e819 + W[44];
168 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
169 t1 = c + e1(h) + Ch(h,a,b) + 0xd6990624 + W[45];
170 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
171 t1 = b + e1(g) + Ch(g,h,a) + 0xf40e3585 + W[46];
172 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
173 t1 = a + e1(f) + Ch(f,g,h) + 0x106aa070 + W[47];
174 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
175
176 t1 = h + e1(e) + Ch(e,f,g) + 0x19a4c116 + W[48];
177 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
178 t1 = g + e1(d) + Ch(d,e,f) + 0x1e376c08 + W[49];
179 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
180 t1 = f + e1(c) + Ch(c,d,e) + 0x2748774c + W[50];
181 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
182 t1 = e + e1(b) + Ch(b,c,d) + 0x34b0bcb5 + W[51];
183 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
184 t1 = d + e1(a) + Ch(a,b,c) + 0x391c0cb3 + W[52];
185 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
186 t1 = c + e1(h) + Ch(h,a,b) + 0x4ed8aa4a + W[53];
187 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
188 t1 = b + e1(g) + Ch(g,h,a) + 0x5b9cca4f + W[54];
189 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
190 t1 = a + e1(f) + Ch(f,g,h) + 0x682e6ff3 + W[55];
191 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
192
193 t1 = h + e1(e) + Ch(e,f,g) + 0x748f82ee + W[56];
194 t2 = e0(a) + Maj(a,b,c); d+=t1; h=t1+t2;
195 t1 = g + e1(d) + Ch(d,e,f) + 0x78a5636f + W[57];
196 t2 = e0(h) + Maj(h,a,b); c+=t1; g=t1+t2;
197 t1 = f + e1(c) + Ch(c,d,e) + 0x84c87814 + W[58];
198 t2 = e0(g) + Maj(g,h,a); b+=t1; f=t1+t2;
199 t1 = e + e1(b) + Ch(b,c,d) + 0x8cc70208 + W[59];
200 t2 = e0(f) + Maj(f,g,h); a+=t1; e=t1+t2;
201 t1 = d + e1(a) + Ch(a,b,c) + 0x90befffa + W[60];
202 t2 = e0(e) + Maj(e,f,g); h+=t1; d=t1+t2;
203 t1 = c + e1(h) + Ch(h,a,b) + 0xa4506ceb + W[61];
204 t2 = e0(d) + Maj(d,e,f); g+=t1; c=t1+t2;
205 t1 = b + e1(g) + Ch(g,h,a) + 0xbef9a3f7 + W[62];
206 t2 = e0(c) + Maj(c,d,e); f+=t1; b=t1+t2;
207 t1 = a + e1(f) + Ch(f,g,h) + 0xc67178f2 + W[63];
208 t2 = e0(b) + Maj(b,c,d); e+=t1; a=t1+t2;
209
210 state[0] += a; state[1] += b; state[2] += c; state[3] += d;
211 state[4] += e; state[5] += f; state[6] += g; state[7] += h;
212
213 /* clear any sensitive info... */
214 a = b = c = d = e = f = g = h = t1 = t2 = 0;
Daniel Borkmann7185ad22014-09-07 23:23:38 +0200215 memzero_explicit(W, 64 * sizeof(u32));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700216}
217
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200218static void sha256_generic_block_fn(struct sha256_state *sst, u8 const *src,
219 int blocks)
Jonathan Lynchcd12fb92007-11-10 20:08:25 +0800220{
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200221 while (blocks--) {
222 sha256_transform(sst->state, src);
223 src += SHA256_BLOCK_SIZE;
224 }
Linus Torvalds1da177e2005-04-16 15:20:36 -0700225}
226
Tim Chen35d2c9d2013-03-26 13:58:49 -0700227int crypto_sha256_update(struct shash_desc *desc, const u8 *data,
Herbert Xu6c2bb982006-05-16 22:09:29 +1000228 unsigned int len)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700229{
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200230 return sha256_base_do_update(desc, data, len, sha256_generic_block_fn);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700231}
Tim Chen35d2c9d2013-03-26 13:58:49 -0700232EXPORT_SYMBOL(crypto_sha256_update);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700233
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +0800234static int sha256_final(struct shash_desc *desc, u8 *out)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700235{
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200236 sha256_base_do_finalize(desc, sha256_generic_block_fn);
237 return sha256_base_finish(desc, out);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700238}
239
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200240int crypto_sha256_finup(struct shash_desc *desc, const u8 *data,
241 unsigned int len, u8 *hash)
Jonathan Lynchcd12fb92007-11-10 20:08:25 +0800242{
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200243 sha256_base_do_update(desc, data, len, sha256_generic_block_fn);
244 return sha256_final(desc, hash);
Jonathan Lynchcd12fb92007-11-10 20:08:25 +0800245}
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200246EXPORT_SYMBOL(crypto_sha256_finup);
Herbert Xu9b2fda72009-07-10 13:00:27 +0800247
Jussi Kivilinna6aeb49b2012-07-11 14:20:30 +0300248static struct shash_alg sha256_algs[2] = { {
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +0800249 .digestsize = SHA256_DIGEST_SIZE,
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200250 .init = sha256_base_init,
Tim Chen35d2c9d2013-03-26 13:58:49 -0700251 .update = crypto_sha256_update,
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +0800252 .final = sha256_final,
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200253 .finup = crypto_sha256_finup,
Herbert Xu9b2fda72009-07-10 13:00:27 +0800254 .descsize = sizeof(struct sha256_state),
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +0800255 .base = {
256 .cra_name = "sha256",
257 .cra_driver_name= "sha256-generic",
258 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
259 .cra_blocksize = SHA256_BLOCK_SIZE,
260 .cra_module = THIS_MODULE,
261 }
Jussi Kivilinna6aeb49b2012-07-11 14:20:30 +0300262}, {
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +0800263 .digestsize = SHA224_DIGEST_SIZE,
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200264 .init = sha224_base_init,
Tim Chen35d2c9d2013-03-26 13:58:49 -0700265 .update = crypto_sha256_update,
Ard Biesheuvela2e5ba42015-04-09 12:55:37 +0200266 .final = sha256_final,
267 .finup = crypto_sha256_finup,
Herbert Xu9b2fda72009-07-10 13:00:27 +0800268 .descsize = sizeof(struct sha256_state),
Adrian-Ken Rueegsegger50e109b52008-12-03 19:57:49 +0800269 .base = {
270 .cra_name = "sha224",
271 .cra_driver_name= "sha224-generic",
272 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
273 .cra_blocksize = SHA224_BLOCK_SIZE,
274 .cra_module = THIS_MODULE,
275 }
Jussi Kivilinna6aeb49b2012-07-11 14:20:30 +0300276} };
Linus Torvalds1da177e2005-04-16 15:20:36 -0700277
Kamalesh Babulal3af5b902008-04-05 21:00:57 +0800278static int __init sha256_generic_mod_init(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700279{
Jussi Kivilinna6aeb49b2012-07-11 14:20:30 +0300280 return crypto_register_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700281}
282
Kamalesh Babulal3af5b902008-04-05 21:00:57 +0800283static void __exit sha256_generic_mod_fini(void)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700284{
Jussi Kivilinna6aeb49b2012-07-11 14:20:30 +0300285 crypto_unregister_shashes(sha256_algs, ARRAY_SIZE(sha256_algs));
Linus Torvalds1da177e2005-04-16 15:20:36 -0700286}
287
Kamalesh Babulal3af5b902008-04-05 21:00:57 +0800288module_init(sha256_generic_mod_init);
289module_exit(sha256_generic_mod_fini);
Linus Torvalds1da177e2005-04-16 15:20:36 -0700290
291MODULE_LICENSE("GPL");
Jonathan Lynchcd12fb92007-11-10 20:08:25 +0800292MODULE_DESCRIPTION("SHA-224 and SHA-256 Secure Hash Algorithm");
Michal Ludvigb3be9a62006-07-09 08:59:38 +1000293
Kees Cook5d26a102014-11-20 17:05:53 -0800294MODULE_ALIAS_CRYPTO("sha224");
Mathias Krause3e14dcf2015-01-11 18:17:42 +0100295MODULE_ALIAS_CRYPTO("sha224-generic");
Kees Cook5d26a102014-11-20 17:05:53 -0800296MODULE_ALIAS_CRYPTO("sha256");
Mathias Krause3e14dcf2015-01-11 18:17:42 +0100297MODULE_ALIAS_CRYPTO("sha256-generic");