blob: 3a1ec47a9b40ad7cb564cda1a5038256aaf9d01e [file] [log] [blame]
Robert Sloan8ff03552017-06-14 12:40:58 -07001/* Copyright (c) 2017, Google Inc.
2 *
3 * Permission to use, copy, modify, and/or distribute this software for any
4 * purpose with or without fee is hereby granted, provided that the above
5 * copyright notice and this permission notice appear in all copies.
6 *
7 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
10 * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
12 * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
13 * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
14
15#include <openssl/aead.h>
16#include <openssl/cipher.h>
17#include <openssl/cpu.h>
18#include <openssl/crypto.h>
19#include <openssl/err.h>
20
21#include "../fipsmodule/cipher/internal.h"
22
23
24#define EVP_AEAD_AES_GCM_SIV_NONCE_LEN 12
25#define EVP_AEAD_AES_GCM_SIV_TAG_LEN 16
26
27#if defined(OPENSSL_X86_64) && !defined(OPENSSL_NO_ASM)
28
29/* Optimised AES-GCM-SIV */
30
31struct aead_aes_gcm_siv_asm_ctx {
32 alignas(64) uint8_t key[16*15];
33 int is_128_bit;
34};
35
36/* aes128gcmsiv_aes_ks writes an AES-128 key schedule for |key| to
37 * |out_expanded_key|. */
38extern void aes128gcmsiv_aes_ks(
39 const uint8_t key[16], uint8_t out_expanded_key[16*15]);
40
41/* aes128gcmsiv_aes_ks writes an AES-128 key schedule for |key| to
42 * |out_expanded_key|. */
43extern void aes256gcmsiv_aes_ks(
44 const uint8_t key[16], uint8_t out_expanded_key[16*15]);
45
46static int aead_aes_gcm_siv_asm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
47 size_t key_len, size_t tag_len) {
48 const size_t key_bits = key_len * 8;
49
50 if (key_bits != 128 && key_bits != 256) {
51 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
52 return 0; /* EVP_AEAD_CTX_init should catch this. */
53 }
54
55 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
56 tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
57 }
58
59 if (tag_len != EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
60 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
61 return 0;
62 }
63
64 struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx =
65 OPENSSL_malloc(sizeof(struct aead_aes_gcm_siv_asm_ctx));
66 if (gcm_siv_ctx == NULL) {
67 return 0;
68 }
69
70 if (key_bits == 128) {
71 aes128gcmsiv_aes_ks(key, &gcm_siv_ctx->key[0]);
72 gcm_siv_ctx->is_128_bit = 1;
73 } else {
74 aes256gcmsiv_aes_ks(key, &gcm_siv_ctx->key[0]);
75 gcm_siv_ctx->is_128_bit = 0;
76 }
77 ctx->aead_state = gcm_siv_ctx;
78 ctx->tag_len = tag_len;
79
80 return 1;
81}
82
83static void aead_aes_gcm_siv_asm_cleanup(EVP_AEAD_CTX *ctx) {
84 struct aead_aes_gcm_siv_asm_ctx *gcm_siv_asm_ctx = ctx->aead_state;
85 OPENSSL_cleanse(gcm_siv_asm_ctx, sizeof(struct aead_aes_gcm_siv_asm_ctx));
86 OPENSSL_free(gcm_siv_asm_ctx);
87}
88
89/* aesgcmsiv_polyval_horner updates the POLYVAL value in |in_out_poly| to
90 * include a number (|in_blocks|) of 16-byte blocks of data from |in|, given
91 * the POLYVAL key in |key|. */
92extern void aesgcmsiv_polyval_horner(const uint8_t in_out_poly[16],
93 const uint8_t key[16], const uint8_t *in,
94 size_t in_blocks);
95
96/* aesgcmsiv_htable_init writes powers 1..8 of |auth_key| to |out_htable|. */
97extern void aesgcmsiv_htable_init(uint8_t out_htable[16 * 8],
98 const uint8_t auth_key[16]);
99
100/* aesgcmsiv_htable6_init writes powers 1..6 of |auth_key| to |out_htable|. */
101extern void aesgcmsiv_htable6_init(uint8_t out_htable[16 * 6],
102 const uint8_t auth_key[16]);
103
104/* aesgcmsiv_htable_polyval updates the POLYVAL value in |in_out_poly| to
105 * include |in_len| bytes of data from |in|. (Where |in_len| must be a multiple
106 * of 16.) It uses the precomputed powers of the key given in |htable|. */
107extern void aesgcmsiv_htable_polyval(const uint8_t htable[16 * 8],
108 const uint8_t *in, size_t in_len,
109 uint8_t in_out_poly[16]);
110
111/* aes128gcmsiv_dec decrypts |in_len| & ~15 bytes from |out| and writes them to
112 * |in|. (The full value of |in_len| is still used to find the authentication
113 * tag appended to the ciphertext, however, so must not be pre-masked.)
114 *
115 * |in| and |out| may be equal, but must not otherwise overlap.
116 *
117 * While decrypting, it updates the POLYVAL value found at the beginning of
118 * |in_out_calculated_tag_and_scratch| and writes the updated value back before
119 * return. During executation, it may use the whole of this space for other
120 * purposes. In order to decrypt and update the POLYVAL value, it uses the
121 * expanded key from |key| and the table of powers in |htable|. */
122extern void aes128gcmsiv_dec(const uint8_t *in, uint8_t *out,
123 uint8_t in_out_calculated_tag_and_scratch[16 * 8],
124 const uint8_t htable[16 * 6],
125 const struct aead_aes_gcm_siv_asm_ctx *key,
126 size_t in_len);
127
128/* aes256gcmsiv_dec acts like |aes128gcmsiv_dec|, but for AES-256. */
129extern void aes256gcmsiv_dec(const uint8_t *in, uint8_t *out,
130 uint8_t in_out_calculated_tag_and_scratch[16 * 8],
131 const uint8_t htable[16 * 6],
132 const struct aead_aes_gcm_siv_asm_ctx *key,
133 size_t in_len);
134
135/* aes128gcmsiv_kdf performs the AES-GCM-SIV KDF given the expanded key from
136 * |key_schedule| and the nonce in |nonce|. Note that, while only 12 bytes of
137 * the nonce are used, 16 bytes are read and so the value must be
138 * right-padded. */
139extern void aes128gcmsiv_kdf(const uint8_t nonce[16],
140 uint64_t out_key_material[8],
141 const uint8_t *key_schedule);
142
143/* aes256gcmsiv_kdf acts like |aes128gcmsiv_kdf|, but for AES-256. */
144extern void aes256gcmsiv_kdf(const uint8_t nonce[16],
145 uint64_t out_key_material[12],
146 const uint8_t *key_schedule);
147
148/* aes128gcmsiv_aes_ks_enc_x1 performs a key expansion of the AES-128 key in
149 * |key|, writes the expanded key to |out_expanded_key| and encrypts a single
150 * block from |in| to |out|. */
151extern void aes128gcmsiv_aes_ks_enc_x1(const uint8_t in[16], uint8_t out[16],
152 uint8_t out_expanded_key[16 * 15],
153 const uint64_t key[2]);
154
155/* aes256gcmsiv_aes_ks_enc_x1 acts like |aes128gcmsiv_aes_ks_enc_x1|, but for
156 * AES-256. */
157extern void aes256gcmsiv_aes_ks_enc_x1(const uint8_t in[16], uint8_t out[16],
158 uint8_t out_expanded_key[16 * 15],
159 const uint64_t key[4]);
160
161/* aes128gcmsiv_ecb_enc_block encrypts a single block from |in| to |out| using
162 * the expanded key in |expanded_key|. */
163extern void aes128gcmsiv_ecb_enc_block(
164 const uint8_t in[16], uint8_t out[16],
165 const struct aead_aes_gcm_siv_asm_ctx *expanded_key);
166
167/* aes256gcmsiv_ecb_enc_block acts like |aes128gcmsiv_ecb_enc_block|, but for
168 * AES-256. */
169extern void aes256gcmsiv_ecb_enc_block(
170 const uint8_t in[16], uint8_t out[16],
171 const struct aead_aes_gcm_siv_asm_ctx *expanded_key);
172
173/* aes128gcmsiv_enc_msg_x4 encrypts |in_len| bytes from |in| to |out| using the
174 * expanded key from |key|. (The value of |in_len| must be a multiple of 16.)
175 * The |in| and |out| buffers may be equal but must not otherwise overlap. The
176 * initial counter is constructed from the given |tag| as required by
177 * AES-GCM-SIV. */
178extern void aes128gcmsiv_enc_msg_x4(const uint8_t *in, uint8_t *out,
179 const uint8_t *tag,
180 const struct aead_aes_gcm_siv_asm_ctx *key,
181 size_t in_len);
182
183/* aes256gcmsiv_enc_msg_x4 acts like |aes128gcmsiv_enc_msg_x4|, but for
184 * AES-256. */
185extern void aes256gcmsiv_enc_msg_x4(const uint8_t *in, uint8_t *out,
186 const uint8_t *tag,
187 const struct aead_aes_gcm_siv_asm_ctx *key,
188 size_t in_len);
189
190/* aes128gcmsiv_enc_msg_x8 acts like |aes128gcmsiv_enc_msg_x4|, but is
191 * optimised for longer messages. */
192extern void aes128gcmsiv_enc_msg_x8(const uint8_t *in, uint8_t *out,
193 const uint8_t *tag,
194 const struct aead_aes_gcm_siv_asm_ctx *key,
195 size_t in_len);
196
197/* aes256gcmsiv_enc_msg_x8 acts like |aes256gcmsiv_enc_msg_x4|, but is
198 * optimised for longer messages. */
199extern void aes256gcmsiv_enc_msg_x8(const uint8_t *in, uint8_t *out,
200 const uint8_t *tag,
201 const struct aead_aes_gcm_siv_asm_ctx *key,
202 size_t in_len);
203
204/* gcm_siv_asm_polyval evaluates POLYVAL at |auth_key| on the given plaintext
205 * and AD. The result is written to |out_tag|. */
206static void gcm_siv_asm_polyval(uint8_t out_tag[16], const uint8_t *in,
207 size_t in_len, const uint8_t *ad, size_t ad_len,
208 const uint8_t auth_key[16],
209 const uint8_t nonce[12]) {
210 OPENSSL_memset(out_tag, 0, 16);
211 const size_t ad_blocks = ad_len / 16;
212 const size_t in_blocks = in_len / 16;
213 int htable_init = 0;
214 alignas(16) uint8_t htable[16*8];
215
216 if (ad_blocks > 8 || in_blocks > 8) {
217 htable_init = 1;
218 aesgcmsiv_htable_init(htable, auth_key);
219 }
220
221 if (htable_init) {
222 aesgcmsiv_htable_polyval(htable, ad, ad_len & ~15, out_tag);
223 } else {
224 aesgcmsiv_polyval_horner(out_tag, auth_key, ad, ad_blocks);
225 }
226
227 uint8_t scratch[16];
228 if (ad_len & 15) {
229 OPENSSL_memset(scratch, 0, sizeof(scratch));
230 OPENSSL_memcpy(scratch, &ad[ad_len & ~15], ad_len & 15);
231 aesgcmsiv_polyval_horner(out_tag, auth_key, scratch, 1);
232 }
233
234 if (htable_init) {
235 aesgcmsiv_htable_polyval(htable, in, in_len & ~15, out_tag);
236 } else {
237 aesgcmsiv_polyval_horner(out_tag, auth_key, in, in_blocks);
238 }
239
240 if (in_len & 15) {
241 OPENSSL_memset(scratch, 0, sizeof(scratch));
242 OPENSSL_memcpy(scratch, &in[in_len & ~15], in_len & 15);
243 aesgcmsiv_polyval_horner(out_tag, auth_key, scratch, 1);
244 }
245
246 union {
247 uint8_t c[16];
248 struct {
249 uint64_t ad;
250 uint64_t in;
251 } bitlens;
252 } length_block;
253
254 length_block.bitlens.ad = ad_len * 8;
255 length_block.bitlens.in = in_len * 8;
256 aesgcmsiv_polyval_horner(out_tag, auth_key, length_block.c, 1);
257
258 for (size_t i = 0; i < 12; i++) {
259 out_tag[i] ^= nonce[i];
260 }
261
262 out_tag[15] &= 0x7f;
263}
264
265/* aead_aes_gcm_siv_asm_crypt_last_block handles the encryption/decryption
266 * (same thing in CTR mode) of the final block of a plaintext/ciphertext. It
267 * writes |in_len| & 15 bytes to |out| + |in_len|, based on an initial counter
268 * derived from |tag|. */
269static void aead_aes_gcm_siv_asm_crypt_last_block(
270 int is_128_bit, uint8_t *out, const uint8_t *in, size_t in_len,
271 const uint8_t tag[16],
272 const struct aead_aes_gcm_siv_asm_ctx *enc_key_expanded) {
273 alignas(16) union {
274 uint8_t c[16];
275 uint32_t u32[4];
276 } counter;
277 OPENSSL_memcpy(&counter, tag, sizeof(counter));
278 counter.c[15] |= 0x80;
279 counter.u32[0] += in_len / 16;
280
281 if (is_128_bit) {
282 aes128gcmsiv_ecb_enc_block(&counter.c[0], &counter.c[0], enc_key_expanded);
283 } else {
284 aes256gcmsiv_ecb_enc_block(&counter.c[0], &counter.c[0], enc_key_expanded);
285 }
286
287 const size_t last_bytes_offset = in_len & ~15;
288 const size_t last_bytes_len = in_len & 15;
289 uint8_t *last_bytes_out = &out[last_bytes_offset];
290 const uint8_t *last_bytes_in = &in[last_bytes_offset];
291 for (size_t i = 0; i < last_bytes_len; i++) {
292 last_bytes_out[i] = last_bytes_in[i] ^ counter.c[i];
293 }
294}
295
296/* aead_aes_gcm_siv_kdf calculates the record encryption and authentication
297 * keys given the |nonce|. */
298static void aead_aes_gcm_siv_kdf(
299 int is_128_bit, const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx,
300 uint64_t out_record_auth_key[2], uint64_t out_record_enc_key[4],
301 const uint8_t nonce[12]) {
302 alignas(16) uint8_t padded_nonce[16];
303 OPENSSL_memcpy(padded_nonce, nonce, 12);
304
305 alignas(16) uint64_t key_material[12];
306 if (is_128_bit) {
307 aes128gcmsiv_kdf(padded_nonce, key_material, &gcm_siv_ctx->key[0]);
308 out_record_enc_key[0] = key_material[4];
309 out_record_enc_key[1] = key_material[6];
310 } else {
311 aes256gcmsiv_kdf(padded_nonce, key_material, &gcm_siv_ctx->key[0]);
312 out_record_enc_key[0] = key_material[4];
313 out_record_enc_key[1] = key_material[6];
314 out_record_enc_key[2] = key_material[8];
315 out_record_enc_key[3] = key_material[10];
316 }
317
318 out_record_auth_key[0] = key_material[0];
319 out_record_auth_key[1] = key_material[2];
320}
321
322static int aead_aes_gcm_siv_asm_seal_scatter(
323 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
324 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
325 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *ad,
326 size_t ad_len) {
327 const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = ctx->aead_state;
328 const uint64_t in_len_64 = in_len;
329 const uint64_t ad_len_64 = ad_len;
330
331 if (in_len_64 > (UINT64_C(1) << 36) ||
332 ad_len_64 >= (UINT64_C(1) << 61)) {
333 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
334 return 0;
335 }
336
337 if (max_out_tag_len < EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
338 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
339 return 0;
340 }
341
342 if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
343 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
344 return 0;
345 }
346
347 alignas(16) uint64_t record_auth_key[2];
348 alignas(16) uint64_t record_enc_key[4];
349 aead_aes_gcm_siv_kdf(gcm_siv_ctx->is_128_bit, gcm_siv_ctx, record_auth_key,
350 record_enc_key, nonce);
351
352 alignas(16) uint8_t tag[16] = {0};
353 gcm_siv_asm_polyval(tag, in, in_len, ad, ad_len,
354 (const uint8_t *)record_auth_key, nonce);
355
356 struct aead_aes_gcm_siv_asm_ctx enc_key_expanded;
357
358 if (gcm_siv_ctx->is_128_bit) {
359 aes128gcmsiv_aes_ks_enc_x1(tag, tag, &enc_key_expanded.key[0],
360 record_enc_key);
361
362 if (in_len < 128) {
363 aes128gcmsiv_enc_msg_x4(in, out, tag, &enc_key_expanded, in_len & ~15);
364 } else {
365 aes128gcmsiv_enc_msg_x8(in, out, tag, &enc_key_expanded, in_len & ~15);
366 }
367 } else {
368 aes256gcmsiv_aes_ks_enc_x1(tag, tag, &enc_key_expanded.key[0],
369 record_enc_key);
370
371 if (in_len < 128) {
372 aes256gcmsiv_enc_msg_x4(in, out, tag, &enc_key_expanded, in_len & ~15);
373 } else {
374 aes256gcmsiv_enc_msg_x8(in, out, tag, &enc_key_expanded, in_len & ~15);
375 }
376 }
377
378 if (in_len & 15) {
379 aead_aes_gcm_siv_asm_crypt_last_block(gcm_siv_ctx->is_128_bit, out, in,
380 in_len, tag, &enc_key_expanded);
381 }
382
383 OPENSSL_memcpy(out_tag, tag, sizeof(tag));
384 *out_tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
385
386 return 1;
387}
388
389// TODO(martinkr): Add aead_aes_gcm_siv_asm_open_gather. N.B. aes128gcmsiv_dec
390// expects ciphertext and tag in a contiguous buffer.
391
392static int aead_aes_gcm_siv_asm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
393 size_t *out_len, size_t max_out_len,
394 const uint8_t *nonce, size_t nonce_len,
395 const uint8_t *in, size_t in_len,
396 const uint8_t *ad, size_t ad_len) {
397 const uint64_t ad_len_64 = ad_len;
398 if (ad_len_64 >= (UINT64_C(1) << 61)) {
399 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
400 return 0;
401 }
402
403 const uint64_t in_len_64 = in_len;
404 if (in_len < EVP_AEAD_AES_GCM_SIV_TAG_LEN ||
405 in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) {
406 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
407 return 0;
408 }
409
410 const struct aead_aes_gcm_siv_asm_ctx *gcm_siv_ctx = ctx->aead_state;
411 const size_t plaintext_len = in_len - EVP_AEAD_AES_GCM_SIV_TAG_LEN;
412 const uint8_t *const given_tag = in + plaintext_len;
413
414 if (max_out_len < plaintext_len) {
415 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
416 return 0;
417 }
418
419 alignas(16) uint64_t record_auth_key[2];
420 alignas(16) uint64_t record_enc_key[4];
421 aead_aes_gcm_siv_kdf(gcm_siv_ctx->is_128_bit, gcm_siv_ctx, record_auth_key,
422 record_enc_key, nonce);
423
424 struct aead_aes_gcm_siv_asm_ctx expanded_key;
425 if (gcm_siv_ctx->is_128_bit) {
426 aes128gcmsiv_aes_ks((const uint8_t *) record_enc_key, &expanded_key.key[0]);
427 } else {
428 aes256gcmsiv_aes_ks((const uint8_t *) record_enc_key, &expanded_key.key[0]);
429 }
430 /* calculated_tag is 16*8 bytes, rather than 16 bytes, because
431 * aes[128|256]gcmsiv_dec uses the extra as scratch space. */
432 alignas(16) uint8_t calculated_tag[16 * 8] = {0};
433
434 OPENSSL_memset(calculated_tag, 0, EVP_AEAD_AES_GCM_SIV_TAG_LEN);
435 const size_t ad_blocks = ad_len / 16;
436 aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key, ad,
437 ad_blocks);
438
439 uint8_t scratch[16];
440 if (ad_len & 15) {
441 OPENSSL_memset(scratch, 0, sizeof(scratch));
442 OPENSSL_memcpy(scratch, &ad[ad_len & ~15], ad_len & 15);
443 aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key,
444 scratch, 1);
445 }
446
447 alignas(16) uint8_t htable[16 * 6];
448 aesgcmsiv_htable6_init(htable, (const uint8_t *)record_auth_key);
449
450 if (gcm_siv_ctx->is_128_bit) {
451 aes128gcmsiv_dec(in, out, calculated_tag, htable, &expanded_key,
452 plaintext_len);
453 } else {
454 aes256gcmsiv_dec(in, out, calculated_tag, htable, &expanded_key,
455 plaintext_len);
456 }
457
458 if (plaintext_len & 15) {
459 aead_aes_gcm_siv_asm_crypt_last_block(gcm_siv_ctx->is_128_bit, out, in,
460 plaintext_len, given_tag,
461 &expanded_key);
462 OPENSSL_memset(scratch, 0, sizeof(scratch));
463 OPENSSL_memcpy(scratch, out + (plaintext_len & ~15), plaintext_len & 15);
464 aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key,
465 scratch, 1);
466 }
467
468 union {
469 uint8_t c[16];
470 struct {
471 uint64_t ad;
472 uint64_t in;
473 } bitlens;
474 } length_block;
475
476 length_block.bitlens.ad = ad_len * 8;
477 length_block.bitlens.in = plaintext_len * 8;
478 aesgcmsiv_polyval_horner(calculated_tag, (const uint8_t *)record_auth_key,
479 length_block.c, 1);
480
481 for (size_t i = 0; i < 12; i++) {
482 calculated_tag[i] ^= nonce[i];
483 }
484
485 calculated_tag[15] &= 0x7f;
486
487 if (gcm_siv_ctx->is_128_bit) {
488 aes128gcmsiv_ecb_enc_block(calculated_tag, calculated_tag, &expanded_key);
489 } else {
490 aes256gcmsiv_ecb_enc_block(calculated_tag, calculated_tag, &expanded_key);
491 }
492
493 if (CRYPTO_memcmp(calculated_tag, given_tag, EVP_AEAD_AES_GCM_SIV_TAG_LEN) !=
494 0) {
495 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
496 return 0;
497 }
498
499 *out_len = in_len - EVP_AEAD_AES_GCM_SIV_TAG_LEN;
500 return 1;
501}
502
503static const EVP_AEAD aead_aes_128_gcm_siv_asm = {
504 16, /* key length */
505 EVP_AEAD_AES_GCM_SIV_NONCE_LEN, /* nonce length */
506 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* overhead */
507 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* max tag length */
508
509 aead_aes_gcm_siv_asm_init,
510 NULL /* init_with_direction */,
511 aead_aes_gcm_siv_asm_cleanup,
512 aead_aes_gcm_siv_asm_open,
513 aead_aes_gcm_siv_asm_seal_scatter,
514 NULL /* open_gather */,
515 NULL /* get_iv */,
516};
517
518static const EVP_AEAD aead_aes_256_gcm_siv_asm = {
519 32, /* key length */
520 EVP_AEAD_AES_GCM_SIV_NONCE_LEN, /* nonce length */
521 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* overhead */
522 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* max tag length */
523
524 aead_aes_gcm_siv_asm_init,
525 NULL /* init_with_direction */,
526 aead_aes_gcm_siv_asm_cleanup,
527 aead_aes_gcm_siv_asm_open,
528 aead_aes_gcm_siv_asm_seal_scatter,
529 NULL /* open_gather */,
530 NULL /* get_iv */,
531};
532
533#endif /* X86_64 && !NO_ASM */
534
535struct aead_aes_gcm_siv_ctx {
536 union {
537 double align;
538 AES_KEY ks;
539 } ks;
540 block128_f kgk_block;
541 unsigned is_256:1;
542};
543
544static int aead_aes_gcm_siv_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
545 size_t key_len, size_t tag_len) {
546 const size_t key_bits = key_len * 8;
547
548 if (key_bits != 128 && key_bits != 256) {
549 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
550 return 0; /* EVP_AEAD_CTX_init should catch this. */
551 }
552
553 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
554 tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
555 }
556 if (tag_len != EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
557 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
558 return 0;
559 }
560
561 struct aead_aes_gcm_siv_ctx *gcm_siv_ctx =
562 OPENSSL_malloc(sizeof(struct aead_aes_gcm_siv_ctx));
563 if (gcm_siv_ctx == NULL) {
564 return 0;
565 }
566 OPENSSL_memset(gcm_siv_ctx, 0, sizeof(struct aead_aes_gcm_siv_ctx));
567
568 aes_ctr_set_key(&gcm_siv_ctx->ks.ks, NULL, &gcm_siv_ctx->kgk_block, key,
569 key_len);
570 gcm_siv_ctx->is_256 = (key_len == 32);
571 ctx->aead_state = gcm_siv_ctx;
572 ctx->tag_len = tag_len;
573
574 return 1;
575}
576
577static void aead_aes_gcm_siv_cleanup(EVP_AEAD_CTX *ctx) {
578 struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
579 OPENSSL_cleanse(gcm_siv_ctx, sizeof(struct aead_aes_gcm_siv_ctx));
580 OPENSSL_free(gcm_siv_ctx);
581}
582
583/* gcm_siv_crypt encrypts (or decrypts—it's the same thing) |in_len| bytes from
584 * |in| to |out|, using the block function |enc_block| with |key| in counter
585 * mode, starting at |initial_counter|. This differs from the traditional
586 * counter mode code in that the counter is handled little-endian, only the
587 * first four bytes are used and the GCM-SIV tweak to the final byte is
588 * applied. The |in| and |out| pointers may be equal but otherwise must not
589 * alias. */
590static void gcm_siv_crypt(uint8_t *out, const uint8_t *in, size_t in_len,
591 const uint8_t initial_counter[AES_BLOCK_SIZE],
592 block128_f enc_block, const AES_KEY *key) {
593 union {
594 uint32_t w[4];
595 uint8_t c[16];
596 } counter;
597
598 OPENSSL_memcpy(counter.c, initial_counter, AES_BLOCK_SIZE);
599 counter.c[15] |= 0x80;
600
601 for (size_t done = 0; done < in_len;) {
602 uint8_t keystream[AES_BLOCK_SIZE];
603 enc_block(counter.c, keystream, key);
604 counter.w[0]++;
605
606 size_t todo = AES_BLOCK_SIZE;
607 if (in_len - done < todo) {
608 todo = in_len - done;
609 }
610
611 for (size_t i = 0; i < todo; i++) {
612 out[done + i] = keystream[i] ^ in[done + i];
613 }
614
615 done += todo;
616 }
617}
618
619/* gcm_siv_polyval evaluates POLYVAL at |auth_key| on the given plaintext and
620 * AD. The result is written to |out_tag|. */
621static void gcm_siv_polyval(
622 uint8_t out_tag[16], const uint8_t *in, size_t in_len, const uint8_t *ad,
623 size_t ad_len, const uint8_t auth_key[16],
624 const uint8_t nonce[EVP_AEAD_AES_GCM_SIV_NONCE_LEN]) {
625 struct polyval_ctx polyval_ctx;
626 CRYPTO_POLYVAL_init(&polyval_ctx, auth_key);
627
628 CRYPTO_POLYVAL_update_blocks(&polyval_ctx, ad, ad_len & ~15);
629
630 uint8_t scratch[16];
631 if (ad_len & 15) {
632 OPENSSL_memset(scratch, 0, sizeof(scratch));
633 OPENSSL_memcpy(scratch, &ad[ad_len & ~15], ad_len & 15);
634 CRYPTO_POLYVAL_update_blocks(&polyval_ctx, scratch, sizeof(scratch));
635 }
636
637 CRYPTO_POLYVAL_update_blocks(&polyval_ctx, in, in_len & ~15);
638 if (in_len & 15) {
639 OPENSSL_memset(scratch, 0, sizeof(scratch));
640 OPENSSL_memcpy(scratch, &in[in_len & ~15], in_len & 15);
641 CRYPTO_POLYVAL_update_blocks(&polyval_ctx, scratch, sizeof(scratch));
642 }
643
644 union {
645 uint8_t c[16];
646 struct {
647 uint64_t ad;
648 uint64_t in;
649 } bitlens;
650 } length_block;
651
652 length_block.bitlens.ad = ad_len * 8;
653 length_block.bitlens.in = in_len * 8;
654 CRYPTO_POLYVAL_update_blocks(&polyval_ctx, length_block.c,
655 sizeof(length_block));
656
657 CRYPTO_POLYVAL_finish(&polyval_ctx, out_tag);
658 for (size_t i = 0; i < EVP_AEAD_AES_GCM_SIV_NONCE_LEN; i++) {
659 out_tag[i] ^= nonce[i];
660 }
661 out_tag[15] &= 0x7f;
662}
663
664/* gcm_siv_record_keys contains the keys used for a specific GCM-SIV record. */
665struct gcm_siv_record_keys {
666 uint8_t auth_key[16];
667 union {
668 double align;
669 AES_KEY ks;
670 } enc_key;
671 block128_f enc_block;
672};
673
674/* gcm_siv_keys calculates the keys for a specific GCM-SIV record with the
675 * given nonce and writes them to |*out_keys|. */
676static void gcm_siv_keys(
677 const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx,
678 struct gcm_siv_record_keys *out_keys,
679 const uint8_t nonce[EVP_AEAD_AES_GCM_SIV_NONCE_LEN]) {
680 const AES_KEY *const key = &gcm_siv_ctx->ks.ks;
681 uint8_t key_material[(128 /* POLYVAL key */ + 256 /* max AES key */) / 8];
682 const size_t blocks_needed = gcm_siv_ctx->is_256 ? 6 : 4;
683
684 uint8_t counter[AES_BLOCK_SIZE];
685 OPENSSL_memset(counter, 0, AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN);
686 OPENSSL_memcpy(counter + AES_BLOCK_SIZE - EVP_AEAD_AES_GCM_SIV_NONCE_LEN,
687 nonce, EVP_AEAD_AES_GCM_SIV_NONCE_LEN);
688 for (size_t i = 0; i < blocks_needed; i++) {
689 counter[0] = i;
690
691 uint8_t ciphertext[AES_BLOCK_SIZE];
692 gcm_siv_ctx->kgk_block(counter, ciphertext, key);
693 OPENSSL_memcpy(&key_material[i * 8], ciphertext, 8);
694 }
695
696 OPENSSL_memcpy(out_keys->auth_key, key_material, 16);
697 aes_ctr_set_key(&out_keys->enc_key.ks, NULL, &out_keys->enc_block,
698 key_material + 16, gcm_siv_ctx->is_256 ? 32 : 16);
699}
700
701static int aead_aes_gcm_siv_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
702 uint8_t *out_tag, size_t *out_tag_len,
703 size_t max_out_tag_len,
704 const uint8_t *nonce, size_t nonce_len,
705 const uint8_t *in, size_t in_len,
706 const uint8_t *ad, size_t ad_len) {
707 const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
708 const uint64_t in_len_64 = in_len;
709 const uint64_t ad_len_64 = ad_len;
710
711 if (in_len + EVP_AEAD_AES_GCM_SIV_TAG_LEN < in_len ||
712 in_len_64 > (UINT64_C(1) << 36) ||
713 ad_len_64 >= (UINT64_C(1) << 61)) {
714 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
715 return 0;
716 }
717
718 if (max_out_tag_len < EVP_AEAD_AES_GCM_SIV_TAG_LEN) {
719 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
720 return 0;
721 }
722
723 if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
724 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
725 return 0;
726 }
727
728 struct gcm_siv_record_keys keys;
729 gcm_siv_keys(gcm_siv_ctx, &keys, nonce);
730
731 uint8_t tag[16];
732 gcm_siv_polyval(tag, in, in_len, ad, ad_len, keys.auth_key, nonce);
733 keys.enc_block(tag, tag, &keys.enc_key.ks);
734
735 gcm_siv_crypt(out, in, in_len, tag, keys.enc_block, &keys.enc_key.ks);
736
737 OPENSSL_memcpy(out_tag, tag, EVP_AEAD_AES_GCM_SIV_TAG_LEN);
738 *out_tag_len = EVP_AEAD_AES_GCM_SIV_TAG_LEN;
739
740 return 1;
741}
742
743static int aead_aes_gcm_siv_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
744 const uint8_t *nonce, size_t nonce_len,
745 const uint8_t *in, size_t in_len,
746 const uint8_t *in_tag,
747 size_t in_tag_len, const uint8_t *ad,
748 size_t ad_len) {
749 const uint64_t ad_len_64 = ad_len;
750 if (ad_len_64 >= (UINT64_C(1) << 61)) {
751 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
752 return 0;
753 }
754
755 const uint64_t in_len_64 = in_len;
756 if (in_tag_len != EVP_AEAD_AES_GCM_SIV_TAG_LEN ||
757 in_len_64 > (UINT64_C(1) << 36) + AES_BLOCK_SIZE) {
758 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
759 return 0;
760 }
761
762 if (nonce_len != EVP_AEAD_AES_GCM_SIV_NONCE_LEN) {
763 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
764 return 0;
765 }
766
767 const struct aead_aes_gcm_siv_ctx *gcm_siv_ctx = ctx->aead_state;
768
769 struct gcm_siv_record_keys keys;
770 gcm_siv_keys(gcm_siv_ctx, &keys, nonce);
771
772 gcm_siv_crypt(out, in, in_len, in_tag, keys.enc_block, &keys.enc_key.ks);
773
774 uint8_t expected_tag[EVP_AEAD_AES_GCM_SIV_TAG_LEN];
775 gcm_siv_polyval(expected_tag, out, in_len, ad, ad_len, keys.auth_key, nonce);
776 keys.enc_block(expected_tag, expected_tag, &keys.enc_key.ks);
777
778 if (CRYPTO_memcmp(expected_tag, in_tag, sizeof(expected_tag)) != 0) {
779 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
780 return 0;
781 }
782
783 return 1;
784}
785
786static const EVP_AEAD aead_aes_128_gcm_siv = {
787 16, /* key length */
788 EVP_AEAD_AES_GCM_SIV_NONCE_LEN, /* nonce length */
789 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* overhead */
790 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* max tag length */
791
792 aead_aes_gcm_siv_init,
793 NULL /* init_with_direction */,
794 aead_aes_gcm_siv_cleanup,
795 NULL /* open */,
796 aead_aes_gcm_siv_seal_scatter,
797 aead_aes_gcm_siv_open_gather,
798 NULL /* get_iv */,
799};
800
801static const EVP_AEAD aead_aes_256_gcm_siv = {
802 32, /* key length */
803 EVP_AEAD_AES_GCM_SIV_NONCE_LEN, /* nonce length */
804 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* overhead */
805 EVP_AEAD_AES_GCM_SIV_TAG_LEN, /* max tag length */
806
807 aead_aes_gcm_siv_init,
808 NULL /* init_with_direction */,
809 aead_aes_gcm_siv_cleanup,
810 NULL /* open */,
811 aead_aes_gcm_siv_seal_scatter,
812 aead_aes_gcm_siv_open_gather,
813 NULL /* get_iv */,
814};
815
816#if defined(OPENSSL_X86_64) && !defined(OPENSSL_NO_ASM)
817
818static char avx_aesni_capable(void) {
819 const uint32_t ecx = OPENSSL_ia32cap_P[1];
820
821 return (ecx & (1 << (57 - 32))) != 0 /* AESNI */ &&
822 (ecx & (1 << 28)) != 0 /* AVX */;
823}
824
825const EVP_AEAD *EVP_aead_aes_128_gcm_siv(void) {
826 if (avx_aesni_capable()) {
827 return &aead_aes_128_gcm_siv_asm;
828 }
829 return &aead_aes_128_gcm_siv;
830}
831
832const EVP_AEAD *EVP_aead_aes_256_gcm_siv(void) {
833 if (avx_aesni_capable()) {
834 return &aead_aes_256_gcm_siv_asm;
835 }
836 return &aead_aes_256_gcm_siv;
837}
838
839#else
840
841const EVP_AEAD *EVP_aead_aes_128_gcm_siv(void) {
842 return &aead_aes_128_gcm_siv;
843}
844
845const EVP_AEAD *EVP_aead_aes_256_gcm_siv(void) {
846 return &aead_aes_256_gcm_siv;
847}
848
849#endif /* X86_64 && !NO_ASM */