blob: a1254036f2b1e0df0e3083dae606a543f914f7d3 [file] [log] [blame]
Ard Biesheuvela3fd8212014-02-10 11:26:29 +01001/*
2 * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
3 *
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +01004 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
Ard Biesheuvela3fd8212014-02-10 11:26:29 +01005 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11#include <asm/neon.h>
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +010012#include <asm/simd.h>
Ard Biesheuvela3fd8212014-02-10 11:26:29 +010013#include <asm/unaligned.h>
14#include <crypto/aes.h>
Ard Biesheuvela3fd8212014-02-10 11:26:29 +010015#include <crypto/scatterwalk.h>
Herbert Xu34ed9a32015-04-22 15:06:27 +080016#include <crypto/internal/aead.h>
Herbert Xucf2c0fe2016-11-22 20:08:14 +080017#include <crypto/internal/skcipher.h>
Ard Biesheuvela3fd8212014-02-10 11:26:29 +010018#include <linux/module.h>
19
Ard Biesheuvel12ac3ef2014-11-03 16:50:01 +000020#include "aes-ce-setkey.h"
21
Ard Biesheuvela3fd8212014-02-10 11:26:29 +010022static int num_rounds(struct crypto_aes_ctx *ctx)
23{
24 /*
25 * # of rounds specified by AES:
26 * 128 bit key 10 rounds
27 * 192 bit key 12 rounds
28 * 256 bit key 14 rounds
29 * => n byte key => 6 + (n/4) rounds
30 */
31 return 6 + ctx->key_length / 4;
32}
33
34asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
35 u32 *macp, u32 const rk[], u32 rounds);
36
37asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
38 u32 const rk[], u32 rounds, u8 mac[],
39 u8 ctr[]);
40
41asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
42 u32 const rk[], u32 rounds, u8 mac[],
43 u8 ctr[]);
44
45asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
46 u32 rounds);
47
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +010048asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
49
Ard Biesheuvela3fd8212014-02-10 11:26:29 +010050static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
51 unsigned int key_len)
52{
53 struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
54 int ret;
55
Ard Biesheuvel12ac3ef2014-11-03 16:50:01 +000056 ret = ce_aes_expandkey(ctx, in_key, key_len);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +010057 if (!ret)
58 return 0;
59
60 tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
61 return -EINVAL;
62}
63
64static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
65{
66 if ((authsize & 1) || authsize < 4)
67 return -EINVAL;
68 return 0;
69}
70
71static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
72{
73 struct crypto_aead *aead = crypto_aead_reqtfm(req);
74 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
75 u32 l = req->iv[0] + 1;
76
77 /* verify that CCM dimension 'L' is set correctly in the IV */
78 if (l < 2 || l > 8)
79 return -EINVAL;
80
81 /* verify that msglen can in fact be represented in L bytes */
82 if (l < 4 && msglen >> (8 * l))
83 return -EOVERFLOW;
84
85 /*
86 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
87 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
88 */
89 n[0] = 0;
90 n[1] = cpu_to_be32(msglen);
91
92 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
93
94 /*
95 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
96 * - bits 0..2 : max # of bytes required to represent msglen, minus 1
97 * (already set by caller)
98 * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
99 * - bit 6 : indicates presence of authenticate-only data
100 */
101 maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
102 if (req->assoclen)
103 maciv[0] |= 0x40;
104
105 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
106 return 0;
107}
108
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100109static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
110 u32 abytes, u32 *macp, bool use_neon)
111{
112 if (likely(use_neon)) {
113 ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
114 num_rounds(key));
115 } else {
116 if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
117 int added = min(abytes, AES_BLOCK_SIZE - *macp);
118
119 crypto_xor(&mac[*macp], in, added);
120
121 *macp += added;
122 in += added;
123 abytes -= added;
124 }
125
126 while (abytes > AES_BLOCK_SIZE) {
127 __aes_arm64_encrypt(key->key_enc, mac, mac,
128 num_rounds(key));
129 crypto_xor(mac, in, AES_BLOCK_SIZE);
130
131 in += AES_BLOCK_SIZE;
132 abytes -= AES_BLOCK_SIZE;
133 }
134
135 if (abytes > 0) {
136 __aes_arm64_encrypt(key->key_enc, mac, mac,
137 num_rounds(key));
138 crypto_xor(mac, in, abytes);
139 *macp = abytes;
140 } else {
141 *macp = 0;
142 }
143 }
144}
145
146static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[],
147 bool use_neon)
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100148{
149 struct crypto_aead *aead = crypto_aead_reqtfm(req);
150 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
151 struct __packed { __be16 l; __be32 h; u16 len; } ltag;
152 struct scatter_walk walk;
153 u32 len = req->assoclen;
154 u32 macp = 0;
155
156 /* prepend the AAD with a length tag */
157 if (len < 0xff00) {
158 ltag.l = cpu_to_be16(len);
159 ltag.len = 2;
160 } else {
161 ltag.l = cpu_to_be16(0xfffe);
162 put_unaligned_be32(len, &ltag.h);
163 ltag.len = 6;
164 }
165
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100166 ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp, use_neon);
Herbert Xu2642d6a2015-07-14 16:53:19 +0800167 scatterwalk_start(&walk, req->src);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100168
169 do {
170 u32 n = scatterwalk_clamp(&walk, len);
171 u8 *p;
172
173 if (!n) {
174 scatterwalk_start(&walk, sg_next(walk.sg));
175 n = scatterwalk_clamp(&walk, len);
176 }
177 p = scatterwalk_map(&walk);
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100178 ccm_update_mac(ctx, mac, p, n, &macp, use_neon);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100179 len -= n;
180
181 scatterwalk_unmap(p);
182 scatterwalk_advance(&walk, n);
183 scatterwalk_done(&walk, 0, len);
184 } while (len);
185}
186
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100187static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
188 struct crypto_aes_ctx *ctx, bool enc)
189{
190 u8 buf[AES_BLOCK_SIZE];
191 int err = 0;
192
193 while (walk->nbytes) {
194 int blocks = walk->nbytes / AES_BLOCK_SIZE;
195 u32 tail = walk->nbytes % AES_BLOCK_SIZE;
196 u8 *dst = walk->dst.virt.addr;
197 u8 *src = walk->src.virt.addr;
198 u32 nbytes = walk->nbytes;
199
200 if (nbytes == walk->total && tail > 0) {
201 blocks++;
202 tail = 0;
203 }
204
205 do {
206 u32 bsize = AES_BLOCK_SIZE;
207
208 if (nbytes < AES_BLOCK_SIZE)
209 bsize = nbytes;
210
211 crypto_inc(walk->iv, AES_BLOCK_SIZE);
212 __aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
213 num_rounds(ctx));
214 __aes_arm64_encrypt(ctx->key_enc, mac, mac,
215 num_rounds(ctx));
216 if (enc)
217 crypto_xor(mac, src, bsize);
218 crypto_xor_cpy(dst, src, buf, bsize);
219 if (!enc)
220 crypto_xor(mac, dst, bsize);
221 dst += bsize;
222 src += bsize;
223 nbytes -= bsize;
224 } while (--blocks);
225
226 err = skcipher_walk_done(walk, tail);
227 }
228
229 if (!err) {
230 __aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
231 __aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
232 crypto_xor(mac, buf, AES_BLOCK_SIZE);
233 }
234 return err;
235}
236
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100237static int ccm_encrypt(struct aead_request *req)
238{
239 struct crypto_aead *aead = crypto_aead_reqtfm(req);
240 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
Herbert Xucf2c0fe2016-11-22 20:08:14 +0800241 struct skcipher_walk walk;
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100242 u8 __aligned(8) mac[AES_BLOCK_SIZE];
243 u8 buf[AES_BLOCK_SIZE];
244 u32 len = req->cryptlen;
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100245 bool use_neon = may_use_simd();
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100246 int err;
247
248 err = ccm_init_mac(req, mac, len);
249 if (err)
250 return err;
251
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100252 if (likely(use_neon))
253 kernel_neon_begin();
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100254
255 if (req->assoclen)
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100256 ccm_calculate_auth_mac(req, mac, use_neon);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100257
258 /* preserve the original iv for the final round */
259 memcpy(buf, req->iv, AES_BLOCK_SIZE);
260
Herbert Xu0be8a272016-11-30 21:17:24 +0800261 err = skcipher_walk_aead_encrypt(&walk, req, true);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100262
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100263 if (likely(use_neon)) {
264 while (walk.nbytes) {
265 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100266
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100267 if (walk.nbytes == walk.total)
268 tail = 0;
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100269
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100270 ce_aes_ccm_encrypt(walk.dst.virt.addr,
271 walk.src.virt.addr,
272 walk.nbytes - tail, ctx->key_enc,
273 num_rounds(ctx), mac, walk.iv);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100274
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100275 err = skcipher_walk_done(&walk, tail);
276 }
277 if (!err)
278 ce_aes_ccm_final(mac, buf, ctx->key_enc,
279 num_rounds(ctx));
280
281 kernel_neon_end();
282 } else {
283 err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100284 }
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100285 if (err)
286 return err;
287
288 /* copy authtag to end of dst */
Herbert Xucf2c0fe2016-11-22 20:08:14 +0800289 scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100290 crypto_aead_authsize(aead), 1);
291
292 return 0;
293}
294
295static int ccm_decrypt(struct aead_request *req)
296{
297 struct crypto_aead *aead = crypto_aead_reqtfm(req);
298 struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
299 unsigned int authsize = crypto_aead_authsize(aead);
Herbert Xucf2c0fe2016-11-22 20:08:14 +0800300 struct skcipher_walk walk;
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100301 u8 __aligned(8) mac[AES_BLOCK_SIZE];
302 u8 buf[AES_BLOCK_SIZE];
303 u32 len = req->cryptlen - authsize;
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100304 bool use_neon = may_use_simd();
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100305 int err;
306
307 err = ccm_init_mac(req, mac, len);
308 if (err)
309 return err;
310
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100311 if (likely(use_neon))
312 kernel_neon_begin();
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100313
314 if (req->assoclen)
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100315 ccm_calculate_auth_mac(req, mac, use_neon);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100316
317 /* preserve the original iv for the final round */
318 memcpy(buf, req->iv, AES_BLOCK_SIZE);
319
Herbert Xu0be8a272016-11-30 21:17:24 +0800320 err = skcipher_walk_aead_decrypt(&walk, req, true);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100321
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100322 if (likely(use_neon)) {
323 while (walk.nbytes) {
324 u32 tail = walk.nbytes % AES_BLOCK_SIZE;
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100325
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100326 if (walk.nbytes == walk.total)
327 tail = 0;
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100328
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100329 ce_aes_ccm_decrypt(walk.dst.virt.addr,
330 walk.src.virt.addr,
331 walk.nbytes - tail, ctx->key_enc,
332 num_rounds(ctx), mac, walk.iv);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100333
Ard Biesheuvel5092fcf2017-07-24 11:28:12 +0100334 err = skcipher_walk_done(&walk, tail);
335 }
336 if (!err)
337 ce_aes_ccm_final(mac, buf, ctx->key_enc,
338 num_rounds(ctx));
339
340 kernel_neon_end();
341 } else {
342 err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100343 }
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100344
345 if (err)
346 return err;
347
348 /* compare calculated auth tag with the stored one */
Herbert Xucf2c0fe2016-11-22 20:08:14 +0800349 scatterwalk_map_and_copy(buf, req->src,
350 req->assoclen + req->cryptlen - authsize,
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100351 authsize, 0);
352
Herbert Xu2642d6a2015-07-14 16:53:19 +0800353 if (crypto_memneq(mac, buf, authsize))
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100354 return -EBADMSG;
355 return 0;
356}
357
Herbert Xu2642d6a2015-07-14 16:53:19 +0800358static struct aead_alg ccm_aes_alg = {
359 .base = {
360 .cra_name = "ccm(aes)",
361 .cra_driver_name = "ccm-aes-ce",
Herbert Xu2642d6a2015-07-14 16:53:19 +0800362 .cra_priority = 300,
363 .cra_blocksize = 1,
364 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
Herbert Xu2642d6a2015-07-14 16:53:19 +0800365 .cra_module = THIS_MODULE,
366 },
367 .ivsize = AES_BLOCK_SIZE,
Herbert Xucf2c0fe2016-11-22 20:08:14 +0800368 .chunksize = AES_BLOCK_SIZE,
Herbert Xu2642d6a2015-07-14 16:53:19 +0800369 .maxauthsize = AES_BLOCK_SIZE,
370 .setkey = ccm_setkey,
371 .setauthsize = ccm_setauthsize,
372 .encrypt = ccm_encrypt,
373 .decrypt = ccm_decrypt,
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100374};
375
376static int __init aes_mod_init(void)
377{
378 if (!(elf_hwcap & HWCAP_AES))
379 return -ENODEV;
Herbert Xu2642d6a2015-07-14 16:53:19 +0800380 return crypto_register_aead(&ccm_aes_alg);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100381}
382
383static void __exit aes_mod_exit(void)
384{
Herbert Xu2642d6a2015-07-14 16:53:19 +0800385 crypto_unregister_aead(&ccm_aes_alg);
Ard Biesheuvela3fd8212014-02-10 11:26:29 +0100386}
387
388module_init(aes_mod_init);
389module_exit(aes_mod_exit);
390
391MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
392MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
393MODULE_LICENSE("GPL v2");
Kees Cook5d26a102014-11-20 17:05:53 -0800394MODULE_ALIAS_CRYPTO("ccm(aes)");