blob: 0da30e3b0e4be8c7dde5d4585bdf893202a5bfe8 [file] [log] [blame]
Ard Biesheuvel49788fe2014-03-21 10:19:17 +01001/*
2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
3 *
Ard Biesheuvel48606202017-02-03 14:49:37 +00004 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
Ard Biesheuvel49788fe2014-03-21 10:19:17 +01005 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 */
10
11#include <asm/neon.h>
12#include <asm/hwcap.h>
13#include <crypto/aes.h>
Ard Biesheuvel48606202017-02-03 14:49:37 +000014#include <crypto/internal/hash.h>
Herbert Xud0ed0db2016-11-22 20:08:35 +080015#include <crypto/internal/simd.h>
16#include <crypto/internal/skcipher.h>
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010017#include <linux/module.h>
18#include <linux/cpufeature.h>
Stephan Mueller49abc0d2016-02-17 07:00:01 +010019#include <crypto/xts.h>
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010020
Ard Biesheuvel12ac3ef2014-11-03 16:50:01 +000021#include "aes-ce-setkey.h"
22
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010023#ifdef USE_V8_CRYPTO_EXTENSIONS
24#define MODE "ce"
25#define PRIO 300
Ard Biesheuvel12ac3ef2014-11-03 16:50:01 +000026#define aes_setkey ce_aes_setkey
27#define aes_expandkey ce_aes_expandkey
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010028#define aes_ecb_encrypt ce_aes_ecb_encrypt
29#define aes_ecb_decrypt ce_aes_ecb_decrypt
30#define aes_cbc_encrypt ce_aes_cbc_encrypt
31#define aes_cbc_decrypt ce_aes_cbc_decrypt
32#define aes_ctr_encrypt ce_aes_ctr_encrypt
33#define aes_xts_encrypt ce_aes_xts_encrypt
34#define aes_xts_decrypt ce_aes_xts_decrypt
Ard Biesheuvel48606202017-02-03 14:49:37 +000035#define aes_mac_update ce_aes_mac_update
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010036MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
37#else
38#define MODE "neon"
39#define PRIO 200
Ard Biesheuvel12ac3ef2014-11-03 16:50:01 +000040#define aes_setkey crypto_aes_set_key
41#define aes_expandkey crypto_aes_expand_key
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010042#define aes_ecb_encrypt neon_aes_ecb_encrypt
43#define aes_ecb_decrypt neon_aes_ecb_decrypt
44#define aes_cbc_encrypt neon_aes_cbc_encrypt
45#define aes_cbc_decrypt neon_aes_cbc_decrypt
46#define aes_ctr_encrypt neon_aes_ctr_encrypt
47#define aes_xts_encrypt neon_aes_xts_encrypt
48#define aes_xts_decrypt neon_aes_xts_decrypt
Ard Biesheuvel48606202017-02-03 14:49:37 +000049#define aes_mac_update neon_aes_mac_update
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010050MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
Kees Cook5d26a102014-11-20 17:05:53 -080051MODULE_ALIAS_CRYPTO("ecb(aes)");
52MODULE_ALIAS_CRYPTO("cbc(aes)");
53MODULE_ALIAS_CRYPTO("ctr(aes)");
54MODULE_ALIAS_CRYPTO("xts(aes)");
Ard Biesheuvel48606202017-02-03 14:49:37 +000055MODULE_ALIAS_CRYPTO("cmac(aes)");
56MODULE_ALIAS_CRYPTO("xcbc(aes)");
57MODULE_ALIAS_CRYPTO("cbcmac(aes)");
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010058#endif
59
60MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
61MODULE_LICENSE("GPL v2");
62
63/* defined in aes-modes.S */
64asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
65 int rounds, int blocks, int first);
66asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
67 int rounds, int blocks, int first);
68
69asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
70 int rounds, int blocks, u8 iv[], int first);
71asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
72 int rounds, int blocks, u8 iv[], int first);
73
74asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
75 int rounds, int blocks, u8 ctr[], int first);
76
77asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
78 int rounds, int blocks, u8 const rk2[], u8 iv[],
79 int first);
80asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
81 int rounds, int blocks, u8 const rk2[], u8 iv[],
82 int first);
83
Ard Biesheuvel48606202017-02-03 14:49:37 +000084asmlinkage void aes_mac_update(u8 const in[], u32 const rk[], int rounds,
85 int blocks, u8 dg[], int enc_before,
86 int enc_after);
87
Ard Biesheuvel49788fe2014-03-21 10:19:17 +010088struct crypto_aes_xts_ctx {
89 struct crypto_aes_ctx key1;
90 struct crypto_aes_ctx __aligned(8) key2;
91};
92
Ard Biesheuvel48606202017-02-03 14:49:37 +000093struct mac_tfm_ctx {
94 struct crypto_aes_ctx key;
95 u8 __aligned(8) consts[];
96};
97
98struct mac_desc_ctx {
99 unsigned int len;
100 u8 dg[AES_BLOCK_SIZE];
101};
102
Herbert Xud0ed0db2016-11-22 20:08:35 +0800103static int skcipher_aes_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
104 unsigned int key_len)
105{
106 return aes_setkey(crypto_skcipher_tfm(tfm), in_key, key_len);
107}
108
109static int xts_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100110 unsigned int key_len)
111{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800112 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100113 int ret;
114
Herbert Xud0ed0db2016-11-22 20:08:35 +0800115 ret = xts_verify_key(tfm, in_key, key_len);
Stephan Mueller28856a92016-02-09 15:37:47 +0100116 if (ret)
117 return ret;
118
Ard Biesheuvel12ac3ef2014-11-03 16:50:01 +0000119 ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100120 if (!ret)
Ard Biesheuvel12ac3ef2014-11-03 16:50:01 +0000121 ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
122 key_len / 2);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100123 if (!ret)
124 return 0;
125
Herbert Xud0ed0db2016-11-22 20:08:35 +0800126 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100127 return -EINVAL;
128}
129
Herbert Xud0ed0db2016-11-22 20:08:35 +0800130static int ecb_encrypt(struct skcipher_request *req)
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100131{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800132 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
133 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100134 int err, first, rounds = 6 + ctx->key_length / 4;
Herbert Xud0ed0db2016-11-22 20:08:35 +0800135 struct skcipher_walk walk;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100136 unsigned int blocks;
137
Herbert Xud0ed0db2016-11-22 20:08:35 +0800138 err = skcipher_walk_virt(&walk, req, true);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100139
140 kernel_neon_begin();
141 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
142 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
143 (u8 *)ctx->key_enc, rounds, blocks, first);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800144 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100145 }
146 kernel_neon_end();
147 return err;
148}
149
Herbert Xud0ed0db2016-11-22 20:08:35 +0800150static int ecb_decrypt(struct skcipher_request *req)
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100151{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800152 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
153 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100154 int err, first, rounds = 6 + ctx->key_length / 4;
Herbert Xud0ed0db2016-11-22 20:08:35 +0800155 struct skcipher_walk walk;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100156 unsigned int blocks;
157
Herbert Xud0ed0db2016-11-22 20:08:35 +0800158 err = skcipher_walk_virt(&walk, req, true);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100159
160 kernel_neon_begin();
161 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
162 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
163 (u8 *)ctx->key_dec, rounds, blocks, first);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800164 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100165 }
166 kernel_neon_end();
167 return err;
168}
169
Herbert Xud0ed0db2016-11-22 20:08:35 +0800170static int cbc_encrypt(struct skcipher_request *req)
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100171{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800172 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
173 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100174 int err, first, rounds = 6 + ctx->key_length / 4;
Herbert Xud0ed0db2016-11-22 20:08:35 +0800175 struct skcipher_walk walk;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100176 unsigned int blocks;
177
Herbert Xud0ed0db2016-11-22 20:08:35 +0800178 err = skcipher_walk_virt(&walk, req, true);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100179
180 kernel_neon_begin();
181 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
182 aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
183 (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
184 first);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800185 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100186 }
187 kernel_neon_end();
188 return err;
189}
190
Herbert Xud0ed0db2016-11-22 20:08:35 +0800191static int cbc_decrypt(struct skcipher_request *req)
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100192{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800193 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
194 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100195 int err, first, rounds = 6 + ctx->key_length / 4;
Herbert Xud0ed0db2016-11-22 20:08:35 +0800196 struct skcipher_walk walk;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100197 unsigned int blocks;
198
Herbert Xud0ed0db2016-11-22 20:08:35 +0800199 err = skcipher_walk_virt(&walk, req, true);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100200
201 kernel_neon_begin();
202 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
203 aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
204 (u8 *)ctx->key_dec, rounds, blocks, walk.iv,
205 first);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800206 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100207 }
208 kernel_neon_end();
209 return err;
210}
211
Herbert Xud0ed0db2016-11-22 20:08:35 +0800212static int ctr_encrypt(struct skcipher_request *req)
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100213{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800214 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
215 struct crypto_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100216 int err, first, rounds = 6 + ctx->key_length / 4;
Herbert Xud0ed0db2016-11-22 20:08:35 +0800217 struct skcipher_walk walk;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100218 int blocks;
219
Herbert Xud0ed0db2016-11-22 20:08:35 +0800220 err = skcipher_walk_virt(&walk, req, true);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100221
222 first = 1;
223 kernel_neon_begin();
224 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
225 aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
226 (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
227 first);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800228 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
Ard Biesheuvelb3e1e0c2016-11-29 13:05:33 +0000229 first = 0;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100230 }
Herbert Xud0ed0db2016-11-22 20:08:35 +0800231 if (walk.nbytes) {
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100232 u8 __aligned(8) tail[AES_BLOCK_SIZE];
Herbert Xud0ed0db2016-11-22 20:08:35 +0800233 unsigned int nbytes = walk.nbytes;
234 u8 *tdst = walk.dst.virt.addr;
235 u8 *tsrc = walk.src.virt.addr;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100236
237 /*
Ard Biesheuvelccc5d512017-01-28 23:25:34 +0000238 * Tell aes_ctr_encrypt() to process a tail block.
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100239 */
Ard Biesheuvelccc5d512017-01-28 23:25:34 +0000240 blocks = -1;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100241
Ard Biesheuvelccc5d512017-01-28 23:25:34 +0000242 aes_ctr_encrypt(tail, NULL, (u8 *)ctx->key_enc, rounds,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100243 blocks, walk.iv, first);
Ard Biesheuvel45fe93d2017-07-24 11:28:04 +0100244 crypto_xor_cpy(tdst, tsrc, tail, nbytes);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800245 err = skcipher_walk_done(&walk, 0);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100246 }
247 kernel_neon_end();
248
249 return err;
250}
251
Herbert Xud0ed0db2016-11-22 20:08:35 +0800252static int xts_encrypt(struct skcipher_request *req)
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100253{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800254 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
255 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100256 int err, first, rounds = 6 + ctx->key1.key_length / 4;
Herbert Xud0ed0db2016-11-22 20:08:35 +0800257 struct skcipher_walk walk;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100258 unsigned int blocks;
259
Herbert Xud0ed0db2016-11-22 20:08:35 +0800260 err = skcipher_walk_virt(&walk, req, true);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100261
262 kernel_neon_begin();
263 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
264 aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
265 (u8 *)ctx->key1.key_enc, rounds, blocks,
266 (u8 *)ctx->key2.key_enc, walk.iv, first);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800267 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100268 }
269 kernel_neon_end();
270
271 return err;
272}
273
Herbert Xud0ed0db2016-11-22 20:08:35 +0800274static int xts_decrypt(struct skcipher_request *req)
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100275{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800276 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
277 struct crypto_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100278 int err, first, rounds = 6 + ctx->key1.key_length / 4;
Herbert Xud0ed0db2016-11-22 20:08:35 +0800279 struct skcipher_walk walk;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100280 unsigned int blocks;
281
Herbert Xud0ed0db2016-11-22 20:08:35 +0800282 err = skcipher_walk_virt(&walk, req, true);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100283
284 kernel_neon_begin();
285 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
286 aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
287 (u8 *)ctx->key1.key_dec, rounds, blocks,
288 (u8 *)ctx->key2.key_enc, walk.iv, first);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800289 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100290 }
291 kernel_neon_end();
292
293 return err;
294}
295
Herbert Xud0ed0db2016-11-22 20:08:35 +0800296static struct skcipher_alg aes_algs[] = { {
297 .base = {
298 .cra_name = "__ecb(aes)",
299 .cra_driver_name = "__ecb-aes-" MODE,
300 .cra_priority = PRIO,
301 .cra_flags = CRYPTO_ALG_INTERNAL,
302 .cra_blocksize = AES_BLOCK_SIZE,
303 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
Herbert Xud0ed0db2016-11-22 20:08:35 +0800304 .cra_module = THIS_MODULE,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100305 },
Herbert Xud0ed0db2016-11-22 20:08:35 +0800306 .min_keysize = AES_MIN_KEY_SIZE,
307 .max_keysize = AES_MAX_KEY_SIZE,
308 .setkey = skcipher_aes_setkey,
309 .encrypt = ecb_encrypt,
310 .decrypt = ecb_decrypt,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100311}, {
Herbert Xud0ed0db2016-11-22 20:08:35 +0800312 .base = {
313 .cra_name = "__cbc(aes)",
314 .cra_driver_name = "__cbc-aes-" MODE,
315 .cra_priority = PRIO,
316 .cra_flags = CRYPTO_ALG_INTERNAL,
317 .cra_blocksize = AES_BLOCK_SIZE,
318 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
Herbert Xud0ed0db2016-11-22 20:08:35 +0800319 .cra_module = THIS_MODULE,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100320 },
Herbert Xud0ed0db2016-11-22 20:08:35 +0800321 .min_keysize = AES_MIN_KEY_SIZE,
322 .max_keysize = AES_MAX_KEY_SIZE,
323 .ivsize = AES_BLOCK_SIZE,
324 .setkey = skcipher_aes_setkey,
325 .encrypt = cbc_encrypt,
326 .decrypt = cbc_decrypt,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100327}, {
Herbert Xud0ed0db2016-11-22 20:08:35 +0800328 .base = {
329 .cra_name = "__ctr(aes)",
330 .cra_driver_name = "__ctr-aes-" MODE,
331 .cra_priority = PRIO,
332 .cra_flags = CRYPTO_ALG_INTERNAL,
333 .cra_blocksize = 1,
334 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
Herbert Xud0ed0db2016-11-22 20:08:35 +0800335 .cra_module = THIS_MODULE,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100336 },
Herbert Xud0ed0db2016-11-22 20:08:35 +0800337 .min_keysize = AES_MIN_KEY_SIZE,
338 .max_keysize = AES_MAX_KEY_SIZE,
339 .ivsize = AES_BLOCK_SIZE,
340 .chunksize = AES_BLOCK_SIZE,
341 .setkey = skcipher_aes_setkey,
342 .encrypt = ctr_encrypt,
343 .decrypt = ctr_encrypt,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100344}, {
Herbert Xud0ed0db2016-11-22 20:08:35 +0800345 .base = {
Ard Biesheuvel293614c2017-01-11 16:41:51 +0000346 .cra_name = "ctr(aes)",
347 .cra_driver_name = "ctr-aes-" MODE,
348 .cra_priority = PRIO - 1,
349 .cra_blocksize = 1,
350 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
Ard Biesheuvel293614c2017-01-11 16:41:51 +0000351 .cra_module = THIS_MODULE,
352 },
353 .min_keysize = AES_MIN_KEY_SIZE,
354 .max_keysize = AES_MAX_KEY_SIZE,
355 .ivsize = AES_BLOCK_SIZE,
356 .chunksize = AES_BLOCK_SIZE,
357 .setkey = skcipher_aes_setkey,
358 .encrypt = ctr_encrypt,
359 .decrypt = ctr_encrypt,
360}, {
361 .base = {
Herbert Xud0ed0db2016-11-22 20:08:35 +0800362 .cra_name = "__xts(aes)",
363 .cra_driver_name = "__xts-aes-" MODE,
364 .cra_priority = PRIO,
365 .cra_flags = CRYPTO_ALG_INTERNAL,
366 .cra_blocksize = AES_BLOCK_SIZE,
367 .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
Herbert Xud0ed0db2016-11-22 20:08:35 +0800368 .cra_module = THIS_MODULE,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100369 },
Herbert Xud0ed0db2016-11-22 20:08:35 +0800370 .min_keysize = 2 * AES_MIN_KEY_SIZE,
371 .max_keysize = 2 * AES_MAX_KEY_SIZE,
372 .ivsize = AES_BLOCK_SIZE,
373 .setkey = xts_set_key,
374 .encrypt = xts_encrypt,
375 .decrypt = xts_decrypt,
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100376} };
377
Ard Biesheuvel48606202017-02-03 14:49:37 +0000378static int cbcmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
379 unsigned int key_len)
380{
381 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
382 int err;
383
384 err = aes_expandkey(&ctx->key, in_key, key_len);
385 if (err)
386 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
387
388 return err;
389}
390
391static void cmac_gf128_mul_by_x(be128 *y, const be128 *x)
392{
393 u64 a = be64_to_cpu(x->a);
394 u64 b = be64_to_cpu(x->b);
395
396 y->a = cpu_to_be64((a << 1) | (b >> 63));
397 y->b = cpu_to_be64((b << 1) ^ ((a >> 63) ? 0x87 : 0));
398}
399
400static int cmac_setkey(struct crypto_shash *tfm, const u8 *in_key,
401 unsigned int key_len)
402{
403 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
404 be128 *consts = (be128 *)ctx->consts;
405 u8 *rk = (u8 *)ctx->key.key_enc;
406 int rounds = 6 + key_len / 4;
407 int err;
408
409 err = cbcmac_setkey(tfm, in_key, key_len);
410 if (err)
411 return err;
412
413 /* encrypt the zero vector */
414 kernel_neon_begin();
415 aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, rk, rounds, 1, 1);
416 kernel_neon_end();
417
418 cmac_gf128_mul_by_x(consts, consts);
419 cmac_gf128_mul_by_x(consts + 1, consts);
420
421 return 0;
422}
423
424static int xcbc_setkey(struct crypto_shash *tfm, const u8 *in_key,
425 unsigned int key_len)
426{
427 static u8 const ks[3][AES_BLOCK_SIZE] = {
428 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 },
429 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 },
430 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 },
431 };
432
433 struct mac_tfm_ctx *ctx = crypto_shash_ctx(tfm);
434 u8 *rk = (u8 *)ctx->key.key_enc;
435 int rounds = 6 + key_len / 4;
436 u8 key[AES_BLOCK_SIZE];
437 int err;
438
439 err = cbcmac_setkey(tfm, in_key, key_len);
440 if (err)
441 return err;
442
443 kernel_neon_begin();
444 aes_ecb_encrypt(key, ks[0], rk, rounds, 1, 1);
445 aes_ecb_encrypt(ctx->consts, ks[1], rk, rounds, 2, 0);
446 kernel_neon_end();
447
448 return cbcmac_setkey(tfm, key, sizeof(key));
449}
450
451static int mac_init(struct shash_desc *desc)
452{
453 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
454
455 memset(ctx->dg, 0, AES_BLOCK_SIZE);
456 ctx->len = 0;
457
458 return 0;
459}
460
461static int mac_update(struct shash_desc *desc, const u8 *p, unsigned int len)
462{
463 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
464 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
465 int rounds = 6 + tctx->key.key_length / 4;
466
467 while (len > 0) {
468 unsigned int l;
469
470 if ((ctx->len % AES_BLOCK_SIZE) == 0 &&
471 (ctx->len + len) > AES_BLOCK_SIZE) {
472
473 int blocks = len / AES_BLOCK_SIZE;
474
475 len %= AES_BLOCK_SIZE;
476
477 kernel_neon_begin();
478 aes_mac_update(p, tctx->key.key_enc, rounds, blocks,
479 ctx->dg, (ctx->len != 0), (len != 0));
480 kernel_neon_end();
481
482 p += blocks * AES_BLOCK_SIZE;
483
484 if (!len) {
485 ctx->len = AES_BLOCK_SIZE;
486 break;
487 }
488 ctx->len = 0;
489 }
490
491 l = min(len, AES_BLOCK_SIZE - ctx->len);
492
493 if (l <= AES_BLOCK_SIZE) {
494 crypto_xor(ctx->dg + ctx->len, p, l);
495 ctx->len += l;
496 len -= l;
497 p += l;
498 }
499 }
500
501 return 0;
502}
503
504static int cbcmac_final(struct shash_desc *desc, u8 *out)
505{
506 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
507 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
508 int rounds = 6 + tctx->key.key_length / 4;
509
510 kernel_neon_begin();
511 aes_mac_update(NULL, tctx->key.key_enc, rounds, 0, ctx->dg, 1, 0);
512 kernel_neon_end();
513
514 memcpy(out, ctx->dg, AES_BLOCK_SIZE);
515
516 return 0;
517}
518
519static int cmac_final(struct shash_desc *desc, u8 *out)
520{
521 struct mac_tfm_ctx *tctx = crypto_shash_ctx(desc->tfm);
522 struct mac_desc_ctx *ctx = shash_desc_ctx(desc);
523 int rounds = 6 + tctx->key.key_length / 4;
524 u8 *consts = tctx->consts;
525
526 if (ctx->len != AES_BLOCK_SIZE) {
527 ctx->dg[ctx->len] ^= 0x80;
528 consts += AES_BLOCK_SIZE;
529 }
530
531 kernel_neon_begin();
532 aes_mac_update(consts, tctx->key.key_enc, rounds, 1, ctx->dg, 0, 1);
533 kernel_neon_end();
534
535 memcpy(out, ctx->dg, AES_BLOCK_SIZE);
536
537 return 0;
538}
539
540static struct shash_alg mac_algs[] = { {
541 .base.cra_name = "cmac(aes)",
542 .base.cra_driver_name = "cmac-aes-" MODE,
543 .base.cra_priority = PRIO,
544 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
545 .base.cra_blocksize = AES_BLOCK_SIZE,
546 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
547 2 * AES_BLOCK_SIZE,
548 .base.cra_module = THIS_MODULE,
549
550 .digestsize = AES_BLOCK_SIZE,
551 .init = mac_init,
552 .update = mac_update,
553 .final = cmac_final,
554 .setkey = cmac_setkey,
555 .descsize = sizeof(struct mac_desc_ctx),
556}, {
557 .base.cra_name = "xcbc(aes)",
558 .base.cra_driver_name = "xcbc-aes-" MODE,
559 .base.cra_priority = PRIO,
560 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
561 .base.cra_blocksize = AES_BLOCK_SIZE,
562 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx) +
563 2 * AES_BLOCK_SIZE,
564 .base.cra_module = THIS_MODULE,
565
566 .digestsize = AES_BLOCK_SIZE,
567 .init = mac_init,
568 .update = mac_update,
569 .final = cmac_final,
570 .setkey = xcbc_setkey,
571 .descsize = sizeof(struct mac_desc_ctx),
572}, {
573 .base.cra_name = "cbcmac(aes)",
574 .base.cra_driver_name = "cbcmac-aes-" MODE,
575 .base.cra_priority = PRIO,
576 .base.cra_flags = CRYPTO_ALG_TYPE_SHASH,
577 .base.cra_blocksize = 1,
578 .base.cra_ctxsize = sizeof(struct mac_tfm_ctx),
579 .base.cra_module = THIS_MODULE,
580
581 .digestsize = AES_BLOCK_SIZE,
582 .init = mac_init,
583 .update = mac_update,
584 .final = cbcmac_final,
585 .setkey = cbcmac_setkey,
586 .descsize = sizeof(struct mac_desc_ctx),
587} };
588
Ard Biesheuvel7f329c12016-11-29 13:05:30 +0000589static struct simd_skcipher_alg *aes_simd_algs[ARRAY_SIZE(aes_algs)];
Herbert Xud0ed0db2016-11-22 20:08:35 +0800590
591static void aes_exit(void)
592{
593 int i;
594
Ard Biesheuvel293614c2017-01-11 16:41:51 +0000595 for (i = 0; i < ARRAY_SIZE(aes_simd_algs); i++)
596 if (aes_simd_algs[i])
597 simd_skcipher_free(aes_simd_algs[i]);
Herbert Xud0ed0db2016-11-22 20:08:35 +0800598
Ard Biesheuvel48606202017-02-03 14:49:37 +0000599 crypto_unregister_shashes(mac_algs, ARRAY_SIZE(mac_algs));
Herbert Xud0ed0db2016-11-22 20:08:35 +0800600 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
601}
602
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100603static int __init aes_init(void)
604{
Herbert Xud0ed0db2016-11-22 20:08:35 +0800605 struct simd_skcipher_alg *simd;
606 const char *basename;
607 const char *algname;
608 const char *drvname;
609 int err;
610 int i;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100611
Herbert Xud0ed0db2016-11-22 20:08:35 +0800612 err = crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
613 if (err)
614 return err;
615
Ard Biesheuvel48606202017-02-03 14:49:37 +0000616 err = crypto_register_shashes(mac_algs, ARRAY_SIZE(mac_algs));
617 if (err)
618 goto unregister_ciphers;
619
Herbert Xud0ed0db2016-11-22 20:08:35 +0800620 for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
Ard Biesheuvel293614c2017-01-11 16:41:51 +0000621 if (!(aes_algs[i].base.cra_flags & CRYPTO_ALG_INTERNAL))
622 continue;
623
Herbert Xud0ed0db2016-11-22 20:08:35 +0800624 algname = aes_algs[i].base.cra_name + 2;
625 drvname = aes_algs[i].base.cra_driver_name + 2;
626 basename = aes_algs[i].base.cra_driver_name;
627 simd = simd_skcipher_create_compat(algname, drvname, basename);
628 err = PTR_ERR(simd);
629 if (IS_ERR(simd))
630 goto unregister_simds;
631
632 aes_simd_algs[i] = simd;
633 }
634
635 return 0;
636
637unregister_simds:
638 aes_exit();
Ard Biesheuvel48606202017-02-03 14:49:37 +0000639unregister_ciphers:
640 crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
Herbert Xud0ed0db2016-11-22 20:08:35 +0800641 return err;
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100642}
643
644#ifdef USE_V8_CRYPTO_EXTENSIONS
645module_cpu_feature_match(AES, aes_init);
646#else
647module_init(aes_init);
Ard Biesheuvel4edd7d02017-01-28 23:25:38 +0000648EXPORT_SYMBOL(neon_aes_ecb_encrypt);
649EXPORT_SYMBOL(neon_aes_cbc_encrypt);
Ard Biesheuvel49788fe2014-03-21 10:19:17 +0100650#endif
651module_exit(aes_exit);