blob: 0b1de0b470a254d0ee40d312c56ea9892191c7d1 [file] [log] [blame]
David S. Miller9bf48522012-08-21 03:58:13 -07001/* Glue code for AES encryption optimized for sparc64 crypto opcodes.
2 *
3 * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
4 *
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
7 *
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 */
16
17#include <linux/crypto.h>
18#include <linux/init.h>
19#include <linux/module.h>
20#include <linux/mm.h>
21#include <linux/types.h>
22#include <crypto/algapi.h>
23#include <crypto/aes.h>
24
25#include <asm/fpumacro.h>
26#include <asm/pstate.h>
27#include <asm/elf.h>
28
David S. Miller0bdcaf72012-08-29 12:50:16 -070029struct aes_ops {
30 void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
31 void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
32 void (*load_encrypt_keys)(const u64 *key);
33 void (*load_decrypt_keys)(const u64 *key);
34 void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
35 unsigned int len);
36 void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
37 unsigned int len);
38 void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
39 unsigned int len, u64 *iv);
40 void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
41 unsigned int len, u64 *iv);
42};
43
David S. Miller9bf48522012-08-21 03:58:13 -070044struct crypto_sparc64_aes_ctx {
David S. Miller0bdcaf72012-08-29 12:50:16 -070045 struct aes_ops *ops;
David S. Miller9bf48522012-08-21 03:58:13 -070046 u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
47 u32 key_length;
48 u32 expanded_key_length;
49};
50
David S. Miller0bdcaf72012-08-29 12:50:16 -070051extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
52 u32 *output);
53extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
54 u32 *output);
55extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
56 u32 *output);
57
58extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
59 u32 *output);
60extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
61 u32 *output);
62extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
63 u32 *output);
64
65extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
66extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
67extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
68
69extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
70extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
71extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
72
73extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
74 u64 *output, unsigned int len);
75extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
76 u64 *output, unsigned int len);
77extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
78 u64 *output, unsigned int len);
79
80extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
81 u64 *output, unsigned int len);
82extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
83 u64 *output, unsigned int len);
84extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
85 u64 *output, unsigned int len);
86
87extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
88 u64 *output, unsigned int len,
89 u64 *iv);
90
91extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
92 u64 *output, unsigned int len,
93 u64 *iv);
94
95extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
96 u64 *output, unsigned int len,
97 u64 *iv);
98
99extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
100 u64 *output, unsigned int len,
101 u64 *iv);
102
103extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
104 u64 *output, unsigned int len,
105 u64 *iv);
106
107extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
108 u64 *output, unsigned int len,
109 u64 *iv);
110
111struct aes_ops aes128_ops = {
112 .encrypt = aes_sparc64_encrypt_128,
113 .decrypt = aes_sparc64_decrypt_128,
114 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
115 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
116 .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
117 .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
118 .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
119 .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
120};
121
122struct aes_ops aes192_ops = {
123 .encrypt = aes_sparc64_encrypt_192,
124 .decrypt = aes_sparc64_decrypt_192,
125 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
126 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
127 .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
128 .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
129 .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
130 .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
131};
132
133struct aes_ops aes256_ops = {
134 .encrypt = aes_sparc64_encrypt_256,
135 .decrypt = aes_sparc64_decrypt_256,
136 .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
137 .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
138 .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
139 .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
140 .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
141 .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
142};
143
David S. Miller9bf48522012-08-21 03:58:13 -0700144extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
145 unsigned int key_len);
146
147static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
148 unsigned int key_len)
149{
150 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
151 u32 *flags = &tfm->crt_flags;
152
153 switch (key_len) {
154 case AES_KEYSIZE_128:
155 ctx->expanded_key_length = 0xb0;
David S. Miller0bdcaf72012-08-29 12:50:16 -0700156 ctx->ops = &aes128_ops;
David S. Miller9bf48522012-08-21 03:58:13 -0700157 break;
158
159 case AES_KEYSIZE_192:
160 ctx->expanded_key_length = 0xd0;
David S. Miller0bdcaf72012-08-29 12:50:16 -0700161 ctx->ops = &aes192_ops;
David S. Miller9bf48522012-08-21 03:58:13 -0700162 break;
163
164 case AES_KEYSIZE_256:
165 ctx->expanded_key_length = 0xf0;
David S. Miller0bdcaf72012-08-29 12:50:16 -0700166 ctx->ops = &aes256_ops;
David S. Miller9bf48522012-08-21 03:58:13 -0700167 break;
168
169 default:
170 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
171 return -EINVAL;
172 }
173
174 aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
175 ctx->key_length = key_len;
176
177 return 0;
178}
179
David S. Miller9bf48522012-08-21 03:58:13 -0700180static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
181{
182 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
183
David S. Miller0bdcaf72012-08-29 12:50:16 -0700184 ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
David S. Miller9bf48522012-08-21 03:58:13 -0700185}
186
David S. Miller9bf48522012-08-21 03:58:13 -0700187static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
188{
189 struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
190
David S. Miller0bdcaf72012-08-29 12:50:16 -0700191 ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
David S. Miller9bf48522012-08-21 03:58:13 -0700192}
193
David S. Miller9bf48522012-08-21 03:58:13 -0700194#define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
195
David S. Miller9bf48522012-08-21 03:58:13 -0700196static int ecb_encrypt(struct blkcipher_desc *desc,
197 struct scatterlist *dst, struct scatterlist *src,
198 unsigned int nbytes)
199{
200 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
201 struct blkcipher_walk walk;
202 int err;
203
204 blkcipher_walk_init(&walk, dst, src, nbytes);
205 err = blkcipher_walk_virt(desc, &walk);
206
David S. Miller0bdcaf72012-08-29 12:50:16 -0700207 ctx->ops->load_encrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700208 while ((nbytes = walk.nbytes)) {
209 unsigned int block_len = nbytes & AES_BLOCK_MASK;
210
211 if (likely(block_len)) {
David S. Miller0bdcaf72012-08-29 12:50:16 -0700212 ctx->ops->ecb_encrypt(&ctx->key[0],
213 (const u64 *)walk.src.virt.addr,
214 (u64 *) walk.dst.virt.addr,
215 block_len);
David S. Miller9bf48522012-08-21 03:58:13 -0700216 }
217 nbytes &= AES_BLOCK_SIZE - 1;
218 err = blkcipher_walk_done(desc, &walk, nbytes);
219 }
220 fprs_write(0);
221 return err;
222}
223
David S. Miller9bf48522012-08-21 03:58:13 -0700224static int ecb_decrypt(struct blkcipher_desc *desc,
225 struct scatterlist *dst, struct scatterlist *src,
226 unsigned int nbytes)
227{
228 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
229 struct blkcipher_walk walk;
230 u64 *key_end;
231 int err;
232
233 blkcipher_walk_init(&walk, dst, src, nbytes);
234 err = blkcipher_walk_virt(desc, &walk);
235
David S. Miller0bdcaf72012-08-29 12:50:16 -0700236 ctx->ops->load_decrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700237 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
238 while ((nbytes = walk.nbytes)) {
239 unsigned int block_len = nbytes & AES_BLOCK_MASK;
240
David S. Miller0bdcaf72012-08-29 12:50:16 -0700241 if (likely(block_len)) {
242 ctx->ops->ecb_decrypt(key_end,
243 (const u64 *) walk.src.virt.addr,
244 (u64 *) walk.dst.virt.addr, block_len);
245 }
David S. Miller9bf48522012-08-21 03:58:13 -0700246 nbytes &= AES_BLOCK_SIZE - 1;
247 err = blkcipher_walk_done(desc, &walk, nbytes);
248 }
249 fprs_write(0);
250
251 return err;
252}
253
David S. Miller9bf48522012-08-21 03:58:13 -0700254static int cbc_encrypt(struct blkcipher_desc *desc,
255 struct scatterlist *dst, struct scatterlist *src,
256 unsigned int nbytes)
257{
258 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
259 struct blkcipher_walk walk;
260 int err;
261
262 blkcipher_walk_init(&walk, dst, src, nbytes);
263 err = blkcipher_walk_virt(desc, &walk);
264
David S. Miller0bdcaf72012-08-29 12:50:16 -0700265 ctx->ops->load_encrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700266 while ((nbytes = walk.nbytes)) {
267 unsigned int block_len = nbytes & AES_BLOCK_MASK;
268
269 if (likely(block_len)) {
David S. Miller0bdcaf72012-08-29 12:50:16 -0700270 ctx->ops->cbc_encrypt(&ctx->key[0],
271 (const u64 *)walk.src.virt.addr,
272 (u64 *) walk.dst.virt.addr,
273 block_len, (u64 *) walk.iv);
David S. Miller9bf48522012-08-21 03:58:13 -0700274 }
275 nbytes &= AES_BLOCK_SIZE - 1;
276 err = blkcipher_walk_done(desc, &walk, nbytes);
277 }
278 fprs_write(0);
279 return err;
280}
281
David S. Miller9bf48522012-08-21 03:58:13 -0700282static int cbc_decrypt(struct blkcipher_desc *desc,
283 struct scatterlist *dst, struct scatterlist *src,
284 unsigned int nbytes)
285{
286 struct crypto_sparc64_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
287 struct blkcipher_walk walk;
288 u64 *key_end;
289 int err;
290
291 blkcipher_walk_init(&walk, dst, src, nbytes);
292 err = blkcipher_walk_virt(desc, &walk);
293
David S. Miller0bdcaf72012-08-29 12:50:16 -0700294 ctx->ops->load_decrypt_keys(&ctx->key[0]);
David S. Miller9bf48522012-08-21 03:58:13 -0700295 key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
296 while ((nbytes = walk.nbytes)) {
297 unsigned int block_len = nbytes & AES_BLOCK_MASK;
298
David S. Miller0bdcaf72012-08-29 12:50:16 -0700299 if (likely(block_len)) {
300 ctx->ops->cbc_decrypt(key_end,
301 (const u64 *) walk.src.virt.addr,
302 (u64 *) walk.dst.virt.addr,
303 block_len, (u64 *) walk.iv);
304 }
David S. Miller9bf48522012-08-21 03:58:13 -0700305 nbytes &= AES_BLOCK_SIZE - 1;
306 err = blkcipher_walk_done(desc, &walk, nbytes);
307 }
308 fprs_write(0);
309
310 return err;
311}
312
313static struct crypto_alg algs[] = { {
314 .cra_name = "aes",
315 .cra_driver_name = "aes-sparc64",
316 .cra_priority = 150,
317 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
318 .cra_blocksize = AES_BLOCK_SIZE,
319 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
320 .cra_alignmask = 3,
321 .cra_module = THIS_MODULE,
322 .cra_u = {
323 .cipher = {
324 .cia_min_keysize = AES_MIN_KEY_SIZE,
325 .cia_max_keysize = AES_MAX_KEY_SIZE,
326 .cia_setkey = aes_set_key,
327 .cia_encrypt = aes_encrypt,
328 .cia_decrypt = aes_decrypt
329 }
330 }
331}, {
332 .cra_name = "ecb(aes)",
333 .cra_driver_name = "ecb-aes-sparc64",
334 .cra_priority = 150,
335 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
336 .cra_blocksize = AES_BLOCK_SIZE,
337 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
338 .cra_alignmask = 7,
339 .cra_type = &crypto_blkcipher_type,
340 .cra_module = THIS_MODULE,
341 .cra_u = {
342 .blkcipher = {
343 .min_keysize = AES_MIN_KEY_SIZE,
344 .max_keysize = AES_MAX_KEY_SIZE,
345 .setkey = aes_set_key,
346 .encrypt = ecb_encrypt,
347 .decrypt = ecb_decrypt,
348 },
349 },
350}, {
351 .cra_name = "cbc(aes)",
352 .cra_driver_name = "cbc-aes-sparc64",
353 .cra_priority = 150,
354 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
355 .cra_blocksize = AES_BLOCK_SIZE,
356 .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
357 .cra_alignmask = 7,
358 .cra_type = &crypto_blkcipher_type,
359 .cra_module = THIS_MODULE,
360 .cra_u = {
361 .blkcipher = {
362 .min_keysize = AES_MIN_KEY_SIZE,
363 .max_keysize = AES_MAX_KEY_SIZE,
364 .setkey = aes_set_key,
365 .encrypt = cbc_encrypt,
366 .decrypt = cbc_decrypt,
367 },
368 },
369} };
370
371static bool __init sparc64_has_aes_opcode(void)
372{
373 unsigned long cfr;
374
375 if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
376 return false;
377
378 __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
379 if (!(cfr & CFR_AES))
380 return false;
381
382 return true;
383}
384
385static int __init aes_sparc64_mod_init(void)
386{
387 int i;
388
389 for (i = 0; i < ARRAY_SIZE(algs); i++)
390 INIT_LIST_HEAD(&algs[i].cra_list);
391
392 if (sparc64_has_aes_opcode()) {
393 pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
394 return crypto_register_algs(algs, ARRAY_SIZE(algs));
395 }
396 pr_info("sparc64 aes opcodes not available.\n");
397 return -ENODEV;
398}
399
400static void __exit aes_sparc64_mod_fini(void)
401{
402 crypto_unregister_algs(algs, ARRAY_SIZE(algs));
403}
404
405module_init(aes_sparc64_mod_init);
406module_exit(aes_sparc64_mod_fini);
407
408MODULE_LICENSE("GPL");
409MODULE_DESCRIPTION("AES Secure Hash Algorithm, sparc64 aes opcode accelerated");
410
411MODULE_ALIAS("aes");