blob: d98b681f6c06c3f583e3f78db4fcdbf3afb24ed7 [file] [log] [blame]
Zain Wang433cd2c2015-11-25 13:43:32 +08001/*
2 * Crypto acceleration support for Rockchip RK3288
3 *
4 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
5 *
6 * Author: Zain Wang <zain.wang@rock-chips.com>
7 *
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms and conditions of the GNU General Public License,
10 * version 2, as published by the Free Software Foundation.
11 *
12 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
13 */
14#include "rk3288_crypto.h"
15
16#define RK_CRYPTO_DEC BIT(0)
17
18static void rk_crypto_complete(struct rk_crypto_info *dev, int err)
19{
20 if (dev->ablk_req->base.complete)
21 dev->ablk_req->base.complete(&dev->ablk_req->base, err);
22}
23
24static int rk_handle_req(struct rk_crypto_info *dev,
25 struct ablkcipher_request *req)
26{
Heiko Stuebnerac7c8e62015-11-28 13:27:48 +010027 unsigned long flags;
Zain Wang433cd2c2015-11-25 13:43:32 +080028 int err;
29
30 if (!IS_ALIGNED(req->nbytes, dev->align_size))
31 return -EINVAL;
32
33 dev->left_bytes = req->nbytes;
34 dev->total = req->nbytes;
35 dev->sg_src = req->src;
36 dev->first = req->src;
37 dev->nents = sg_nents(req->src);
38 dev->sg_dst = req->dst;
39 dev->aligned = 1;
40 dev->ablk_req = req;
41
Heiko Stuebnerac7c8e62015-11-28 13:27:48 +010042 spin_lock_irqsave(&dev->lock, flags);
Zain Wang433cd2c2015-11-25 13:43:32 +080043 err = ablkcipher_enqueue_request(&dev->queue, req);
Heiko Stuebnerac7c8e62015-11-28 13:27:48 +010044 spin_unlock_irqrestore(&dev->lock, flags);
Zain Wang433cd2c2015-11-25 13:43:32 +080045 tasklet_schedule(&dev->crypto_tasklet);
46 return err;
47}
48
49static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
50 const u8 *key, unsigned int keylen)
51{
52 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
53 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
54
55 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
56 keylen != AES_KEYSIZE_256) {
57 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
58 return -EINVAL;
59 }
60 ctx->keylen = keylen;
61 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
62 return 0;
63}
64
65static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
66 const u8 *key, unsigned int keylen)
67{
68 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
69 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
70 u32 tmp[DES_EXPKEY_WORDS];
71
72 if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
73 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
74 return -EINVAL;
75 }
76
77 if (keylen == DES_KEY_SIZE) {
78 if (!des_ekey(tmp, key) &&
79 (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
80 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
81 return -EINVAL;
82 }
83 }
84
85 ctx->keylen = keylen;
86 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
87 return 0;
88}
89
90static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
91{
92 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
93 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
94 struct rk_crypto_info *dev = ctx->dev;
95
96 dev->mode = RK_CRYPTO_AES_ECB_MODE;
97 return rk_handle_req(dev, req);
98}
99
100static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
101{
102 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
103 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
104 struct rk_crypto_info *dev = ctx->dev;
105
106 dev->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
107 return rk_handle_req(dev, req);
108}
109
110static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
111{
112 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
113 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
114 struct rk_crypto_info *dev = ctx->dev;
115
116 dev->mode = RK_CRYPTO_AES_CBC_MODE;
117 return rk_handle_req(dev, req);
118}
119
120static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
121{
122 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
123 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
124 struct rk_crypto_info *dev = ctx->dev;
125
126 dev->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
127 return rk_handle_req(dev, req);
128}
129
130static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
131{
132 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
133 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
134 struct rk_crypto_info *dev = ctx->dev;
135
136 dev->mode = 0;
137 return rk_handle_req(dev, req);
138}
139
140static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
141{
142 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
143 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
144 struct rk_crypto_info *dev = ctx->dev;
145
146 dev->mode = RK_CRYPTO_DEC;
147 return rk_handle_req(dev, req);
148}
149
150static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
151{
152 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
153 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
154 struct rk_crypto_info *dev = ctx->dev;
155
156 dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
157 return rk_handle_req(dev, req);
158}
159
160static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
161{
162 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
163 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
164 struct rk_crypto_info *dev = ctx->dev;
165
166 dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
167 return rk_handle_req(dev, req);
168}
169
170static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
171{
172 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
173 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
174 struct rk_crypto_info *dev = ctx->dev;
175
176 dev->mode = RK_CRYPTO_TDES_SELECT;
177 return rk_handle_req(dev, req);
178}
179
180static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
181{
182 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
183 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
184 struct rk_crypto_info *dev = ctx->dev;
185
186 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
187 return rk_handle_req(dev, req);
188}
189
190static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
191{
192 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
193 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
194 struct rk_crypto_info *dev = ctx->dev;
195
196 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
197 return rk_handle_req(dev, req);
198}
199
200static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
201{
202 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
203 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
204 struct rk_crypto_info *dev = ctx->dev;
205
206 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
207 RK_CRYPTO_DEC;
208 return rk_handle_req(dev, req);
209}
210
211static void rk_ablk_hw_init(struct rk_crypto_info *dev)
212{
213 struct crypto_ablkcipher *cipher =
214 crypto_ablkcipher_reqtfm(dev->ablk_req);
215 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
216 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
217 u32 ivsize, block, conf_reg = 0;
218
219 block = crypto_tfm_alg_blocksize(tfm);
220 ivsize = crypto_ablkcipher_ivsize(cipher);
221
222 if (block == DES_BLOCK_SIZE) {
223 dev->mode |= RK_CRYPTO_TDES_FIFO_MODE |
224 RK_CRYPTO_TDES_BYTESWAP_KEY |
225 RK_CRYPTO_TDES_BYTESWAP_IV;
226 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, dev->mode);
227 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0,
228 dev->ablk_req->info, ivsize);
229 conf_reg = RK_CRYPTO_DESSEL;
230 } else {
231 dev->mode |= RK_CRYPTO_AES_FIFO_MODE |
232 RK_CRYPTO_AES_KEY_CHANGE |
233 RK_CRYPTO_AES_BYTESWAP_KEY |
234 RK_CRYPTO_AES_BYTESWAP_IV;
235 if (ctx->keylen == AES_KEYSIZE_192)
236 dev->mode |= RK_CRYPTO_AES_192BIT_key;
237 else if (ctx->keylen == AES_KEYSIZE_256)
238 dev->mode |= RK_CRYPTO_AES_256BIT_key;
239 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, dev->mode);
240 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0,
241 dev->ablk_req->info, ivsize);
242 }
243 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
244 RK_CRYPTO_BYTESWAP_BRFIFO;
245 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
246 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
247 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
248}
249
250static void crypto_dma_start(struct rk_crypto_info *dev)
251{
252 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
253 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
254 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
255 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
256 _SBF(RK_CRYPTO_BLOCK_START, 16));
257}
258
259static int rk_set_data_start(struct rk_crypto_info *dev)
260{
261 int err;
262
263 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
264 if (!err)
265 crypto_dma_start(dev);
266 return err;
267}
268
269static int rk_ablk_start(struct rk_crypto_info *dev)
270{
Heiko Stuebnerac7c8e62015-11-28 13:27:48 +0100271 unsigned long flags;
Zain Wang433cd2c2015-11-25 13:43:32 +0800272 int err;
273
Heiko Stuebnerac7c8e62015-11-28 13:27:48 +0100274 spin_lock_irqsave(&dev->lock, flags);
Zain Wang433cd2c2015-11-25 13:43:32 +0800275 rk_ablk_hw_init(dev);
276 err = rk_set_data_start(dev);
Heiko Stuebnerac7c8e62015-11-28 13:27:48 +0100277 spin_unlock_irqrestore(&dev->lock, flags);
Zain Wang433cd2c2015-11-25 13:43:32 +0800278 return err;
279}
280
281static void rk_iv_copyback(struct rk_crypto_info *dev)
282{
283 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(dev->ablk_req);
284 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
285
286 if (ivsize == DES_BLOCK_SIZE)
287 memcpy_fromio(dev->ablk_req->info,
288 dev->reg + RK_CRYPTO_TDES_IV_0, ivsize);
289 else if (ivsize == AES_BLOCK_SIZE)
290 memcpy_fromio(dev->ablk_req->info,
291 dev->reg + RK_CRYPTO_AES_IV_0, ivsize);
292}
293
294/* return:
295 * true some err was occurred
296 * fault no err, continue
297 */
298static int rk_ablk_rx(struct rk_crypto_info *dev)
299{
300 int err = 0;
301
302 dev->unload_data(dev);
303 if (!dev->aligned) {
304 if (!sg_pcopy_from_buffer(dev->ablk_req->dst, dev->nents,
305 dev->addr_vir, dev->count,
306 dev->total - dev->left_bytes -
307 dev->count)) {
308 err = -EINVAL;
309 goto out_rx;
310 }
311 }
312 if (dev->left_bytes) {
313 if (dev->aligned) {
314 if (sg_is_last(dev->sg_src)) {
315 dev_err(dev->dev, "[%s:%d] Lack of data\n",
316 __func__, __LINE__);
317 err = -ENOMEM;
318 goto out_rx;
319 }
320 dev->sg_src = sg_next(dev->sg_src);
321 dev->sg_dst = sg_next(dev->sg_dst);
322 }
323 err = rk_set_data_start(dev);
324 } else {
325 rk_iv_copyback(dev);
326 /* here show the calculation is over without any err */
327 dev->complete(dev, 0);
328 }
329out_rx:
330 return err;
331}
332
333static int rk_ablk_cra_init(struct crypto_tfm *tfm)
334{
335 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
336 struct crypto_alg *alg = tfm->__crt_alg;
337 struct rk_crypto_tmp *algt;
338
339 algt = container_of(alg, struct rk_crypto_tmp, alg);
340
341 ctx->dev = algt->dev;
342 ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
343 ctx->dev->start = rk_ablk_start;
344 ctx->dev->update = rk_ablk_rx;
345 ctx->dev->complete = rk_crypto_complete;
346 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
347
348 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
349}
350
351static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
352{
353 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
354
355 free_page((unsigned long)ctx->dev->addr_vir);
356 ctx->dev->disable_clk(ctx->dev);
357}
358
359struct rk_crypto_tmp rk_ecb_aes_alg = {
360 .alg = {
361 .cra_name = "ecb(aes)",
362 .cra_driver_name = "ecb-aes-rk",
363 .cra_priority = 300,
364 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
365 CRYPTO_ALG_ASYNC,
366 .cra_blocksize = AES_BLOCK_SIZE,
367 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
368 .cra_alignmask = 0x0f,
369 .cra_type = &crypto_ablkcipher_type,
370 .cra_module = THIS_MODULE,
371 .cra_init = rk_ablk_cra_init,
372 .cra_exit = rk_ablk_cra_exit,
373 .cra_u.ablkcipher = {
374 .min_keysize = AES_MIN_KEY_SIZE,
375 .max_keysize = AES_MAX_KEY_SIZE,
376 .setkey = rk_aes_setkey,
377 .encrypt = rk_aes_ecb_encrypt,
378 .decrypt = rk_aes_ecb_decrypt,
379 }
380 }
381};
382
383struct rk_crypto_tmp rk_cbc_aes_alg = {
384 .alg = {
385 .cra_name = "cbc(aes)",
386 .cra_driver_name = "cbc-aes-rk",
387 .cra_priority = 300,
388 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
389 CRYPTO_ALG_ASYNC,
390 .cra_blocksize = AES_BLOCK_SIZE,
391 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
392 .cra_alignmask = 0x0f,
393 .cra_type = &crypto_ablkcipher_type,
394 .cra_module = THIS_MODULE,
395 .cra_init = rk_ablk_cra_init,
396 .cra_exit = rk_ablk_cra_exit,
397 .cra_u.ablkcipher = {
398 .min_keysize = AES_MIN_KEY_SIZE,
399 .max_keysize = AES_MAX_KEY_SIZE,
400 .ivsize = AES_BLOCK_SIZE,
401 .setkey = rk_aes_setkey,
402 .encrypt = rk_aes_cbc_encrypt,
403 .decrypt = rk_aes_cbc_decrypt,
404 }
405 }
406};
407
408struct rk_crypto_tmp rk_ecb_des_alg = {
409 .alg = {
410 .cra_name = "ecb(des)",
411 .cra_driver_name = "ecb-des-rk",
412 .cra_priority = 300,
413 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
414 CRYPTO_ALG_ASYNC,
415 .cra_blocksize = DES_BLOCK_SIZE,
416 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
417 .cra_alignmask = 0x07,
418 .cra_type = &crypto_ablkcipher_type,
419 .cra_module = THIS_MODULE,
420 .cra_init = rk_ablk_cra_init,
421 .cra_exit = rk_ablk_cra_exit,
422 .cra_u.ablkcipher = {
423 .min_keysize = DES_KEY_SIZE,
424 .max_keysize = DES_KEY_SIZE,
425 .setkey = rk_tdes_setkey,
426 .encrypt = rk_des_ecb_encrypt,
427 .decrypt = rk_des_ecb_decrypt,
428 }
429 }
430};
431
432struct rk_crypto_tmp rk_cbc_des_alg = {
433 .alg = {
434 .cra_name = "cbc(des)",
435 .cra_driver_name = "cbc-des-rk",
436 .cra_priority = 300,
437 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
438 CRYPTO_ALG_ASYNC,
439 .cra_blocksize = DES_BLOCK_SIZE,
440 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
441 .cra_alignmask = 0x07,
442 .cra_type = &crypto_ablkcipher_type,
443 .cra_module = THIS_MODULE,
444 .cra_init = rk_ablk_cra_init,
445 .cra_exit = rk_ablk_cra_exit,
446 .cra_u.ablkcipher = {
447 .min_keysize = DES_KEY_SIZE,
448 .max_keysize = DES_KEY_SIZE,
449 .ivsize = DES_BLOCK_SIZE,
450 .setkey = rk_tdes_setkey,
451 .encrypt = rk_des_cbc_encrypt,
452 .decrypt = rk_des_cbc_decrypt,
453 }
454 }
455};
456
457struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
458 .alg = {
459 .cra_name = "ecb(des3_ede)",
460 .cra_driver_name = "ecb-des3-ede-rk",
461 .cra_priority = 300,
462 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
463 CRYPTO_ALG_ASYNC,
464 .cra_blocksize = DES_BLOCK_SIZE,
465 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
466 .cra_alignmask = 0x07,
467 .cra_type = &crypto_ablkcipher_type,
468 .cra_module = THIS_MODULE,
469 .cra_init = rk_ablk_cra_init,
470 .cra_exit = rk_ablk_cra_exit,
471 .cra_u.ablkcipher = {
472 .min_keysize = DES3_EDE_KEY_SIZE,
473 .max_keysize = DES3_EDE_KEY_SIZE,
474 .ivsize = DES_BLOCK_SIZE,
475 .setkey = rk_tdes_setkey,
476 .encrypt = rk_des3_ede_ecb_encrypt,
477 .decrypt = rk_des3_ede_ecb_decrypt,
478 }
479 }
480};
481
482struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
483 .alg = {
484 .cra_name = "cbc(des3_ede)",
485 .cra_driver_name = "cbc-des3-ede-rk",
486 .cra_priority = 300,
487 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
488 CRYPTO_ALG_ASYNC,
489 .cra_blocksize = DES_BLOCK_SIZE,
490 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
491 .cra_alignmask = 0x07,
492 .cra_type = &crypto_ablkcipher_type,
493 .cra_module = THIS_MODULE,
494 .cra_init = rk_ablk_cra_init,
495 .cra_exit = rk_ablk_cra_exit,
496 .cra_u.ablkcipher = {
497 .min_keysize = DES3_EDE_KEY_SIZE,
498 .max_keysize = DES3_EDE_KEY_SIZE,
499 .ivsize = DES_BLOCK_SIZE,
500 .setkey = rk_tdes_setkey,
501 .encrypt = rk_des3_ede_cbc_encrypt,
502 .decrypt = rk_des3_ede_cbc_decrypt,
503 }
504 }
505};