sh_eth: fix EESIPR values for SH77{34|63}
[linux/fpc-iii.git] / drivers / crypto / rockchip / rk3288_crypto_ablkcipher.c
blobb5a3afe222e42b81b59e50bc6105ae6f11ac1976
1 /*
2 * Crypto acceleration support for Rockchip RK3288
4 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6 * Author: Zain Wang <zain.wang@rock-chips.com>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms and conditions of the GNU General Public License,
10 * version 2, as published by the Free Software Foundation.
12 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
14 #include "rk3288_crypto.h"
16 #define RK_CRYPTO_DEC BIT(0)
18 static void rk_crypto_complete(struct rk_crypto_info *dev, int err)
20 if (dev->ablk_req->base.complete)
21 dev->ablk_req->base.complete(&dev->ablk_req->base, err);
24 static int rk_handle_req(struct rk_crypto_info *dev,
25 struct ablkcipher_request *req)
27 unsigned long flags;
28 int err;
30 if (!IS_ALIGNED(req->nbytes, dev->align_size))
31 return -EINVAL;
33 dev->left_bytes = req->nbytes;
34 dev->total = req->nbytes;
35 dev->sg_src = req->src;
36 dev->first = req->src;
37 dev->nents = sg_nents(req->src);
38 dev->sg_dst = req->dst;
39 dev->aligned = 1;
40 dev->ablk_req = req;
42 spin_lock_irqsave(&dev->lock, flags);
43 err = ablkcipher_enqueue_request(&dev->queue, req);
44 spin_unlock_irqrestore(&dev->lock, flags);
45 tasklet_schedule(&dev->crypto_tasklet);
46 return err;
49 static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
50 const u8 *key, unsigned int keylen)
52 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
53 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
55 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
56 keylen != AES_KEYSIZE_256) {
57 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
58 return -EINVAL;
60 ctx->keylen = keylen;
61 memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
62 return 0;
65 static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
66 const u8 *key, unsigned int keylen)
68 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
69 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
70 u32 tmp[DES_EXPKEY_WORDS];
72 if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
73 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
74 return -EINVAL;
77 if (keylen == DES_KEY_SIZE) {
78 if (!des_ekey(tmp, key) &&
79 (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
80 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
81 return -EINVAL;
85 ctx->keylen = keylen;
86 memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
87 return 0;
90 static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
92 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
93 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
94 struct rk_crypto_info *dev = ctx->dev;
96 dev->mode = RK_CRYPTO_AES_ECB_MODE;
97 return rk_handle_req(dev, req);
100 static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
102 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
103 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
104 struct rk_crypto_info *dev = ctx->dev;
106 dev->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
107 return rk_handle_req(dev, req);
110 static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
112 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
113 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
114 struct rk_crypto_info *dev = ctx->dev;
116 dev->mode = RK_CRYPTO_AES_CBC_MODE;
117 return rk_handle_req(dev, req);
120 static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
122 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
123 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
124 struct rk_crypto_info *dev = ctx->dev;
126 dev->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
127 return rk_handle_req(dev, req);
130 static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
132 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
133 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
134 struct rk_crypto_info *dev = ctx->dev;
136 dev->mode = 0;
137 return rk_handle_req(dev, req);
140 static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
142 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
143 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
144 struct rk_crypto_info *dev = ctx->dev;
146 dev->mode = RK_CRYPTO_DEC;
147 return rk_handle_req(dev, req);
150 static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
152 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
153 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
154 struct rk_crypto_info *dev = ctx->dev;
156 dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
157 return rk_handle_req(dev, req);
160 static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
162 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
163 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
164 struct rk_crypto_info *dev = ctx->dev;
166 dev->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
167 return rk_handle_req(dev, req);
170 static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
172 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
173 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
174 struct rk_crypto_info *dev = ctx->dev;
176 dev->mode = RK_CRYPTO_TDES_SELECT;
177 return rk_handle_req(dev, req);
180 static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
182 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
183 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
184 struct rk_crypto_info *dev = ctx->dev;
186 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
187 return rk_handle_req(dev, req);
190 static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
192 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
193 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
194 struct rk_crypto_info *dev = ctx->dev;
196 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
197 return rk_handle_req(dev, req);
200 static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
202 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
203 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
204 struct rk_crypto_info *dev = ctx->dev;
206 dev->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
207 RK_CRYPTO_DEC;
208 return rk_handle_req(dev, req);
211 static void rk_ablk_hw_init(struct rk_crypto_info *dev)
213 struct crypto_ablkcipher *cipher =
214 crypto_ablkcipher_reqtfm(dev->ablk_req);
215 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
216 struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
217 u32 ivsize, block, conf_reg = 0;
219 block = crypto_tfm_alg_blocksize(tfm);
220 ivsize = crypto_ablkcipher_ivsize(cipher);
222 if (block == DES_BLOCK_SIZE) {
223 dev->mode |= RK_CRYPTO_TDES_FIFO_MODE |
224 RK_CRYPTO_TDES_BYTESWAP_KEY |
225 RK_CRYPTO_TDES_BYTESWAP_IV;
226 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, dev->mode);
227 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0,
228 dev->ablk_req->info, ivsize);
229 conf_reg = RK_CRYPTO_DESSEL;
230 } else {
231 dev->mode |= RK_CRYPTO_AES_FIFO_MODE |
232 RK_CRYPTO_AES_KEY_CHANGE |
233 RK_CRYPTO_AES_BYTESWAP_KEY |
234 RK_CRYPTO_AES_BYTESWAP_IV;
235 if (ctx->keylen == AES_KEYSIZE_192)
236 dev->mode |= RK_CRYPTO_AES_192BIT_key;
237 else if (ctx->keylen == AES_KEYSIZE_256)
238 dev->mode |= RK_CRYPTO_AES_256BIT_key;
239 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, dev->mode);
240 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0,
241 dev->ablk_req->info, ivsize);
243 conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
244 RK_CRYPTO_BYTESWAP_BRFIFO;
245 CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
246 CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
247 RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
250 static void crypto_dma_start(struct rk_crypto_info *dev)
252 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
253 CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
254 CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
255 CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
256 _SBF(RK_CRYPTO_BLOCK_START, 16));
259 static int rk_set_data_start(struct rk_crypto_info *dev)
261 int err;
263 err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
264 if (!err)
265 crypto_dma_start(dev);
266 return err;
269 static int rk_ablk_start(struct rk_crypto_info *dev)
271 unsigned long flags;
272 int err;
274 spin_lock_irqsave(&dev->lock, flags);
275 rk_ablk_hw_init(dev);
276 err = rk_set_data_start(dev);
277 spin_unlock_irqrestore(&dev->lock, flags);
278 return err;
281 static void rk_iv_copyback(struct rk_crypto_info *dev)
283 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(dev->ablk_req);
284 u32 ivsize = crypto_ablkcipher_ivsize(tfm);
286 if (ivsize == DES_BLOCK_SIZE)
287 memcpy_fromio(dev->ablk_req->info,
288 dev->reg + RK_CRYPTO_TDES_IV_0, ivsize);
289 else if (ivsize == AES_BLOCK_SIZE)
290 memcpy_fromio(dev->ablk_req->info,
291 dev->reg + RK_CRYPTO_AES_IV_0, ivsize);
294 /* return:
295 * true some err was occurred
296 * fault no err, continue
298 static int rk_ablk_rx(struct rk_crypto_info *dev)
300 int err = 0;
302 dev->unload_data(dev);
303 if (!dev->aligned) {
304 if (!sg_pcopy_from_buffer(dev->ablk_req->dst, dev->nents,
305 dev->addr_vir, dev->count,
306 dev->total - dev->left_bytes -
307 dev->count)) {
308 err = -EINVAL;
309 goto out_rx;
312 if (dev->left_bytes) {
313 if (dev->aligned) {
314 if (sg_is_last(dev->sg_src)) {
315 dev_err(dev->dev, "[%s:%d] Lack of data\n",
316 __func__, __LINE__);
317 err = -ENOMEM;
318 goto out_rx;
320 dev->sg_src = sg_next(dev->sg_src);
321 dev->sg_dst = sg_next(dev->sg_dst);
323 err = rk_set_data_start(dev);
324 } else {
325 rk_iv_copyback(dev);
326 /* here show the calculation is over without any err */
327 dev->complete(dev, 0);
329 out_rx:
330 return err;
333 static int rk_ablk_cra_init(struct crypto_tfm *tfm)
335 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
336 struct crypto_alg *alg = tfm->__crt_alg;
337 struct rk_crypto_tmp *algt;
339 algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
341 ctx->dev = algt->dev;
342 ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
343 ctx->dev->start = rk_ablk_start;
344 ctx->dev->update = rk_ablk_rx;
345 ctx->dev->complete = rk_crypto_complete;
346 ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
348 return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
351 static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
353 struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
355 free_page((unsigned long)ctx->dev->addr_vir);
356 ctx->dev->disable_clk(ctx->dev);
359 struct rk_crypto_tmp rk_ecb_aes_alg = {
360 .type = ALG_TYPE_CIPHER,
361 .alg.crypto = {
362 .cra_name = "ecb(aes)",
363 .cra_driver_name = "ecb-aes-rk",
364 .cra_priority = 300,
365 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
366 CRYPTO_ALG_ASYNC,
367 .cra_blocksize = AES_BLOCK_SIZE,
368 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
369 .cra_alignmask = 0x0f,
370 .cra_type = &crypto_ablkcipher_type,
371 .cra_module = THIS_MODULE,
372 .cra_init = rk_ablk_cra_init,
373 .cra_exit = rk_ablk_cra_exit,
374 .cra_u.ablkcipher = {
375 .min_keysize = AES_MIN_KEY_SIZE,
376 .max_keysize = AES_MAX_KEY_SIZE,
377 .setkey = rk_aes_setkey,
378 .encrypt = rk_aes_ecb_encrypt,
379 .decrypt = rk_aes_ecb_decrypt,
384 struct rk_crypto_tmp rk_cbc_aes_alg = {
385 .type = ALG_TYPE_CIPHER,
386 .alg.crypto = {
387 .cra_name = "cbc(aes)",
388 .cra_driver_name = "cbc-aes-rk",
389 .cra_priority = 300,
390 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
391 CRYPTO_ALG_ASYNC,
392 .cra_blocksize = AES_BLOCK_SIZE,
393 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
394 .cra_alignmask = 0x0f,
395 .cra_type = &crypto_ablkcipher_type,
396 .cra_module = THIS_MODULE,
397 .cra_init = rk_ablk_cra_init,
398 .cra_exit = rk_ablk_cra_exit,
399 .cra_u.ablkcipher = {
400 .min_keysize = AES_MIN_KEY_SIZE,
401 .max_keysize = AES_MAX_KEY_SIZE,
402 .ivsize = AES_BLOCK_SIZE,
403 .setkey = rk_aes_setkey,
404 .encrypt = rk_aes_cbc_encrypt,
405 .decrypt = rk_aes_cbc_decrypt,
410 struct rk_crypto_tmp rk_ecb_des_alg = {
411 .type = ALG_TYPE_CIPHER,
412 .alg.crypto = {
413 .cra_name = "ecb(des)",
414 .cra_driver_name = "ecb-des-rk",
415 .cra_priority = 300,
416 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
417 CRYPTO_ALG_ASYNC,
418 .cra_blocksize = DES_BLOCK_SIZE,
419 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
420 .cra_alignmask = 0x07,
421 .cra_type = &crypto_ablkcipher_type,
422 .cra_module = THIS_MODULE,
423 .cra_init = rk_ablk_cra_init,
424 .cra_exit = rk_ablk_cra_exit,
425 .cra_u.ablkcipher = {
426 .min_keysize = DES_KEY_SIZE,
427 .max_keysize = DES_KEY_SIZE,
428 .setkey = rk_tdes_setkey,
429 .encrypt = rk_des_ecb_encrypt,
430 .decrypt = rk_des_ecb_decrypt,
435 struct rk_crypto_tmp rk_cbc_des_alg = {
436 .type = ALG_TYPE_CIPHER,
437 .alg.crypto = {
438 .cra_name = "cbc(des)",
439 .cra_driver_name = "cbc-des-rk",
440 .cra_priority = 300,
441 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
442 CRYPTO_ALG_ASYNC,
443 .cra_blocksize = DES_BLOCK_SIZE,
444 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
445 .cra_alignmask = 0x07,
446 .cra_type = &crypto_ablkcipher_type,
447 .cra_module = THIS_MODULE,
448 .cra_init = rk_ablk_cra_init,
449 .cra_exit = rk_ablk_cra_exit,
450 .cra_u.ablkcipher = {
451 .min_keysize = DES_KEY_SIZE,
452 .max_keysize = DES_KEY_SIZE,
453 .ivsize = DES_BLOCK_SIZE,
454 .setkey = rk_tdes_setkey,
455 .encrypt = rk_des_cbc_encrypt,
456 .decrypt = rk_des_cbc_decrypt,
461 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
462 .type = ALG_TYPE_CIPHER,
463 .alg.crypto = {
464 .cra_name = "ecb(des3_ede)",
465 .cra_driver_name = "ecb-des3-ede-rk",
466 .cra_priority = 300,
467 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
468 CRYPTO_ALG_ASYNC,
469 .cra_blocksize = DES_BLOCK_SIZE,
470 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
471 .cra_alignmask = 0x07,
472 .cra_type = &crypto_ablkcipher_type,
473 .cra_module = THIS_MODULE,
474 .cra_init = rk_ablk_cra_init,
475 .cra_exit = rk_ablk_cra_exit,
476 .cra_u.ablkcipher = {
477 .min_keysize = DES3_EDE_KEY_SIZE,
478 .max_keysize = DES3_EDE_KEY_SIZE,
479 .ivsize = DES_BLOCK_SIZE,
480 .setkey = rk_tdes_setkey,
481 .encrypt = rk_des3_ede_ecb_encrypt,
482 .decrypt = rk_des3_ede_ecb_decrypt,
487 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
488 .type = ALG_TYPE_CIPHER,
489 .alg.crypto = {
490 .cra_name = "cbc(des3_ede)",
491 .cra_driver_name = "cbc-des3-ede-rk",
492 .cra_priority = 300,
493 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
494 CRYPTO_ALG_ASYNC,
495 .cra_blocksize = DES_BLOCK_SIZE,
496 .cra_ctxsize = sizeof(struct rk_cipher_ctx),
497 .cra_alignmask = 0x07,
498 .cra_type = &crypto_ablkcipher_type,
499 .cra_module = THIS_MODULE,
500 .cra_init = rk_ablk_cra_init,
501 .cra_exit = rk_ablk_cra_exit,
502 .cra_u.ablkcipher = {
503 .min_keysize = DES3_EDE_KEY_SIZE,
504 .max_keysize = DES3_EDE_KEY_SIZE,
505 .ivsize = DES_BLOCK_SIZE,
506 .setkey = rk_tdes_setkey,
507 .encrypt = rk_des3_ede_cbc_encrypt,
508 .decrypt = rk_des3_ede_cbc_decrypt,