1 // SPDX-License-Identifier: GPL-2.0-only
3 * Crypto acceleration support for Rockchip RK3288
5 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
7 * Author: Zain Wang <zain.wang@rock-chips.com>
9 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
12 #include <crypto/engine.h>
13 #include <crypto/internal/skcipher.h>
14 #include <crypto/scatterwalk.h>
15 #include <linux/device.h>
16 #include <linux/err.h>
17 #include <linux/kernel.h>
18 #include <linux/string.h>
19 #include "rk3288_crypto.h"
21 #define RK_CRYPTO_DEC BIT(0)
23 static int rk_cipher_need_fallback(struct skcipher_request
*req
)
25 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
26 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
27 struct rk_crypto_tmp
*algt
= container_of(alg
, struct rk_crypto_tmp
, alg
.skcipher
.base
);
28 struct scatterlist
*sgs
, *sgd
;
29 unsigned int stodo
, dtodo
, len
;
30 unsigned int bs
= crypto_skcipher_blocksize(tfm
);
39 if (!IS_ALIGNED(sgs
->offset
, sizeof(u32
))) {
40 algt
->stat_fb_align
++;
43 if (!IS_ALIGNED(sgd
->offset
, sizeof(u32
))) {
44 algt
->stat_fb_align
++;
47 stodo
= min(len
, sgs
->length
);
52 dtodo
= min(len
, sgd
->length
);
58 algt
->stat_fb_sgdiff
++;
68 static int rk_cipher_fallback(struct skcipher_request
*areq
)
70 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
71 struct rk_cipher_ctx
*op
= crypto_skcipher_ctx(tfm
);
72 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(areq
);
73 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
74 struct rk_crypto_tmp
*algt
= container_of(alg
, struct rk_crypto_tmp
, alg
.skcipher
.base
);
79 skcipher_request_set_tfm(&rctx
->fallback_req
, op
->fallback_tfm
);
80 skcipher_request_set_callback(&rctx
->fallback_req
, areq
->base
.flags
,
81 areq
->base
.complete
, areq
->base
.data
);
82 skcipher_request_set_crypt(&rctx
->fallback_req
, areq
->src
, areq
->dst
,
83 areq
->cryptlen
, areq
->iv
);
84 if (rctx
->mode
& RK_CRYPTO_DEC
)
85 err
= crypto_skcipher_decrypt(&rctx
->fallback_req
);
87 err
= crypto_skcipher_encrypt(&rctx
->fallback_req
);
91 static int rk_cipher_handle_req(struct skcipher_request
*req
)
93 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
94 struct rk_crypto_info
*rkc
;
95 struct crypto_engine
*engine
;
97 if (rk_cipher_need_fallback(req
))
98 return rk_cipher_fallback(req
);
100 rkc
= get_rk_crypto();
102 engine
= rkc
->engine
;
105 return crypto_transfer_skcipher_request_to_engine(engine
, req
);
108 static int rk_aes_setkey(struct crypto_skcipher
*cipher
,
109 const u8
*key
, unsigned int keylen
)
111 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(cipher
);
112 struct rk_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
114 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
115 keylen
!= AES_KEYSIZE_256
)
117 ctx
->keylen
= keylen
;
118 memcpy(ctx
->key
, key
, keylen
);
120 return crypto_skcipher_setkey(ctx
->fallback_tfm
, key
, keylen
);
123 static int rk_des_setkey(struct crypto_skcipher
*cipher
,
124 const u8
*key
, unsigned int keylen
)
126 struct rk_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
129 err
= verify_skcipher_des_key(cipher
, key
);
133 ctx
->keylen
= keylen
;
134 memcpy(ctx
->key
, key
, keylen
);
136 return crypto_skcipher_setkey(ctx
->fallback_tfm
, key
, keylen
);
139 static int rk_tdes_setkey(struct crypto_skcipher
*cipher
,
140 const u8
*key
, unsigned int keylen
)
142 struct rk_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
145 err
= verify_skcipher_des3_key(cipher
, key
);
149 ctx
->keylen
= keylen
;
150 memcpy(ctx
->key
, key
, keylen
);
152 return crypto_skcipher_setkey(ctx
->fallback_tfm
, key
, keylen
);
155 static int rk_aes_ecb_encrypt(struct skcipher_request
*req
)
157 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
159 rctx
->mode
= RK_CRYPTO_AES_ECB_MODE
;
160 return rk_cipher_handle_req(req
);
163 static int rk_aes_ecb_decrypt(struct skcipher_request
*req
)
165 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
167 rctx
->mode
= RK_CRYPTO_AES_ECB_MODE
| RK_CRYPTO_DEC
;
168 return rk_cipher_handle_req(req
);
171 static int rk_aes_cbc_encrypt(struct skcipher_request
*req
)
173 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
175 rctx
->mode
= RK_CRYPTO_AES_CBC_MODE
;
176 return rk_cipher_handle_req(req
);
179 static int rk_aes_cbc_decrypt(struct skcipher_request
*req
)
181 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
183 rctx
->mode
= RK_CRYPTO_AES_CBC_MODE
| RK_CRYPTO_DEC
;
184 return rk_cipher_handle_req(req
);
187 static int rk_des_ecb_encrypt(struct skcipher_request
*req
)
189 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
192 return rk_cipher_handle_req(req
);
195 static int rk_des_ecb_decrypt(struct skcipher_request
*req
)
197 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
199 rctx
->mode
= RK_CRYPTO_DEC
;
200 return rk_cipher_handle_req(req
);
203 static int rk_des_cbc_encrypt(struct skcipher_request
*req
)
205 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
207 rctx
->mode
= RK_CRYPTO_TDES_CHAINMODE_CBC
;
208 return rk_cipher_handle_req(req
);
211 static int rk_des_cbc_decrypt(struct skcipher_request
*req
)
213 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
215 rctx
->mode
= RK_CRYPTO_TDES_CHAINMODE_CBC
| RK_CRYPTO_DEC
;
216 return rk_cipher_handle_req(req
);
219 static int rk_des3_ede_ecb_encrypt(struct skcipher_request
*req
)
221 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
223 rctx
->mode
= RK_CRYPTO_TDES_SELECT
;
224 return rk_cipher_handle_req(req
);
227 static int rk_des3_ede_ecb_decrypt(struct skcipher_request
*req
)
229 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
231 rctx
->mode
= RK_CRYPTO_TDES_SELECT
| RK_CRYPTO_DEC
;
232 return rk_cipher_handle_req(req
);
235 static int rk_des3_ede_cbc_encrypt(struct skcipher_request
*req
)
237 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
239 rctx
->mode
= RK_CRYPTO_TDES_SELECT
| RK_CRYPTO_TDES_CHAINMODE_CBC
;
240 return rk_cipher_handle_req(req
);
243 static int rk_des3_ede_cbc_decrypt(struct skcipher_request
*req
)
245 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
247 rctx
->mode
= RK_CRYPTO_TDES_SELECT
| RK_CRYPTO_TDES_CHAINMODE_CBC
|
249 return rk_cipher_handle_req(req
);
252 static void rk_cipher_hw_init(struct rk_crypto_info
*dev
, struct skcipher_request
*req
)
254 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
255 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(cipher
);
256 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(req
);
257 struct rk_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
258 u32 block
, conf_reg
= 0;
260 block
= crypto_tfm_alg_blocksize(tfm
);
262 if (block
== DES_BLOCK_SIZE
) {
263 rctx
->mode
|= RK_CRYPTO_TDES_FIFO_MODE
|
264 RK_CRYPTO_TDES_BYTESWAP_KEY
|
265 RK_CRYPTO_TDES_BYTESWAP_IV
;
266 CRYPTO_WRITE(dev
, RK_CRYPTO_TDES_CTRL
, rctx
->mode
);
267 memcpy_toio(dev
->reg
+ RK_CRYPTO_TDES_KEY1_0
, ctx
->key
, ctx
->keylen
);
268 conf_reg
= RK_CRYPTO_DESSEL
;
270 rctx
->mode
|= RK_CRYPTO_AES_FIFO_MODE
|
271 RK_CRYPTO_AES_KEY_CHANGE
|
272 RK_CRYPTO_AES_BYTESWAP_KEY
|
273 RK_CRYPTO_AES_BYTESWAP_IV
;
274 if (ctx
->keylen
== AES_KEYSIZE_192
)
275 rctx
->mode
|= RK_CRYPTO_AES_192BIT_key
;
276 else if (ctx
->keylen
== AES_KEYSIZE_256
)
277 rctx
->mode
|= RK_CRYPTO_AES_256BIT_key
;
278 CRYPTO_WRITE(dev
, RK_CRYPTO_AES_CTRL
, rctx
->mode
);
279 memcpy_toio(dev
->reg
+ RK_CRYPTO_AES_KEY_0
, ctx
->key
, ctx
->keylen
);
281 conf_reg
|= RK_CRYPTO_BYTESWAP_BTFIFO
|
282 RK_CRYPTO_BYTESWAP_BRFIFO
;
283 CRYPTO_WRITE(dev
, RK_CRYPTO_CONF
, conf_reg
);
284 CRYPTO_WRITE(dev
, RK_CRYPTO_INTENA
,
285 RK_CRYPTO_BCDMA_ERR_ENA
| RK_CRYPTO_BCDMA_DONE_ENA
);
288 static void crypto_dma_start(struct rk_crypto_info
*dev
,
289 struct scatterlist
*sgs
,
290 struct scatterlist
*sgd
, unsigned int todo
)
292 CRYPTO_WRITE(dev
, RK_CRYPTO_BRDMAS
, sg_dma_address(sgs
));
293 CRYPTO_WRITE(dev
, RK_CRYPTO_BRDMAL
, todo
);
294 CRYPTO_WRITE(dev
, RK_CRYPTO_BTDMAS
, sg_dma_address(sgd
));
295 CRYPTO_WRITE(dev
, RK_CRYPTO_CTRL
, RK_CRYPTO_BLOCK_START
|
296 _SBF(RK_CRYPTO_BLOCK_START
, 16));
299 static int rk_cipher_run(struct crypto_engine
*engine
, void *async_req
)
301 struct skcipher_request
*areq
= container_of(async_req
, struct skcipher_request
, base
);
302 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
303 struct rk_cipher_rctx
*rctx
= skcipher_request_ctx(areq
);
304 struct scatterlist
*sgs
, *sgd
;
306 int ivsize
= crypto_skcipher_ivsize(tfm
);
308 u8 iv
[AES_BLOCK_SIZE
];
309 u8 biv
[AES_BLOCK_SIZE
];
310 u8
*ivtouse
= areq
->iv
;
311 unsigned int len
= areq
->cryptlen
;
313 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
314 struct rk_crypto_tmp
*algt
= container_of(alg
, struct rk_crypto_tmp
, alg
.skcipher
.base
);
315 struct rk_crypto_info
*rkc
= rctx
->dev
;
317 err
= pm_runtime_resume_and_get(rkc
->dev
);
324 ivsize
= crypto_skcipher_ivsize(tfm
);
325 if (areq
->iv
&& crypto_skcipher_ivsize(tfm
) > 0) {
326 if (rctx
->mode
& RK_CRYPTO_DEC
) {
327 offset
= areq
->cryptlen
- ivsize
;
328 scatterwalk_map_and_copy(rctx
->backup_iv
, areq
->src
,
336 while (sgs
&& sgd
&& len
) {
342 if (rctx
->mode
& RK_CRYPTO_DEC
) {
343 /* we backup last block of source to be used as IV at next step */
344 offset
= sgs
->length
- ivsize
;
345 scatterwalk_map_and_copy(biv
, sgs
, offset
, ivsize
, 0);
348 err
= dma_map_sg(rkc
->dev
, sgs
, 1, DMA_BIDIRECTIONAL
);
354 err
= dma_map_sg(rkc
->dev
, sgs
, 1, DMA_TO_DEVICE
);
359 err
= dma_map_sg(rkc
->dev
, sgd
, 1, DMA_FROM_DEVICE
);
366 rk_cipher_hw_init(rkc
, areq
);
368 if (ivsize
== DES_BLOCK_SIZE
)
369 memcpy_toio(rkc
->reg
+ RK_CRYPTO_TDES_IV_0
, ivtouse
, ivsize
);
371 memcpy_toio(rkc
->reg
+ RK_CRYPTO_AES_IV_0
, ivtouse
, ivsize
);
373 reinit_completion(&rkc
->complete
);
376 todo
= min(sg_dma_len(sgs
), len
);
378 crypto_dma_start(rkc
, sgs
, sgd
, todo
/ 4);
379 wait_for_completion_interruptible_timeout(&rkc
->complete
,
380 msecs_to_jiffies(2000));
382 dev_err(rkc
->dev
, "DMA timeout\n");
387 dma_unmap_sg(rkc
->dev
, sgs
, 1, DMA_BIDIRECTIONAL
);
389 dma_unmap_sg(rkc
->dev
, sgs
, 1, DMA_TO_DEVICE
);
390 dma_unmap_sg(rkc
->dev
, sgd
, 1, DMA_FROM_DEVICE
);
392 if (rctx
->mode
& RK_CRYPTO_DEC
) {
393 memcpy(iv
, biv
, ivsize
);
396 offset
= sgd
->length
- ivsize
;
397 scatterwalk_map_and_copy(iv
, sgd
, offset
, ivsize
, 0);
404 if (areq
->iv
&& ivsize
> 0) {
405 offset
= areq
->cryptlen
- ivsize
;
406 if (rctx
->mode
& RK_CRYPTO_DEC
) {
407 memcpy(areq
->iv
, rctx
->backup_iv
, ivsize
);
408 memzero_explicit(rctx
->backup_iv
, ivsize
);
410 scatterwalk_map_and_copy(areq
->iv
, areq
->dst
, offset
,
416 pm_runtime_put_autosuspend(rkc
->dev
);
419 crypto_finalize_skcipher_request(engine
, areq
, err
);
425 dma_unmap_sg(rkc
->dev
, sgs
, 1, DMA_BIDIRECTIONAL
);
427 dma_unmap_sg(rkc
->dev
, sgs
, 1, DMA_TO_DEVICE
);
428 dma_unmap_sg(rkc
->dev
, sgd
, 1, DMA_FROM_DEVICE
);
434 static int rk_cipher_tfm_init(struct crypto_skcipher
*tfm
)
436 struct rk_cipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
437 const char *name
= crypto_tfm_alg_name(&tfm
->base
);
438 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
439 struct rk_crypto_tmp
*algt
= container_of(alg
, struct rk_crypto_tmp
, alg
.skcipher
.base
);
441 ctx
->fallback_tfm
= crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
);
442 if (IS_ERR(ctx
->fallback_tfm
)) {
443 dev_err(algt
->dev
->dev
, "ERROR: Cannot allocate fallback for %s %ld\n",
444 name
, PTR_ERR(ctx
->fallback_tfm
));
445 return PTR_ERR(ctx
->fallback_tfm
);
448 crypto_skcipher_set_reqsize(tfm
, sizeof(struct rk_cipher_rctx
) +
449 crypto_skcipher_reqsize(ctx
->fallback_tfm
));
454 static void rk_cipher_tfm_exit(struct crypto_skcipher
*tfm
)
456 struct rk_cipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
458 memzero_explicit(ctx
->key
, ctx
->keylen
);
459 crypto_free_skcipher(ctx
->fallback_tfm
);
462 struct rk_crypto_tmp rk_ecb_aes_alg
= {
463 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
464 .alg
.skcipher
.base
= {
465 .base
.cra_name
= "ecb(aes)",
466 .base
.cra_driver_name
= "ecb-aes-rk",
467 .base
.cra_priority
= 300,
468 .base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
,
469 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
470 .base
.cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
471 .base
.cra_alignmask
= 0x0f,
472 .base
.cra_module
= THIS_MODULE
,
474 .init
= rk_cipher_tfm_init
,
475 .exit
= rk_cipher_tfm_exit
,
476 .min_keysize
= AES_MIN_KEY_SIZE
,
477 .max_keysize
= AES_MAX_KEY_SIZE
,
478 .setkey
= rk_aes_setkey
,
479 .encrypt
= rk_aes_ecb_encrypt
,
480 .decrypt
= rk_aes_ecb_decrypt
,
483 .do_one_request
= rk_cipher_run
,
487 struct rk_crypto_tmp rk_cbc_aes_alg
= {
488 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
489 .alg
.skcipher
.base
= {
490 .base
.cra_name
= "cbc(aes)",
491 .base
.cra_driver_name
= "cbc-aes-rk",
492 .base
.cra_priority
= 300,
493 .base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
,
494 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
495 .base
.cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
496 .base
.cra_alignmask
= 0x0f,
497 .base
.cra_module
= THIS_MODULE
,
499 .init
= rk_cipher_tfm_init
,
500 .exit
= rk_cipher_tfm_exit
,
501 .min_keysize
= AES_MIN_KEY_SIZE
,
502 .max_keysize
= AES_MAX_KEY_SIZE
,
503 .ivsize
= AES_BLOCK_SIZE
,
504 .setkey
= rk_aes_setkey
,
505 .encrypt
= rk_aes_cbc_encrypt
,
506 .decrypt
= rk_aes_cbc_decrypt
,
509 .do_one_request
= rk_cipher_run
,
513 struct rk_crypto_tmp rk_ecb_des_alg
= {
514 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
515 .alg
.skcipher
.base
= {
516 .base
.cra_name
= "ecb(des)",
517 .base
.cra_driver_name
= "ecb-des-rk",
518 .base
.cra_priority
= 300,
519 .base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
,
520 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
521 .base
.cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
522 .base
.cra_alignmask
= 0x07,
523 .base
.cra_module
= THIS_MODULE
,
525 .init
= rk_cipher_tfm_init
,
526 .exit
= rk_cipher_tfm_exit
,
527 .min_keysize
= DES_KEY_SIZE
,
528 .max_keysize
= DES_KEY_SIZE
,
529 .setkey
= rk_des_setkey
,
530 .encrypt
= rk_des_ecb_encrypt
,
531 .decrypt
= rk_des_ecb_decrypt
,
534 .do_one_request
= rk_cipher_run
,
538 struct rk_crypto_tmp rk_cbc_des_alg
= {
539 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
540 .alg
.skcipher
.base
= {
541 .base
.cra_name
= "cbc(des)",
542 .base
.cra_driver_name
= "cbc-des-rk",
543 .base
.cra_priority
= 300,
544 .base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
,
545 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
546 .base
.cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
547 .base
.cra_alignmask
= 0x07,
548 .base
.cra_module
= THIS_MODULE
,
550 .init
= rk_cipher_tfm_init
,
551 .exit
= rk_cipher_tfm_exit
,
552 .min_keysize
= DES_KEY_SIZE
,
553 .max_keysize
= DES_KEY_SIZE
,
554 .ivsize
= DES_BLOCK_SIZE
,
555 .setkey
= rk_des_setkey
,
556 .encrypt
= rk_des_cbc_encrypt
,
557 .decrypt
= rk_des_cbc_decrypt
,
560 .do_one_request
= rk_cipher_run
,
564 struct rk_crypto_tmp rk_ecb_des3_ede_alg
= {
565 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
566 .alg
.skcipher
.base
= {
567 .base
.cra_name
= "ecb(des3_ede)",
568 .base
.cra_driver_name
= "ecb-des3-ede-rk",
569 .base
.cra_priority
= 300,
570 .base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
,
571 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
572 .base
.cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
573 .base
.cra_alignmask
= 0x07,
574 .base
.cra_module
= THIS_MODULE
,
576 .init
= rk_cipher_tfm_init
,
577 .exit
= rk_cipher_tfm_exit
,
578 .min_keysize
= DES3_EDE_KEY_SIZE
,
579 .max_keysize
= DES3_EDE_KEY_SIZE
,
580 .setkey
= rk_tdes_setkey
,
581 .encrypt
= rk_des3_ede_ecb_encrypt
,
582 .decrypt
= rk_des3_ede_ecb_decrypt
,
585 .do_one_request
= rk_cipher_run
,
589 struct rk_crypto_tmp rk_cbc_des3_ede_alg
= {
590 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
591 .alg
.skcipher
.base
= {
592 .base
.cra_name
= "cbc(des3_ede)",
593 .base
.cra_driver_name
= "cbc-des3-ede-rk",
594 .base
.cra_priority
= 300,
595 .base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_NEED_FALLBACK
,
596 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
597 .base
.cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
598 .base
.cra_alignmask
= 0x07,
599 .base
.cra_module
= THIS_MODULE
,
601 .init
= rk_cipher_tfm_init
,
602 .exit
= rk_cipher_tfm_exit
,
603 .min_keysize
= DES3_EDE_KEY_SIZE
,
604 .max_keysize
= DES3_EDE_KEY_SIZE
,
605 .ivsize
= DES_BLOCK_SIZE
,
606 .setkey
= rk_tdes_setkey
,
607 .encrypt
= rk_des3_ede_cbc_encrypt
,
608 .decrypt
= rk_des3_ede_cbc_decrypt
,
611 .do_one_request
= rk_cipher_run
,