2 * Crypto acceleration support for Rockchip RK3288
4 * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6 * Author: Zain Wang <zain.wang@rock-chips.com>
8 * This program is free software; you can redistribute it and/or modify it
9 * under the terms and conditions of the GNU General Public License,
10 * version 2, as published by the Free Software Foundation.
12 * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
14 #include "rk3288_crypto.h"
16 #define RK_CRYPTO_DEC BIT(0)
18 static void rk_crypto_complete(struct crypto_async_request
*base
, int err
)
21 base
->complete(base
, err
);
24 static int rk_handle_req(struct rk_crypto_info
*dev
,
25 struct ablkcipher_request
*req
)
27 if (!IS_ALIGNED(req
->nbytes
, dev
->align_size
))
30 return dev
->enqueue(dev
, &req
->base
);
33 static int rk_aes_setkey(struct crypto_ablkcipher
*cipher
,
34 const u8
*key
, unsigned int keylen
)
36 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(cipher
);
37 struct rk_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
39 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
40 keylen
!= AES_KEYSIZE_256
) {
41 crypto_ablkcipher_set_flags(cipher
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
45 memcpy_toio(ctx
->dev
->reg
+ RK_CRYPTO_AES_KEY_0
, key
, keylen
);
49 static int rk_tdes_setkey(struct crypto_ablkcipher
*cipher
,
50 const u8
*key
, unsigned int keylen
)
52 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(cipher
);
53 struct rk_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
54 u32 tmp
[DES_EXPKEY_WORDS
];
56 if (keylen
!= DES_KEY_SIZE
&& keylen
!= DES3_EDE_KEY_SIZE
) {
57 crypto_ablkcipher_set_flags(cipher
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
61 if (keylen
== DES_KEY_SIZE
) {
62 if (!des_ekey(tmp
, key
) &&
63 (tfm
->crt_flags
& CRYPTO_TFM_REQ_WEAK_KEY
)) {
64 tfm
->crt_flags
|= CRYPTO_TFM_RES_WEAK_KEY
;
70 memcpy_toio(ctx
->dev
->reg
+ RK_CRYPTO_TDES_KEY1_0
, key
, keylen
);
74 static int rk_aes_ecb_encrypt(struct ablkcipher_request
*req
)
76 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
77 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
78 struct rk_crypto_info
*dev
= ctx
->dev
;
80 ctx
->mode
= RK_CRYPTO_AES_ECB_MODE
;
81 return rk_handle_req(dev
, req
);
84 static int rk_aes_ecb_decrypt(struct ablkcipher_request
*req
)
86 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
87 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
88 struct rk_crypto_info
*dev
= ctx
->dev
;
90 ctx
->mode
= RK_CRYPTO_AES_ECB_MODE
| RK_CRYPTO_DEC
;
91 return rk_handle_req(dev
, req
);
94 static int rk_aes_cbc_encrypt(struct ablkcipher_request
*req
)
96 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
97 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
98 struct rk_crypto_info
*dev
= ctx
->dev
;
100 ctx
->mode
= RK_CRYPTO_AES_CBC_MODE
;
101 return rk_handle_req(dev
, req
);
104 static int rk_aes_cbc_decrypt(struct ablkcipher_request
*req
)
106 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
107 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
108 struct rk_crypto_info
*dev
= ctx
->dev
;
110 ctx
->mode
= RK_CRYPTO_AES_CBC_MODE
| RK_CRYPTO_DEC
;
111 return rk_handle_req(dev
, req
);
114 static int rk_des_ecb_encrypt(struct ablkcipher_request
*req
)
116 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
117 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
118 struct rk_crypto_info
*dev
= ctx
->dev
;
121 return rk_handle_req(dev
, req
);
124 static int rk_des_ecb_decrypt(struct ablkcipher_request
*req
)
126 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
127 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
128 struct rk_crypto_info
*dev
= ctx
->dev
;
130 ctx
->mode
= RK_CRYPTO_DEC
;
131 return rk_handle_req(dev
, req
);
134 static int rk_des_cbc_encrypt(struct ablkcipher_request
*req
)
136 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
137 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
138 struct rk_crypto_info
*dev
= ctx
->dev
;
140 ctx
->mode
= RK_CRYPTO_TDES_CHAINMODE_CBC
;
141 return rk_handle_req(dev
, req
);
144 static int rk_des_cbc_decrypt(struct ablkcipher_request
*req
)
146 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
147 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
148 struct rk_crypto_info
*dev
= ctx
->dev
;
150 ctx
->mode
= RK_CRYPTO_TDES_CHAINMODE_CBC
| RK_CRYPTO_DEC
;
151 return rk_handle_req(dev
, req
);
154 static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request
*req
)
156 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
157 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
158 struct rk_crypto_info
*dev
= ctx
->dev
;
160 ctx
->mode
= RK_CRYPTO_TDES_SELECT
;
161 return rk_handle_req(dev
, req
);
164 static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request
*req
)
166 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
167 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
168 struct rk_crypto_info
*dev
= ctx
->dev
;
170 ctx
->mode
= RK_CRYPTO_TDES_SELECT
| RK_CRYPTO_DEC
;
171 return rk_handle_req(dev
, req
);
174 static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request
*req
)
176 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
177 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
178 struct rk_crypto_info
*dev
= ctx
->dev
;
180 ctx
->mode
= RK_CRYPTO_TDES_SELECT
| RK_CRYPTO_TDES_CHAINMODE_CBC
;
181 return rk_handle_req(dev
, req
);
184 static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request
*req
)
186 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
187 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
188 struct rk_crypto_info
*dev
= ctx
->dev
;
190 ctx
->mode
= RK_CRYPTO_TDES_SELECT
| RK_CRYPTO_TDES_CHAINMODE_CBC
|
192 return rk_handle_req(dev
, req
);
195 static void rk_ablk_hw_init(struct rk_crypto_info
*dev
)
197 struct ablkcipher_request
*req
=
198 ablkcipher_request_cast(dev
->async_req
);
199 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(req
);
200 struct crypto_tfm
*tfm
= crypto_ablkcipher_tfm(cipher
);
201 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
202 u32 ivsize
, block
, conf_reg
= 0;
204 block
= crypto_tfm_alg_blocksize(tfm
);
205 ivsize
= crypto_ablkcipher_ivsize(cipher
);
207 if (block
== DES_BLOCK_SIZE
) {
208 ctx
->mode
|= RK_CRYPTO_TDES_FIFO_MODE
|
209 RK_CRYPTO_TDES_BYTESWAP_KEY
|
210 RK_CRYPTO_TDES_BYTESWAP_IV
;
211 CRYPTO_WRITE(dev
, RK_CRYPTO_TDES_CTRL
, ctx
->mode
);
212 memcpy_toio(dev
->reg
+ RK_CRYPTO_TDES_IV_0
, req
->info
, ivsize
);
213 conf_reg
= RK_CRYPTO_DESSEL
;
215 ctx
->mode
|= RK_CRYPTO_AES_FIFO_MODE
|
216 RK_CRYPTO_AES_KEY_CHANGE
|
217 RK_CRYPTO_AES_BYTESWAP_KEY
|
218 RK_CRYPTO_AES_BYTESWAP_IV
;
219 if (ctx
->keylen
== AES_KEYSIZE_192
)
220 ctx
->mode
|= RK_CRYPTO_AES_192BIT_key
;
221 else if (ctx
->keylen
== AES_KEYSIZE_256
)
222 ctx
->mode
|= RK_CRYPTO_AES_256BIT_key
;
223 CRYPTO_WRITE(dev
, RK_CRYPTO_AES_CTRL
, ctx
->mode
);
224 memcpy_toio(dev
->reg
+ RK_CRYPTO_AES_IV_0
, req
->info
, ivsize
);
226 conf_reg
|= RK_CRYPTO_BYTESWAP_BTFIFO
|
227 RK_CRYPTO_BYTESWAP_BRFIFO
;
228 CRYPTO_WRITE(dev
, RK_CRYPTO_CONF
, conf_reg
);
229 CRYPTO_WRITE(dev
, RK_CRYPTO_INTENA
,
230 RK_CRYPTO_BCDMA_ERR_ENA
| RK_CRYPTO_BCDMA_DONE_ENA
);
233 static void crypto_dma_start(struct rk_crypto_info
*dev
)
235 CRYPTO_WRITE(dev
, RK_CRYPTO_BRDMAS
, dev
->addr_in
);
236 CRYPTO_WRITE(dev
, RK_CRYPTO_BRDMAL
, dev
->count
/ 4);
237 CRYPTO_WRITE(dev
, RK_CRYPTO_BTDMAS
, dev
->addr_out
);
238 CRYPTO_WRITE(dev
, RK_CRYPTO_CTRL
, RK_CRYPTO_BLOCK_START
|
239 _SBF(RK_CRYPTO_BLOCK_START
, 16));
242 static int rk_set_data_start(struct rk_crypto_info
*dev
)
245 struct ablkcipher_request
*req
=
246 ablkcipher_request_cast(dev
->async_req
);
247 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
248 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
249 u32 ivsize
= crypto_ablkcipher_ivsize(tfm
);
250 u8
*src_last_blk
= page_address(sg_page(dev
->sg_src
)) +
251 dev
->sg_src
->offset
+ dev
->sg_src
->length
- ivsize
;
253 /* Store the iv that need to be updated in chain mode.
254 * And update the IV buffer to contain the next IV for decryption mode.
256 if (ctx
->mode
& RK_CRYPTO_DEC
) {
257 memcpy(ctx
->iv
, src_last_blk
, ivsize
);
258 sg_pcopy_to_buffer(dev
->first
, dev
->src_nents
, req
->info
,
259 ivsize
, dev
->total
- ivsize
);
262 err
= dev
->load_data(dev
, dev
->sg_src
, dev
->sg_dst
);
264 crypto_dma_start(dev
);
268 static int rk_ablk_start(struct rk_crypto_info
*dev
)
270 struct ablkcipher_request
*req
=
271 ablkcipher_request_cast(dev
->async_req
);
275 dev
->left_bytes
= req
->nbytes
;
276 dev
->total
= req
->nbytes
;
277 dev
->sg_src
= req
->src
;
278 dev
->first
= req
->src
;
279 dev
->src_nents
= sg_nents(req
->src
);
280 dev
->sg_dst
= req
->dst
;
281 dev
->dst_nents
= sg_nents(req
->dst
);
284 spin_lock_irqsave(&dev
->lock
, flags
);
285 rk_ablk_hw_init(dev
);
286 err
= rk_set_data_start(dev
);
287 spin_unlock_irqrestore(&dev
->lock
, flags
);
291 static void rk_iv_copyback(struct rk_crypto_info
*dev
)
293 struct ablkcipher_request
*req
=
294 ablkcipher_request_cast(dev
->async_req
);
295 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
296 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
297 u32 ivsize
= crypto_ablkcipher_ivsize(tfm
);
299 /* Update the IV buffer to contain the next IV for encryption mode. */
300 if (!(ctx
->mode
& RK_CRYPTO_DEC
)) {
302 memcpy(req
->info
, sg_virt(dev
->sg_dst
) +
303 dev
->sg_dst
->length
- ivsize
, ivsize
);
305 memcpy(req
->info
, dev
->addr_vir
+
306 dev
->count
- ivsize
, ivsize
);
311 static void rk_update_iv(struct rk_crypto_info
*dev
)
313 struct ablkcipher_request
*req
=
314 ablkcipher_request_cast(dev
->async_req
);
315 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
316 struct rk_cipher_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
317 u32 ivsize
= crypto_ablkcipher_ivsize(tfm
);
320 if (ctx
->mode
& RK_CRYPTO_DEC
) {
323 new_iv
= page_address(sg_page(dev
->sg_dst
)) +
324 dev
->sg_dst
->offset
+ dev
->sg_dst
->length
- ivsize
;
327 if (ivsize
== DES_BLOCK_SIZE
)
328 memcpy_toio(dev
->reg
+ RK_CRYPTO_TDES_IV_0
, new_iv
, ivsize
);
329 else if (ivsize
== AES_BLOCK_SIZE
)
330 memcpy_toio(dev
->reg
+ RK_CRYPTO_AES_IV_0
, new_iv
, ivsize
);
334 * true some err was occurred
335 * fault no err, continue
337 static int rk_ablk_rx(struct rk_crypto_info
*dev
)
340 struct ablkcipher_request
*req
=
341 ablkcipher_request_cast(dev
->async_req
);
343 dev
->unload_data(dev
);
345 if (!sg_pcopy_from_buffer(req
->dst
, dev
->dst_nents
,
346 dev
->addr_vir
, dev
->count
,
347 dev
->total
- dev
->left_bytes
-
353 if (dev
->left_bytes
) {
356 if (sg_is_last(dev
->sg_src
)) {
357 dev_err(dev
->dev
, "[%s:%d] Lack of data\n",
362 dev
->sg_src
= sg_next(dev
->sg_src
);
363 dev
->sg_dst
= sg_next(dev
->sg_dst
);
365 err
= rk_set_data_start(dev
);
368 /* here show the calculation is over without any err */
369 dev
->complete(dev
->async_req
, 0);
370 tasklet_schedule(&dev
->queue_task
);
376 static int rk_ablk_cra_init(struct crypto_tfm
*tfm
)
378 struct rk_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
379 struct crypto_alg
*alg
= tfm
->__crt_alg
;
380 struct rk_crypto_tmp
*algt
;
382 algt
= container_of(alg
, struct rk_crypto_tmp
, alg
.crypto
);
384 ctx
->dev
= algt
->dev
;
385 ctx
->dev
->align_size
= crypto_tfm_alg_alignmask(tfm
) + 1;
386 ctx
->dev
->start
= rk_ablk_start
;
387 ctx
->dev
->update
= rk_ablk_rx
;
388 ctx
->dev
->complete
= rk_crypto_complete
;
389 ctx
->dev
->addr_vir
= (char *)__get_free_page(GFP_KERNEL
);
391 return ctx
->dev
->addr_vir
? ctx
->dev
->enable_clk(ctx
->dev
) : -ENOMEM
;
394 static void rk_ablk_cra_exit(struct crypto_tfm
*tfm
)
396 struct rk_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
398 free_page((unsigned long)ctx
->dev
->addr_vir
);
399 ctx
->dev
->disable_clk(ctx
->dev
);
402 struct rk_crypto_tmp rk_ecb_aes_alg
= {
403 .type
= ALG_TYPE_CIPHER
,
405 .cra_name
= "ecb(aes)",
406 .cra_driver_name
= "ecb-aes-rk",
408 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
410 .cra_blocksize
= AES_BLOCK_SIZE
,
411 .cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
412 .cra_alignmask
= 0x0f,
413 .cra_type
= &crypto_ablkcipher_type
,
414 .cra_module
= THIS_MODULE
,
415 .cra_init
= rk_ablk_cra_init
,
416 .cra_exit
= rk_ablk_cra_exit
,
417 .cra_u
.ablkcipher
= {
418 .min_keysize
= AES_MIN_KEY_SIZE
,
419 .max_keysize
= AES_MAX_KEY_SIZE
,
420 .setkey
= rk_aes_setkey
,
421 .encrypt
= rk_aes_ecb_encrypt
,
422 .decrypt
= rk_aes_ecb_decrypt
,
427 struct rk_crypto_tmp rk_cbc_aes_alg
= {
428 .type
= ALG_TYPE_CIPHER
,
430 .cra_name
= "cbc(aes)",
431 .cra_driver_name
= "cbc-aes-rk",
433 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
435 .cra_blocksize
= AES_BLOCK_SIZE
,
436 .cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
437 .cra_alignmask
= 0x0f,
438 .cra_type
= &crypto_ablkcipher_type
,
439 .cra_module
= THIS_MODULE
,
440 .cra_init
= rk_ablk_cra_init
,
441 .cra_exit
= rk_ablk_cra_exit
,
442 .cra_u
.ablkcipher
= {
443 .min_keysize
= AES_MIN_KEY_SIZE
,
444 .max_keysize
= AES_MAX_KEY_SIZE
,
445 .ivsize
= AES_BLOCK_SIZE
,
446 .setkey
= rk_aes_setkey
,
447 .encrypt
= rk_aes_cbc_encrypt
,
448 .decrypt
= rk_aes_cbc_decrypt
,
453 struct rk_crypto_tmp rk_ecb_des_alg
= {
454 .type
= ALG_TYPE_CIPHER
,
456 .cra_name
= "ecb(des)",
457 .cra_driver_name
= "ecb-des-rk",
459 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
461 .cra_blocksize
= DES_BLOCK_SIZE
,
462 .cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
463 .cra_alignmask
= 0x07,
464 .cra_type
= &crypto_ablkcipher_type
,
465 .cra_module
= THIS_MODULE
,
466 .cra_init
= rk_ablk_cra_init
,
467 .cra_exit
= rk_ablk_cra_exit
,
468 .cra_u
.ablkcipher
= {
469 .min_keysize
= DES_KEY_SIZE
,
470 .max_keysize
= DES_KEY_SIZE
,
471 .setkey
= rk_tdes_setkey
,
472 .encrypt
= rk_des_ecb_encrypt
,
473 .decrypt
= rk_des_ecb_decrypt
,
478 struct rk_crypto_tmp rk_cbc_des_alg
= {
479 .type
= ALG_TYPE_CIPHER
,
481 .cra_name
= "cbc(des)",
482 .cra_driver_name
= "cbc-des-rk",
484 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
486 .cra_blocksize
= DES_BLOCK_SIZE
,
487 .cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
488 .cra_alignmask
= 0x07,
489 .cra_type
= &crypto_ablkcipher_type
,
490 .cra_module
= THIS_MODULE
,
491 .cra_init
= rk_ablk_cra_init
,
492 .cra_exit
= rk_ablk_cra_exit
,
493 .cra_u
.ablkcipher
= {
494 .min_keysize
= DES_KEY_SIZE
,
495 .max_keysize
= DES_KEY_SIZE
,
496 .ivsize
= DES_BLOCK_SIZE
,
497 .setkey
= rk_tdes_setkey
,
498 .encrypt
= rk_des_cbc_encrypt
,
499 .decrypt
= rk_des_cbc_decrypt
,
504 struct rk_crypto_tmp rk_ecb_des3_ede_alg
= {
505 .type
= ALG_TYPE_CIPHER
,
507 .cra_name
= "ecb(des3_ede)",
508 .cra_driver_name
= "ecb-des3-ede-rk",
510 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
512 .cra_blocksize
= DES_BLOCK_SIZE
,
513 .cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
514 .cra_alignmask
= 0x07,
515 .cra_type
= &crypto_ablkcipher_type
,
516 .cra_module
= THIS_MODULE
,
517 .cra_init
= rk_ablk_cra_init
,
518 .cra_exit
= rk_ablk_cra_exit
,
519 .cra_u
.ablkcipher
= {
520 .min_keysize
= DES3_EDE_KEY_SIZE
,
521 .max_keysize
= DES3_EDE_KEY_SIZE
,
522 .ivsize
= DES_BLOCK_SIZE
,
523 .setkey
= rk_tdes_setkey
,
524 .encrypt
= rk_des3_ede_ecb_encrypt
,
525 .decrypt
= rk_des3_ede_ecb_decrypt
,
530 struct rk_crypto_tmp rk_cbc_des3_ede_alg
= {
531 .type
= ALG_TYPE_CIPHER
,
533 .cra_name
= "cbc(des3_ede)",
534 .cra_driver_name
= "cbc-des3-ede-rk",
536 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
538 .cra_blocksize
= DES_BLOCK_SIZE
,
539 .cra_ctxsize
= sizeof(struct rk_cipher_ctx
),
540 .cra_alignmask
= 0x07,
541 .cra_type
= &crypto_ablkcipher_type
,
542 .cra_module
= THIS_MODULE
,
543 .cra_init
= rk_ablk_cra_init
,
544 .cra_exit
= rk_ablk_cra_exit
,
545 .cra_u
.ablkcipher
= {
546 .min_keysize
= DES3_EDE_KEY_SIZE
,
547 .max_keysize
= DES3_EDE_KEY_SIZE
,
548 .ivsize
= DES_BLOCK_SIZE
,
549 .setkey
= rk_tdes_setkey
,
550 .encrypt
= rk_des3_ede_cbc_encrypt
,
551 .decrypt
= rk_des3_ede_cbc_decrypt
,