1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
19 #define MAX_SKCIPHER_SEQ_LEN 6
21 #define template_skcipher template_u.skcipher
23 struct cc_user_key_info
{
25 dma_addr_t key_dma_addr
;
28 struct cc_hw_key_info
{
29 enum cc_hw_crypto_key key1_slot
;
30 enum cc_hw_crypto_key key2_slot
;
33 struct cc_cpp_key_info
{
39 CC_UNPROTECTED_KEY
, /* User key */
40 CC_HW_PROTECTED_KEY
, /* HW (FDE) key */
41 CC_POLICY_PROTECTED_KEY
, /* CPP key */
42 CC_INVALID_PROTECTED_KEY
/* Invalid key */
45 struct cc_cipher_ctx
{
46 struct cc_drvdata
*drvdata
;
52 enum cc_key_type key_type
;
53 struct cc_user_key_info user
;
55 struct cc_hw_key_info hw
;
56 struct cc_cpp_key_info cpp
;
58 struct crypto_shash
*shash_tfm
;
61 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
);
63 static inline enum cc_key_type
cc_key_type(struct crypto_tfm
*tfm
)
65 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
67 return ctx_p
->key_type
;
70 static int validate_keys_sizes(struct cc_cipher_ctx
*ctx_p
, u32 size
)
72 switch (ctx_p
->flow_mode
) {
75 case CC_AES_128_BIT_KEY_SIZE
:
76 case CC_AES_192_BIT_KEY_SIZE
:
77 if (ctx_p
->cipher_mode
!= DRV_CIPHER_XTS
&&
78 ctx_p
->cipher_mode
!= DRV_CIPHER_ESSIV
&&
79 ctx_p
->cipher_mode
!= DRV_CIPHER_BITLOCKER
)
82 case CC_AES_256_BIT_KEY_SIZE
:
84 case (CC_AES_192_BIT_KEY_SIZE
* 2):
85 case (CC_AES_256_BIT_KEY_SIZE
* 2):
86 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
87 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
88 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
)
96 if (size
== DES3_EDE_KEY_SIZE
|| size
== DES_KEY_SIZE
)
100 if (size
== SM4_KEY_SIZE
)
108 static int validate_data_size(struct cc_cipher_ctx
*ctx_p
,
111 switch (ctx_p
->flow_mode
) {
113 switch (ctx_p
->cipher_mode
) {
115 case DRV_CIPHER_CBC_CTS
:
116 if (size
>= AES_BLOCK_SIZE
)
124 case DRV_CIPHER_ESSIV
:
125 case DRV_CIPHER_BITLOCKER
:
126 if (IS_ALIGNED(size
, AES_BLOCK_SIZE
))
134 if (IS_ALIGNED(size
, DES_BLOCK_SIZE
))
138 switch (ctx_p
->cipher_mode
) {
143 if (IS_ALIGNED(size
, SM4_BLOCK_SIZE
))
154 static int cc_cipher_init(struct crypto_tfm
*tfm
)
156 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
157 struct cc_crypto_alg
*cc_alg
=
158 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
160 struct device
*dev
= drvdata_to_dev(cc_alg
->drvdata
);
161 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
164 dev_dbg(dev
, "Initializing context @%p for %s\n", ctx_p
,
165 crypto_tfm_alg_name(tfm
));
167 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
168 sizeof(struct cipher_req_ctx
));
170 ctx_p
->cipher_mode
= cc_alg
->cipher_mode
;
171 ctx_p
->flow_mode
= cc_alg
->flow_mode
;
172 ctx_p
->drvdata
= cc_alg
->drvdata
;
174 /* Allocate key buffer, cache line aligned */
175 ctx_p
->user
.key
= kmalloc(max_key_buf_size
, GFP_KERNEL
);
176 if (!ctx_p
->user
.key
)
179 dev_dbg(dev
, "Allocated key buffer in context. key=@%p\n",
183 ctx_p
->user
.key_dma_addr
= dma_map_single(dev
, ctx_p
->user
.key
,
186 if (dma_mapping_error(dev
, ctx_p
->user
.key_dma_addr
)) {
187 dev_err(dev
, "Mapping Key %u B at va=%pK for DMA failed\n",
188 max_key_buf_size
, ctx_p
->user
.key
);
191 dev_dbg(dev
, "Mapped key %u B at va=%pK to dma=%pad\n",
192 max_key_buf_size
, ctx_p
->user
.key
, &ctx_p
->user
.key_dma_addr
);
194 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
195 /* Alloc hash tfm for essiv */
196 ctx_p
->shash_tfm
= crypto_alloc_shash("sha256-generic", 0, 0);
197 if (IS_ERR(ctx_p
->shash_tfm
)) {
198 dev_err(dev
, "Error allocating hash tfm for ESSIV.\n");
199 return PTR_ERR(ctx_p
->shash_tfm
);
206 static void cc_cipher_exit(struct crypto_tfm
*tfm
)
208 struct crypto_alg
*alg
= tfm
->__crt_alg
;
209 struct cc_crypto_alg
*cc_alg
=
210 container_of(alg
, struct cc_crypto_alg
,
212 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
213 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
214 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
216 dev_dbg(dev
, "Clearing context @%p for %s\n",
217 crypto_tfm_ctx(tfm
), crypto_tfm_alg_name(tfm
));
219 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
220 /* Free hash tfm for essiv */
221 crypto_free_shash(ctx_p
->shash_tfm
);
222 ctx_p
->shash_tfm
= NULL
;
225 /* Unmap key buffer */
226 dma_unmap_single(dev
, ctx_p
->user
.key_dma_addr
, max_key_buf_size
,
228 dev_dbg(dev
, "Unmapped key buffer key_dma_addr=%pad\n",
229 &ctx_p
->user
.key_dma_addr
);
231 /* Free key buffer in context */
232 kzfree(ctx_p
->user
.key
);
233 dev_dbg(dev
, "Free key buffer in context. key=@%p\n", ctx_p
->user
.key
);
237 u8 key1
[DES_KEY_SIZE
];
238 u8 key2
[DES_KEY_SIZE
];
239 u8 key3
[DES_KEY_SIZE
];
242 static enum cc_hw_crypto_key
cc_slot_to_hw_key(u8 slot_num
)
257 static u8
cc_slot_to_cpp_key(u8 slot_num
)
259 return (slot_num
- CC_FIRST_CPP_KEY_SLOT
);
262 static inline enum cc_key_type
cc_slot_to_key_type(u8 slot_num
)
264 if (slot_num
>= CC_FIRST_HW_KEY_SLOT
&& slot_num
<= CC_LAST_HW_KEY_SLOT
)
265 return CC_HW_PROTECTED_KEY
;
266 else if (slot_num
>= CC_FIRST_CPP_KEY_SLOT
&&
267 slot_num
<= CC_LAST_CPP_KEY_SLOT
)
268 return CC_POLICY_PROTECTED_KEY
;
270 return CC_INVALID_PROTECTED_KEY
;
273 static int cc_cipher_sethkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
276 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
277 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
278 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
279 struct cc_hkey_info hki
;
281 dev_dbg(dev
, "Setting HW key in context @%p for %s. keylen=%u\n",
282 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
283 dump_byte_array("key", key
, keylen
);
285 /* STAT_PHASE_0: Init and sanity checks */
287 /* This check the size of the protected key token */
288 if (keylen
!= sizeof(hki
)) {
289 dev_err(dev
, "Unsupported protected key size %d.\n", keylen
);
293 memcpy(&hki
, key
, keylen
);
295 /* The real key len for crypto op is the size of the HW key
296 * referenced by the HW key slot, not the hardware key token
300 if (validate_keys_sizes(ctx_p
, keylen
)) {
301 dev_dbg(dev
, "Unsupported key size %d.\n", keylen
);
305 ctx_p
->keylen
= keylen
;
307 switch (cc_slot_to_key_type(hki
.hw_key1
)) {
308 case CC_HW_PROTECTED_KEY
:
309 if (ctx_p
->flow_mode
== S_DIN_to_SM4
) {
310 dev_err(dev
, "Only AES HW protected keys are supported\n");
314 ctx_p
->hw
.key1_slot
= cc_slot_to_hw_key(hki
.hw_key1
);
315 if (ctx_p
->hw
.key1_slot
== END_OF_KEYS
) {
316 dev_err(dev
, "Unsupported hw key1 number (%d)\n",
321 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
322 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
323 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
) {
324 if (hki
.hw_key1
== hki
.hw_key2
) {
325 dev_err(dev
, "Illegal hw key numbers (%d,%d)\n",
326 hki
.hw_key1
, hki
.hw_key2
);
330 ctx_p
->hw
.key2_slot
= cc_slot_to_hw_key(hki
.hw_key2
);
331 if (ctx_p
->hw
.key2_slot
== END_OF_KEYS
) {
332 dev_err(dev
, "Unsupported hw key2 number (%d)\n",
338 ctx_p
->key_type
= CC_HW_PROTECTED_KEY
;
339 dev_dbg(dev
, "HW protected key %d/%d set\n.",
340 ctx_p
->hw
.key1_slot
, ctx_p
->hw
.key2_slot
);
343 case CC_POLICY_PROTECTED_KEY
:
344 if (ctx_p
->drvdata
->hw_rev
< CC_HW_REV_713
) {
345 dev_err(dev
, "CPP keys not supported in this hardware revision.\n");
349 if (ctx_p
->cipher_mode
!= DRV_CIPHER_CBC
&&
350 ctx_p
->cipher_mode
!= DRV_CIPHER_CTR
) {
351 dev_err(dev
, "CPP keys only supported in CBC or CTR modes.\n");
355 ctx_p
->cpp
.slot
= cc_slot_to_cpp_key(hki
.hw_key1
);
356 if (ctx_p
->flow_mode
== S_DIN_to_AES
)
357 ctx_p
->cpp
.alg
= CC_CPP_AES
;
358 else /* Must be SM4 since due to sethkey registration */
359 ctx_p
->cpp
.alg
= CC_CPP_SM4
;
360 ctx_p
->key_type
= CC_POLICY_PROTECTED_KEY
;
361 dev_dbg(dev
, "policy protected key alg: %d slot: %d.\n",
362 ctx_p
->cpp
.alg
, ctx_p
->cpp
.slot
);
366 dev_err(dev
, "Unsupported protected key (%d)\n", hki
.hw_key1
);
373 static int cc_cipher_setkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
376 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
377 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
378 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
379 struct cc_crypto_alg
*cc_alg
=
380 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
382 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
384 dev_dbg(dev
, "Setting key in context @%p for %s. keylen=%u\n",
385 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
386 dump_byte_array("key", key
, keylen
);
388 /* STAT_PHASE_0: Init and sanity checks */
390 if (validate_keys_sizes(ctx_p
, keylen
)) {
391 dev_dbg(dev
, "Unsupported key size %d.\n", keylen
);
395 ctx_p
->key_type
= CC_UNPROTECTED_KEY
;
398 * Verify DES weak keys
399 * Note that we're dropping the expanded key since the
400 * HW does the expansion on its own.
402 if (ctx_p
->flow_mode
== S_DIN_to_DES
) {
403 if ((keylen
== DES3_EDE_KEY_SIZE
&&
404 verify_skcipher_des3_key(sktfm
, key
)) ||
405 verify_skcipher_des_key(sktfm
, key
)) {
406 dev_dbg(dev
, "weak DES key");
411 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
&&
412 xts_check_key(tfm
, key
, keylen
)) {
413 dev_dbg(dev
, "weak XTS key");
417 /* STAT_PHASE_1: Copy key to ctx */
418 dma_sync_single_for_cpu(dev
, ctx_p
->user
.key_dma_addr
,
419 max_key_buf_size
, DMA_TO_DEVICE
);
421 memcpy(ctx_p
->user
.key
, key
, keylen
);
423 memset(ctx_p
->user
.key
+ 24, 0, CC_AES_KEY_SIZE_MAX
- 24);
425 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
426 /* sha256 for key2 - use sw implementation */
427 int key_len
= keylen
>> 1;
430 SHASH_DESC_ON_STACK(desc
, ctx_p
->shash_tfm
);
432 desc
->tfm
= ctx_p
->shash_tfm
;
434 err
= crypto_shash_digest(desc
, ctx_p
->user
.key
, key_len
,
435 ctx_p
->user
.key
+ key_len
);
437 dev_err(dev
, "Failed to hash ESSIV key.\n");
441 dma_sync_single_for_device(dev
, ctx_p
->user
.key_dma_addr
,
442 max_key_buf_size
, DMA_TO_DEVICE
);
443 ctx_p
->keylen
= keylen
;
445 dev_dbg(dev
, "return safely");
449 static int cc_out_setup_mode(struct cc_cipher_ctx
*ctx_p
)
451 switch (ctx_p
->flow_mode
) {
453 return S_AES_to_DOUT
;
455 return S_DES_to_DOUT
;
457 return S_SM4_to_DOUT
;
459 return ctx_p
->flow_mode
;
463 static void cc_setup_readiv_desc(struct crypto_tfm
*tfm
,
464 struct cipher_req_ctx
*req_ctx
,
465 unsigned int ivsize
, struct cc_hw_desc desc
[],
466 unsigned int *seq_size
)
468 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
469 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
470 int cipher_mode
= ctx_p
->cipher_mode
;
471 int flow_mode
= cc_out_setup_mode(ctx_p
);
472 int direction
= req_ctx
->gen_ctx
.op_type
;
473 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
475 if (ctx_p
->key_type
== CC_POLICY_PROTECTED_KEY
)
478 switch (cipher_mode
) {
482 case DRV_CIPHER_CBC_CTS
:
486 hw_desc_init(&desc
[*seq_size
]);
487 set_dout_dlli(&desc
[*seq_size
], iv_dma_addr
, ivsize
, NS_BIT
, 1);
488 set_cipher_config0(&desc
[*seq_size
], direction
);
489 set_flow_mode(&desc
[*seq_size
], flow_mode
);
490 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
491 if (cipher_mode
== DRV_CIPHER_CTR
||
492 cipher_mode
== DRV_CIPHER_OFB
) {
493 set_setup_mode(&desc
[*seq_size
], SETUP_WRITE_STATE1
);
495 set_setup_mode(&desc
[*seq_size
], SETUP_WRITE_STATE0
);
497 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
501 case DRV_CIPHER_ESSIV
:
502 case DRV_CIPHER_BITLOCKER
:
504 hw_desc_init(&desc
[*seq_size
]);
505 set_setup_mode(&desc
[*seq_size
], SETUP_WRITE_STATE1
);
506 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
507 set_cipher_config0(&desc
[*seq_size
], direction
);
508 set_flow_mode(&desc
[*seq_size
], flow_mode
);
509 set_dout_dlli(&desc
[*seq_size
], iv_dma_addr
, CC_AES_BLOCK_SIZE
,
511 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
515 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
520 static void cc_setup_state_desc(struct crypto_tfm
*tfm
,
521 struct cipher_req_ctx
*req_ctx
,
522 unsigned int ivsize
, unsigned int nbytes
,
523 struct cc_hw_desc desc
[],
524 unsigned int *seq_size
)
526 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
527 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
528 int cipher_mode
= ctx_p
->cipher_mode
;
529 int flow_mode
= ctx_p
->flow_mode
;
530 int direction
= req_ctx
->gen_ctx
.op_type
;
531 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
533 switch (cipher_mode
) {
537 case DRV_CIPHER_CBC_CTS
:
541 hw_desc_init(&desc
[*seq_size
]);
542 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
, ivsize
,
544 set_cipher_config0(&desc
[*seq_size
], direction
);
545 set_flow_mode(&desc
[*seq_size
], flow_mode
);
546 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
547 if (cipher_mode
== DRV_CIPHER_CTR
||
548 cipher_mode
== DRV_CIPHER_OFB
) {
549 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
551 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE0
);
556 case DRV_CIPHER_ESSIV
:
557 case DRV_CIPHER_BITLOCKER
:
560 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
565 static void cc_setup_xex_state_desc(struct crypto_tfm
*tfm
,
566 struct cipher_req_ctx
*req_ctx
,
567 unsigned int ivsize
, unsigned int nbytes
,
568 struct cc_hw_desc desc
[],
569 unsigned int *seq_size
)
571 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
572 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
573 int cipher_mode
= ctx_p
->cipher_mode
;
574 int flow_mode
= ctx_p
->flow_mode
;
575 int direction
= req_ctx
->gen_ctx
.op_type
;
576 dma_addr_t key_dma_addr
= ctx_p
->user
.key_dma_addr
;
577 unsigned int key_len
= ctx_p
->keylen
;
578 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
579 unsigned int du_size
= nbytes
;
581 struct cc_crypto_alg
*cc_alg
=
582 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
585 if (cc_alg
->data_unit
)
586 du_size
= cc_alg
->data_unit
;
588 switch (cipher_mode
) {
592 case DRV_CIPHER_CBC_CTS
:
597 case DRV_CIPHER_ESSIV
:
598 case DRV_CIPHER_BITLOCKER
:
600 hw_desc_init(&desc
[*seq_size
]);
601 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
602 set_cipher_config0(&desc
[*seq_size
], direction
);
603 if (cc_key_type(tfm
) == CC_HW_PROTECTED_KEY
) {
604 set_hw_crypto_key(&desc
[*seq_size
],
605 ctx_p
->hw
.key2_slot
);
607 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
608 (key_dma_addr
+ (key_len
/ 2)),
609 (key_len
/ 2), NS_BIT
);
611 set_xex_data_unit_size(&desc
[*seq_size
], du_size
);
612 set_flow_mode(&desc
[*seq_size
], S_DIN_to_AES2
);
613 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
614 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_XEX_KEY
);
618 hw_desc_init(&desc
[*seq_size
]);
619 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
620 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
621 set_cipher_config0(&desc
[*seq_size
], direction
);
622 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
623 set_flow_mode(&desc
[*seq_size
], flow_mode
);
624 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
,
625 CC_AES_BLOCK_SIZE
, NS_BIT
);
629 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
633 static int cc_out_flow_mode(struct cc_cipher_ctx
*ctx_p
)
635 switch (ctx_p
->flow_mode
) {
643 return ctx_p
->flow_mode
;
647 static void cc_setup_key_desc(struct crypto_tfm
*tfm
,
648 struct cipher_req_ctx
*req_ctx
,
649 unsigned int nbytes
, struct cc_hw_desc desc
[],
650 unsigned int *seq_size
)
652 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
653 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
654 int cipher_mode
= ctx_p
->cipher_mode
;
655 int flow_mode
= ctx_p
->flow_mode
;
656 int direction
= req_ctx
->gen_ctx
.op_type
;
657 dma_addr_t key_dma_addr
= ctx_p
->user
.key_dma_addr
;
658 unsigned int key_len
= ctx_p
->keylen
;
659 unsigned int din_size
;
661 switch (cipher_mode
) {
663 case DRV_CIPHER_CBC_CTS
:
668 hw_desc_init(&desc
[*seq_size
]);
669 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
670 set_cipher_config0(&desc
[*seq_size
], direction
);
672 if (cc_key_type(tfm
) == CC_POLICY_PROTECTED_KEY
) {
673 /* We use the AES key size coding for all CPP algs */
674 set_key_size_aes(&desc
[*seq_size
], key_len
);
675 set_cpp_crypto_key(&desc
[*seq_size
], ctx_p
->cpp
.slot
);
676 flow_mode
= cc_out_flow_mode(ctx_p
);
678 if (flow_mode
== S_DIN_to_AES
) {
679 if (cc_key_type(tfm
) == CC_HW_PROTECTED_KEY
) {
680 set_hw_crypto_key(&desc
[*seq_size
],
681 ctx_p
->hw
.key1_slot
);
683 /* CC_POLICY_UNPROTECTED_KEY
684 * Invalid keys are filtered out in
687 din_size
= (key_len
== 24) ?
688 AES_MAX_KEY_SIZE
: key_len
;
690 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
691 key_dma_addr
, din_size
,
694 set_key_size_aes(&desc
[*seq_size
], key_len
);
697 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
698 key_dma_addr
, key_len
, NS_BIT
);
699 set_key_size_des(&desc
[*seq_size
], key_len
);
701 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
703 set_flow_mode(&desc
[*seq_size
], flow_mode
);
707 case DRV_CIPHER_ESSIV
:
708 case DRV_CIPHER_BITLOCKER
:
710 hw_desc_init(&desc
[*seq_size
]);
711 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
712 set_cipher_config0(&desc
[*seq_size
], direction
);
713 if (cc_key_type(tfm
) == CC_HW_PROTECTED_KEY
) {
714 set_hw_crypto_key(&desc
[*seq_size
],
715 ctx_p
->hw
.key1_slot
);
717 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
718 (key_len
/ 2), NS_BIT
);
720 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
721 set_flow_mode(&desc
[*seq_size
], flow_mode
);
722 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
726 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
730 static void cc_setup_mlli_desc(struct crypto_tfm
*tfm
,
731 struct cipher_req_ctx
*req_ctx
,
732 struct scatterlist
*dst
, struct scatterlist
*src
,
733 unsigned int nbytes
, void *areq
,
734 struct cc_hw_desc desc
[], unsigned int *seq_size
)
736 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
737 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
739 if (req_ctx
->dma_buf_type
== CC_DMA_BUF_MLLI
) {
741 dev_dbg(dev
, " bypass params addr %pad length 0x%X addr 0x%08X\n",
742 &req_ctx
->mlli_params
.mlli_dma_addr
,
743 req_ctx
->mlli_params
.mlli_len
,
744 ctx_p
->drvdata
->mlli_sram_addr
);
745 hw_desc_init(&desc
[*seq_size
]);
746 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
747 req_ctx
->mlli_params
.mlli_dma_addr
,
748 req_ctx
->mlli_params
.mlli_len
, NS_BIT
);
749 set_dout_sram(&desc
[*seq_size
],
750 ctx_p
->drvdata
->mlli_sram_addr
,
751 req_ctx
->mlli_params
.mlli_len
);
752 set_flow_mode(&desc
[*seq_size
], BYPASS
);
757 static void cc_setup_flow_desc(struct crypto_tfm
*tfm
,
758 struct cipher_req_ctx
*req_ctx
,
759 struct scatterlist
*dst
, struct scatterlist
*src
,
760 unsigned int nbytes
, struct cc_hw_desc desc
[],
761 unsigned int *seq_size
)
763 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
764 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
765 unsigned int flow_mode
= cc_out_flow_mode(ctx_p
);
766 bool last_desc
= (ctx_p
->key_type
== CC_POLICY_PROTECTED_KEY
||
767 ctx_p
->cipher_mode
== DRV_CIPHER_ECB
);
770 if (req_ctx
->dma_buf_type
== CC_DMA_BUF_DLLI
) {
771 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
772 &sg_dma_address(src
), nbytes
);
773 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
774 &sg_dma_address(dst
), nbytes
);
775 hw_desc_init(&desc
[*seq_size
]);
776 set_din_type(&desc
[*seq_size
], DMA_DLLI
, sg_dma_address(src
),
778 set_dout_dlli(&desc
[*seq_size
], sg_dma_address(dst
),
779 nbytes
, NS_BIT
, (!last_desc
? 0 : 1));
781 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
783 set_flow_mode(&desc
[*seq_size
], flow_mode
);
786 hw_desc_init(&desc
[*seq_size
]);
787 set_din_type(&desc
[*seq_size
], DMA_MLLI
,
788 ctx_p
->drvdata
->mlli_sram_addr
,
789 req_ctx
->in_mlli_nents
, NS_BIT
);
790 if (req_ctx
->out_nents
== 0) {
791 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
792 ctx_p
->drvdata
->mlli_sram_addr
,
793 ctx_p
->drvdata
->mlli_sram_addr
);
794 set_dout_mlli(&desc
[*seq_size
],
795 ctx_p
->drvdata
->mlli_sram_addr
,
796 req_ctx
->in_mlli_nents
, NS_BIT
,
797 (!last_desc
? 0 : 1));
799 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
800 ctx_p
->drvdata
->mlli_sram_addr
,
801 ctx_p
->drvdata
->mlli_sram_addr
+
802 (u32
)LLI_ENTRY_BYTE_SIZE
* req_ctx
->in_nents
);
803 set_dout_mlli(&desc
[*seq_size
],
804 (ctx_p
->drvdata
->mlli_sram_addr
+
805 (LLI_ENTRY_BYTE_SIZE
*
806 req_ctx
->in_mlli_nents
)),
807 req_ctx
->out_mlli_nents
, NS_BIT
,
808 (!last_desc
? 0 : 1));
811 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
813 set_flow_mode(&desc
[*seq_size
], flow_mode
);
818 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
)
820 struct skcipher_request
*req
= (struct skcipher_request
*)cc_req
;
821 struct scatterlist
*dst
= req
->dst
;
822 struct scatterlist
*src
= req
->src
;
823 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
824 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
825 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
827 if (err
!= -EINPROGRESS
) {
828 /* Not a BACKLOG notification */
829 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
830 memcpy(req
->iv
, req_ctx
->iv
, ivsize
);
834 skcipher_request_complete(req
, err
);
837 static int cc_cipher_process(struct skcipher_request
*req
,
838 enum drv_crypto_direction direction
)
840 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
841 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
842 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
843 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
844 struct scatterlist
*dst
= req
->dst
;
845 struct scatterlist
*src
= req
->src
;
846 unsigned int nbytes
= req
->cryptlen
;
848 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
849 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
850 struct cc_hw_desc desc
[MAX_SKCIPHER_SEQ_LEN
];
851 struct cc_crypto_req cc_req
= {};
853 unsigned int seq_len
= 0;
854 gfp_t flags
= cc_gfp_flags(&req
->base
);
856 dev_dbg(dev
, "%s req=%p iv=%p nbytes=%d\n",
857 ((direction
== DRV_CRYPTO_DIRECTION_ENCRYPT
) ?
858 "Encrypt" : "Decrypt"), req
, iv
, nbytes
);
860 /* STAT_PHASE_0: Init and sanity checks */
862 if (validate_data_size(ctx_p
, nbytes
)) {
863 dev_dbg(dev
, "Unsupported data size %d.\n", nbytes
);
868 /* No data to process is valid */
873 /* The IV we are handed may be allocted from the stack so
874 * we must copy it to a DMAable buffer before use.
876 req_ctx
->iv
= kmemdup(iv
, ivsize
, flags
);
882 /* Setup request structure */
883 cc_req
.user_cb
= cc_cipher_complete
;
884 cc_req
.user_arg
= req
;
886 /* Setup CPP operation details */
887 if (ctx_p
->key_type
== CC_POLICY_PROTECTED_KEY
) {
888 cc_req
.cpp
.is_cpp
= true;
889 cc_req
.cpp
.alg
= ctx_p
->cpp
.alg
;
890 cc_req
.cpp
.slot
= ctx_p
->cpp
.slot
;
893 /* Setup request context */
894 req_ctx
->gen_ctx
.op_type
= direction
;
896 /* STAT_PHASE_1: Map buffers */
898 rc
= cc_map_cipher_request(ctx_p
->drvdata
, req_ctx
, ivsize
, nbytes
,
899 req_ctx
->iv
, src
, dst
, flags
);
901 dev_err(dev
, "map_request() failed\n");
905 /* STAT_PHASE_2: Create sequence */
907 /* Setup state (IV) */
908 cc_setup_state_desc(tfm
, req_ctx
, ivsize
, nbytes
, desc
, &seq_len
);
909 /* Setup MLLI line, if needed */
910 cc_setup_mlli_desc(tfm
, req_ctx
, dst
, src
, nbytes
, req
, desc
, &seq_len
);
912 cc_setup_key_desc(tfm
, req_ctx
, nbytes
, desc
, &seq_len
);
913 /* Setup state (IV and XEX key) */
914 cc_setup_xex_state_desc(tfm
, req_ctx
, ivsize
, nbytes
, desc
, &seq_len
);
915 /* Data processing */
916 cc_setup_flow_desc(tfm
, req_ctx
, dst
, src
, nbytes
, desc
, &seq_len
);
918 cc_setup_readiv_desc(tfm
, req_ctx
, ivsize
, desc
, &seq_len
);
920 /* STAT_PHASE_3: Lock HW and push sequence */
922 rc
= cc_send_request(ctx_p
->drvdata
, &cc_req
, desc
, seq_len
,
924 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
925 /* Failed to send the request or request completed
928 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
932 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
939 static int cc_cipher_encrypt(struct skcipher_request
*req
)
941 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
943 memset(req_ctx
, 0, sizeof(*req_ctx
));
945 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_ENCRYPT
);
948 static int cc_cipher_decrypt(struct skcipher_request
*req
)
950 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
952 memset(req_ctx
, 0, sizeof(*req_ctx
));
954 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_DECRYPT
);
957 /* Block cipher alg */
958 static const struct cc_alg_template skcipher_algs
[] = {
961 .driver_name
= "xts-paes-ccree",
963 .template_skcipher
= {
964 .setkey
= cc_cipher_sethkey
,
965 .encrypt
= cc_cipher_encrypt
,
966 .decrypt
= cc_cipher_decrypt
,
967 .min_keysize
= CC_HW_KEY_SIZE
,
968 .max_keysize
= CC_HW_KEY_SIZE
,
969 .ivsize
= AES_BLOCK_SIZE
,
971 .cipher_mode
= DRV_CIPHER_XTS
,
972 .flow_mode
= S_DIN_to_AES
,
973 .min_hw_rev
= CC_HW_REV_630
,
974 .std_body
= CC_STD_NIST
,
978 .name
= "xts512(paes)",
979 .driver_name
= "xts-paes-du512-ccree",
981 .template_skcipher
= {
982 .setkey
= cc_cipher_sethkey
,
983 .encrypt
= cc_cipher_encrypt
,
984 .decrypt
= cc_cipher_decrypt
,
985 .min_keysize
= CC_HW_KEY_SIZE
,
986 .max_keysize
= CC_HW_KEY_SIZE
,
987 .ivsize
= AES_BLOCK_SIZE
,
989 .cipher_mode
= DRV_CIPHER_XTS
,
990 .flow_mode
= S_DIN_to_AES
,
992 .min_hw_rev
= CC_HW_REV_712
,
993 .std_body
= CC_STD_NIST
,
997 .name
= "xts4096(paes)",
998 .driver_name
= "xts-paes-du4096-ccree",
1000 .template_skcipher
= {
1001 .setkey
= cc_cipher_sethkey
,
1002 .encrypt
= cc_cipher_encrypt
,
1003 .decrypt
= cc_cipher_decrypt
,
1004 .min_keysize
= CC_HW_KEY_SIZE
,
1005 .max_keysize
= CC_HW_KEY_SIZE
,
1006 .ivsize
= AES_BLOCK_SIZE
,
1008 .cipher_mode
= DRV_CIPHER_XTS
,
1009 .flow_mode
= S_DIN_to_AES
,
1011 .min_hw_rev
= CC_HW_REV_712
,
1012 .std_body
= CC_STD_NIST
,
1016 .name
= "essiv(paes)",
1017 .driver_name
= "essiv-paes-ccree",
1018 .blocksize
= AES_BLOCK_SIZE
,
1019 .template_skcipher
= {
1020 .setkey
= cc_cipher_sethkey
,
1021 .encrypt
= cc_cipher_encrypt
,
1022 .decrypt
= cc_cipher_decrypt
,
1023 .min_keysize
= CC_HW_KEY_SIZE
,
1024 .max_keysize
= CC_HW_KEY_SIZE
,
1025 .ivsize
= AES_BLOCK_SIZE
,
1027 .cipher_mode
= DRV_CIPHER_ESSIV
,
1028 .flow_mode
= S_DIN_to_AES
,
1029 .min_hw_rev
= CC_HW_REV_712
,
1030 .std_body
= CC_STD_NIST
,
1034 .name
= "essiv512(paes)",
1035 .driver_name
= "essiv-paes-du512-ccree",
1036 .blocksize
= AES_BLOCK_SIZE
,
1037 .template_skcipher
= {
1038 .setkey
= cc_cipher_sethkey
,
1039 .encrypt
= cc_cipher_encrypt
,
1040 .decrypt
= cc_cipher_decrypt
,
1041 .min_keysize
= CC_HW_KEY_SIZE
,
1042 .max_keysize
= CC_HW_KEY_SIZE
,
1043 .ivsize
= AES_BLOCK_SIZE
,
1045 .cipher_mode
= DRV_CIPHER_ESSIV
,
1046 .flow_mode
= S_DIN_to_AES
,
1048 .min_hw_rev
= CC_HW_REV_712
,
1049 .std_body
= CC_STD_NIST
,
1053 .name
= "essiv4096(paes)",
1054 .driver_name
= "essiv-paes-du4096-ccree",
1055 .blocksize
= AES_BLOCK_SIZE
,
1056 .template_skcipher
= {
1057 .setkey
= cc_cipher_sethkey
,
1058 .encrypt
= cc_cipher_encrypt
,
1059 .decrypt
= cc_cipher_decrypt
,
1060 .min_keysize
= CC_HW_KEY_SIZE
,
1061 .max_keysize
= CC_HW_KEY_SIZE
,
1062 .ivsize
= AES_BLOCK_SIZE
,
1064 .cipher_mode
= DRV_CIPHER_ESSIV
,
1065 .flow_mode
= S_DIN_to_AES
,
1067 .min_hw_rev
= CC_HW_REV_712
,
1068 .std_body
= CC_STD_NIST
,
1072 .name
= "bitlocker(paes)",
1073 .driver_name
= "bitlocker-paes-ccree",
1074 .blocksize
= AES_BLOCK_SIZE
,
1075 .template_skcipher
= {
1076 .setkey
= cc_cipher_sethkey
,
1077 .encrypt
= cc_cipher_encrypt
,
1078 .decrypt
= cc_cipher_decrypt
,
1079 .min_keysize
= CC_HW_KEY_SIZE
,
1080 .max_keysize
= CC_HW_KEY_SIZE
,
1081 .ivsize
= AES_BLOCK_SIZE
,
1083 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1084 .flow_mode
= S_DIN_to_AES
,
1085 .min_hw_rev
= CC_HW_REV_712
,
1086 .std_body
= CC_STD_NIST
,
1090 .name
= "bitlocker512(paes)",
1091 .driver_name
= "bitlocker-paes-du512-ccree",
1092 .blocksize
= AES_BLOCK_SIZE
,
1093 .template_skcipher
= {
1094 .setkey
= cc_cipher_sethkey
,
1095 .encrypt
= cc_cipher_encrypt
,
1096 .decrypt
= cc_cipher_decrypt
,
1097 .min_keysize
= CC_HW_KEY_SIZE
,
1098 .max_keysize
= CC_HW_KEY_SIZE
,
1099 .ivsize
= AES_BLOCK_SIZE
,
1101 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1102 .flow_mode
= S_DIN_to_AES
,
1104 .min_hw_rev
= CC_HW_REV_712
,
1105 .std_body
= CC_STD_NIST
,
1109 .name
= "bitlocker4096(paes)",
1110 .driver_name
= "bitlocker-paes-du4096-ccree",
1111 .blocksize
= AES_BLOCK_SIZE
,
1112 .template_skcipher
= {
1113 .setkey
= cc_cipher_sethkey
,
1114 .encrypt
= cc_cipher_encrypt
,
1115 .decrypt
= cc_cipher_decrypt
,
1116 .min_keysize
= CC_HW_KEY_SIZE
,
1117 .max_keysize
= CC_HW_KEY_SIZE
,
1118 .ivsize
= AES_BLOCK_SIZE
,
1120 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1121 .flow_mode
= S_DIN_to_AES
,
1123 .min_hw_rev
= CC_HW_REV_712
,
1124 .std_body
= CC_STD_NIST
,
1128 .name
= "ecb(paes)",
1129 .driver_name
= "ecb-paes-ccree",
1130 .blocksize
= AES_BLOCK_SIZE
,
1131 .template_skcipher
= {
1132 .setkey
= cc_cipher_sethkey
,
1133 .encrypt
= cc_cipher_encrypt
,
1134 .decrypt
= cc_cipher_decrypt
,
1135 .min_keysize
= CC_HW_KEY_SIZE
,
1136 .max_keysize
= CC_HW_KEY_SIZE
,
1139 .cipher_mode
= DRV_CIPHER_ECB
,
1140 .flow_mode
= S_DIN_to_AES
,
1141 .min_hw_rev
= CC_HW_REV_712
,
1142 .std_body
= CC_STD_NIST
,
1146 .name
= "cbc(paes)",
1147 .driver_name
= "cbc-paes-ccree",
1148 .blocksize
= AES_BLOCK_SIZE
,
1149 .template_skcipher
= {
1150 .setkey
= cc_cipher_sethkey
,
1151 .encrypt
= cc_cipher_encrypt
,
1152 .decrypt
= cc_cipher_decrypt
,
1153 .min_keysize
= CC_HW_KEY_SIZE
,
1154 .max_keysize
= CC_HW_KEY_SIZE
,
1155 .ivsize
= AES_BLOCK_SIZE
,
1157 .cipher_mode
= DRV_CIPHER_CBC
,
1158 .flow_mode
= S_DIN_to_AES
,
1159 .min_hw_rev
= CC_HW_REV_712
,
1160 .std_body
= CC_STD_NIST
,
1164 .name
= "ofb(paes)",
1165 .driver_name
= "ofb-paes-ccree",
1166 .blocksize
= AES_BLOCK_SIZE
,
1167 .template_skcipher
= {
1168 .setkey
= cc_cipher_sethkey
,
1169 .encrypt
= cc_cipher_encrypt
,
1170 .decrypt
= cc_cipher_decrypt
,
1171 .min_keysize
= CC_HW_KEY_SIZE
,
1172 .max_keysize
= CC_HW_KEY_SIZE
,
1173 .ivsize
= AES_BLOCK_SIZE
,
1175 .cipher_mode
= DRV_CIPHER_OFB
,
1176 .flow_mode
= S_DIN_to_AES
,
1177 .min_hw_rev
= CC_HW_REV_712
,
1178 .std_body
= CC_STD_NIST
,
1182 .name
= "cts(cbc(paes))",
1183 .driver_name
= "cts-cbc-paes-ccree",
1184 .blocksize
= AES_BLOCK_SIZE
,
1185 .template_skcipher
= {
1186 .setkey
= cc_cipher_sethkey
,
1187 .encrypt
= cc_cipher_encrypt
,
1188 .decrypt
= cc_cipher_decrypt
,
1189 .min_keysize
= CC_HW_KEY_SIZE
,
1190 .max_keysize
= CC_HW_KEY_SIZE
,
1191 .ivsize
= AES_BLOCK_SIZE
,
1193 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1194 .flow_mode
= S_DIN_to_AES
,
1195 .min_hw_rev
= CC_HW_REV_712
,
1196 .std_body
= CC_STD_NIST
,
1200 .name
= "ctr(paes)",
1201 .driver_name
= "ctr-paes-ccree",
1203 .template_skcipher
= {
1204 .setkey
= cc_cipher_sethkey
,
1205 .encrypt
= cc_cipher_encrypt
,
1206 .decrypt
= cc_cipher_decrypt
,
1207 .min_keysize
= CC_HW_KEY_SIZE
,
1208 .max_keysize
= CC_HW_KEY_SIZE
,
1209 .ivsize
= AES_BLOCK_SIZE
,
1211 .cipher_mode
= DRV_CIPHER_CTR
,
1212 .flow_mode
= S_DIN_to_AES
,
1213 .min_hw_rev
= CC_HW_REV_712
,
1214 .std_body
= CC_STD_NIST
,
1218 /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1219 * for the reason why this differs from the generic
1223 .driver_name
= "xts-aes-ccree",
1225 .template_skcipher
= {
1226 .setkey
= cc_cipher_setkey
,
1227 .encrypt
= cc_cipher_encrypt
,
1228 .decrypt
= cc_cipher_decrypt
,
1229 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1230 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1231 .ivsize
= AES_BLOCK_SIZE
,
1233 .cipher_mode
= DRV_CIPHER_XTS
,
1234 .flow_mode
= S_DIN_to_AES
,
1235 .min_hw_rev
= CC_HW_REV_630
,
1236 .std_body
= CC_STD_NIST
,
1239 .name
= "xts512(aes)",
1240 .driver_name
= "xts-aes-du512-ccree",
1242 .template_skcipher
= {
1243 .setkey
= cc_cipher_setkey
,
1244 .encrypt
= cc_cipher_encrypt
,
1245 .decrypt
= cc_cipher_decrypt
,
1246 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1247 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1248 .ivsize
= AES_BLOCK_SIZE
,
1250 .cipher_mode
= DRV_CIPHER_XTS
,
1251 .flow_mode
= S_DIN_to_AES
,
1253 .min_hw_rev
= CC_HW_REV_712
,
1254 .std_body
= CC_STD_NIST
,
1257 .name
= "xts4096(aes)",
1258 .driver_name
= "xts-aes-du4096-ccree",
1260 .template_skcipher
= {
1261 .setkey
= cc_cipher_setkey
,
1262 .encrypt
= cc_cipher_encrypt
,
1263 .decrypt
= cc_cipher_decrypt
,
1264 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1265 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1266 .ivsize
= AES_BLOCK_SIZE
,
1268 .cipher_mode
= DRV_CIPHER_XTS
,
1269 .flow_mode
= S_DIN_to_AES
,
1271 .min_hw_rev
= CC_HW_REV_712
,
1272 .std_body
= CC_STD_NIST
,
1275 .name
= "essiv(aes)",
1276 .driver_name
= "essiv-aes-ccree",
1277 .blocksize
= AES_BLOCK_SIZE
,
1278 .template_skcipher
= {
1279 .setkey
= cc_cipher_setkey
,
1280 .encrypt
= cc_cipher_encrypt
,
1281 .decrypt
= cc_cipher_decrypt
,
1282 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1283 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1284 .ivsize
= AES_BLOCK_SIZE
,
1286 .cipher_mode
= DRV_CIPHER_ESSIV
,
1287 .flow_mode
= S_DIN_to_AES
,
1288 .min_hw_rev
= CC_HW_REV_712
,
1289 .std_body
= CC_STD_NIST
,
1292 .name
= "essiv512(aes)",
1293 .driver_name
= "essiv-aes-du512-ccree",
1294 .blocksize
= AES_BLOCK_SIZE
,
1295 .template_skcipher
= {
1296 .setkey
= cc_cipher_setkey
,
1297 .encrypt
= cc_cipher_encrypt
,
1298 .decrypt
= cc_cipher_decrypt
,
1299 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1300 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1301 .ivsize
= AES_BLOCK_SIZE
,
1303 .cipher_mode
= DRV_CIPHER_ESSIV
,
1304 .flow_mode
= S_DIN_to_AES
,
1306 .min_hw_rev
= CC_HW_REV_712
,
1307 .std_body
= CC_STD_NIST
,
1310 .name
= "essiv4096(aes)",
1311 .driver_name
= "essiv-aes-du4096-ccree",
1312 .blocksize
= AES_BLOCK_SIZE
,
1313 .template_skcipher
= {
1314 .setkey
= cc_cipher_setkey
,
1315 .encrypt
= cc_cipher_encrypt
,
1316 .decrypt
= cc_cipher_decrypt
,
1317 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1318 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1319 .ivsize
= AES_BLOCK_SIZE
,
1321 .cipher_mode
= DRV_CIPHER_ESSIV
,
1322 .flow_mode
= S_DIN_to_AES
,
1324 .min_hw_rev
= CC_HW_REV_712
,
1325 .std_body
= CC_STD_NIST
,
1328 .name
= "bitlocker(aes)",
1329 .driver_name
= "bitlocker-aes-ccree",
1330 .blocksize
= AES_BLOCK_SIZE
,
1331 .template_skcipher
= {
1332 .setkey
= cc_cipher_setkey
,
1333 .encrypt
= cc_cipher_encrypt
,
1334 .decrypt
= cc_cipher_decrypt
,
1335 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1336 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1337 .ivsize
= AES_BLOCK_SIZE
,
1339 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1340 .flow_mode
= S_DIN_to_AES
,
1341 .min_hw_rev
= CC_HW_REV_712
,
1342 .std_body
= CC_STD_NIST
,
1345 .name
= "bitlocker512(aes)",
1346 .driver_name
= "bitlocker-aes-du512-ccree",
1347 .blocksize
= AES_BLOCK_SIZE
,
1348 .template_skcipher
= {
1349 .setkey
= cc_cipher_setkey
,
1350 .encrypt
= cc_cipher_encrypt
,
1351 .decrypt
= cc_cipher_decrypt
,
1352 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1353 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1354 .ivsize
= AES_BLOCK_SIZE
,
1356 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1357 .flow_mode
= S_DIN_to_AES
,
1359 .min_hw_rev
= CC_HW_REV_712
,
1360 .std_body
= CC_STD_NIST
,
1363 .name
= "bitlocker4096(aes)",
1364 .driver_name
= "bitlocker-aes-du4096-ccree",
1365 .blocksize
= AES_BLOCK_SIZE
,
1366 .template_skcipher
= {
1367 .setkey
= cc_cipher_setkey
,
1368 .encrypt
= cc_cipher_encrypt
,
1369 .decrypt
= cc_cipher_decrypt
,
1370 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1371 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1372 .ivsize
= AES_BLOCK_SIZE
,
1374 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1375 .flow_mode
= S_DIN_to_AES
,
1377 .min_hw_rev
= CC_HW_REV_712
,
1378 .std_body
= CC_STD_NIST
,
1382 .driver_name
= "ecb-aes-ccree",
1383 .blocksize
= AES_BLOCK_SIZE
,
1384 .template_skcipher
= {
1385 .setkey
= cc_cipher_setkey
,
1386 .encrypt
= cc_cipher_encrypt
,
1387 .decrypt
= cc_cipher_decrypt
,
1388 .min_keysize
= AES_MIN_KEY_SIZE
,
1389 .max_keysize
= AES_MAX_KEY_SIZE
,
1392 .cipher_mode
= DRV_CIPHER_ECB
,
1393 .flow_mode
= S_DIN_to_AES
,
1394 .min_hw_rev
= CC_HW_REV_630
,
1395 .std_body
= CC_STD_NIST
,
1399 .driver_name
= "cbc-aes-ccree",
1400 .blocksize
= AES_BLOCK_SIZE
,
1401 .template_skcipher
= {
1402 .setkey
= cc_cipher_setkey
,
1403 .encrypt
= cc_cipher_encrypt
,
1404 .decrypt
= cc_cipher_decrypt
,
1405 .min_keysize
= AES_MIN_KEY_SIZE
,
1406 .max_keysize
= AES_MAX_KEY_SIZE
,
1407 .ivsize
= AES_BLOCK_SIZE
,
1409 .cipher_mode
= DRV_CIPHER_CBC
,
1410 .flow_mode
= S_DIN_to_AES
,
1411 .min_hw_rev
= CC_HW_REV_630
,
1412 .std_body
= CC_STD_NIST
,
1416 .driver_name
= "ofb-aes-ccree",
1418 .template_skcipher
= {
1419 .setkey
= cc_cipher_setkey
,
1420 .encrypt
= cc_cipher_encrypt
,
1421 .decrypt
= cc_cipher_decrypt
,
1422 .min_keysize
= AES_MIN_KEY_SIZE
,
1423 .max_keysize
= AES_MAX_KEY_SIZE
,
1424 .ivsize
= AES_BLOCK_SIZE
,
1426 .cipher_mode
= DRV_CIPHER_OFB
,
1427 .flow_mode
= S_DIN_to_AES
,
1428 .min_hw_rev
= CC_HW_REV_630
,
1429 .std_body
= CC_STD_NIST
,
1432 .name
= "cts(cbc(aes))",
1433 .driver_name
= "cts-cbc-aes-ccree",
1434 .blocksize
= AES_BLOCK_SIZE
,
1435 .template_skcipher
= {
1436 .setkey
= cc_cipher_setkey
,
1437 .encrypt
= cc_cipher_encrypt
,
1438 .decrypt
= cc_cipher_decrypt
,
1439 .min_keysize
= AES_MIN_KEY_SIZE
,
1440 .max_keysize
= AES_MAX_KEY_SIZE
,
1441 .ivsize
= AES_BLOCK_SIZE
,
1443 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1444 .flow_mode
= S_DIN_to_AES
,
1445 .min_hw_rev
= CC_HW_REV_630
,
1446 .std_body
= CC_STD_NIST
,
1450 .driver_name
= "ctr-aes-ccree",
1452 .template_skcipher
= {
1453 .setkey
= cc_cipher_setkey
,
1454 .encrypt
= cc_cipher_encrypt
,
1455 .decrypt
= cc_cipher_decrypt
,
1456 .min_keysize
= AES_MIN_KEY_SIZE
,
1457 .max_keysize
= AES_MAX_KEY_SIZE
,
1458 .ivsize
= AES_BLOCK_SIZE
,
1460 .cipher_mode
= DRV_CIPHER_CTR
,
1461 .flow_mode
= S_DIN_to_AES
,
1462 .min_hw_rev
= CC_HW_REV_630
,
1463 .std_body
= CC_STD_NIST
,
1466 .name
= "cbc(des3_ede)",
1467 .driver_name
= "cbc-3des-ccree",
1468 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1469 .template_skcipher
= {
1470 .setkey
= cc_cipher_setkey
,
1471 .encrypt
= cc_cipher_encrypt
,
1472 .decrypt
= cc_cipher_decrypt
,
1473 .min_keysize
= DES3_EDE_KEY_SIZE
,
1474 .max_keysize
= DES3_EDE_KEY_SIZE
,
1475 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1477 .cipher_mode
= DRV_CIPHER_CBC
,
1478 .flow_mode
= S_DIN_to_DES
,
1479 .min_hw_rev
= CC_HW_REV_630
,
1480 .std_body
= CC_STD_NIST
,
1483 .name
= "ecb(des3_ede)",
1484 .driver_name
= "ecb-3des-ccree",
1485 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1486 .template_skcipher
= {
1487 .setkey
= cc_cipher_setkey
,
1488 .encrypt
= cc_cipher_encrypt
,
1489 .decrypt
= cc_cipher_decrypt
,
1490 .min_keysize
= DES3_EDE_KEY_SIZE
,
1491 .max_keysize
= DES3_EDE_KEY_SIZE
,
1494 .cipher_mode
= DRV_CIPHER_ECB
,
1495 .flow_mode
= S_DIN_to_DES
,
1496 .min_hw_rev
= CC_HW_REV_630
,
1497 .std_body
= CC_STD_NIST
,
1501 .driver_name
= "cbc-des-ccree",
1502 .blocksize
= DES_BLOCK_SIZE
,
1503 .template_skcipher
= {
1504 .setkey
= cc_cipher_setkey
,
1505 .encrypt
= cc_cipher_encrypt
,
1506 .decrypt
= cc_cipher_decrypt
,
1507 .min_keysize
= DES_KEY_SIZE
,
1508 .max_keysize
= DES_KEY_SIZE
,
1509 .ivsize
= DES_BLOCK_SIZE
,
1511 .cipher_mode
= DRV_CIPHER_CBC
,
1512 .flow_mode
= S_DIN_to_DES
,
1513 .min_hw_rev
= CC_HW_REV_630
,
1514 .std_body
= CC_STD_NIST
,
1518 .driver_name
= "ecb-des-ccree",
1519 .blocksize
= DES_BLOCK_SIZE
,
1520 .template_skcipher
= {
1521 .setkey
= cc_cipher_setkey
,
1522 .encrypt
= cc_cipher_encrypt
,
1523 .decrypt
= cc_cipher_decrypt
,
1524 .min_keysize
= DES_KEY_SIZE
,
1525 .max_keysize
= DES_KEY_SIZE
,
1528 .cipher_mode
= DRV_CIPHER_ECB
,
1529 .flow_mode
= S_DIN_to_DES
,
1530 .min_hw_rev
= CC_HW_REV_630
,
1531 .std_body
= CC_STD_NIST
,
1535 .driver_name
= "cbc-sm4-ccree",
1536 .blocksize
= SM4_BLOCK_SIZE
,
1537 .template_skcipher
= {
1538 .setkey
= cc_cipher_setkey
,
1539 .encrypt
= cc_cipher_encrypt
,
1540 .decrypt
= cc_cipher_decrypt
,
1541 .min_keysize
= SM4_KEY_SIZE
,
1542 .max_keysize
= SM4_KEY_SIZE
,
1543 .ivsize
= SM4_BLOCK_SIZE
,
1545 .cipher_mode
= DRV_CIPHER_CBC
,
1546 .flow_mode
= S_DIN_to_SM4
,
1547 .min_hw_rev
= CC_HW_REV_713
,
1548 .std_body
= CC_STD_OSCCA
,
1552 .driver_name
= "ecb-sm4-ccree",
1553 .blocksize
= SM4_BLOCK_SIZE
,
1554 .template_skcipher
= {
1555 .setkey
= cc_cipher_setkey
,
1556 .encrypt
= cc_cipher_encrypt
,
1557 .decrypt
= cc_cipher_decrypt
,
1558 .min_keysize
= SM4_KEY_SIZE
,
1559 .max_keysize
= SM4_KEY_SIZE
,
1562 .cipher_mode
= DRV_CIPHER_ECB
,
1563 .flow_mode
= S_DIN_to_SM4
,
1564 .min_hw_rev
= CC_HW_REV_713
,
1565 .std_body
= CC_STD_OSCCA
,
1569 .driver_name
= "ctr-sm4-ccree",
1571 .template_skcipher
= {
1572 .setkey
= cc_cipher_setkey
,
1573 .encrypt
= cc_cipher_encrypt
,
1574 .decrypt
= cc_cipher_decrypt
,
1575 .min_keysize
= SM4_KEY_SIZE
,
1576 .max_keysize
= SM4_KEY_SIZE
,
1577 .ivsize
= SM4_BLOCK_SIZE
,
1579 .cipher_mode
= DRV_CIPHER_CTR
,
1580 .flow_mode
= S_DIN_to_SM4
,
1581 .min_hw_rev
= CC_HW_REV_713
,
1582 .std_body
= CC_STD_OSCCA
,
1585 .name
= "cbc(psm4)",
1586 .driver_name
= "cbc-psm4-ccree",
1587 .blocksize
= SM4_BLOCK_SIZE
,
1588 .template_skcipher
= {
1589 .setkey
= cc_cipher_sethkey
,
1590 .encrypt
= cc_cipher_encrypt
,
1591 .decrypt
= cc_cipher_decrypt
,
1592 .min_keysize
= CC_HW_KEY_SIZE
,
1593 .max_keysize
= CC_HW_KEY_SIZE
,
1594 .ivsize
= SM4_BLOCK_SIZE
,
1596 .cipher_mode
= DRV_CIPHER_CBC
,
1597 .flow_mode
= S_DIN_to_SM4
,
1598 .min_hw_rev
= CC_HW_REV_713
,
1599 .std_body
= CC_STD_OSCCA
,
1603 .name
= "ctr(psm4)",
1604 .driver_name
= "ctr-psm4-ccree",
1605 .blocksize
= SM4_BLOCK_SIZE
,
1606 .template_skcipher
= {
1607 .setkey
= cc_cipher_sethkey
,
1608 .encrypt
= cc_cipher_encrypt
,
1609 .decrypt
= cc_cipher_decrypt
,
1610 .min_keysize
= CC_HW_KEY_SIZE
,
1611 .max_keysize
= CC_HW_KEY_SIZE
,
1612 .ivsize
= SM4_BLOCK_SIZE
,
1614 .cipher_mode
= DRV_CIPHER_CTR
,
1615 .flow_mode
= S_DIN_to_SM4
,
1616 .min_hw_rev
= CC_HW_REV_713
,
1617 .std_body
= CC_STD_OSCCA
,
1622 static struct cc_crypto_alg
*cc_create_alg(const struct cc_alg_template
*tmpl
,
1625 struct cc_crypto_alg
*t_alg
;
1626 struct skcipher_alg
*alg
;
1628 t_alg
= devm_kzalloc(dev
, sizeof(*t_alg
), GFP_KERNEL
);
1630 return ERR_PTR(-ENOMEM
);
1632 alg
= &t_alg
->skcipher_alg
;
1634 memcpy(alg
, &tmpl
->template_skcipher
, sizeof(*alg
));
1636 snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", tmpl
->name
);
1637 snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
1639 alg
->base
.cra_module
= THIS_MODULE
;
1640 alg
->base
.cra_priority
= CC_CRA_PRIO
;
1641 alg
->base
.cra_blocksize
= tmpl
->blocksize
;
1642 alg
->base
.cra_alignmask
= 0;
1643 alg
->base
.cra_ctxsize
= sizeof(struct cc_cipher_ctx
);
1645 alg
->base
.cra_init
= cc_cipher_init
;
1646 alg
->base
.cra_exit
= cc_cipher_exit
;
1647 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
1649 t_alg
->cipher_mode
= tmpl
->cipher_mode
;
1650 t_alg
->flow_mode
= tmpl
->flow_mode
;
1651 t_alg
->data_unit
= tmpl
->data_unit
;
1656 int cc_cipher_free(struct cc_drvdata
*drvdata
)
1658 struct cc_crypto_alg
*t_alg
, *n
;
1660 /* Remove registered algs */
1661 list_for_each_entry_safe(t_alg
, n
, &drvdata
->alg_list
, entry
) {
1662 crypto_unregister_skcipher(&t_alg
->skcipher_alg
);
1663 list_del(&t_alg
->entry
);
1668 int cc_cipher_alloc(struct cc_drvdata
*drvdata
)
1670 struct cc_crypto_alg
*t_alg
;
1671 struct device
*dev
= drvdata_to_dev(drvdata
);
1675 INIT_LIST_HEAD(&drvdata
->alg_list
);
1678 dev_dbg(dev
, "Number of algorithms = %zu\n",
1679 ARRAY_SIZE(skcipher_algs
));
1680 for (alg
= 0; alg
< ARRAY_SIZE(skcipher_algs
); alg
++) {
1681 if ((skcipher_algs
[alg
].min_hw_rev
> drvdata
->hw_rev
) ||
1682 !(drvdata
->std_bodies
& skcipher_algs
[alg
].std_body
) ||
1683 (drvdata
->sec_disabled
&& skcipher_algs
[alg
].sec_func
))
1686 dev_dbg(dev
, "creating %s\n", skcipher_algs
[alg
].driver_name
);
1687 t_alg
= cc_create_alg(&skcipher_algs
[alg
], dev
);
1688 if (IS_ERR(t_alg
)) {
1689 rc
= PTR_ERR(t_alg
);
1690 dev_err(dev
, "%s alg allocation failed\n",
1691 skcipher_algs
[alg
].driver_name
);
1694 t_alg
->drvdata
= drvdata
;
1696 dev_dbg(dev
, "registering %s\n",
1697 skcipher_algs
[alg
].driver_name
);
1698 rc
= crypto_register_skcipher(&t_alg
->skcipher_alg
);
1699 dev_dbg(dev
, "%s alg registration rc = %x\n",
1700 t_alg
->skcipher_alg
.base
.cra_driver_name
, rc
);
1702 dev_err(dev
, "%s alg registration failed\n",
1703 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1707 list_add_tail(&t_alg
->entry
, &drvdata
->alg_list
);
1708 dev_dbg(dev
, "Registered %s\n",
1709 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1714 cc_cipher_free(drvdata
);