1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/internal/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
19 #define MAX_SKCIPHER_SEQ_LEN 6
21 #define template_skcipher template_u.skcipher
23 struct cc_user_key_info
{
25 dma_addr_t key_dma_addr
;
28 struct cc_hw_key_info
{
29 enum cc_hw_crypto_key key1_slot
;
30 enum cc_hw_crypto_key key2_slot
;
33 struct cc_cpp_key_info
{
39 CC_UNPROTECTED_KEY
, /* User key */
40 CC_HW_PROTECTED_KEY
, /* HW (FDE) key */
41 CC_POLICY_PROTECTED_KEY
, /* CPP key */
42 CC_INVALID_PROTECTED_KEY
/* Invalid key */
45 struct cc_cipher_ctx
{
46 struct cc_drvdata
*drvdata
;
51 enum cc_key_type key_type
;
52 struct cc_user_key_info user
;
54 struct cc_hw_key_info hw
;
55 struct cc_cpp_key_info cpp
;
57 struct crypto_shash
*shash_tfm
;
58 struct crypto_skcipher
*fallback_tfm
;
62 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
);
64 static inline enum cc_key_type
cc_key_type(struct crypto_tfm
*tfm
)
66 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
68 return ctx_p
->key_type
;
71 static int validate_keys_sizes(struct cc_cipher_ctx
*ctx_p
, u32 size
)
73 switch (ctx_p
->flow_mode
) {
76 case CC_AES_128_BIT_KEY_SIZE
:
77 case CC_AES_192_BIT_KEY_SIZE
:
78 if (ctx_p
->cipher_mode
!= DRV_CIPHER_XTS
)
81 case CC_AES_256_BIT_KEY_SIZE
:
83 case (CC_AES_192_BIT_KEY_SIZE
* 2):
84 case (CC_AES_256_BIT_KEY_SIZE
* 2):
85 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
86 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
)
94 if (size
== DES3_EDE_KEY_SIZE
|| size
== DES_KEY_SIZE
)
98 if (size
== SM4_KEY_SIZE
)
107 static int validate_data_size(struct cc_cipher_ctx
*ctx_p
,
110 switch (ctx_p
->flow_mode
) {
112 switch (ctx_p
->cipher_mode
) {
114 case DRV_CIPHER_CBC_CTS
:
115 if (size
>= AES_BLOCK_SIZE
)
123 case DRV_CIPHER_ESSIV
:
124 if (IS_ALIGNED(size
, AES_BLOCK_SIZE
))
132 if (IS_ALIGNED(size
, DES_BLOCK_SIZE
))
136 switch (ctx_p
->cipher_mode
) {
141 if (IS_ALIGNED(size
, SM4_BLOCK_SIZE
))
154 static int cc_cipher_init(struct crypto_tfm
*tfm
)
156 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
157 struct cc_crypto_alg
*cc_alg
=
158 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
160 struct device
*dev
= drvdata_to_dev(cc_alg
->drvdata
);
161 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
162 unsigned int fallback_req_size
= 0;
164 dev_dbg(dev
, "Initializing context @%p for %s\n", ctx_p
,
165 crypto_tfm_alg_name(tfm
));
167 ctx_p
->cipher_mode
= cc_alg
->cipher_mode
;
168 ctx_p
->flow_mode
= cc_alg
->flow_mode
;
169 ctx_p
->drvdata
= cc_alg
->drvdata
;
171 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
172 const char *name
= crypto_tfm_alg_name(tfm
);
174 /* Alloc hash tfm for essiv */
175 ctx_p
->shash_tfm
= crypto_alloc_shash("sha256", 0, 0);
176 if (IS_ERR(ctx_p
->shash_tfm
)) {
177 dev_err(dev
, "Error allocating hash tfm for ESSIV.\n");
178 return PTR_ERR(ctx_p
->shash_tfm
);
180 max_key_buf_size
<<= 1;
182 /* Alloc fallback tfm or essiv when key size != 256 bit */
183 ctx_p
->fallback_tfm
=
184 crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
| CRYPTO_ALG_ASYNC
);
186 if (IS_ERR(ctx_p
->fallback_tfm
)) {
187 /* Note we're still allowing registration with no fallback since it's
188 * better to have most modes supported than none at all.
190 dev_warn(dev
, "Error allocating fallback algo %s. Some modes may be available.\n",
192 ctx_p
->fallback_tfm
= NULL
;
194 fallback_req_size
= crypto_skcipher_reqsize(ctx_p
->fallback_tfm
);
198 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
199 sizeof(struct cipher_req_ctx
) + fallback_req_size
);
201 /* Allocate key buffer, cache line aligned */
202 ctx_p
->user
.key
= kzalloc(max_key_buf_size
, GFP_KERNEL
);
203 if (!ctx_p
->user
.key
)
206 dev_dbg(dev
, "Allocated key buffer in context. key=@%p\n",
210 ctx_p
->user
.key_dma_addr
= dma_map_single(dev
, ctx_p
->user
.key
,
213 if (dma_mapping_error(dev
, ctx_p
->user
.key_dma_addr
)) {
214 dev_err(dev
, "Mapping Key %u B at va=%pK for DMA failed\n",
215 max_key_buf_size
, ctx_p
->user
.key
);
218 dev_dbg(dev
, "Mapped key %u B at va=%pK to dma=%pad\n",
219 max_key_buf_size
, ctx_p
->user
.key
, &ctx_p
->user
.key_dma_addr
);
224 kfree(ctx_p
->user
.key
);
226 crypto_free_skcipher(ctx_p
->fallback_tfm
);
227 crypto_free_shash(ctx_p
->shash_tfm
);
232 static void cc_cipher_exit(struct crypto_tfm
*tfm
)
234 struct crypto_alg
*alg
= tfm
->__crt_alg
;
235 struct cc_crypto_alg
*cc_alg
=
236 container_of(alg
, struct cc_crypto_alg
,
238 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
239 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
240 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
242 dev_dbg(dev
, "Clearing context @%p for %s\n",
243 crypto_tfm_ctx(tfm
), crypto_tfm_alg_name(tfm
));
245 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
246 /* Free hash tfm for essiv */
247 crypto_free_shash(ctx_p
->shash_tfm
);
248 ctx_p
->shash_tfm
= NULL
;
249 crypto_free_skcipher(ctx_p
->fallback_tfm
);
250 ctx_p
->fallback_tfm
= NULL
;
253 /* Unmap key buffer */
254 dma_unmap_single(dev
, ctx_p
->user
.key_dma_addr
, max_key_buf_size
,
256 dev_dbg(dev
, "Unmapped key buffer key_dma_addr=%pad\n",
257 &ctx_p
->user
.key_dma_addr
);
259 /* Free key buffer in context */
260 dev_dbg(dev
, "Free key buffer in context. key=@%p\n", ctx_p
->user
.key
);
261 kfree_sensitive(ctx_p
->user
.key
);
264 static enum cc_hw_crypto_key
cc_slot_to_hw_key(u8 slot_num
)
279 static u8
cc_slot_to_cpp_key(u8 slot_num
)
281 return (slot_num
- CC_FIRST_CPP_KEY_SLOT
);
284 static inline enum cc_key_type
cc_slot_to_key_type(u8 slot_num
)
286 if (slot_num
>= CC_FIRST_HW_KEY_SLOT
&& slot_num
<= CC_LAST_HW_KEY_SLOT
)
287 return CC_HW_PROTECTED_KEY
;
288 else if (slot_num
>= CC_FIRST_CPP_KEY_SLOT
&&
289 slot_num
<= CC_LAST_CPP_KEY_SLOT
)
290 return CC_POLICY_PROTECTED_KEY
;
292 return CC_INVALID_PROTECTED_KEY
;
295 static int cc_cipher_sethkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
298 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
299 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
300 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
301 struct cc_hkey_info hki
;
303 dev_dbg(dev
, "Setting HW key in context @%p for %s. keylen=%u\n",
304 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
305 dump_byte_array("key", key
, keylen
);
307 /* STAT_PHASE_0: Init and sanity checks */
309 /* This check the size of the protected key token */
310 if (keylen
!= sizeof(hki
)) {
311 dev_err(dev
, "Unsupported protected key size %d.\n", keylen
);
315 memcpy(&hki
, key
, keylen
);
317 /* The real key len for crypto op is the size of the HW key
318 * referenced by the HW key slot, not the hardware key token
322 if (validate_keys_sizes(ctx_p
, keylen
)) {
323 dev_dbg(dev
, "Unsupported key size %d.\n", keylen
);
327 ctx_p
->keylen
= keylen
;
328 ctx_p
->fallback_on
= false;
330 switch (cc_slot_to_key_type(hki
.hw_key1
)) {
331 case CC_HW_PROTECTED_KEY
:
332 if (ctx_p
->flow_mode
== S_DIN_to_SM4
) {
333 dev_err(dev
, "Only AES HW protected keys are supported\n");
337 ctx_p
->hw
.key1_slot
= cc_slot_to_hw_key(hki
.hw_key1
);
338 if (ctx_p
->hw
.key1_slot
== END_OF_KEYS
) {
339 dev_err(dev
, "Unsupported hw key1 number (%d)\n",
344 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
345 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
346 if (hki
.hw_key1
== hki
.hw_key2
) {
347 dev_err(dev
, "Illegal hw key numbers (%d,%d)\n",
348 hki
.hw_key1
, hki
.hw_key2
);
352 ctx_p
->hw
.key2_slot
= cc_slot_to_hw_key(hki
.hw_key2
);
353 if (ctx_p
->hw
.key2_slot
== END_OF_KEYS
) {
354 dev_err(dev
, "Unsupported hw key2 number (%d)\n",
360 ctx_p
->key_type
= CC_HW_PROTECTED_KEY
;
361 dev_dbg(dev
, "HW protected key %d/%d set\n.",
362 ctx_p
->hw
.key1_slot
, ctx_p
->hw
.key2_slot
);
365 case CC_POLICY_PROTECTED_KEY
:
366 if (ctx_p
->drvdata
->hw_rev
< CC_HW_REV_713
) {
367 dev_err(dev
, "CPP keys not supported in this hardware revision.\n");
371 if (ctx_p
->cipher_mode
!= DRV_CIPHER_CBC
&&
372 ctx_p
->cipher_mode
!= DRV_CIPHER_CTR
) {
373 dev_err(dev
, "CPP keys only supported in CBC or CTR modes.\n");
377 ctx_p
->cpp
.slot
= cc_slot_to_cpp_key(hki
.hw_key1
);
378 if (ctx_p
->flow_mode
== S_DIN_to_AES
)
379 ctx_p
->cpp
.alg
= CC_CPP_AES
;
380 else /* Must be SM4 since due to sethkey registration */
381 ctx_p
->cpp
.alg
= CC_CPP_SM4
;
382 ctx_p
->key_type
= CC_POLICY_PROTECTED_KEY
;
383 dev_dbg(dev
, "policy protected key alg: %d slot: %d.\n",
384 ctx_p
->cpp
.alg
, ctx_p
->cpp
.slot
);
388 dev_err(dev
, "Unsupported protected key (%d)\n", hki
.hw_key1
);
395 static int cc_cipher_setkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
398 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
399 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
400 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
401 struct cc_crypto_alg
*cc_alg
=
402 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
404 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
406 dev_dbg(dev
, "Setting key in context @%p for %s. keylen=%u\n",
407 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
408 dump_byte_array("key", key
, keylen
);
410 /* STAT_PHASE_0: Init and sanity checks */
412 if (validate_keys_sizes(ctx_p
, keylen
)) {
413 dev_dbg(dev
, "Invalid key size %d.\n", keylen
);
417 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
419 /* We only support 256 bit ESSIV-CBC-AES keys */
420 if (keylen
!= AES_KEYSIZE_256
) {
421 unsigned int flags
= crypto_tfm_get_flags(tfm
) & CRYPTO_TFM_REQ_MASK
;
423 if (likely(ctx_p
->fallback_tfm
)) {
424 ctx_p
->fallback_on
= true;
425 crypto_skcipher_clear_flags(ctx_p
->fallback_tfm
,
426 CRYPTO_TFM_REQ_MASK
);
427 crypto_skcipher_clear_flags(ctx_p
->fallback_tfm
, flags
);
428 return crypto_skcipher_setkey(ctx_p
->fallback_tfm
, key
, keylen
);
431 dev_dbg(dev
, "Unsupported key size %d and no fallback.\n", keylen
);
435 /* Internal ESSIV key buffer is double sized */
436 max_key_buf_size
<<= 1;
439 ctx_p
->fallback_on
= false;
440 ctx_p
->key_type
= CC_UNPROTECTED_KEY
;
443 * Verify DES weak keys
444 * Note that we're dropping the expanded key since the
445 * HW does the expansion on its own.
447 if (ctx_p
->flow_mode
== S_DIN_to_DES
) {
448 if ((keylen
== DES3_EDE_KEY_SIZE
&&
449 verify_skcipher_des3_key(sktfm
, key
)) ||
450 verify_skcipher_des_key(sktfm
, key
)) {
451 dev_dbg(dev
, "weak DES key");
456 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
&&
457 xts_verify_key(sktfm
, key
, keylen
)) {
458 dev_dbg(dev
, "weak XTS key");
462 /* STAT_PHASE_1: Copy key to ctx */
463 dma_sync_single_for_cpu(dev
, ctx_p
->user
.key_dma_addr
,
464 max_key_buf_size
, DMA_TO_DEVICE
);
466 memcpy(ctx_p
->user
.key
, key
, keylen
);
468 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
469 /* sha256 for key2 - use sw implementation */
472 err
= crypto_shash_tfm_digest(ctx_p
->shash_tfm
,
473 ctx_p
->user
.key
, keylen
,
474 ctx_p
->user
.key
+ keylen
);
476 dev_err(dev
, "Failed to hash ESSIV key.\n");
482 dma_sync_single_for_device(dev
, ctx_p
->user
.key_dma_addr
,
483 max_key_buf_size
, DMA_TO_DEVICE
);
484 ctx_p
->keylen
= keylen
;
486 dev_dbg(dev
, "return safely");
490 static int cc_out_setup_mode(struct cc_cipher_ctx
*ctx_p
)
492 switch (ctx_p
->flow_mode
) {
494 return S_AES_to_DOUT
;
496 return S_DES_to_DOUT
;
498 return S_SM4_to_DOUT
;
500 return ctx_p
->flow_mode
;
504 static void cc_setup_readiv_desc(struct crypto_tfm
*tfm
,
505 struct cipher_req_ctx
*req_ctx
,
506 unsigned int ivsize
, struct cc_hw_desc desc
[],
507 unsigned int *seq_size
)
509 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
510 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
511 int cipher_mode
= ctx_p
->cipher_mode
;
512 int flow_mode
= cc_out_setup_mode(ctx_p
);
513 int direction
= req_ctx
->gen_ctx
.op_type
;
514 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
516 if (ctx_p
->key_type
== CC_POLICY_PROTECTED_KEY
)
519 switch (cipher_mode
) {
523 case DRV_CIPHER_CBC_CTS
:
527 hw_desc_init(&desc
[*seq_size
]);
528 set_dout_dlli(&desc
[*seq_size
], iv_dma_addr
, ivsize
, NS_BIT
, 1);
529 set_cipher_config0(&desc
[*seq_size
], direction
);
530 set_flow_mode(&desc
[*seq_size
], flow_mode
);
531 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
532 if (cipher_mode
== DRV_CIPHER_CTR
||
533 cipher_mode
== DRV_CIPHER_OFB
) {
534 set_setup_mode(&desc
[*seq_size
], SETUP_WRITE_STATE1
);
536 set_setup_mode(&desc
[*seq_size
], SETUP_WRITE_STATE0
);
538 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
542 case DRV_CIPHER_ESSIV
:
544 hw_desc_init(&desc
[*seq_size
]);
545 set_setup_mode(&desc
[*seq_size
], SETUP_WRITE_STATE1
);
546 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
547 set_cipher_config0(&desc
[*seq_size
], direction
);
548 set_flow_mode(&desc
[*seq_size
], flow_mode
);
549 set_dout_dlli(&desc
[*seq_size
], iv_dma_addr
, CC_AES_BLOCK_SIZE
,
551 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
555 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
560 static void cc_setup_state_desc(struct crypto_tfm
*tfm
,
561 struct cipher_req_ctx
*req_ctx
,
562 unsigned int ivsize
, unsigned int nbytes
,
563 struct cc_hw_desc desc
[],
564 unsigned int *seq_size
)
566 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
567 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
568 int cipher_mode
= ctx_p
->cipher_mode
;
569 int flow_mode
= ctx_p
->flow_mode
;
570 int direction
= req_ctx
->gen_ctx
.op_type
;
571 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
573 switch (cipher_mode
) {
577 case DRV_CIPHER_CBC_CTS
:
581 hw_desc_init(&desc
[*seq_size
]);
582 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
, ivsize
,
584 set_cipher_config0(&desc
[*seq_size
], direction
);
585 set_flow_mode(&desc
[*seq_size
], flow_mode
);
586 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
587 if (cipher_mode
== DRV_CIPHER_CTR
||
588 cipher_mode
== DRV_CIPHER_OFB
) {
589 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
591 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE0
);
596 case DRV_CIPHER_ESSIV
:
599 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
604 static void cc_setup_xex_state_desc(struct crypto_tfm
*tfm
,
605 struct cipher_req_ctx
*req_ctx
,
606 unsigned int ivsize
, unsigned int nbytes
,
607 struct cc_hw_desc desc
[],
608 unsigned int *seq_size
)
610 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
611 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
612 int cipher_mode
= ctx_p
->cipher_mode
;
613 int flow_mode
= ctx_p
->flow_mode
;
614 int direction
= req_ctx
->gen_ctx
.op_type
;
615 dma_addr_t key_dma_addr
= ctx_p
->user
.key_dma_addr
;
616 unsigned int key_len
= (ctx_p
->keylen
/ 2);
617 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
618 unsigned int key_offset
= key_len
;
620 switch (cipher_mode
) {
624 case DRV_CIPHER_CBC_CTS
:
629 case DRV_CIPHER_ESSIV
:
631 if (cipher_mode
== DRV_CIPHER_ESSIV
)
632 key_len
= SHA256_DIGEST_SIZE
;
635 hw_desc_init(&desc
[*seq_size
]);
636 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
637 set_cipher_config0(&desc
[*seq_size
], direction
);
638 if (cc_key_type(tfm
) == CC_HW_PROTECTED_KEY
) {
639 set_hw_crypto_key(&desc
[*seq_size
],
640 ctx_p
->hw
.key2_slot
);
642 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
643 (key_dma_addr
+ key_offset
),
646 set_xex_data_unit_size(&desc
[*seq_size
], nbytes
);
647 set_flow_mode(&desc
[*seq_size
], S_DIN_to_AES2
);
648 set_key_size_aes(&desc
[*seq_size
], key_len
);
649 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_XEX_KEY
);
653 hw_desc_init(&desc
[*seq_size
]);
654 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
655 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
656 set_cipher_config0(&desc
[*seq_size
], direction
);
657 set_key_size_aes(&desc
[*seq_size
], key_len
);
658 set_flow_mode(&desc
[*seq_size
], flow_mode
);
659 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
,
660 CC_AES_BLOCK_SIZE
, NS_BIT
);
664 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
668 static int cc_out_flow_mode(struct cc_cipher_ctx
*ctx_p
)
670 switch (ctx_p
->flow_mode
) {
678 return ctx_p
->flow_mode
;
682 static void cc_setup_key_desc(struct crypto_tfm
*tfm
,
683 struct cipher_req_ctx
*req_ctx
,
684 unsigned int nbytes
, struct cc_hw_desc desc
[],
685 unsigned int *seq_size
)
687 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
688 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
689 int cipher_mode
= ctx_p
->cipher_mode
;
690 int flow_mode
= ctx_p
->flow_mode
;
691 int direction
= req_ctx
->gen_ctx
.op_type
;
692 dma_addr_t key_dma_addr
= ctx_p
->user
.key_dma_addr
;
693 unsigned int key_len
= ctx_p
->keylen
;
694 unsigned int din_size
;
696 switch (cipher_mode
) {
698 case DRV_CIPHER_CBC_CTS
:
703 hw_desc_init(&desc
[*seq_size
]);
704 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
705 set_cipher_config0(&desc
[*seq_size
], direction
);
707 if (cc_key_type(tfm
) == CC_POLICY_PROTECTED_KEY
) {
708 /* We use the AES key size coding for all CPP algs */
709 set_key_size_aes(&desc
[*seq_size
], key_len
);
710 set_cpp_crypto_key(&desc
[*seq_size
], ctx_p
->cpp
.slot
);
711 flow_mode
= cc_out_flow_mode(ctx_p
);
713 if (flow_mode
== S_DIN_to_AES
) {
714 if (cc_key_type(tfm
) == CC_HW_PROTECTED_KEY
) {
715 set_hw_crypto_key(&desc
[*seq_size
],
716 ctx_p
->hw
.key1_slot
);
718 /* CC_POLICY_UNPROTECTED_KEY
719 * Invalid keys are filtered out in
722 din_size
= (key_len
== 24) ?
723 AES_MAX_KEY_SIZE
: key_len
;
725 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
726 key_dma_addr
, din_size
,
729 set_key_size_aes(&desc
[*seq_size
], key_len
);
732 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
733 key_dma_addr
, key_len
, NS_BIT
);
734 set_key_size_des(&desc
[*seq_size
], key_len
);
736 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
738 set_flow_mode(&desc
[*seq_size
], flow_mode
);
742 case DRV_CIPHER_ESSIV
:
744 hw_desc_init(&desc
[*seq_size
]);
745 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
746 set_cipher_config0(&desc
[*seq_size
], direction
);
747 if (cc_key_type(tfm
) == CC_HW_PROTECTED_KEY
) {
748 set_hw_crypto_key(&desc
[*seq_size
],
749 ctx_p
->hw
.key1_slot
);
751 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
752 (key_len
/ 2), NS_BIT
);
754 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
755 set_flow_mode(&desc
[*seq_size
], flow_mode
);
756 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
760 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
764 static void cc_setup_mlli_desc(struct crypto_tfm
*tfm
,
765 struct cipher_req_ctx
*req_ctx
,
766 struct scatterlist
*dst
, struct scatterlist
*src
,
767 unsigned int nbytes
, void *areq
,
768 struct cc_hw_desc desc
[], unsigned int *seq_size
)
770 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
771 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
773 if (req_ctx
->dma_buf_type
== CC_DMA_BUF_MLLI
) {
775 dev_dbg(dev
, " bypass params addr %pad length 0x%X addr 0x%08X\n",
776 &req_ctx
->mlli_params
.mlli_dma_addr
,
777 req_ctx
->mlli_params
.mlli_len
,
778 ctx_p
->drvdata
->mlli_sram_addr
);
779 hw_desc_init(&desc
[*seq_size
]);
780 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
781 req_ctx
->mlli_params
.mlli_dma_addr
,
782 req_ctx
->mlli_params
.mlli_len
, NS_BIT
);
783 set_dout_sram(&desc
[*seq_size
],
784 ctx_p
->drvdata
->mlli_sram_addr
,
785 req_ctx
->mlli_params
.mlli_len
);
786 set_flow_mode(&desc
[*seq_size
], BYPASS
);
791 static void cc_setup_flow_desc(struct crypto_tfm
*tfm
,
792 struct cipher_req_ctx
*req_ctx
,
793 struct scatterlist
*dst
, struct scatterlist
*src
,
794 unsigned int nbytes
, struct cc_hw_desc desc
[],
795 unsigned int *seq_size
)
797 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
798 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
799 unsigned int flow_mode
= cc_out_flow_mode(ctx_p
);
800 bool last_desc
= (ctx_p
->key_type
== CC_POLICY_PROTECTED_KEY
||
801 ctx_p
->cipher_mode
== DRV_CIPHER_ECB
);
804 if (req_ctx
->dma_buf_type
== CC_DMA_BUF_DLLI
) {
805 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
806 &sg_dma_address(src
), nbytes
);
807 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
808 &sg_dma_address(dst
), nbytes
);
809 hw_desc_init(&desc
[*seq_size
]);
810 set_din_type(&desc
[*seq_size
], DMA_DLLI
, sg_dma_address(src
),
812 set_dout_dlli(&desc
[*seq_size
], sg_dma_address(dst
),
813 nbytes
, NS_BIT
, (!last_desc
? 0 : 1));
815 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
817 set_flow_mode(&desc
[*seq_size
], flow_mode
);
820 hw_desc_init(&desc
[*seq_size
]);
821 set_din_type(&desc
[*seq_size
], DMA_MLLI
,
822 ctx_p
->drvdata
->mlli_sram_addr
,
823 req_ctx
->in_mlli_nents
, NS_BIT
);
824 if (req_ctx
->out_nents
== 0) {
825 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
826 ctx_p
->drvdata
->mlli_sram_addr
,
827 ctx_p
->drvdata
->mlli_sram_addr
);
828 set_dout_mlli(&desc
[*seq_size
],
829 ctx_p
->drvdata
->mlli_sram_addr
,
830 req_ctx
->in_mlli_nents
, NS_BIT
,
831 (!last_desc
? 0 : 1));
833 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
834 ctx_p
->drvdata
->mlli_sram_addr
,
835 ctx_p
->drvdata
->mlli_sram_addr
+
836 (u32
)LLI_ENTRY_BYTE_SIZE
* req_ctx
->in_nents
);
837 set_dout_mlli(&desc
[*seq_size
],
838 (ctx_p
->drvdata
->mlli_sram_addr
+
839 (LLI_ENTRY_BYTE_SIZE
*
840 req_ctx
->in_mlli_nents
)),
841 req_ctx
->out_mlli_nents
, NS_BIT
,
842 (!last_desc
? 0 : 1));
845 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
847 set_flow_mode(&desc
[*seq_size
], flow_mode
);
852 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
)
854 struct skcipher_request
*req
= (struct skcipher_request
*)cc_req
;
855 struct scatterlist
*dst
= req
->dst
;
856 struct scatterlist
*src
= req
->src
;
857 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
858 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
859 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
861 if (err
!= -EINPROGRESS
) {
862 /* Not a BACKLOG notification */
863 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
864 memcpy(req
->iv
, req_ctx
->iv
, ivsize
);
865 kfree_sensitive(req_ctx
->iv
);
868 skcipher_request_complete(req
, err
);
871 static int cc_cipher_process(struct skcipher_request
*req
,
872 enum drv_crypto_direction direction
)
874 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
875 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
876 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
877 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
878 struct scatterlist
*dst
= req
->dst
;
879 struct scatterlist
*src
= req
->src
;
880 unsigned int nbytes
= req
->cryptlen
;
882 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
883 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
884 struct cc_hw_desc desc
[MAX_SKCIPHER_SEQ_LEN
];
885 struct cc_crypto_req cc_req
= {};
887 unsigned int seq_len
= 0;
888 gfp_t flags
= cc_gfp_flags(&req
->base
);
890 dev_dbg(dev
, "%s req=%p iv=%p nbytes=%d\n",
891 ((direction
== DRV_CRYPTO_DIRECTION_ENCRYPT
) ?
892 "Encrypt" : "Decrypt"), req
, iv
, nbytes
);
894 /* STAT_PHASE_0: Init and sanity checks */
896 if (validate_data_size(ctx_p
, nbytes
)) {
897 dev_dbg(dev
, "Unsupported data size %d.\n", nbytes
);
902 /* No data to process is valid */
907 if (ctx_p
->fallback_on
) {
908 struct skcipher_request
*subreq
= skcipher_request_ctx(req
);
911 skcipher_request_set_tfm(subreq
, ctx_p
->fallback_tfm
);
912 if (direction
== DRV_CRYPTO_DIRECTION_ENCRYPT
)
913 return crypto_skcipher_encrypt(subreq
);
915 return crypto_skcipher_decrypt(subreq
);
918 /* The IV we are handed may be allocated from the stack so
919 * we must copy it to a DMAable buffer before use.
921 req_ctx
->iv
= kmemdup(iv
, ivsize
, flags
);
927 /* Setup request structure */
928 cc_req
.user_cb
= cc_cipher_complete
;
929 cc_req
.user_arg
= req
;
931 /* Setup CPP operation details */
932 if (ctx_p
->key_type
== CC_POLICY_PROTECTED_KEY
) {
933 cc_req
.cpp
.is_cpp
= true;
934 cc_req
.cpp
.alg
= ctx_p
->cpp
.alg
;
935 cc_req
.cpp
.slot
= ctx_p
->cpp
.slot
;
938 /* Setup request context */
939 req_ctx
->gen_ctx
.op_type
= direction
;
941 /* STAT_PHASE_1: Map buffers */
943 rc
= cc_map_cipher_request(ctx_p
->drvdata
, req_ctx
, ivsize
, nbytes
,
944 req_ctx
->iv
, src
, dst
, flags
);
946 dev_err(dev
, "map_request() failed\n");
950 /* STAT_PHASE_2: Create sequence */
952 /* Setup state (IV) */
953 cc_setup_state_desc(tfm
, req_ctx
, ivsize
, nbytes
, desc
, &seq_len
);
954 /* Setup MLLI line, if needed */
955 cc_setup_mlli_desc(tfm
, req_ctx
, dst
, src
, nbytes
, req
, desc
, &seq_len
);
957 cc_setup_key_desc(tfm
, req_ctx
, nbytes
, desc
, &seq_len
);
958 /* Setup state (IV and XEX key) */
959 cc_setup_xex_state_desc(tfm
, req_ctx
, ivsize
, nbytes
, desc
, &seq_len
);
960 /* Data processing */
961 cc_setup_flow_desc(tfm
, req_ctx
, dst
, src
, nbytes
, desc
, &seq_len
);
963 cc_setup_readiv_desc(tfm
, req_ctx
, ivsize
, desc
, &seq_len
);
965 /* STAT_PHASE_3: Lock HW and push sequence */
967 rc
= cc_send_request(ctx_p
->drvdata
, &cc_req
, desc
, seq_len
,
969 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
970 /* Failed to send the request or request completed
973 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
977 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
978 kfree_sensitive(req_ctx
->iv
);
984 static int cc_cipher_encrypt(struct skcipher_request
*req
)
986 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
988 memset(req_ctx
, 0, sizeof(*req_ctx
));
990 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_ENCRYPT
);
993 static int cc_cipher_decrypt(struct skcipher_request
*req
)
995 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
997 memset(req_ctx
, 0, sizeof(*req_ctx
));
999 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_DECRYPT
);
1002 /* Block cipher alg */
1003 static const struct cc_alg_template skcipher_algs
[] = {
1005 .name
= "xts(paes)",
1006 .driver_name
= "xts-paes-ccree",
1008 .template_skcipher
= {
1009 .setkey
= cc_cipher_sethkey
,
1010 .encrypt
= cc_cipher_encrypt
,
1011 .decrypt
= cc_cipher_decrypt
,
1012 .min_keysize
= CC_HW_KEY_SIZE
,
1013 .max_keysize
= CC_HW_KEY_SIZE
,
1014 .ivsize
= AES_BLOCK_SIZE
,
1016 .cipher_mode
= DRV_CIPHER_XTS
,
1017 .flow_mode
= S_DIN_to_AES
,
1018 .min_hw_rev
= CC_HW_REV_630
,
1019 .std_body
= CC_STD_NIST
,
1023 .name
= "essiv(cbc(paes),sha256)",
1024 .driver_name
= "essiv-paes-ccree",
1025 .blocksize
= AES_BLOCK_SIZE
,
1026 .template_skcipher
= {
1027 .setkey
= cc_cipher_sethkey
,
1028 .encrypt
= cc_cipher_encrypt
,
1029 .decrypt
= cc_cipher_decrypt
,
1030 .min_keysize
= CC_HW_KEY_SIZE
,
1031 .max_keysize
= CC_HW_KEY_SIZE
,
1032 .ivsize
= AES_BLOCK_SIZE
,
1034 .cipher_mode
= DRV_CIPHER_ESSIV
,
1035 .flow_mode
= S_DIN_to_AES
,
1036 .min_hw_rev
= CC_HW_REV_712
,
1037 .std_body
= CC_STD_NIST
,
1041 .name
= "ecb(paes)",
1042 .driver_name
= "ecb-paes-ccree",
1043 .blocksize
= AES_BLOCK_SIZE
,
1044 .template_skcipher
= {
1045 .setkey
= cc_cipher_sethkey
,
1046 .encrypt
= cc_cipher_encrypt
,
1047 .decrypt
= cc_cipher_decrypt
,
1048 .min_keysize
= CC_HW_KEY_SIZE
,
1049 .max_keysize
= CC_HW_KEY_SIZE
,
1052 .cipher_mode
= DRV_CIPHER_ECB
,
1053 .flow_mode
= S_DIN_to_AES
,
1054 .min_hw_rev
= CC_HW_REV_712
,
1055 .std_body
= CC_STD_NIST
,
1059 .name
= "cbc(paes)",
1060 .driver_name
= "cbc-paes-ccree",
1061 .blocksize
= AES_BLOCK_SIZE
,
1062 .template_skcipher
= {
1063 .setkey
= cc_cipher_sethkey
,
1064 .encrypt
= cc_cipher_encrypt
,
1065 .decrypt
= cc_cipher_decrypt
,
1066 .min_keysize
= CC_HW_KEY_SIZE
,
1067 .max_keysize
= CC_HW_KEY_SIZE
,
1068 .ivsize
= AES_BLOCK_SIZE
,
1070 .cipher_mode
= DRV_CIPHER_CBC
,
1071 .flow_mode
= S_DIN_to_AES
,
1072 .min_hw_rev
= CC_HW_REV_712
,
1073 .std_body
= CC_STD_NIST
,
1077 .name
= "cts(cbc(paes))",
1078 .driver_name
= "cts-cbc-paes-ccree",
1079 .blocksize
= AES_BLOCK_SIZE
,
1080 .template_skcipher
= {
1081 .setkey
= cc_cipher_sethkey
,
1082 .encrypt
= cc_cipher_encrypt
,
1083 .decrypt
= cc_cipher_decrypt
,
1084 .min_keysize
= CC_HW_KEY_SIZE
,
1085 .max_keysize
= CC_HW_KEY_SIZE
,
1086 .ivsize
= AES_BLOCK_SIZE
,
1088 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1089 .flow_mode
= S_DIN_to_AES
,
1090 .min_hw_rev
= CC_HW_REV_712
,
1091 .std_body
= CC_STD_NIST
,
1095 .name
= "ctr(paes)",
1096 .driver_name
= "ctr-paes-ccree",
1098 .template_skcipher
= {
1099 .setkey
= cc_cipher_sethkey
,
1100 .encrypt
= cc_cipher_encrypt
,
1101 .decrypt
= cc_cipher_decrypt
,
1102 .min_keysize
= CC_HW_KEY_SIZE
,
1103 .max_keysize
= CC_HW_KEY_SIZE
,
1104 .ivsize
= AES_BLOCK_SIZE
,
1106 .cipher_mode
= DRV_CIPHER_CTR
,
1107 .flow_mode
= S_DIN_to_AES
,
1108 .min_hw_rev
= CC_HW_REV_712
,
1109 .std_body
= CC_STD_NIST
,
1113 /* See https://www.mail-archive.com/linux-crypto@vger.kernel.org/msg40576.html
1114 * for the reason why this differs from the generic
1118 .driver_name
= "xts-aes-ccree",
1120 .template_skcipher
= {
1121 .setkey
= cc_cipher_setkey
,
1122 .encrypt
= cc_cipher_encrypt
,
1123 .decrypt
= cc_cipher_decrypt
,
1124 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1125 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1126 .ivsize
= AES_BLOCK_SIZE
,
1128 .cipher_mode
= DRV_CIPHER_XTS
,
1129 .flow_mode
= S_DIN_to_AES
,
1130 .min_hw_rev
= CC_HW_REV_630
,
1131 .std_body
= CC_STD_NIST
,
1134 .name
= "essiv(cbc(aes),sha256)",
1135 .driver_name
= "essiv-aes-ccree",
1136 .blocksize
= AES_BLOCK_SIZE
,
1137 .template_skcipher
= {
1138 .setkey
= cc_cipher_setkey
,
1139 .encrypt
= cc_cipher_encrypt
,
1140 .decrypt
= cc_cipher_decrypt
,
1141 .min_keysize
= AES_MIN_KEY_SIZE
,
1142 .max_keysize
= AES_MAX_KEY_SIZE
,
1143 .ivsize
= AES_BLOCK_SIZE
,
1145 .cipher_mode
= DRV_CIPHER_ESSIV
,
1146 .flow_mode
= S_DIN_to_AES
,
1147 .min_hw_rev
= CC_HW_REV_712
,
1148 .std_body
= CC_STD_NIST
,
1152 .driver_name
= "ecb-aes-ccree",
1153 .blocksize
= AES_BLOCK_SIZE
,
1154 .template_skcipher
= {
1155 .setkey
= cc_cipher_setkey
,
1156 .encrypt
= cc_cipher_encrypt
,
1157 .decrypt
= cc_cipher_decrypt
,
1158 .min_keysize
= AES_MIN_KEY_SIZE
,
1159 .max_keysize
= AES_MAX_KEY_SIZE
,
1162 .cipher_mode
= DRV_CIPHER_ECB
,
1163 .flow_mode
= S_DIN_to_AES
,
1164 .min_hw_rev
= CC_HW_REV_630
,
1165 .std_body
= CC_STD_NIST
,
1169 .driver_name
= "cbc-aes-ccree",
1170 .blocksize
= AES_BLOCK_SIZE
,
1171 .template_skcipher
= {
1172 .setkey
= cc_cipher_setkey
,
1173 .encrypt
= cc_cipher_encrypt
,
1174 .decrypt
= cc_cipher_decrypt
,
1175 .min_keysize
= AES_MIN_KEY_SIZE
,
1176 .max_keysize
= AES_MAX_KEY_SIZE
,
1177 .ivsize
= AES_BLOCK_SIZE
,
1179 .cipher_mode
= DRV_CIPHER_CBC
,
1180 .flow_mode
= S_DIN_to_AES
,
1181 .min_hw_rev
= CC_HW_REV_630
,
1182 .std_body
= CC_STD_NIST
,
1185 .name
= "cts(cbc(aes))",
1186 .driver_name
= "cts-cbc-aes-ccree",
1187 .blocksize
= AES_BLOCK_SIZE
,
1188 .template_skcipher
= {
1189 .setkey
= cc_cipher_setkey
,
1190 .encrypt
= cc_cipher_encrypt
,
1191 .decrypt
= cc_cipher_decrypt
,
1192 .min_keysize
= AES_MIN_KEY_SIZE
,
1193 .max_keysize
= AES_MAX_KEY_SIZE
,
1194 .ivsize
= AES_BLOCK_SIZE
,
1196 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1197 .flow_mode
= S_DIN_to_AES
,
1198 .min_hw_rev
= CC_HW_REV_630
,
1199 .std_body
= CC_STD_NIST
,
1203 .driver_name
= "ctr-aes-ccree",
1205 .template_skcipher
= {
1206 .setkey
= cc_cipher_setkey
,
1207 .encrypt
= cc_cipher_encrypt
,
1208 .decrypt
= cc_cipher_decrypt
,
1209 .min_keysize
= AES_MIN_KEY_SIZE
,
1210 .max_keysize
= AES_MAX_KEY_SIZE
,
1211 .ivsize
= AES_BLOCK_SIZE
,
1213 .cipher_mode
= DRV_CIPHER_CTR
,
1214 .flow_mode
= S_DIN_to_AES
,
1215 .min_hw_rev
= CC_HW_REV_630
,
1216 .std_body
= CC_STD_NIST
,
1219 .name
= "cbc(des3_ede)",
1220 .driver_name
= "cbc-3des-ccree",
1221 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1222 .template_skcipher
= {
1223 .setkey
= cc_cipher_setkey
,
1224 .encrypt
= cc_cipher_encrypt
,
1225 .decrypt
= cc_cipher_decrypt
,
1226 .min_keysize
= DES3_EDE_KEY_SIZE
,
1227 .max_keysize
= DES3_EDE_KEY_SIZE
,
1228 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1230 .cipher_mode
= DRV_CIPHER_CBC
,
1231 .flow_mode
= S_DIN_to_DES
,
1232 .min_hw_rev
= CC_HW_REV_630
,
1233 .std_body
= CC_STD_NIST
,
1236 .name
= "ecb(des3_ede)",
1237 .driver_name
= "ecb-3des-ccree",
1238 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1239 .template_skcipher
= {
1240 .setkey
= cc_cipher_setkey
,
1241 .encrypt
= cc_cipher_encrypt
,
1242 .decrypt
= cc_cipher_decrypt
,
1243 .min_keysize
= DES3_EDE_KEY_SIZE
,
1244 .max_keysize
= DES3_EDE_KEY_SIZE
,
1247 .cipher_mode
= DRV_CIPHER_ECB
,
1248 .flow_mode
= S_DIN_to_DES
,
1249 .min_hw_rev
= CC_HW_REV_630
,
1250 .std_body
= CC_STD_NIST
,
1254 .driver_name
= "cbc-des-ccree",
1255 .blocksize
= DES_BLOCK_SIZE
,
1256 .template_skcipher
= {
1257 .setkey
= cc_cipher_setkey
,
1258 .encrypt
= cc_cipher_encrypt
,
1259 .decrypt
= cc_cipher_decrypt
,
1260 .min_keysize
= DES_KEY_SIZE
,
1261 .max_keysize
= DES_KEY_SIZE
,
1262 .ivsize
= DES_BLOCK_SIZE
,
1264 .cipher_mode
= DRV_CIPHER_CBC
,
1265 .flow_mode
= S_DIN_to_DES
,
1266 .min_hw_rev
= CC_HW_REV_630
,
1267 .std_body
= CC_STD_NIST
,
1271 .driver_name
= "ecb-des-ccree",
1272 .blocksize
= DES_BLOCK_SIZE
,
1273 .template_skcipher
= {
1274 .setkey
= cc_cipher_setkey
,
1275 .encrypt
= cc_cipher_encrypt
,
1276 .decrypt
= cc_cipher_decrypt
,
1277 .min_keysize
= DES_KEY_SIZE
,
1278 .max_keysize
= DES_KEY_SIZE
,
1281 .cipher_mode
= DRV_CIPHER_ECB
,
1282 .flow_mode
= S_DIN_to_DES
,
1283 .min_hw_rev
= CC_HW_REV_630
,
1284 .std_body
= CC_STD_NIST
,
1288 .driver_name
= "cbc-sm4-ccree",
1289 .blocksize
= SM4_BLOCK_SIZE
,
1290 .template_skcipher
= {
1291 .setkey
= cc_cipher_setkey
,
1292 .encrypt
= cc_cipher_encrypt
,
1293 .decrypt
= cc_cipher_decrypt
,
1294 .min_keysize
= SM4_KEY_SIZE
,
1295 .max_keysize
= SM4_KEY_SIZE
,
1296 .ivsize
= SM4_BLOCK_SIZE
,
1298 .cipher_mode
= DRV_CIPHER_CBC
,
1299 .flow_mode
= S_DIN_to_SM4
,
1300 .min_hw_rev
= CC_HW_REV_713
,
1301 .std_body
= CC_STD_OSCCA
,
1305 .driver_name
= "ecb-sm4-ccree",
1306 .blocksize
= SM4_BLOCK_SIZE
,
1307 .template_skcipher
= {
1308 .setkey
= cc_cipher_setkey
,
1309 .encrypt
= cc_cipher_encrypt
,
1310 .decrypt
= cc_cipher_decrypt
,
1311 .min_keysize
= SM4_KEY_SIZE
,
1312 .max_keysize
= SM4_KEY_SIZE
,
1315 .cipher_mode
= DRV_CIPHER_ECB
,
1316 .flow_mode
= S_DIN_to_SM4
,
1317 .min_hw_rev
= CC_HW_REV_713
,
1318 .std_body
= CC_STD_OSCCA
,
1322 .driver_name
= "ctr-sm4-ccree",
1324 .template_skcipher
= {
1325 .setkey
= cc_cipher_setkey
,
1326 .encrypt
= cc_cipher_encrypt
,
1327 .decrypt
= cc_cipher_decrypt
,
1328 .min_keysize
= SM4_KEY_SIZE
,
1329 .max_keysize
= SM4_KEY_SIZE
,
1330 .ivsize
= SM4_BLOCK_SIZE
,
1332 .cipher_mode
= DRV_CIPHER_CTR
,
1333 .flow_mode
= S_DIN_to_SM4
,
1334 .min_hw_rev
= CC_HW_REV_713
,
1335 .std_body
= CC_STD_OSCCA
,
1338 .name
= "cbc(psm4)",
1339 .driver_name
= "cbc-psm4-ccree",
1340 .blocksize
= SM4_BLOCK_SIZE
,
1341 .template_skcipher
= {
1342 .setkey
= cc_cipher_sethkey
,
1343 .encrypt
= cc_cipher_encrypt
,
1344 .decrypt
= cc_cipher_decrypt
,
1345 .min_keysize
= CC_HW_KEY_SIZE
,
1346 .max_keysize
= CC_HW_KEY_SIZE
,
1347 .ivsize
= SM4_BLOCK_SIZE
,
1349 .cipher_mode
= DRV_CIPHER_CBC
,
1350 .flow_mode
= S_DIN_to_SM4
,
1351 .min_hw_rev
= CC_HW_REV_713
,
1352 .std_body
= CC_STD_OSCCA
,
1356 .name
= "ctr(psm4)",
1357 .driver_name
= "ctr-psm4-ccree",
1358 .blocksize
= SM4_BLOCK_SIZE
,
1359 .template_skcipher
= {
1360 .setkey
= cc_cipher_sethkey
,
1361 .encrypt
= cc_cipher_encrypt
,
1362 .decrypt
= cc_cipher_decrypt
,
1363 .min_keysize
= CC_HW_KEY_SIZE
,
1364 .max_keysize
= CC_HW_KEY_SIZE
,
1365 .ivsize
= SM4_BLOCK_SIZE
,
1367 .cipher_mode
= DRV_CIPHER_CTR
,
1368 .flow_mode
= S_DIN_to_SM4
,
1369 .min_hw_rev
= CC_HW_REV_713
,
1370 .std_body
= CC_STD_OSCCA
,
1375 static struct cc_crypto_alg
*cc_create_alg(const struct cc_alg_template
*tmpl
,
1378 struct cc_crypto_alg
*t_alg
;
1379 struct skcipher_alg
*alg
;
1381 t_alg
= devm_kzalloc(dev
, sizeof(*t_alg
), GFP_KERNEL
);
1383 return ERR_PTR(-ENOMEM
);
1385 alg
= &t_alg
->skcipher_alg
;
1387 memcpy(alg
, &tmpl
->template_skcipher
, sizeof(*alg
));
1389 if (snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s",
1390 tmpl
->name
) >= CRYPTO_MAX_ALG_NAME
)
1391 return ERR_PTR(-EINVAL
);
1392 if (snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
1393 tmpl
->driver_name
) >= CRYPTO_MAX_ALG_NAME
)
1394 return ERR_PTR(-EINVAL
);
1396 alg
->base
.cra_module
= THIS_MODULE
;
1397 alg
->base
.cra_priority
= CC_CRA_PRIO
;
1398 alg
->base
.cra_blocksize
= tmpl
->blocksize
;
1399 alg
->base
.cra_alignmask
= 0;
1400 alg
->base
.cra_ctxsize
= sizeof(struct cc_cipher_ctx
);
1402 alg
->base
.cra_init
= cc_cipher_init
;
1403 alg
->base
.cra_exit
= cc_cipher_exit
;
1404 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
1406 t_alg
->cipher_mode
= tmpl
->cipher_mode
;
1407 t_alg
->flow_mode
= tmpl
->flow_mode
;
1412 int cc_cipher_free(struct cc_drvdata
*drvdata
)
1414 struct cc_crypto_alg
*t_alg
, *n
;
1416 /* Remove registered algs */
1417 list_for_each_entry_safe(t_alg
, n
, &drvdata
->alg_list
, entry
) {
1418 crypto_unregister_skcipher(&t_alg
->skcipher_alg
);
1419 list_del(&t_alg
->entry
);
1424 int cc_cipher_alloc(struct cc_drvdata
*drvdata
)
1426 struct cc_crypto_alg
*t_alg
;
1427 struct device
*dev
= drvdata_to_dev(drvdata
);
1431 INIT_LIST_HEAD(&drvdata
->alg_list
);
1434 dev_dbg(dev
, "Number of algorithms = %zu\n",
1435 ARRAY_SIZE(skcipher_algs
));
1436 for (alg
= 0; alg
< ARRAY_SIZE(skcipher_algs
); alg
++) {
1437 if ((skcipher_algs
[alg
].min_hw_rev
> drvdata
->hw_rev
) ||
1438 !(drvdata
->std_bodies
& skcipher_algs
[alg
].std_body
) ||
1439 (drvdata
->sec_disabled
&& skcipher_algs
[alg
].sec_func
))
1442 dev_dbg(dev
, "creating %s\n", skcipher_algs
[alg
].driver_name
);
1443 t_alg
= cc_create_alg(&skcipher_algs
[alg
], dev
);
1444 if (IS_ERR(t_alg
)) {
1445 rc
= PTR_ERR(t_alg
);
1446 dev_err(dev
, "%s alg allocation failed\n",
1447 skcipher_algs
[alg
].driver_name
);
1450 t_alg
->drvdata
= drvdata
;
1452 dev_dbg(dev
, "registering %s\n",
1453 skcipher_algs
[alg
].driver_name
);
1454 rc
= crypto_register_skcipher(&t_alg
->skcipher_alg
);
1455 dev_dbg(dev
, "%s alg registration rc = %x\n",
1456 t_alg
->skcipher_alg
.base
.cra_driver_name
, rc
);
1458 dev_err(dev
, "%s alg registration failed\n",
1459 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1463 list_add_tail(&t_alg
->entry
, &drvdata
->alg_list
);
1464 dev_dbg(dev
, "Registered %s\n",
1465 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1470 cc_cipher_free(drvdata
);