1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/scatterwalk.h>
12 #include "cc_driver.h"
13 #include "cc_lli_defs.h"
14 #include "cc_buffer_mgr.h"
15 #include "cc_cipher.h"
16 #include "cc_request_mgr.h"
18 #define MAX_ABLKCIPHER_SEQ_LEN 6
20 #define template_skcipher template_u.skcipher
22 #define CC_MIN_AES_XTS_SIZE 0x10
23 #define CC_MAX_AES_XTS_SIZE 0x2000
24 struct cc_cipher_handle
{
25 struct list_head alg_list
;
28 struct cc_user_key_info
{
30 dma_addr_t key_dma_addr
;
33 struct cc_hw_key_info
{
34 enum cc_hw_crypto_key key1_slot
;
35 enum cc_hw_crypto_key key2_slot
;
38 struct cc_cipher_ctx
{
39 struct cc_drvdata
*drvdata
;
46 struct cc_user_key_info user
;
47 struct cc_hw_key_info hw
;
48 struct crypto_shash
*shash_tfm
;
51 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
);
53 static inline bool cc_is_hw_key(struct crypto_tfm
*tfm
)
55 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
60 static int validate_keys_sizes(struct cc_cipher_ctx
*ctx_p
, u32 size
)
62 switch (ctx_p
->flow_mode
) {
65 case CC_AES_128_BIT_KEY_SIZE
:
66 case CC_AES_192_BIT_KEY_SIZE
:
67 if (ctx_p
->cipher_mode
!= DRV_CIPHER_XTS
&&
68 ctx_p
->cipher_mode
!= DRV_CIPHER_ESSIV
&&
69 ctx_p
->cipher_mode
!= DRV_CIPHER_BITLOCKER
)
72 case CC_AES_256_BIT_KEY_SIZE
:
74 case (CC_AES_192_BIT_KEY_SIZE
* 2):
75 case (CC_AES_256_BIT_KEY_SIZE
* 2):
76 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
77 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
78 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
)
85 if (size
== DES3_EDE_KEY_SIZE
|| size
== DES_KEY_SIZE
)
94 static int validate_data_size(struct cc_cipher_ctx
*ctx_p
,
97 switch (ctx_p
->flow_mode
) {
99 switch (ctx_p
->cipher_mode
) {
101 if (size
>= CC_MIN_AES_XTS_SIZE
&&
102 size
<= CC_MAX_AES_XTS_SIZE
&&
103 IS_ALIGNED(size
, AES_BLOCK_SIZE
))
106 case DRV_CIPHER_CBC_CTS
:
107 if (size
>= AES_BLOCK_SIZE
)
115 case DRV_CIPHER_ESSIV
:
116 case DRV_CIPHER_BITLOCKER
:
117 if (IS_ALIGNED(size
, AES_BLOCK_SIZE
))
125 if (IS_ALIGNED(size
, DES_BLOCK_SIZE
))
134 static int cc_cipher_init(struct crypto_tfm
*tfm
)
136 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
137 struct cc_crypto_alg
*cc_alg
=
138 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
140 struct device
*dev
= drvdata_to_dev(cc_alg
->drvdata
);
141 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
144 dev_dbg(dev
, "Initializing context @%p for %s\n", ctx_p
,
145 crypto_tfm_alg_name(tfm
));
147 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
148 sizeof(struct cipher_req_ctx
));
150 ctx_p
->cipher_mode
= cc_alg
->cipher_mode
;
151 ctx_p
->flow_mode
= cc_alg
->flow_mode
;
152 ctx_p
->drvdata
= cc_alg
->drvdata
;
154 /* Allocate key buffer, cache line aligned */
155 ctx_p
->user
.key
= kmalloc(max_key_buf_size
, GFP_KERNEL
);
156 if (!ctx_p
->user
.key
)
159 dev_dbg(dev
, "Allocated key buffer in context. key=@%p\n",
163 ctx_p
->user
.key_dma_addr
= dma_map_single(dev
, (void *)ctx_p
->user
.key
,
166 if (dma_mapping_error(dev
, ctx_p
->user
.key_dma_addr
)) {
167 dev_err(dev
, "Mapping Key %u B at va=%pK for DMA failed\n",
168 max_key_buf_size
, ctx_p
->user
.key
);
171 dev_dbg(dev
, "Mapped key %u B at va=%pK to dma=%pad\n",
172 max_key_buf_size
, ctx_p
->user
.key
, &ctx_p
->user
.key_dma_addr
);
174 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
175 /* Alloc hash tfm for essiv */
176 ctx_p
->shash_tfm
= crypto_alloc_shash("sha256-generic", 0, 0);
177 if (IS_ERR(ctx_p
->shash_tfm
)) {
178 dev_err(dev
, "Error allocating hash tfm for ESSIV.\n");
179 return PTR_ERR(ctx_p
->shash_tfm
);
186 static void cc_cipher_exit(struct crypto_tfm
*tfm
)
188 struct crypto_alg
*alg
= tfm
->__crt_alg
;
189 struct cc_crypto_alg
*cc_alg
=
190 container_of(alg
, struct cc_crypto_alg
,
192 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
193 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
194 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
196 dev_dbg(dev
, "Clearing context @%p for %s\n",
197 crypto_tfm_ctx(tfm
), crypto_tfm_alg_name(tfm
));
199 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
200 /* Free hash tfm for essiv */
201 crypto_free_shash(ctx_p
->shash_tfm
);
202 ctx_p
->shash_tfm
= NULL
;
205 /* Unmap key buffer */
206 dma_unmap_single(dev
, ctx_p
->user
.key_dma_addr
, max_key_buf_size
,
208 dev_dbg(dev
, "Unmapped key buffer key_dma_addr=%pad\n",
209 &ctx_p
->user
.key_dma_addr
);
211 /* Free key buffer in context */
212 kzfree(ctx_p
->user
.key
);
213 dev_dbg(dev
, "Free key buffer in context. key=@%p\n", ctx_p
->user
.key
);
217 u8 key1
[DES_KEY_SIZE
];
218 u8 key2
[DES_KEY_SIZE
];
219 u8 key3
[DES_KEY_SIZE
];
222 static enum cc_hw_crypto_key
cc_slot_to_hw_key(int slot_num
)
237 static int cc_cipher_sethkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
240 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
241 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
242 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
243 struct cc_hkey_info hki
;
245 dev_dbg(dev
, "Setting HW key in context @%p for %s. keylen=%u\n",
246 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
247 dump_byte_array("key", (u8
*)key
, keylen
);
249 /* STAT_PHASE_0: Init and sanity checks */
251 /* This check the size of the hardware key token */
252 if (keylen
!= sizeof(hki
)) {
253 dev_err(dev
, "Unsupported HW key size %d.\n", keylen
);
254 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
258 if (ctx_p
->flow_mode
!= S_DIN_to_AES
) {
259 dev_err(dev
, "HW key not supported for non-AES flows\n");
263 memcpy(&hki
, key
, keylen
);
265 /* The real key len for crypto op is the size of the HW key
266 * referenced by the HW key slot, not the hardware key token
270 if (validate_keys_sizes(ctx_p
, keylen
)) {
271 dev_err(dev
, "Unsupported key size %d.\n", keylen
);
272 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
276 ctx_p
->hw
.key1_slot
= cc_slot_to_hw_key(hki
.hw_key1
);
277 if (ctx_p
->hw
.key1_slot
== END_OF_KEYS
) {
278 dev_err(dev
, "Unsupported hw key1 number (%d)\n", hki
.hw_key1
);
282 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
283 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
284 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
) {
285 if (hki
.hw_key1
== hki
.hw_key2
) {
286 dev_err(dev
, "Illegal hw key numbers (%d,%d)\n",
287 hki
.hw_key1
, hki
.hw_key2
);
290 ctx_p
->hw
.key2_slot
= cc_slot_to_hw_key(hki
.hw_key2
);
291 if (ctx_p
->hw
.key2_slot
== END_OF_KEYS
) {
292 dev_err(dev
, "Unsupported hw key2 number (%d)\n",
298 ctx_p
->keylen
= keylen
;
299 ctx_p
->hw_key
= true;
300 dev_dbg(dev
, "cc_is_hw_key ret 0");
305 static int cc_cipher_setkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
308 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
309 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
310 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
311 u32 tmp
[DES3_EDE_EXPKEY_WORDS
];
312 struct cc_crypto_alg
*cc_alg
=
313 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
315 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
317 dev_dbg(dev
, "Setting key in context @%p for %s. keylen=%u\n",
318 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
319 dump_byte_array("key", (u8
*)key
, keylen
);
321 /* STAT_PHASE_0: Init and sanity checks */
323 if (validate_keys_sizes(ctx_p
, keylen
)) {
324 dev_err(dev
, "Unsupported key size %d.\n", keylen
);
325 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
329 ctx_p
->hw_key
= false;
332 * Verify DES weak keys
333 * Note that we're dropping the expanded key since the
334 * HW does the expansion on its own.
336 if (ctx_p
->flow_mode
== S_DIN_to_DES
) {
337 if (keylen
== DES3_EDE_KEY_SIZE
&&
338 __des3_ede_setkey(tmp
, &tfm
->crt_flags
, key
,
339 DES3_EDE_KEY_SIZE
)) {
340 dev_dbg(dev
, "weak 3DES key");
342 } else if (!des_ekey(tmp
, key
) &&
343 (crypto_tfm_get_flags(tfm
) & CRYPTO_TFM_REQ_WEAK_KEY
)) {
344 tfm
->crt_flags
|= CRYPTO_TFM_RES_WEAK_KEY
;
345 dev_dbg(dev
, "weak DES key");
350 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
&&
351 xts_check_key(tfm
, key
, keylen
)) {
352 dev_dbg(dev
, "weak XTS key");
356 /* STAT_PHASE_1: Copy key to ctx */
357 dma_sync_single_for_cpu(dev
, ctx_p
->user
.key_dma_addr
,
358 max_key_buf_size
, DMA_TO_DEVICE
);
360 memcpy(ctx_p
->user
.key
, key
, keylen
);
362 memset(ctx_p
->user
.key
+ 24, 0, CC_AES_KEY_SIZE_MAX
- 24);
364 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
365 /* sha256 for key2 - use sw implementation */
366 int key_len
= keylen
>> 1;
369 SHASH_DESC_ON_STACK(desc
, ctx_p
->shash_tfm
);
371 desc
->tfm
= ctx_p
->shash_tfm
;
373 err
= crypto_shash_digest(desc
, ctx_p
->user
.key
, key_len
,
374 ctx_p
->user
.key
+ key_len
);
376 dev_err(dev
, "Failed to hash ESSIV key.\n");
380 dma_sync_single_for_device(dev
, ctx_p
->user
.key_dma_addr
,
381 max_key_buf_size
, DMA_TO_DEVICE
);
382 ctx_p
->keylen
= keylen
;
384 dev_dbg(dev
, "return safely");
388 static void cc_setup_cipher_desc(struct crypto_tfm
*tfm
,
389 struct cipher_req_ctx
*req_ctx
,
390 unsigned int ivsize
, unsigned int nbytes
,
391 struct cc_hw_desc desc
[],
392 unsigned int *seq_size
)
394 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
395 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
396 int cipher_mode
= ctx_p
->cipher_mode
;
397 int flow_mode
= ctx_p
->flow_mode
;
398 int direction
= req_ctx
->gen_ctx
.op_type
;
399 dma_addr_t key_dma_addr
= ctx_p
->user
.key_dma_addr
;
400 unsigned int key_len
= ctx_p
->keylen
;
401 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
402 unsigned int du_size
= nbytes
;
404 struct cc_crypto_alg
*cc_alg
=
405 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
408 if (cc_alg
->data_unit
)
409 du_size
= cc_alg
->data_unit
;
411 switch (cipher_mode
) {
413 case DRV_CIPHER_CBC_CTS
:
416 /* Load cipher state */
417 hw_desc_init(&desc
[*seq_size
]);
418 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
, ivsize
,
420 set_cipher_config0(&desc
[*seq_size
], direction
);
421 set_flow_mode(&desc
[*seq_size
], flow_mode
);
422 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
423 if (cipher_mode
== DRV_CIPHER_CTR
||
424 cipher_mode
== DRV_CIPHER_OFB
) {
425 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
427 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE0
);
433 hw_desc_init(&desc
[*seq_size
]);
434 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
435 set_cipher_config0(&desc
[*seq_size
], direction
);
436 if (flow_mode
== S_DIN_to_AES
) {
437 if (cc_is_hw_key(tfm
)) {
438 set_hw_crypto_key(&desc
[*seq_size
],
439 ctx_p
->hw
.key1_slot
);
441 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
442 key_dma_addr
, ((key_len
== 24) ?
446 set_key_size_aes(&desc
[*seq_size
], key_len
);
449 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
451 set_key_size_des(&desc
[*seq_size
], key_len
);
453 set_flow_mode(&desc
[*seq_size
], flow_mode
);
454 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
458 case DRV_CIPHER_ESSIV
:
459 case DRV_CIPHER_BITLOCKER
:
461 hw_desc_init(&desc
[*seq_size
]);
462 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
463 set_cipher_config0(&desc
[*seq_size
], direction
);
464 if (cc_is_hw_key(tfm
)) {
465 set_hw_crypto_key(&desc
[*seq_size
],
466 ctx_p
->hw
.key1_slot
);
468 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
469 (key_len
/ 2), NS_BIT
);
471 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
472 set_flow_mode(&desc
[*seq_size
], flow_mode
);
473 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
477 hw_desc_init(&desc
[*seq_size
]);
478 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
479 set_cipher_config0(&desc
[*seq_size
], direction
);
480 if (cc_is_hw_key(tfm
)) {
481 set_hw_crypto_key(&desc
[*seq_size
],
482 ctx_p
->hw
.key2_slot
);
484 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
485 (key_dma_addr
+ (key_len
/ 2)),
486 (key_len
/ 2), NS_BIT
);
488 set_xex_data_unit_size(&desc
[*seq_size
], du_size
);
489 set_flow_mode(&desc
[*seq_size
], S_DIN_to_AES2
);
490 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
491 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_XEX_KEY
);
495 hw_desc_init(&desc
[*seq_size
]);
496 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
497 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
498 set_cipher_config0(&desc
[*seq_size
], direction
);
499 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
500 set_flow_mode(&desc
[*seq_size
], flow_mode
);
501 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
,
502 CC_AES_BLOCK_SIZE
, NS_BIT
);
506 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
510 static void cc_setup_cipher_data(struct crypto_tfm
*tfm
,
511 struct cipher_req_ctx
*req_ctx
,
512 struct scatterlist
*dst
,
513 struct scatterlist
*src
, unsigned int nbytes
,
514 void *areq
, struct cc_hw_desc desc
[],
515 unsigned int *seq_size
)
517 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
518 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
519 unsigned int flow_mode
= ctx_p
->flow_mode
;
521 switch (ctx_p
->flow_mode
) {
523 flow_mode
= DIN_AES_DOUT
;
526 flow_mode
= DIN_DES_DOUT
;
529 dev_err(dev
, "invalid flow mode, flow_mode = %d\n", flow_mode
);
533 if (req_ctx
->dma_buf_type
== CC_DMA_BUF_DLLI
) {
534 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
535 &sg_dma_address(src
), nbytes
);
536 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
537 &sg_dma_address(dst
), nbytes
);
538 hw_desc_init(&desc
[*seq_size
]);
539 set_din_type(&desc
[*seq_size
], DMA_DLLI
, sg_dma_address(src
),
541 set_dout_dlli(&desc
[*seq_size
], sg_dma_address(dst
),
542 nbytes
, NS_BIT
, (!areq
? 0 : 1));
544 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
546 set_flow_mode(&desc
[*seq_size
], flow_mode
);
550 dev_dbg(dev
, " bypass params addr %pad length 0x%X addr 0x%08X\n",
551 &req_ctx
->mlli_params
.mlli_dma_addr
,
552 req_ctx
->mlli_params
.mlli_len
,
553 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
);
554 hw_desc_init(&desc
[*seq_size
]);
555 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
556 req_ctx
->mlli_params
.mlli_dma_addr
,
557 req_ctx
->mlli_params
.mlli_len
, NS_BIT
);
558 set_dout_sram(&desc
[*seq_size
],
559 ctx_p
->drvdata
->mlli_sram_addr
,
560 req_ctx
->mlli_params
.mlli_len
);
561 set_flow_mode(&desc
[*seq_size
], BYPASS
);
564 hw_desc_init(&desc
[*seq_size
]);
565 set_din_type(&desc
[*seq_size
], DMA_MLLI
,
566 ctx_p
->drvdata
->mlli_sram_addr
,
567 req_ctx
->in_mlli_nents
, NS_BIT
);
568 if (req_ctx
->out_nents
== 0) {
569 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
570 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
,
571 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
);
572 set_dout_mlli(&desc
[*seq_size
],
573 ctx_p
->drvdata
->mlli_sram_addr
,
574 req_ctx
->in_mlli_nents
, NS_BIT
,
577 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
578 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
,
579 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
+
580 (u32
)LLI_ENTRY_BYTE_SIZE
* req_ctx
->in_nents
);
581 set_dout_mlli(&desc
[*seq_size
],
582 (ctx_p
->drvdata
->mlli_sram_addr
+
583 (LLI_ENTRY_BYTE_SIZE
*
584 req_ctx
->in_mlli_nents
)),
585 req_ctx
->out_mlli_nents
, NS_BIT
,
589 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
591 set_flow_mode(&desc
[*seq_size
], flow_mode
);
596 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
)
598 struct skcipher_request
*req
= (struct skcipher_request
*)cc_req
;
599 struct scatterlist
*dst
= req
->dst
;
600 struct scatterlist
*src
= req
->src
;
601 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
602 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
603 unsigned int ivsize
= crypto_skcipher_ivsize(tfm
);
605 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
609 * The crypto API expects us to set the req->iv to the last
610 * ciphertext block. For encrypt, simply copy from the result.
611 * For decrypt, we must copy from a saved buffer since this
612 * could be an in-place decryption operation and the src is
613 * lost by this point.
615 if (req_ctx
->gen_ctx
.op_type
== DRV_CRYPTO_DIRECTION_DECRYPT
) {
616 memcpy(req
->iv
, req_ctx
->backup_info
, ivsize
);
617 kzfree(req_ctx
->backup_info
);
619 scatterwalk_map_and_copy(req
->iv
, req
->dst
,
620 (req
->cryptlen
- ivsize
),
624 skcipher_request_complete(req
, err
);
627 static int cc_cipher_process(struct skcipher_request
*req
,
628 enum drv_crypto_direction direction
)
630 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
631 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
632 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
633 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
634 struct scatterlist
*dst
= req
->dst
;
635 struct scatterlist
*src
= req
->src
;
636 unsigned int nbytes
= req
->cryptlen
;
638 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
639 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
640 struct cc_hw_desc desc
[MAX_ABLKCIPHER_SEQ_LEN
];
641 struct cc_crypto_req cc_req
= {};
642 int rc
, cts_restore_flag
= 0;
643 unsigned int seq_len
= 0;
644 gfp_t flags
= cc_gfp_flags(&req
->base
);
646 dev_dbg(dev
, "%s req=%p iv=%p nbytes=%d\n",
647 ((direction
== DRV_CRYPTO_DIRECTION_ENCRYPT
) ?
648 "Encrypt" : "Decrypt"), req
, iv
, nbytes
);
650 /* STAT_PHASE_0: Init and sanity checks */
652 /* TODO: check data length according to mode */
653 if (validate_data_size(ctx_p
, nbytes
)) {
654 dev_err(dev
, "Unsupported data size %d.\n", nbytes
);
655 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_BLOCK_LEN
);
660 /* No data to process is valid */
665 /* The IV we are handed may be allocted from the stack so
666 * we must copy it to a DMAable buffer before use.
668 req_ctx
->iv
= kmemdup(iv
, ivsize
, flags
);
674 /*For CTS in case of data size aligned to 16 use CBC mode*/
675 if (((nbytes
% AES_BLOCK_SIZE
) == 0) &&
676 ctx_p
->cipher_mode
== DRV_CIPHER_CBC_CTS
) {
677 ctx_p
->cipher_mode
= DRV_CIPHER_CBC
;
678 cts_restore_flag
= 1;
681 /* Setup request structure */
682 cc_req
.user_cb
= (void *)cc_cipher_complete
;
683 cc_req
.user_arg
= (void *)req
;
685 #ifdef ENABLE_CYCLE_COUNT
686 cc_req
.op_type
= (direction
== DRV_CRYPTO_DIRECTION_DECRYPT
) ?
687 STAT_OP_TYPE_DECODE
: STAT_OP_TYPE_ENCODE
;
691 /* Setup request context */
692 req_ctx
->gen_ctx
.op_type
= direction
;
694 /* STAT_PHASE_1: Map buffers */
696 rc
= cc_map_cipher_request(ctx_p
->drvdata
, req_ctx
, ivsize
, nbytes
,
697 req_ctx
->iv
, src
, dst
, flags
);
699 dev_err(dev
, "map_request() failed\n");
703 /* STAT_PHASE_2: Create sequence */
705 /* Setup processing */
706 cc_setup_cipher_desc(tfm
, req_ctx
, ivsize
, nbytes
, desc
, &seq_len
);
707 /* Data processing */
708 cc_setup_cipher_data(tfm
, req_ctx
, dst
, src
, nbytes
, req
, desc
,
711 /* do we need to generate IV? */
712 if (req_ctx
->is_giv
) {
713 cc_req
.ivgen_dma_addr
[0] = req_ctx
->gen_ctx
.iv_dma_addr
;
714 cc_req
.ivgen_dma_addr_len
= 1;
715 /* set the IV size (8/16 B long)*/
716 cc_req
.ivgen_size
= ivsize
;
719 /* STAT_PHASE_3: Lock HW and push sequence */
721 rc
= cc_send_request(ctx_p
->drvdata
, &cc_req
, desc
, seq_len
,
723 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
724 /* Failed to send the request or request completed
727 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
731 if (cts_restore_flag
)
732 ctx_p
->cipher_mode
= DRV_CIPHER_CBC_CTS
;
734 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
735 kzfree(req_ctx
->backup_info
);
742 static int cc_cipher_encrypt(struct skcipher_request
*req
)
744 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
746 req_ctx
->is_giv
= false;
747 req_ctx
->backup_info
= NULL
;
749 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_ENCRYPT
);
752 static int cc_cipher_decrypt(struct skcipher_request
*req
)
754 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
755 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
756 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
757 gfp_t flags
= cc_gfp_flags(&req
->base
);
760 * Allocate and save the last IV sized bytes of the source, which will
761 * be lost in case of in-place decryption and might be needed for CTS.
763 req_ctx
->backup_info
= kmalloc(ivsize
, flags
);
764 if (!req_ctx
->backup_info
)
767 scatterwalk_map_and_copy(req_ctx
->backup_info
, req
->src
,
768 (req
->cryptlen
- ivsize
), ivsize
, 0);
769 req_ctx
->is_giv
= false;
771 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_DECRYPT
);
774 /* Block cipher alg */
775 static const struct cc_alg_template skcipher_algs
[] = {
778 .driver_name
= "xts-paes-ccree",
779 .blocksize
= AES_BLOCK_SIZE
,
780 .template_skcipher
= {
781 .setkey
= cc_cipher_sethkey
,
782 .encrypt
= cc_cipher_encrypt
,
783 .decrypt
= cc_cipher_decrypt
,
784 .min_keysize
= CC_HW_KEY_SIZE
,
785 .max_keysize
= CC_HW_KEY_SIZE
,
786 .ivsize
= AES_BLOCK_SIZE
,
788 .cipher_mode
= DRV_CIPHER_XTS
,
789 .flow_mode
= S_DIN_to_AES
,
790 .min_hw_rev
= CC_HW_REV_630
,
793 .name
= "xts512(paes)",
794 .driver_name
= "xts-paes-du512-ccree",
795 .blocksize
= AES_BLOCK_SIZE
,
796 .template_skcipher
= {
797 .setkey
= cc_cipher_sethkey
,
798 .encrypt
= cc_cipher_encrypt
,
799 .decrypt
= cc_cipher_decrypt
,
800 .min_keysize
= CC_HW_KEY_SIZE
,
801 .max_keysize
= CC_HW_KEY_SIZE
,
802 .ivsize
= AES_BLOCK_SIZE
,
804 .cipher_mode
= DRV_CIPHER_XTS
,
805 .flow_mode
= S_DIN_to_AES
,
807 .min_hw_rev
= CC_HW_REV_712
,
810 .name
= "xts4096(paes)",
811 .driver_name
= "xts-paes-du4096-ccree",
812 .blocksize
= AES_BLOCK_SIZE
,
813 .template_skcipher
= {
814 .setkey
= cc_cipher_sethkey
,
815 .encrypt
= cc_cipher_encrypt
,
816 .decrypt
= cc_cipher_decrypt
,
817 .min_keysize
= CC_HW_KEY_SIZE
,
818 .max_keysize
= CC_HW_KEY_SIZE
,
819 .ivsize
= AES_BLOCK_SIZE
,
821 .cipher_mode
= DRV_CIPHER_XTS
,
822 .flow_mode
= S_DIN_to_AES
,
824 .min_hw_rev
= CC_HW_REV_712
,
827 .name
= "essiv(paes)",
828 .driver_name
= "essiv-paes-ccree",
829 .blocksize
= AES_BLOCK_SIZE
,
830 .template_skcipher
= {
831 .setkey
= cc_cipher_sethkey
,
832 .encrypt
= cc_cipher_encrypt
,
833 .decrypt
= cc_cipher_decrypt
,
834 .min_keysize
= CC_HW_KEY_SIZE
,
835 .max_keysize
= CC_HW_KEY_SIZE
,
836 .ivsize
= AES_BLOCK_SIZE
,
838 .cipher_mode
= DRV_CIPHER_ESSIV
,
839 .flow_mode
= S_DIN_to_AES
,
840 .min_hw_rev
= CC_HW_REV_712
,
843 .name
= "essiv512(paes)",
844 .driver_name
= "essiv-paes-du512-ccree",
845 .blocksize
= AES_BLOCK_SIZE
,
846 .template_skcipher
= {
847 .setkey
= cc_cipher_sethkey
,
848 .encrypt
= cc_cipher_encrypt
,
849 .decrypt
= cc_cipher_decrypt
,
850 .min_keysize
= CC_HW_KEY_SIZE
,
851 .max_keysize
= CC_HW_KEY_SIZE
,
852 .ivsize
= AES_BLOCK_SIZE
,
854 .cipher_mode
= DRV_CIPHER_ESSIV
,
855 .flow_mode
= S_DIN_to_AES
,
857 .min_hw_rev
= CC_HW_REV_712
,
860 .name
= "essiv4096(paes)",
861 .driver_name
= "essiv-paes-du4096-ccree",
862 .blocksize
= AES_BLOCK_SIZE
,
863 .template_skcipher
= {
864 .setkey
= cc_cipher_sethkey
,
865 .encrypt
= cc_cipher_encrypt
,
866 .decrypt
= cc_cipher_decrypt
,
867 .min_keysize
= CC_HW_KEY_SIZE
,
868 .max_keysize
= CC_HW_KEY_SIZE
,
869 .ivsize
= AES_BLOCK_SIZE
,
871 .cipher_mode
= DRV_CIPHER_ESSIV
,
872 .flow_mode
= S_DIN_to_AES
,
874 .min_hw_rev
= CC_HW_REV_712
,
877 .name
= "bitlocker(paes)",
878 .driver_name
= "bitlocker-paes-ccree",
879 .blocksize
= AES_BLOCK_SIZE
,
880 .template_skcipher
= {
881 .setkey
= cc_cipher_sethkey
,
882 .encrypt
= cc_cipher_encrypt
,
883 .decrypt
= cc_cipher_decrypt
,
884 .min_keysize
= CC_HW_KEY_SIZE
,
885 .max_keysize
= CC_HW_KEY_SIZE
,
886 .ivsize
= AES_BLOCK_SIZE
,
888 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
889 .flow_mode
= S_DIN_to_AES
,
890 .min_hw_rev
= CC_HW_REV_712
,
893 .name
= "bitlocker512(paes)",
894 .driver_name
= "bitlocker-paes-du512-ccree",
895 .blocksize
= AES_BLOCK_SIZE
,
896 .template_skcipher
= {
897 .setkey
= cc_cipher_sethkey
,
898 .encrypt
= cc_cipher_encrypt
,
899 .decrypt
= cc_cipher_decrypt
,
900 .min_keysize
= CC_HW_KEY_SIZE
,
901 .max_keysize
= CC_HW_KEY_SIZE
,
902 .ivsize
= AES_BLOCK_SIZE
,
904 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
905 .flow_mode
= S_DIN_to_AES
,
907 .min_hw_rev
= CC_HW_REV_712
,
910 .name
= "bitlocker4096(paes)",
911 .driver_name
= "bitlocker-paes-du4096-ccree",
912 .blocksize
= AES_BLOCK_SIZE
,
913 .template_skcipher
= {
914 .setkey
= cc_cipher_sethkey
,
915 .encrypt
= cc_cipher_encrypt
,
916 .decrypt
= cc_cipher_decrypt
,
917 .min_keysize
= CC_HW_KEY_SIZE
,
918 .max_keysize
= CC_HW_KEY_SIZE
,
919 .ivsize
= AES_BLOCK_SIZE
,
921 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
922 .flow_mode
= S_DIN_to_AES
,
924 .min_hw_rev
= CC_HW_REV_712
,
928 .driver_name
= "ecb-paes-ccree",
929 .blocksize
= AES_BLOCK_SIZE
,
930 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
931 .template_skcipher
= {
932 .setkey
= cc_cipher_sethkey
,
933 .encrypt
= cc_cipher_encrypt
,
934 .decrypt
= cc_cipher_decrypt
,
935 .min_keysize
= CC_HW_KEY_SIZE
,
936 .max_keysize
= CC_HW_KEY_SIZE
,
939 .cipher_mode
= DRV_CIPHER_ECB
,
940 .flow_mode
= S_DIN_to_AES
,
941 .min_hw_rev
= CC_HW_REV_712
,
945 .driver_name
= "cbc-paes-ccree",
946 .blocksize
= AES_BLOCK_SIZE
,
947 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
948 .template_skcipher
= {
949 .setkey
= cc_cipher_sethkey
,
950 .encrypt
= cc_cipher_encrypt
,
951 .decrypt
= cc_cipher_decrypt
,
952 .min_keysize
= CC_HW_KEY_SIZE
,
953 .max_keysize
= CC_HW_KEY_SIZE
,
954 .ivsize
= AES_BLOCK_SIZE
,
956 .cipher_mode
= DRV_CIPHER_CBC
,
957 .flow_mode
= S_DIN_to_AES
,
958 .min_hw_rev
= CC_HW_REV_712
,
962 .driver_name
= "ofb-paes-ccree",
963 .blocksize
= AES_BLOCK_SIZE
,
964 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
965 .template_skcipher
= {
966 .setkey
= cc_cipher_sethkey
,
967 .encrypt
= cc_cipher_encrypt
,
968 .decrypt
= cc_cipher_decrypt
,
969 .min_keysize
= CC_HW_KEY_SIZE
,
970 .max_keysize
= CC_HW_KEY_SIZE
,
971 .ivsize
= AES_BLOCK_SIZE
,
973 .cipher_mode
= DRV_CIPHER_OFB
,
974 .flow_mode
= S_DIN_to_AES
,
975 .min_hw_rev
= CC_HW_REV_712
,
978 .name
= "cts1(cbc(paes))",
979 .driver_name
= "cts1-cbc-paes-ccree",
980 .blocksize
= AES_BLOCK_SIZE
,
981 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
982 .template_skcipher
= {
983 .setkey
= cc_cipher_sethkey
,
984 .encrypt
= cc_cipher_encrypt
,
985 .decrypt
= cc_cipher_decrypt
,
986 .min_keysize
= CC_HW_KEY_SIZE
,
987 .max_keysize
= CC_HW_KEY_SIZE
,
988 .ivsize
= AES_BLOCK_SIZE
,
990 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
991 .flow_mode
= S_DIN_to_AES
,
992 .min_hw_rev
= CC_HW_REV_712
,
996 .driver_name
= "ctr-paes-ccree",
998 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
999 .template_skcipher
= {
1000 .setkey
= cc_cipher_sethkey
,
1001 .encrypt
= cc_cipher_encrypt
,
1002 .decrypt
= cc_cipher_decrypt
,
1003 .min_keysize
= CC_HW_KEY_SIZE
,
1004 .max_keysize
= CC_HW_KEY_SIZE
,
1005 .ivsize
= AES_BLOCK_SIZE
,
1007 .cipher_mode
= DRV_CIPHER_CTR
,
1008 .flow_mode
= S_DIN_to_AES
,
1009 .min_hw_rev
= CC_HW_REV_712
,
1013 .driver_name
= "xts-aes-ccree",
1014 .blocksize
= AES_BLOCK_SIZE
,
1015 .template_skcipher
= {
1016 .setkey
= cc_cipher_setkey
,
1017 .encrypt
= cc_cipher_encrypt
,
1018 .decrypt
= cc_cipher_decrypt
,
1019 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1020 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1021 .ivsize
= AES_BLOCK_SIZE
,
1023 .cipher_mode
= DRV_CIPHER_XTS
,
1024 .flow_mode
= S_DIN_to_AES
,
1025 .min_hw_rev
= CC_HW_REV_630
,
1028 .name
= "xts512(aes)",
1029 .driver_name
= "xts-aes-du512-ccree",
1030 .blocksize
= AES_BLOCK_SIZE
,
1031 .template_skcipher
= {
1032 .setkey
= cc_cipher_setkey
,
1033 .encrypt
= cc_cipher_encrypt
,
1034 .decrypt
= cc_cipher_decrypt
,
1035 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1036 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1037 .ivsize
= AES_BLOCK_SIZE
,
1039 .cipher_mode
= DRV_CIPHER_XTS
,
1040 .flow_mode
= S_DIN_to_AES
,
1042 .min_hw_rev
= CC_HW_REV_712
,
1045 .name
= "xts4096(aes)",
1046 .driver_name
= "xts-aes-du4096-ccree",
1047 .blocksize
= AES_BLOCK_SIZE
,
1048 .template_skcipher
= {
1049 .setkey
= cc_cipher_setkey
,
1050 .encrypt
= cc_cipher_encrypt
,
1051 .decrypt
= cc_cipher_decrypt
,
1052 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1053 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1054 .ivsize
= AES_BLOCK_SIZE
,
1056 .cipher_mode
= DRV_CIPHER_XTS
,
1057 .flow_mode
= S_DIN_to_AES
,
1059 .min_hw_rev
= CC_HW_REV_712
,
1062 .name
= "essiv(aes)",
1063 .driver_name
= "essiv-aes-ccree",
1064 .blocksize
= AES_BLOCK_SIZE
,
1065 .template_skcipher
= {
1066 .setkey
= cc_cipher_setkey
,
1067 .encrypt
= cc_cipher_encrypt
,
1068 .decrypt
= cc_cipher_decrypt
,
1069 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1070 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1071 .ivsize
= AES_BLOCK_SIZE
,
1073 .cipher_mode
= DRV_CIPHER_ESSIV
,
1074 .flow_mode
= S_DIN_to_AES
,
1075 .min_hw_rev
= CC_HW_REV_712
,
1078 .name
= "essiv512(aes)",
1079 .driver_name
= "essiv-aes-du512-ccree",
1080 .blocksize
= AES_BLOCK_SIZE
,
1081 .template_skcipher
= {
1082 .setkey
= cc_cipher_setkey
,
1083 .encrypt
= cc_cipher_encrypt
,
1084 .decrypt
= cc_cipher_decrypt
,
1085 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1086 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1087 .ivsize
= AES_BLOCK_SIZE
,
1089 .cipher_mode
= DRV_CIPHER_ESSIV
,
1090 .flow_mode
= S_DIN_to_AES
,
1092 .min_hw_rev
= CC_HW_REV_712
,
1095 .name
= "essiv4096(aes)",
1096 .driver_name
= "essiv-aes-du4096-ccree",
1097 .blocksize
= AES_BLOCK_SIZE
,
1098 .template_skcipher
= {
1099 .setkey
= cc_cipher_setkey
,
1100 .encrypt
= cc_cipher_encrypt
,
1101 .decrypt
= cc_cipher_decrypt
,
1102 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1103 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1104 .ivsize
= AES_BLOCK_SIZE
,
1106 .cipher_mode
= DRV_CIPHER_ESSIV
,
1107 .flow_mode
= S_DIN_to_AES
,
1109 .min_hw_rev
= CC_HW_REV_712
,
1112 .name
= "bitlocker(aes)",
1113 .driver_name
= "bitlocker-aes-ccree",
1114 .blocksize
= AES_BLOCK_SIZE
,
1115 .template_skcipher
= {
1116 .setkey
= cc_cipher_setkey
,
1117 .encrypt
= cc_cipher_encrypt
,
1118 .decrypt
= cc_cipher_decrypt
,
1119 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1120 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1121 .ivsize
= AES_BLOCK_SIZE
,
1123 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1124 .flow_mode
= S_DIN_to_AES
,
1125 .min_hw_rev
= CC_HW_REV_712
,
1128 .name
= "bitlocker512(aes)",
1129 .driver_name
= "bitlocker-aes-du512-ccree",
1130 .blocksize
= AES_BLOCK_SIZE
,
1131 .template_skcipher
= {
1132 .setkey
= cc_cipher_setkey
,
1133 .encrypt
= cc_cipher_encrypt
,
1134 .decrypt
= cc_cipher_decrypt
,
1135 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1136 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1137 .ivsize
= AES_BLOCK_SIZE
,
1139 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1140 .flow_mode
= S_DIN_to_AES
,
1142 .min_hw_rev
= CC_HW_REV_712
,
1145 .name
= "bitlocker4096(aes)",
1146 .driver_name
= "bitlocker-aes-du4096-ccree",
1147 .blocksize
= AES_BLOCK_SIZE
,
1148 .template_skcipher
= {
1149 .setkey
= cc_cipher_setkey
,
1150 .encrypt
= cc_cipher_encrypt
,
1151 .decrypt
= cc_cipher_decrypt
,
1152 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1153 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1154 .ivsize
= AES_BLOCK_SIZE
,
1156 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1157 .flow_mode
= S_DIN_to_AES
,
1159 .min_hw_rev
= CC_HW_REV_712
,
1163 .driver_name
= "ecb-aes-ccree",
1164 .blocksize
= AES_BLOCK_SIZE
,
1165 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1166 .template_skcipher
= {
1167 .setkey
= cc_cipher_setkey
,
1168 .encrypt
= cc_cipher_encrypt
,
1169 .decrypt
= cc_cipher_decrypt
,
1170 .min_keysize
= AES_MIN_KEY_SIZE
,
1171 .max_keysize
= AES_MAX_KEY_SIZE
,
1174 .cipher_mode
= DRV_CIPHER_ECB
,
1175 .flow_mode
= S_DIN_to_AES
,
1176 .min_hw_rev
= CC_HW_REV_630
,
1180 .driver_name
= "cbc-aes-ccree",
1181 .blocksize
= AES_BLOCK_SIZE
,
1182 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1183 .template_skcipher
= {
1184 .setkey
= cc_cipher_setkey
,
1185 .encrypt
= cc_cipher_encrypt
,
1186 .decrypt
= cc_cipher_decrypt
,
1187 .min_keysize
= AES_MIN_KEY_SIZE
,
1188 .max_keysize
= AES_MAX_KEY_SIZE
,
1189 .ivsize
= AES_BLOCK_SIZE
,
1191 .cipher_mode
= DRV_CIPHER_CBC
,
1192 .flow_mode
= S_DIN_to_AES
,
1193 .min_hw_rev
= CC_HW_REV_630
,
1197 .driver_name
= "ofb-aes-ccree",
1198 .blocksize
= AES_BLOCK_SIZE
,
1199 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1200 .template_skcipher
= {
1201 .setkey
= cc_cipher_setkey
,
1202 .encrypt
= cc_cipher_encrypt
,
1203 .decrypt
= cc_cipher_decrypt
,
1204 .min_keysize
= AES_MIN_KEY_SIZE
,
1205 .max_keysize
= AES_MAX_KEY_SIZE
,
1206 .ivsize
= AES_BLOCK_SIZE
,
1208 .cipher_mode
= DRV_CIPHER_OFB
,
1209 .flow_mode
= S_DIN_to_AES
,
1210 .min_hw_rev
= CC_HW_REV_630
,
1213 .name
= "cts1(cbc(aes))",
1214 .driver_name
= "cts1-cbc-aes-ccree",
1215 .blocksize
= AES_BLOCK_SIZE
,
1216 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1217 .template_skcipher
= {
1218 .setkey
= cc_cipher_setkey
,
1219 .encrypt
= cc_cipher_encrypt
,
1220 .decrypt
= cc_cipher_decrypt
,
1221 .min_keysize
= AES_MIN_KEY_SIZE
,
1222 .max_keysize
= AES_MAX_KEY_SIZE
,
1223 .ivsize
= AES_BLOCK_SIZE
,
1225 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1226 .flow_mode
= S_DIN_to_AES
,
1227 .min_hw_rev
= CC_HW_REV_630
,
1231 .driver_name
= "ctr-aes-ccree",
1233 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1234 .template_skcipher
= {
1235 .setkey
= cc_cipher_setkey
,
1236 .encrypt
= cc_cipher_encrypt
,
1237 .decrypt
= cc_cipher_decrypt
,
1238 .min_keysize
= AES_MIN_KEY_SIZE
,
1239 .max_keysize
= AES_MAX_KEY_SIZE
,
1240 .ivsize
= AES_BLOCK_SIZE
,
1242 .cipher_mode
= DRV_CIPHER_CTR
,
1243 .flow_mode
= S_DIN_to_AES
,
1244 .min_hw_rev
= CC_HW_REV_630
,
1247 .name
= "cbc(des3_ede)",
1248 .driver_name
= "cbc-3des-ccree",
1249 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1250 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1251 .template_skcipher
= {
1252 .setkey
= cc_cipher_setkey
,
1253 .encrypt
= cc_cipher_encrypt
,
1254 .decrypt
= cc_cipher_decrypt
,
1255 .min_keysize
= DES3_EDE_KEY_SIZE
,
1256 .max_keysize
= DES3_EDE_KEY_SIZE
,
1257 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1259 .cipher_mode
= DRV_CIPHER_CBC
,
1260 .flow_mode
= S_DIN_to_DES
,
1261 .min_hw_rev
= CC_HW_REV_630
,
1264 .name
= "ecb(des3_ede)",
1265 .driver_name
= "ecb-3des-ccree",
1266 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1267 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1268 .template_skcipher
= {
1269 .setkey
= cc_cipher_setkey
,
1270 .encrypt
= cc_cipher_encrypt
,
1271 .decrypt
= cc_cipher_decrypt
,
1272 .min_keysize
= DES3_EDE_KEY_SIZE
,
1273 .max_keysize
= DES3_EDE_KEY_SIZE
,
1276 .cipher_mode
= DRV_CIPHER_ECB
,
1277 .flow_mode
= S_DIN_to_DES
,
1278 .min_hw_rev
= CC_HW_REV_630
,
1282 .driver_name
= "cbc-des-ccree",
1283 .blocksize
= DES_BLOCK_SIZE
,
1284 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1285 .template_skcipher
= {
1286 .setkey
= cc_cipher_setkey
,
1287 .encrypt
= cc_cipher_encrypt
,
1288 .decrypt
= cc_cipher_decrypt
,
1289 .min_keysize
= DES_KEY_SIZE
,
1290 .max_keysize
= DES_KEY_SIZE
,
1291 .ivsize
= DES_BLOCK_SIZE
,
1293 .cipher_mode
= DRV_CIPHER_CBC
,
1294 .flow_mode
= S_DIN_to_DES
,
1295 .min_hw_rev
= CC_HW_REV_630
,
1299 .driver_name
= "ecb-des-ccree",
1300 .blocksize
= DES_BLOCK_SIZE
,
1301 .type
= CRYPTO_ALG_TYPE_ABLKCIPHER
,
1302 .template_skcipher
= {
1303 .setkey
= cc_cipher_setkey
,
1304 .encrypt
= cc_cipher_encrypt
,
1305 .decrypt
= cc_cipher_decrypt
,
1306 .min_keysize
= DES_KEY_SIZE
,
1307 .max_keysize
= DES_KEY_SIZE
,
1310 .cipher_mode
= DRV_CIPHER_ECB
,
1311 .flow_mode
= S_DIN_to_DES
,
1312 .min_hw_rev
= CC_HW_REV_630
,
1316 static struct cc_crypto_alg
*cc_create_alg(const struct cc_alg_template
*tmpl
,
1319 struct cc_crypto_alg
*t_alg
;
1320 struct skcipher_alg
*alg
;
1322 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
1324 return ERR_PTR(-ENOMEM
);
1326 alg
= &t_alg
->skcipher_alg
;
1328 memcpy(alg
, &tmpl
->template_skcipher
, sizeof(*alg
));
1330 snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", tmpl
->name
);
1331 snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
1333 alg
->base
.cra_module
= THIS_MODULE
;
1334 alg
->base
.cra_priority
= CC_CRA_PRIO
;
1335 alg
->base
.cra_blocksize
= tmpl
->blocksize
;
1336 alg
->base
.cra_alignmask
= 0;
1337 alg
->base
.cra_ctxsize
= sizeof(struct cc_cipher_ctx
);
1339 alg
->base
.cra_init
= cc_cipher_init
;
1340 alg
->base
.cra_exit
= cc_cipher_exit
;
1341 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
|
1342 CRYPTO_ALG_TYPE_SKCIPHER
;
1344 t_alg
->cipher_mode
= tmpl
->cipher_mode
;
1345 t_alg
->flow_mode
= tmpl
->flow_mode
;
1346 t_alg
->data_unit
= tmpl
->data_unit
;
1351 int cc_cipher_free(struct cc_drvdata
*drvdata
)
1353 struct cc_crypto_alg
*t_alg
, *n
;
1354 struct cc_cipher_handle
*cipher_handle
= drvdata
->cipher_handle
;
1356 if (cipher_handle
) {
1357 /* Remove registered algs */
1358 list_for_each_entry_safe(t_alg
, n
, &cipher_handle
->alg_list
,
1360 crypto_unregister_skcipher(&t_alg
->skcipher_alg
);
1361 list_del(&t_alg
->entry
);
1364 kfree(cipher_handle
);
1365 drvdata
->cipher_handle
= NULL
;
1370 int cc_cipher_alloc(struct cc_drvdata
*drvdata
)
1372 struct cc_cipher_handle
*cipher_handle
;
1373 struct cc_crypto_alg
*t_alg
;
1374 struct device
*dev
= drvdata_to_dev(drvdata
);
1378 cipher_handle
= kmalloc(sizeof(*cipher_handle
), GFP_KERNEL
);
1382 INIT_LIST_HEAD(&cipher_handle
->alg_list
);
1383 drvdata
->cipher_handle
= cipher_handle
;
1386 dev_dbg(dev
, "Number of algorithms = %zu\n",
1387 ARRAY_SIZE(skcipher_algs
));
1388 for (alg
= 0; alg
< ARRAY_SIZE(skcipher_algs
); alg
++) {
1389 if (skcipher_algs
[alg
].min_hw_rev
> drvdata
->hw_rev
)
1392 dev_dbg(dev
, "creating %s\n", skcipher_algs
[alg
].driver_name
);
1393 t_alg
= cc_create_alg(&skcipher_algs
[alg
], dev
);
1394 if (IS_ERR(t_alg
)) {
1395 rc
= PTR_ERR(t_alg
);
1396 dev_err(dev
, "%s alg allocation failed\n",
1397 skcipher_algs
[alg
].driver_name
);
1400 t_alg
->drvdata
= drvdata
;
1402 dev_dbg(dev
, "registering %s\n",
1403 skcipher_algs
[alg
].driver_name
);
1404 rc
= crypto_register_skcipher(&t_alg
->skcipher_alg
);
1405 dev_dbg(dev
, "%s alg registration rc = %x\n",
1406 t_alg
->skcipher_alg
.base
.cra_driver_name
, rc
);
1408 dev_err(dev
, "%s alg registration failed\n",
1409 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1413 list_add_tail(&t_alg
->entry
,
1414 &cipher_handle
->alg_list
);
1415 dev_dbg(dev
, "Registered %s\n",
1416 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1422 cc_cipher_free(drvdata
);