1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/scatterwalk.h>
12 #include "cc_driver.h"
13 #include "cc_lli_defs.h"
14 #include "cc_buffer_mgr.h"
15 #include "cc_cipher.h"
16 #include "cc_request_mgr.h"
18 #define MAX_ABLKCIPHER_SEQ_LEN 6
20 #define template_skcipher template_u.skcipher
22 struct cc_cipher_handle
{
23 struct list_head alg_list
;
26 struct cc_user_key_info
{
28 dma_addr_t key_dma_addr
;
31 struct cc_hw_key_info
{
32 enum cc_hw_crypto_key key1_slot
;
33 enum cc_hw_crypto_key key2_slot
;
36 struct cc_cipher_ctx
{
37 struct cc_drvdata
*drvdata
;
44 struct cc_user_key_info user
;
45 struct cc_hw_key_info hw
;
46 struct crypto_shash
*shash_tfm
;
49 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
);
51 static inline bool cc_is_hw_key(struct crypto_tfm
*tfm
)
53 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
58 static int validate_keys_sizes(struct cc_cipher_ctx
*ctx_p
, u32 size
)
60 switch (ctx_p
->flow_mode
) {
63 case CC_AES_128_BIT_KEY_SIZE
:
64 case CC_AES_192_BIT_KEY_SIZE
:
65 if (ctx_p
->cipher_mode
!= DRV_CIPHER_XTS
&&
66 ctx_p
->cipher_mode
!= DRV_CIPHER_ESSIV
&&
67 ctx_p
->cipher_mode
!= DRV_CIPHER_BITLOCKER
)
70 case CC_AES_256_BIT_KEY_SIZE
:
72 case (CC_AES_192_BIT_KEY_SIZE
* 2):
73 case (CC_AES_256_BIT_KEY_SIZE
* 2):
74 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
75 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
76 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
)
83 if (size
== DES3_EDE_KEY_SIZE
|| size
== DES_KEY_SIZE
)
92 static int validate_data_size(struct cc_cipher_ctx
*ctx_p
,
95 switch (ctx_p
->flow_mode
) {
97 switch (ctx_p
->cipher_mode
) {
99 if (size
>= AES_BLOCK_SIZE
&&
100 IS_ALIGNED(size
, AES_BLOCK_SIZE
))
103 case DRV_CIPHER_CBC_CTS
:
104 if (size
>= AES_BLOCK_SIZE
)
112 case DRV_CIPHER_ESSIV
:
113 case DRV_CIPHER_BITLOCKER
:
114 if (IS_ALIGNED(size
, AES_BLOCK_SIZE
))
122 if (IS_ALIGNED(size
, DES_BLOCK_SIZE
))
131 static int cc_cipher_init(struct crypto_tfm
*tfm
)
133 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
134 struct cc_crypto_alg
*cc_alg
=
135 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
137 struct device
*dev
= drvdata_to_dev(cc_alg
->drvdata
);
138 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
141 dev_dbg(dev
, "Initializing context @%p for %s\n", ctx_p
,
142 crypto_tfm_alg_name(tfm
));
144 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
145 sizeof(struct cipher_req_ctx
));
147 ctx_p
->cipher_mode
= cc_alg
->cipher_mode
;
148 ctx_p
->flow_mode
= cc_alg
->flow_mode
;
149 ctx_p
->drvdata
= cc_alg
->drvdata
;
151 /* Allocate key buffer, cache line aligned */
152 ctx_p
->user
.key
= kmalloc(max_key_buf_size
, GFP_KERNEL
);
153 if (!ctx_p
->user
.key
)
156 dev_dbg(dev
, "Allocated key buffer in context. key=@%p\n",
160 ctx_p
->user
.key_dma_addr
= dma_map_single(dev
, (void *)ctx_p
->user
.key
,
163 if (dma_mapping_error(dev
, ctx_p
->user
.key_dma_addr
)) {
164 dev_err(dev
, "Mapping Key %u B at va=%pK for DMA failed\n",
165 max_key_buf_size
, ctx_p
->user
.key
);
168 dev_dbg(dev
, "Mapped key %u B at va=%pK to dma=%pad\n",
169 max_key_buf_size
, ctx_p
->user
.key
, &ctx_p
->user
.key_dma_addr
);
171 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
172 /* Alloc hash tfm for essiv */
173 ctx_p
->shash_tfm
= crypto_alloc_shash("sha256-generic", 0, 0);
174 if (IS_ERR(ctx_p
->shash_tfm
)) {
175 dev_err(dev
, "Error allocating hash tfm for ESSIV.\n");
176 return PTR_ERR(ctx_p
->shash_tfm
);
183 static void cc_cipher_exit(struct crypto_tfm
*tfm
)
185 struct crypto_alg
*alg
= tfm
->__crt_alg
;
186 struct cc_crypto_alg
*cc_alg
=
187 container_of(alg
, struct cc_crypto_alg
,
189 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
190 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
191 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
193 dev_dbg(dev
, "Clearing context @%p for %s\n",
194 crypto_tfm_ctx(tfm
), crypto_tfm_alg_name(tfm
));
196 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
197 /* Free hash tfm for essiv */
198 crypto_free_shash(ctx_p
->shash_tfm
);
199 ctx_p
->shash_tfm
= NULL
;
202 /* Unmap key buffer */
203 dma_unmap_single(dev
, ctx_p
->user
.key_dma_addr
, max_key_buf_size
,
205 dev_dbg(dev
, "Unmapped key buffer key_dma_addr=%pad\n",
206 &ctx_p
->user
.key_dma_addr
);
208 /* Free key buffer in context */
209 kzfree(ctx_p
->user
.key
);
210 dev_dbg(dev
, "Free key buffer in context. key=@%p\n", ctx_p
->user
.key
);
214 u8 key1
[DES_KEY_SIZE
];
215 u8 key2
[DES_KEY_SIZE
];
216 u8 key3
[DES_KEY_SIZE
];
219 static enum cc_hw_crypto_key
cc_slot_to_hw_key(int slot_num
)
234 static int cc_cipher_sethkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
237 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
238 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
239 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
240 struct cc_hkey_info hki
;
242 dev_dbg(dev
, "Setting HW key in context @%p for %s. keylen=%u\n",
243 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
244 dump_byte_array("key", (u8
*)key
, keylen
);
246 /* STAT_PHASE_0: Init and sanity checks */
248 /* This check the size of the hardware key token */
249 if (keylen
!= sizeof(hki
)) {
250 dev_err(dev
, "Unsupported HW key size %d.\n", keylen
);
251 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
255 if (ctx_p
->flow_mode
!= S_DIN_to_AES
) {
256 dev_err(dev
, "HW key not supported for non-AES flows\n");
260 memcpy(&hki
, key
, keylen
);
262 /* The real key len for crypto op is the size of the HW key
263 * referenced by the HW key slot, not the hardware key token
267 if (validate_keys_sizes(ctx_p
, keylen
)) {
268 dev_err(dev
, "Unsupported key size %d.\n", keylen
);
269 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
273 ctx_p
->hw
.key1_slot
= cc_slot_to_hw_key(hki
.hw_key1
);
274 if (ctx_p
->hw
.key1_slot
== END_OF_KEYS
) {
275 dev_err(dev
, "Unsupported hw key1 number (%d)\n", hki
.hw_key1
);
279 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
280 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
281 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
) {
282 if (hki
.hw_key1
== hki
.hw_key2
) {
283 dev_err(dev
, "Illegal hw key numbers (%d,%d)\n",
284 hki
.hw_key1
, hki
.hw_key2
);
287 ctx_p
->hw
.key2_slot
= cc_slot_to_hw_key(hki
.hw_key2
);
288 if (ctx_p
->hw
.key2_slot
== END_OF_KEYS
) {
289 dev_err(dev
, "Unsupported hw key2 number (%d)\n",
295 ctx_p
->keylen
= keylen
;
296 ctx_p
->hw_key
= true;
297 dev_dbg(dev
, "cc_is_hw_key ret 0");
302 static int cc_cipher_setkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
305 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
306 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
307 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
308 u32 tmp
[DES3_EDE_EXPKEY_WORDS
];
309 struct cc_crypto_alg
*cc_alg
=
310 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
312 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
314 dev_dbg(dev
, "Setting key in context @%p for %s. keylen=%u\n",
315 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
316 dump_byte_array("key", (u8
*)key
, keylen
);
318 /* STAT_PHASE_0: Init and sanity checks */
320 if (validate_keys_sizes(ctx_p
, keylen
)) {
321 dev_err(dev
, "Unsupported key size %d.\n", keylen
);
322 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
326 ctx_p
->hw_key
= false;
329 * Verify DES weak keys
330 * Note that we're dropping the expanded key since the
331 * HW does the expansion on its own.
333 if (ctx_p
->flow_mode
== S_DIN_to_DES
) {
334 if (keylen
== DES3_EDE_KEY_SIZE
&&
335 __des3_ede_setkey(tmp
, &tfm
->crt_flags
, key
,
336 DES3_EDE_KEY_SIZE
)) {
337 dev_dbg(dev
, "weak 3DES key");
339 } else if (!des_ekey(tmp
, key
) &&
340 (crypto_tfm_get_flags(tfm
) & CRYPTO_TFM_REQ_WEAK_KEY
)) {
341 tfm
->crt_flags
|= CRYPTO_TFM_RES_WEAK_KEY
;
342 dev_dbg(dev
, "weak DES key");
347 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
&&
348 xts_check_key(tfm
, key
, keylen
)) {
349 dev_dbg(dev
, "weak XTS key");
353 /* STAT_PHASE_1: Copy key to ctx */
354 dma_sync_single_for_cpu(dev
, ctx_p
->user
.key_dma_addr
,
355 max_key_buf_size
, DMA_TO_DEVICE
);
357 memcpy(ctx_p
->user
.key
, key
, keylen
);
359 memset(ctx_p
->user
.key
+ 24, 0, CC_AES_KEY_SIZE_MAX
- 24);
361 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
362 /* sha256 for key2 - use sw implementation */
363 int key_len
= keylen
>> 1;
366 SHASH_DESC_ON_STACK(desc
, ctx_p
->shash_tfm
);
368 desc
->tfm
= ctx_p
->shash_tfm
;
370 err
= crypto_shash_digest(desc
, ctx_p
->user
.key
, key_len
,
371 ctx_p
->user
.key
+ key_len
);
373 dev_err(dev
, "Failed to hash ESSIV key.\n");
377 dma_sync_single_for_device(dev
, ctx_p
->user
.key_dma_addr
,
378 max_key_buf_size
, DMA_TO_DEVICE
);
379 ctx_p
->keylen
= keylen
;
381 dev_dbg(dev
, "return safely");
385 static void cc_setup_cipher_desc(struct crypto_tfm
*tfm
,
386 struct cipher_req_ctx
*req_ctx
,
387 unsigned int ivsize
, unsigned int nbytes
,
388 struct cc_hw_desc desc
[],
389 unsigned int *seq_size
)
391 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
392 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
393 int cipher_mode
= ctx_p
->cipher_mode
;
394 int flow_mode
= ctx_p
->flow_mode
;
395 int direction
= req_ctx
->gen_ctx
.op_type
;
396 dma_addr_t key_dma_addr
= ctx_p
->user
.key_dma_addr
;
397 unsigned int key_len
= ctx_p
->keylen
;
398 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
399 unsigned int du_size
= nbytes
;
401 struct cc_crypto_alg
*cc_alg
=
402 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
405 if (cc_alg
->data_unit
)
406 du_size
= cc_alg
->data_unit
;
408 switch (cipher_mode
) {
410 case DRV_CIPHER_CBC_CTS
:
413 /* Load cipher state */
414 hw_desc_init(&desc
[*seq_size
]);
415 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
, ivsize
,
417 set_cipher_config0(&desc
[*seq_size
], direction
);
418 set_flow_mode(&desc
[*seq_size
], flow_mode
);
419 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
420 if (cipher_mode
== DRV_CIPHER_CTR
||
421 cipher_mode
== DRV_CIPHER_OFB
) {
422 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
424 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE0
);
430 hw_desc_init(&desc
[*seq_size
]);
431 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
432 set_cipher_config0(&desc
[*seq_size
], direction
);
433 if (flow_mode
== S_DIN_to_AES
) {
434 if (cc_is_hw_key(tfm
)) {
435 set_hw_crypto_key(&desc
[*seq_size
],
436 ctx_p
->hw
.key1_slot
);
438 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
439 key_dma_addr
, ((key_len
== 24) ?
443 set_key_size_aes(&desc
[*seq_size
], key_len
);
446 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
448 set_key_size_des(&desc
[*seq_size
], key_len
);
450 set_flow_mode(&desc
[*seq_size
], flow_mode
);
451 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
455 case DRV_CIPHER_ESSIV
:
456 case DRV_CIPHER_BITLOCKER
:
458 hw_desc_init(&desc
[*seq_size
]);
459 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
460 set_cipher_config0(&desc
[*seq_size
], direction
);
461 if (cc_is_hw_key(tfm
)) {
462 set_hw_crypto_key(&desc
[*seq_size
],
463 ctx_p
->hw
.key1_slot
);
465 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
466 (key_len
/ 2), NS_BIT
);
468 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
469 set_flow_mode(&desc
[*seq_size
], flow_mode
);
470 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
474 hw_desc_init(&desc
[*seq_size
]);
475 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
476 set_cipher_config0(&desc
[*seq_size
], direction
);
477 if (cc_is_hw_key(tfm
)) {
478 set_hw_crypto_key(&desc
[*seq_size
],
479 ctx_p
->hw
.key2_slot
);
481 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
482 (key_dma_addr
+ (key_len
/ 2)),
483 (key_len
/ 2), NS_BIT
);
485 set_xex_data_unit_size(&desc
[*seq_size
], du_size
);
486 set_flow_mode(&desc
[*seq_size
], S_DIN_to_AES2
);
487 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
488 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_XEX_KEY
);
492 hw_desc_init(&desc
[*seq_size
]);
493 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
494 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
495 set_cipher_config0(&desc
[*seq_size
], direction
);
496 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
497 set_flow_mode(&desc
[*seq_size
], flow_mode
);
498 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
,
499 CC_AES_BLOCK_SIZE
, NS_BIT
);
503 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
507 static void cc_setup_cipher_data(struct crypto_tfm
*tfm
,
508 struct cipher_req_ctx
*req_ctx
,
509 struct scatterlist
*dst
,
510 struct scatterlist
*src
, unsigned int nbytes
,
511 void *areq
, struct cc_hw_desc desc
[],
512 unsigned int *seq_size
)
514 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
515 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
516 unsigned int flow_mode
= ctx_p
->flow_mode
;
518 switch (ctx_p
->flow_mode
) {
520 flow_mode
= DIN_AES_DOUT
;
523 flow_mode
= DIN_DES_DOUT
;
526 dev_err(dev
, "invalid flow mode, flow_mode = %d\n", flow_mode
);
530 if (req_ctx
->dma_buf_type
== CC_DMA_BUF_DLLI
) {
531 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
532 &sg_dma_address(src
), nbytes
);
533 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
534 &sg_dma_address(dst
), nbytes
);
535 hw_desc_init(&desc
[*seq_size
]);
536 set_din_type(&desc
[*seq_size
], DMA_DLLI
, sg_dma_address(src
),
538 set_dout_dlli(&desc
[*seq_size
], sg_dma_address(dst
),
539 nbytes
, NS_BIT
, (!areq
? 0 : 1));
541 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
543 set_flow_mode(&desc
[*seq_size
], flow_mode
);
547 dev_dbg(dev
, " bypass params addr %pad length 0x%X addr 0x%08X\n",
548 &req_ctx
->mlli_params
.mlli_dma_addr
,
549 req_ctx
->mlli_params
.mlli_len
,
550 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
);
551 hw_desc_init(&desc
[*seq_size
]);
552 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
553 req_ctx
->mlli_params
.mlli_dma_addr
,
554 req_ctx
->mlli_params
.mlli_len
, NS_BIT
);
555 set_dout_sram(&desc
[*seq_size
],
556 ctx_p
->drvdata
->mlli_sram_addr
,
557 req_ctx
->mlli_params
.mlli_len
);
558 set_flow_mode(&desc
[*seq_size
], BYPASS
);
561 hw_desc_init(&desc
[*seq_size
]);
562 set_din_type(&desc
[*seq_size
], DMA_MLLI
,
563 ctx_p
->drvdata
->mlli_sram_addr
,
564 req_ctx
->in_mlli_nents
, NS_BIT
);
565 if (req_ctx
->out_nents
== 0) {
566 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
567 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
,
568 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
);
569 set_dout_mlli(&desc
[*seq_size
],
570 ctx_p
->drvdata
->mlli_sram_addr
,
571 req_ctx
->in_mlli_nents
, NS_BIT
,
574 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
575 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
,
576 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
+
577 (u32
)LLI_ENTRY_BYTE_SIZE
* req_ctx
->in_nents
);
578 set_dout_mlli(&desc
[*seq_size
],
579 (ctx_p
->drvdata
->mlli_sram_addr
+
580 (LLI_ENTRY_BYTE_SIZE
*
581 req_ctx
->in_mlli_nents
)),
582 req_ctx
->out_mlli_nents
, NS_BIT
,
586 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
588 set_flow_mode(&desc
[*seq_size
], flow_mode
);
594 * Update a CTR-AES 128 bit counter
596 static void cc_update_ctr(u8
*ctr
, unsigned int increment
)
598 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
) ||
599 IS_ALIGNED((unsigned long)ctr
, 8)) {
601 __be64
*high_be
= (__be64
*)ctr
;
602 __be64
*low_be
= high_be
+ 1;
603 u64 orig_low
= __be64_to_cpu(*low_be
);
604 u64 new_low
= orig_low
+ (u64
)increment
;
606 *low_be
= __cpu_to_be64(new_low
);
608 if (new_low
< orig_low
)
609 *high_be
= __cpu_to_be64(__be64_to_cpu(*high_be
) + 1);
611 u8
*pos
= (ctr
+ AES_BLOCK_SIZE
);
615 for (; increment
; increment
--)
616 for (size
= AES_BLOCK_SIZE
; size
; size
--) {
625 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
)
627 struct skcipher_request
*req
= (struct skcipher_request
*)cc_req
;
628 struct scatterlist
*dst
= req
->dst
;
629 struct scatterlist
*src
= req
->src
;
630 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
631 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
632 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
633 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
634 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
637 switch (ctx_p
->cipher_mode
) {
640 * The crypto API expects us to set the req->iv to the last
641 * ciphertext block. For encrypt, simply copy from the result.
642 * For decrypt, we must copy from a saved buffer since this
643 * could be an in-place decryption operation and the src is
644 * lost by this point.
646 if (req_ctx
->gen_ctx
.op_type
== DRV_CRYPTO_DIRECTION_DECRYPT
) {
647 memcpy(req
->iv
, req_ctx
->backup_info
, ivsize
);
648 kzfree(req_ctx
->backup_info
);
650 len
= req
->cryptlen
- ivsize
;
651 scatterwalk_map_and_copy(req
->iv
, req
->dst
, len
,
657 /* Compute the counter of the last block */
658 len
= ALIGN(req
->cryptlen
, AES_BLOCK_SIZE
) / AES_BLOCK_SIZE
;
659 cc_update_ctr((u8
*)req
->iv
, len
);
666 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
669 skcipher_request_complete(req
, err
);
672 static int cc_cipher_process(struct skcipher_request
*req
,
673 enum drv_crypto_direction direction
)
675 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
676 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
677 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
678 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
679 struct scatterlist
*dst
= req
->dst
;
680 struct scatterlist
*src
= req
->src
;
681 unsigned int nbytes
= req
->cryptlen
;
683 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
684 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
685 struct cc_hw_desc desc
[MAX_ABLKCIPHER_SEQ_LEN
];
686 struct cc_crypto_req cc_req
= {};
688 unsigned int seq_len
= 0;
689 gfp_t flags
= cc_gfp_flags(&req
->base
);
691 dev_dbg(dev
, "%s req=%p iv=%p nbytes=%d\n",
692 ((direction
== DRV_CRYPTO_DIRECTION_ENCRYPT
) ?
693 "Encrypt" : "Decrypt"), req
, iv
, nbytes
);
695 /* STAT_PHASE_0: Init and sanity checks */
697 /* TODO: check data length according to mode */
698 if (validate_data_size(ctx_p
, nbytes
)) {
699 dev_err(dev
, "Unsupported data size %d.\n", nbytes
);
700 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_BLOCK_LEN
);
705 /* No data to process is valid */
710 /* The IV we are handed may be allocted from the stack so
711 * we must copy it to a DMAable buffer before use.
713 req_ctx
->iv
= kmemdup(iv
, ivsize
, flags
);
719 /* Setup request structure */
720 cc_req
.user_cb
= (void *)cc_cipher_complete
;
721 cc_req
.user_arg
= (void *)req
;
723 /* Setup request context */
724 req_ctx
->gen_ctx
.op_type
= direction
;
726 /* STAT_PHASE_1: Map buffers */
728 rc
= cc_map_cipher_request(ctx_p
->drvdata
, req_ctx
, ivsize
, nbytes
,
729 req_ctx
->iv
, src
, dst
, flags
);
731 dev_err(dev
, "map_request() failed\n");
735 /* STAT_PHASE_2: Create sequence */
737 /* Setup processing */
738 cc_setup_cipher_desc(tfm
, req_ctx
, ivsize
, nbytes
, desc
, &seq_len
);
739 /* Data processing */
740 cc_setup_cipher_data(tfm
, req_ctx
, dst
, src
, nbytes
, req
, desc
,
743 /* STAT_PHASE_3: Lock HW and push sequence */
745 rc
= cc_send_request(ctx_p
->drvdata
, &cc_req
, desc
, seq_len
,
747 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
748 /* Failed to send the request or request completed
751 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
755 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
756 kzfree(req_ctx
->backup_info
);
763 static int cc_cipher_encrypt(struct skcipher_request
*req
)
765 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
767 memset(req_ctx
, 0, sizeof(*req_ctx
));
769 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_ENCRYPT
);
772 static int cc_cipher_decrypt(struct skcipher_request
*req
)
774 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
775 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
776 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
777 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
778 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
779 gfp_t flags
= cc_gfp_flags(&req
->base
);
782 memset(req_ctx
, 0, sizeof(*req_ctx
));
784 if (ctx_p
->cipher_mode
== DRV_CIPHER_CBC
) {
786 /* Allocate and save the last IV sized bytes of the source,
787 * which will be lost in case of in-place decryption.
789 req_ctx
->backup_info
= kzalloc(ivsize
, flags
);
790 if (!req_ctx
->backup_info
)
793 len
= req
->cryptlen
- ivsize
;
794 scatterwalk_map_and_copy(req_ctx
->backup_info
, req
->src
, len
,
798 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_DECRYPT
);
801 /* Block cipher alg */
802 static const struct cc_alg_template skcipher_algs
[] = {
805 .driver_name
= "xts-paes-ccree",
806 .blocksize
= AES_BLOCK_SIZE
,
807 .template_skcipher
= {
808 .setkey
= cc_cipher_sethkey
,
809 .encrypt
= cc_cipher_encrypt
,
810 .decrypt
= cc_cipher_decrypt
,
811 .min_keysize
= CC_HW_KEY_SIZE
,
812 .max_keysize
= CC_HW_KEY_SIZE
,
813 .ivsize
= AES_BLOCK_SIZE
,
815 .cipher_mode
= DRV_CIPHER_XTS
,
816 .flow_mode
= S_DIN_to_AES
,
817 .min_hw_rev
= CC_HW_REV_630
,
820 .name
= "xts512(paes)",
821 .driver_name
= "xts-paes-du512-ccree",
822 .blocksize
= AES_BLOCK_SIZE
,
823 .template_skcipher
= {
824 .setkey
= cc_cipher_sethkey
,
825 .encrypt
= cc_cipher_encrypt
,
826 .decrypt
= cc_cipher_decrypt
,
827 .min_keysize
= CC_HW_KEY_SIZE
,
828 .max_keysize
= CC_HW_KEY_SIZE
,
829 .ivsize
= AES_BLOCK_SIZE
,
831 .cipher_mode
= DRV_CIPHER_XTS
,
832 .flow_mode
= S_DIN_to_AES
,
834 .min_hw_rev
= CC_HW_REV_712
,
837 .name
= "xts4096(paes)",
838 .driver_name
= "xts-paes-du4096-ccree",
839 .blocksize
= AES_BLOCK_SIZE
,
840 .template_skcipher
= {
841 .setkey
= cc_cipher_sethkey
,
842 .encrypt
= cc_cipher_encrypt
,
843 .decrypt
= cc_cipher_decrypt
,
844 .min_keysize
= CC_HW_KEY_SIZE
,
845 .max_keysize
= CC_HW_KEY_SIZE
,
846 .ivsize
= AES_BLOCK_SIZE
,
848 .cipher_mode
= DRV_CIPHER_XTS
,
849 .flow_mode
= S_DIN_to_AES
,
851 .min_hw_rev
= CC_HW_REV_712
,
854 .name
= "essiv(paes)",
855 .driver_name
= "essiv-paes-ccree",
856 .blocksize
= AES_BLOCK_SIZE
,
857 .template_skcipher
= {
858 .setkey
= cc_cipher_sethkey
,
859 .encrypt
= cc_cipher_encrypt
,
860 .decrypt
= cc_cipher_decrypt
,
861 .min_keysize
= CC_HW_KEY_SIZE
,
862 .max_keysize
= CC_HW_KEY_SIZE
,
863 .ivsize
= AES_BLOCK_SIZE
,
865 .cipher_mode
= DRV_CIPHER_ESSIV
,
866 .flow_mode
= S_DIN_to_AES
,
867 .min_hw_rev
= CC_HW_REV_712
,
870 .name
= "essiv512(paes)",
871 .driver_name
= "essiv-paes-du512-ccree",
872 .blocksize
= AES_BLOCK_SIZE
,
873 .template_skcipher
= {
874 .setkey
= cc_cipher_sethkey
,
875 .encrypt
= cc_cipher_encrypt
,
876 .decrypt
= cc_cipher_decrypt
,
877 .min_keysize
= CC_HW_KEY_SIZE
,
878 .max_keysize
= CC_HW_KEY_SIZE
,
879 .ivsize
= AES_BLOCK_SIZE
,
881 .cipher_mode
= DRV_CIPHER_ESSIV
,
882 .flow_mode
= S_DIN_to_AES
,
884 .min_hw_rev
= CC_HW_REV_712
,
887 .name
= "essiv4096(paes)",
888 .driver_name
= "essiv-paes-du4096-ccree",
889 .blocksize
= AES_BLOCK_SIZE
,
890 .template_skcipher
= {
891 .setkey
= cc_cipher_sethkey
,
892 .encrypt
= cc_cipher_encrypt
,
893 .decrypt
= cc_cipher_decrypt
,
894 .min_keysize
= CC_HW_KEY_SIZE
,
895 .max_keysize
= CC_HW_KEY_SIZE
,
896 .ivsize
= AES_BLOCK_SIZE
,
898 .cipher_mode
= DRV_CIPHER_ESSIV
,
899 .flow_mode
= S_DIN_to_AES
,
901 .min_hw_rev
= CC_HW_REV_712
,
904 .name
= "bitlocker(paes)",
905 .driver_name
= "bitlocker-paes-ccree",
906 .blocksize
= AES_BLOCK_SIZE
,
907 .template_skcipher
= {
908 .setkey
= cc_cipher_sethkey
,
909 .encrypt
= cc_cipher_encrypt
,
910 .decrypt
= cc_cipher_decrypt
,
911 .min_keysize
= CC_HW_KEY_SIZE
,
912 .max_keysize
= CC_HW_KEY_SIZE
,
913 .ivsize
= AES_BLOCK_SIZE
,
915 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
916 .flow_mode
= S_DIN_to_AES
,
917 .min_hw_rev
= CC_HW_REV_712
,
920 .name
= "bitlocker512(paes)",
921 .driver_name
= "bitlocker-paes-du512-ccree",
922 .blocksize
= AES_BLOCK_SIZE
,
923 .template_skcipher
= {
924 .setkey
= cc_cipher_sethkey
,
925 .encrypt
= cc_cipher_encrypt
,
926 .decrypt
= cc_cipher_decrypt
,
927 .min_keysize
= CC_HW_KEY_SIZE
,
928 .max_keysize
= CC_HW_KEY_SIZE
,
929 .ivsize
= AES_BLOCK_SIZE
,
931 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
932 .flow_mode
= S_DIN_to_AES
,
934 .min_hw_rev
= CC_HW_REV_712
,
937 .name
= "bitlocker4096(paes)",
938 .driver_name
= "bitlocker-paes-du4096-ccree",
939 .blocksize
= AES_BLOCK_SIZE
,
940 .template_skcipher
= {
941 .setkey
= cc_cipher_sethkey
,
942 .encrypt
= cc_cipher_encrypt
,
943 .decrypt
= cc_cipher_decrypt
,
944 .min_keysize
= CC_HW_KEY_SIZE
,
945 .max_keysize
= CC_HW_KEY_SIZE
,
946 .ivsize
= AES_BLOCK_SIZE
,
948 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
949 .flow_mode
= S_DIN_to_AES
,
951 .min_hw_rev
= CC_HW_REV_712
,
955 .driver_name
= "ecb-paes-ccree",
956 .blocksize
= AES_BLOCK_SIZE
,
957 .template_skcipher
= {
958 .setkey
= cc_cipher_sethkey
,
959 .encrypt
= cc_cipher_encrypt
,
960 .decrypt
= cc_cipher_decrypt
,
961 .min_keysize
= CC_HW_KEY_SIZE
,
962 .max_keysize
= CC_HW_KEY_SIZE
,
965 .cipher_mode
= DRV_CIPHER_ECB
,
966 .flow_mode
= S_DIN_to_AES
,
967 .min_hw_rev
= CC_HW_REV_712
,
971 .driver_name
= "cbc-paes-ccree",
972 .blocksize
= AES_BLOCK_SIZE
,
973 .template_skcipher
= {
974 .setkey
= cc_cipher_sethkey
,
975 .encrypt
= cc_cipher_encrypt
,
976 .decrypt
= cc_cipher_decrypt
,
977 .min_keysize
= CC_HW_KEY_SIZE
,
978 .max_keysize
= CC_HW_KEY_SIZE
,
979 .ivsize
= AES_BLOCK_SIZE
,
981 .cipher_mode
= DRV_CIPHER_CBC
,
982 .flow_mode
= S_DIN_to_AES
,
983 .min_hw_rev
= CC_HW_REV_712
,
987 .driver_name
= "ofb-paes-ccree",
988 .blocksize
= AES_BLOCK_SIZE
,
989 .template_skcipher
= {
990 .setkey
= cc_cipher_sethkey
,
991 .encrypt
= cc_cipher_encrypt
,
992 .decrypt
= cc_cipher_decrypt
,
993 .min_keysize
= CC_HW_KEY_SIZE
,
994 .max_keysize
= CC_HW_KEY_SIZE
,
995 .ivsize
= AES_BLOCK_SIZE
,
997 .cipher_mode
= DRV_CIPHER_OFB
,
998 .flow_mode
= S_DIN_to_AES
,
999 .min_hw_rev
= CC_HW_REV_712
,
1002 .name
= "cts(cbc(paes))",
1003 .driver_name
= "cts-cbc-paes-ccree",
1004 .blocksize
= AES_BLOCK_SIZE
,
1005 .template_skcipher
= {
1006 .setkey
= cc_cipher_sethkey
,
1007 .encrypt
= cc_cipher_encrypt
,
1008 .decrypt
= cc_cipher_decrypt
,
1009 .min_keysize
= CC_HW_KEY_SIZE
,
1010 .max_keysize
= CC_HW_KEY_SIZE
,
1011 .ivsize
= AES_BLOCK_SIZE
,
1013 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1014 .flow_mode
= S_DIN_to_AES
,
1015 .min_hw_rev
= CC_HW_REV_712
,
1018 .name
= "ctr(paes)",
1019 .driver_name
= "ctr-paes-ccree",
1021 .template_skcipher
= {
1022 .setkey
= cc_cipher_sethkey
,
1023 .encrypt
= cc_cipher_encrypt
,
1024 .decrypt
= cc_cipher_decrypt
,
1025 .min_keysize
= CC_HW_KEY_SIZE
,
1026 .max_keysize
= CC_HW_KEY_SIZE
,
1027 .ivsize
= AES_BLOCK_SIZE
,
1029 .cipher_mode
= DRV_CIPHER_CTR
,
1030 .flow_mode
= S_DIN_to_AES
,
1031 .min_hw_rev
= CC_HW_REV_712
,
1035 .driver_name
= "xts-aes-ccree",
1036 .blocksize
= AES_BLOCK_SIZE
,
1037 .template_skcipher
= {
1038 .setkey
= cc_cipher_setkey
,
1039 .encrypt
= cc_cipher_encrypt
,
1040 .decrypt
= cc_cipher_decrypt
,
1041 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1042 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1043 .ivsize
= AES_BLOCK_SIZE
,
1045 .cipher_mode
= DRV_CIPHER_XTS
,
1046 .flow_mode
= S_DIN_to_AES
,
1047 .min_hw_rev
= CC_HW_REV_630
,
1050 .name
= "xts512(aes)",
1051 .driver_name
= "xts-aes-du512-ccree",
1052 .blocksize
= AES_BLOCK_SIZE
,
1053 .template_skcipher
= {
1054 .setkey
= cc_cipher_setkey
,
1055 .encrypt
= cc_cipher_encrypt
,
1056 .decrypt
= cc_cipher_decrypt
,
1057 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1058 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1059 .ivsize
= AES_BLOCK_SIZE
,
1061 .cipher_mode
= DRV_CIPHER_XTS
,
1062 .flow_mode
= S_DIN_to_AES
,
1064 .min_hw_rev
= CC_HW_REV_712
,
1067 .name
= "xts4096(aes)",
1068 .driver_name
= "xts-aes-du4096-ccree",
1069 .blocksize
= AES_BLOCK_SIZE
,
1070 .template_skcipher
= {
1071 .setkey
= cc_cipher_setkey
,
1072 .encrypt
= cc_cipher_encrypt
,
1073 .decrypt
= cc_cipher_decrypt
,
1074 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1075 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1076 .ivsize
= AES_BLOCK_SIZE
,
1078 .cipher_mode
= DRV_CIPHER_XTS
,
1079 .flow_mode
= S_DIN_to_AES
,
1081 .min_hw_rev
= CC_HW_REV_712
,
1084 .name
= "essiv(aes)",
1085 .driver_name
= "essiv-aes-ccree",
1086 .blocksize
= AES_BLOCK_SIZE
,
1087 .template_skcipher
= {
1088 .setkey
= cc_cipher_setkey
,
1089 .encrypt
= cc_cipher_encrypt
,
1090 .decrypt
= cc_cipher_decrypt
,
1091 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1092 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1093 .ivsize
= AES_BLOCK_SIZE
,
1095 .cipher_mode
= DRV_CIPHER_ESSIV
,
1096 .flow_mode
= S_DIN_to_AES
,
1097 .min_hw_rev
= CC_HW_REV_712
,
1100 .name
= "essiv512(aes)",
1101 .driver_name
= "essiv-aes-du512-ccree",
1102 .blocksize
= AES_BLOCK_SIZE
,
1103 .template_skcipher
= {
1104 .setkey
= cc_cipher_setkey
,
1105 .encrypt
= cc_cipher_encrypt
,
1106 .decrypt
= cc_cipher_decrypt
,
1107 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1108 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1109 .ivsize
= AES_BLOCK_SIZE
,
1111 .cipher_mode
= DRV_CIPHER_ESSIV
,
1112 .flow_mode
= S_DIN_to_AES
,
1114 .min_hw_rev
= CC_HW_REV_712
,
1117 .name
= "essiv4096(aes)",
1118 .driver_name
= "essiv-aes-du4096-ccree",
1119 .blocksize
= AES_BLOCK_SIZE
,
1120 .template_skcipher
= {
1121 .setkey
= cc_cipher_setkey
,
1122 .encrypt
= cc_cipher_encrypt
,
1123 .decrypt
= cc_cipher_decrypt
,
1124 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1125 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1126 .ivsize
= AES_BLOCK_SIZE
,
1128 .cipher_mode
= DRV_CIPHER_ESSIV
,
1129 .flow_mode
= S_DIN_to_AES
,
1131 .min_hw_rev
= CC_HW_REV_712
,
1134 .name
= "bitlocker(aes)",
1135 .driver_name
= "bitlocker-aes-ccree",
1136 .blocksize
= AES_BLOCK_SIZE
,
1137 .template_skcipher
= {
1138 .setkey
= cc_cipher_setkey
,
1139 .encrypt
= cc_cipher_encrypt
,
1140 .decrypt
= cc_cipher_decrypt
,
1141 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1142 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1143 .ivsize
= AES_BLOCK_SIZE
,
1145 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1146 .flow_mode
= S_DIN_to_AES
,
1147 .min_hw_rev
= CC_HW_REV_712
,
1150 .name
= "bitlocker512(aes)",
1151 .driver_name
= "bitlocker-aes-du512-ccree",
1152 .blocksize
= AES_BLOCK_SIZE
,
1153 .template_skcipher
= {
1154 .setkey
= cc_cipher_setkey
,
1155 .encrypt
= cc_cipher_encrypt
,
1156 .decrypt
= cc_cipher_decrypt
,
1157 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1158 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1159 .ivsize
= AES_BLOCK_SIZE
,
1161 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1162 .flow_mode
= S_DIN_to_AES
,
1164 .min_hw_rev
= CC_HW_REV_712
,
1167 .name
= "bitlocker4096(aes)",
1168 .driver_name
= "bitlocker-aes-du4096-ccree",
1169 .blocksize
= AES_BLOCK_SIZE
,
1170 .template_skcipher
= {
1171 .setkey
= cc_cipher_setkey
,
1172 .encrypt
= cc_cipher_encrypt
,
1173 .decrypt
= cc_cipher_decrypt
,
1174 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1175 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1176 .ivsize
= AES_BLOCK_SIZE
,
1178 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1179 .flow_mode
= S_DIN_to_AES
,
1181 .min_hw_rev
= CC_HW_REV_712
,
1185 .driver_name
= "ecb-aes-ccree",
1186 .blocksize
= AES_BLOCK_SIZE
,
1187 .template_skcipher
= {
1188 .setkey
= cc_cipher_setkey
,
1189 .encrypt
= cc_cipher_encrypt
,
1190 .decrypt
= cc_cipher_decrypt
,
1191 .min_keysize
= AES_MIN_KEY_SIZE
,
1192 .max_keysize
= AES_MAX_KEY_SIZE
,
1195 .cipher_mode
= DRV_CIPHER_ECB
,
1196 .flow_mode
= S_DIN_to_AES
,
1197 .min_hw_rev
= CC_HW_REV_630
,
1201 .driver_name
= "cbc-aes-ccree",
1202 .blocksize
= AES_BLOCK_SIZE
,
1203 .template_skcipher
= {
1204 .setkey
= cc_cipher_setkey
,
1205 .encrypt
= cc_cipher_encrypt
,
1206 .decrypt
= cc_cipher_decrypt
,
1207 .min_keysize
= AES_MIN_KEY_SIZE
,
1208 .max_keysize
= AES_MAX_KEY_SIZE
,
1209 .ivsize
= AES_BLOCK_SIZE
,
1211 .cipher_mode
= DRV_CIPHER_CBC
,
1212 .flow_mode
= S_DIN_to_AES
,
1213 .min_hw_rev
= CC_HW_REV_630
,
1217 .driver_name
= "ofb-aes-ccree",
1218 .blocksize
= AES_BLOCK_SIZE
,
1219 .template_skcipher
= {
1220 .setkey
= cc_cipher_setkey
,
1221 .encrypt
= cc_cipher_encrypt
,
1222 .decrypt
= cc_cipher_decrypt
,
1223 .min_keysize
= AES_MIN_KEY_SIZE
,
1224 .max_keysize
= AES_MAX_KEY_SIZE
,
1225 .ivsize
= AES_BLOCK_SIZE
,
1227 .cipher_mode
= DRV_CIPHER_OFB
,
1228 .flow_mode
= S_DIN_to_AES
,
1229 .min_hw_rev
= CC_HW_REV_630
,
1232 .name
= "cts(cbc(aes))",
1233 .driver_name
= "cts-cbc-aes-ccree",
1234 .blocksize
= AES_BLOCK_SIZE
,
1235 .template_skcipher
= {
1236 .setkey
= cc_cipher_setkey
,
1237 .encrypt
= cc_cipher_encrypt
,
1238 .decrypt
= cc_cipher_decrypt
,
1239 .min_keysize
= AES_MIN_KEY_SIZE
,
1240 .max_keysize
= AES_MAX_KEY_SIZE
,
1241 .ivsize
= AES_BLOCK_SIZE
,
1243 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1244 .flow_mode
= S_DIN_to_AES
,
1245 .min_hw_rev
= CC_HW_REV_630
,
1249 .driver_name
= "ctr-aes-ccree",
1251 .template_skcipher
= {
1252 .setkey
= cc_cipher_setkey
,
1253 .encrypt
= cc_cipher_encrypt
,
1254 .decrypt
= cc_cipher_decrypt
,
1255 .min_keysize
= AES_MIN_KEY_SIZE
,
1256 .max_keysize
= AES_MAX_KEY_SIZE
,
1257 .ivsize
= AES_BLOCK_SIZE
,
1259 .cipher_mode
= DRV_CIPHER_CTR
,
1260 .flow_mode
= S_DIN_to_AES
,
1261 .min_hw_rev
= CC_HW_REV_630
,
1264 .name
= "cbc(des3_ede)",
1265 .driver_name
= "cbc-3des-ccree",
1266 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1267 .template_skcipher
= {
1268 .setkey
= cc_cipher_setkey
,
1269 .encrypt
= cc_cipher_encrypt
,
1270 .decrypt
= cc_cipher_decrypt
,
1271 .min_keysize
= DES3_EDE_KEY_SIZE
,
1272 .max_keysize
= DES3_EDE_KEY_SIZE
,
1273 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1275 .cipher_mode
= DRV_CIPHER_CBC
,
1276 .flow_mode
= S_DIN_to_DES
,
1277 .min_hw_rev
= CC_HW_REV_630
,
1280 .name
= "ecb(des3_ede)",
1281 .driver_name
= "ecb-3des-ccree",
1282 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1283 .template_skcipher
= {
1284 .setkey
= cc_cipher_setkey
,
1285 .encrypt
= cc_cipher_encrypt
,
1286 .decrypt
= cc_cipher_decrypt
,
1287 .min_keysize
= DES3_EDE_KEY_SIZE
,
1288 .max_keysize
= DES3_EDE_KEY_SIZE
,
1291 .cipher_mode
= DRV_CIPHER_ECB
,
1292 .flow_mode
= S_DIN_to_DES
,
1293 .min_hw_rev
= CC_HW_REV_630
,
1297 .driver_name
= "cbc-des-ccree",
1298 .blocksize
= DES_BLOCK_SIZE
,
1299 .template_skcipher
= {
1300 .setkey
= cc_cipher_setkey
,
1301 .encrypt
= cc_cipher_encrypt
,
1302 .decrypt
= cc_cipher_decrypt
,
1303 .min_keysize
= DES_KEY_SIZE
,
1304 .max_keysize
= DES_KEY_SIZE
,
1305 .ivsize
= DES_BLOCK_SIZE
,
1307 .cipher_mode
= DRV_CIPHER_CBC
,
1308 .flow_mode
= S_DIN_to_DES
,
1309 .min_hw_rev
= CC_HW_REV_630
,
1313 .driver_name
= "ecb-des-ccree",
1314 .blocksize
= DES_BLOCK_SIZE
,
1315 .template_skcipher
= {
1316 .setkey
= cc_cipher_setkey
,
1317 .encrypt
= cc_cipher_encrypt
,
1318 .decrypt
= cc_cipher_decrypt
,
1319 .min_keysize
= DES_KEY_SIZE
,
1320 .max_keysize
= DES_KEY_SIZE
,
1323 .cipher_mode
= DRV_CIPHER_ECB
,
1324 .flow_mode
= S_DIN_to_DES
,
1325 .min_hw_rev
= CC_HW_REV_630
,
1329 static struct cc_crypto_alg
*cc_create_alg(const struct cc_alg_template
*tmpl
,
1332 struct cc_crypto_alg
*t_alg
;
1333 struct skcipher_alg
*alg
;
1335 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
1337 return ERR_PTR(-ENOMEM
);
1339 alg
= &t_alg
->skcipher_alg
;
1341 memcpy(alg
, &tmpl
->template_skcipher
, sizeof(*alg
));
1343 snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", tmpl
->name
);
1344 snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
1346 alg
->base
.cra_module
= THIS_MODULE
;
1347 alg
->base
.cra_priority
= CC_CRA_PRIO
;
1348 alg
->base
.cra_blocksize
= tmpl
->blocksize
;
1349 alg
->base
.cra_alignmask
= 0;
1350 alg
->base
.cra_ctxsize
= sizeof(struct cc_cipher_ctx
);
1352 alg
->base
.cra_init
= cc_cipher_init
;
1353 alg
->base
.cra_exit
= cc_cipher_exit
;
1354 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
1356 t_alg
->cipher_mode
= tmpl
->cipher_mode
;
1357 t_alg
->flow_mode
= tmpl
->flow_mode
;
1358 t_alg
->data_unit
= tmpl
->data_unit
;
1363 int cc_cipher_free(struct cc_drvdata
*drvdata
)
1365 struct cc_crypto_alg
*t_alg
, *n
;
1366 struct cc_cipher_handle
*cipher_handle
= drvdata
->cipher_handle
;
1368 if (cipher_handle
) {
1369 /* Remove registered algs */
1370 list_for_each_entry_safe(t_alg
, n
, &cipher_handle
->alg_list
,
1372 crypto_unregister_skcipher(&t_alg
->skcipher_alg
);
1373 list_del(&t_alg
->entry
);
1376 kfree(cipher_handle
);
1377 drvdata
->cipher_handle
= NULL
;
1382 int cc_cipher_alloc(struct cc_drvdata
*drvdata
)
1384 struct cc_cipher_handle
*cipher_handle
;
1385 struct cc_crypto_alg
*t_alg
;
1386 struct device
*dev
= drvdata_to_dev(drvdata
);
1390 cipher_handle
= kmalloc(sizeof(*cipher_handle
), GFP_KERNEL
);
1394 INIT_LIST_HEAD(&cipher_handle
->alg_list
);
1395 drvdata
->cipher_handle
= cipher_handle
;
1398 dev_dbg(dev
, "Number of algorithms = %zu\n",
1399 ARRAY_SIZE(skcipher_algs
));
1400 for (alg
= 0; alg
< ARRAY_SIZE(skcipher_algs
); alg
++) {
1401 if (skcipher_algs
[alg
].min_hw_rev
> drvdata
->hw_rev
)
1404 dev_dbg(dev
, "creating %s\n", skcipher_algs
[alg
].driver_name
);
1405 t_alg
= cc_create_alg(&skcipher_algs
[alg
], dev
);
1406 if (IS_ERR(t_alg
)) {
1407 rc
= PTR_ERR(t_alg
);
1408 dev_err(dev
, "%s alg allocation failed\n",
1409 skcipher_algs
[alg
].driver_name
);
1412 t_alg
->drvdata
= drvdata
;
1414 dev_dbg(dev
, "registering %s\n",
1415 skcipher_algs
[alg
].driver_name
);
1416 rc
= crypto_register_skcipher(&t_alg
->skcipher_alg
);
1417 dev_dbg(dev
, "%s alg registration rc = %x\n",
1418 t_alg
->skcipher_alg
.base
.cra_driver_name
, rc
);
1420 dev_err(dev
, "%s alg registration failed\n",
1421 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1425 list_add_tail(&t_alg
->entry
,
1426 &cipher_handle
->alg_list
);
1427 dev_dbg(dev
, "Registered %s\n",
1428 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1434 cc_cipher_free(drvdata
);