1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/scatterwalk.h>
12 #include "cc_driver.h"
13 #include "cc_lli_defs.h"
14 #include "cc_buffer_mgr.h"
15 #include "cc_cipher.h"
16 #include "cc_request_mgr.h"
18 #define MAX_ABLKCIPHER_SEQ_LEN 6
20 #define template_skcipher template_u.skcipher
22 struct cc_cipher_handle
{
23 struct list_head alg_list
;
26 struct cc_user_key_info
{
28 dma_addr_t key_dma_addr
;
31 struct cc_hw_key_info
{
32 enum cc_hw_crypto_key key1_slot
;
33 enum cc_hw_crypto_key key2_slot
;
36 struct cc_cipher_ctx
{
37 struct cc_drvdata
*drvdata
;
44 struct cc_user_key_info user
;
45 struct cc_hw_key_info hw
;
46 struct crypto_shash
*shash_tfm
;
49 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
);
51 static inline bool cc_is_hw_key(struct crypto_tfm
*tfm
)
53 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
58 static int validate_keys_sizes(struct cc_cipher_ctx
*ctx_p
, u32 size
)
60 switch (ctx_p
->flow_mode
) {
63 case CC_AES_128_BIT_KEY_SIZE
:
64 case CC_AES_192_BIT_KEY_SIZE
:
65 if (ctx_p
->cipher_mode
!= DRV_CIPHER_XTS
&&
66 ctx_p
->cipher_mode
!= DRV_CIPHER_ESSIV
&&
67 ctx_p
->cipher_mode
!= DRV_CIPHER_BITLOCKER
)
70 case CC_AES_256_BIT_KEY_SIZE
:
72 case (CC_AES_192_BIT_KEY_SIZE
* 2):
73 case (CC_AES_256_BIT_KEY_SIZE
* 2):
74 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
75 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
76 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
)
84 if (size
== DES3_EDE_KEY_SIZE
|| size
== DES_KEY_SIZE
)
93 static int validate_data_size(struct cc_cipher_ctx
*ctx_p
,
96 switch (ctx_p
->flow_mode
) {
98 switch (ctx_p
->cipher_mode
) {
100 if (size
>= AES_BLOCK_SIZE
&&
101 IS_ALIGNED(size
, AES_BLOCK_SIZE
))
104 case DRV_CIPHER_CBC_CTS
:
105 if (size
>= AES_BLOCK_SIZE
)
113 case DRV_CIPHER_ESSIV
:
114 case DRV_CIPHER_BITLOCKER
:
115 if (IS_ALIGNED(size
, AES_BLOCK_SIZE
))
123 if (IS_ALIGNED(size
, DES_BLOCK_SIZE
))
132 static int cc_cipher_init(struct crypto_tfm
*tfm
)
134 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
135 struct cc_crypto_alg
*cc_alg
=
136 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
138 struct device
*dev
= drvdata_to_dev(cc_alg
->drvdata
);
139 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
142 dev_dbg(dev
, "Initializing context @%p for %s\n", ctx_p
,
143 crypto_tfm_alg_name(tfm
));
145 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
146 sizeof(struct cipher_req_ctx
));
148 ctx_p
->cipher_mode
= cc_alg
->cipher_mode
;
149 ctx_p
->flow_mode
= cc_alg
->flow_mode
;
150 ctx_p
->drvdata
= cc_alg
->drvdata
;
152 /* Allocate key buffer, cache line aligned */
153 ctx_p
->user
.key
= kmalloc(max_key_buf_size
, GFP_KERNEL
);
154 if (!ctx_p
->user
.key
)
157 dev_dbg(dev
, "Allocated key buffer in context. key=@%p\n",
161 ctx_p
->user
.key_dma_addr
= dma_map_single(dev
, (void *)ctx_p
->user
.key
,
164 if (dma_mapping_error(dev
, ctx_p
->user
.key_dma_addr
)) {
165 dev_err(dev
, "Mapping Key %u B at va=%pK for DMA failed\n",
166 max_key_buf_size
, ctx_p
->user
.key
);
169 dev_dbg(dev
, "Mapped key %u B at va=%pK to dma=%pad\n",
170 max_key_buf_size
, ctx_p
->user
.key
, &ctx_p
->user
.key_dma_addr
);
172 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
173 /* Alloc hash tfm for essiv */
174 ctx_p
->shash_tfm
= crypto_alloc_shash("sha256-generic", 0, 0);
175 if (IS_ERR(ctx_p
->shash_tfm
)) {
176 dev_err(dev
, "Error allocating hash tfm for ESSIV.\n");
177 return PTR_ERR(ctx_p
->shash_tfm
);
184 static void cc_cipher_exit(struct crypto_tfm
*tfm
)
186 struct crypto_alg
*alg
= tfm
->__crt_alg
;
187 struct cc_crypto_alg
*cc_alg
=
188 container_of(alg
, struct cc_crypto_alg
,
190 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
191 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
192 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
194 dev_dbg(dev
, "Clearing context @%p for %s\n",
195 crypto_tfm_ctx(tfm
), crypto_tfm_alg_name(tfm
));
197 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
198 /* Free hash tfm for essiv */
199 crypto_free_shash(ctx_p
->shash_tfm
);
200 ctx_p
->shash_tfm
= NULL
;
203 /* Unmap key buffer */
204 dma_unmap_single(dev
, ctx_p
->user
.key_dma_addr
, max_key_buf_size
,
206 dev_dbg(dev
, "Unmapped key buffer key_dma_addr=%pad\n",
207 &ctx_p
->user
.key_dma_addr
);
209 /* Free key buffer in context */
210 kzfree(ctx_p
->user
.key
);
211 dev_dbg(dev
, "Free key buffer in context. key=@%p\n", ctx_p
->user
.key
);
215 u8 key1
[DES_KEY_SIZE
];
216 u8 key2
[DES_KEY_SIZE
];
217 u8 key3
[DES_KEY_SIZE
];
220 static enum cc_hw_crypto_key
cc_slot_to_hw_key(int slot_num
)
235 static int cc_cipher_sethkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
238 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
239 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
240 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
241 struct cc_hkey_info hki
;
243 dev_dbg(dev
, "Setting HW key in context @%p for %s. keylen=%u\n",
244 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
245 dump_byte_array("key", (u8
*)key
, keylen
);
247 /* STAT_PHASE_0: Init and sanity checks */
249 /* This check the size of the hardware key token */
250 if (keylen
!= sizeof(hki
)) {
251 dev_err(dev
, "Unsupported HW key size %d.\n", keylen
);
252 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
256 if (ctx_p
->flow_mode
!= S_DIN_to_AES
) {
257 dev_err(dev
, "HW key not supported for non-AES flows\n");
261 memcpy(&hki
, key
, keylen
);
263 /* The real key len for crypto op is the size of the HW key
264 * referenced by the HW key slot, not the hardware key token
268 if (validate_keys_sizes(ctx_p
, keylen
)) {
269 dev_err(dev
, "Unsupported key size %d.\n", keylen
);
270 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
274 ctx_p
->hw
.key1_slot
= cc_slot_to_hw_key(hki
.hw_key1
);
275 if (ctx_p
->hw
.key1_slot
== END_OF_KEYS
) {
276 dev_err(dev
, "Unsupported hw key1 number (%d)\n", hki
.hw_key1
);
280 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
||
281 ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
||
282 ctx_p
->cipher_mode
== DRV_CIPHER_BITLOCKER
) {
283 if (hki
.hw_key1
== hki
.hw_key2
) {
284 dev_err(dev
, "Illegal hw key numbers (%d,%d)\n",
285 hki
.hw_key1
, hki
.hw_key2
);
288 ctx_p
->hw
.key2_slot
= cc_slot_to_hw_key(hki
.hw_key2
);
289 if (ctx_p
->hw
.key2_slot
== END_OF_KEYS
) {
290 dev_err(dev
, "Unsupported hw key2 number (%d)\n",
296 ctx_p
->keylen
= keylen
;
297 ctx_p
->hw_key
= true;
298 dev_dbg(dev
, "cc_is_hw_key ret 0");
303 static int cc_cipher_setkey(struct crypto_skcipher
*sktfm
, const u8
*key
,
306 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sktfm
);
307 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
308 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
309 struct cc_crypto_alg
*cc_alg
=
310 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
312 unsigned int max_key_buf_size
= cc_alg
->skcipher_alg
.max_keysize
;
314 dev_dbg(dev
, "Setting key in context @%p for %s. keylen=%u\n",
315 ctx_p
, crypto_tfm_alg_name(tfm
), keylen
);
316 dump_byte_array("key", (u8
*)key
, keylen
);
318 /* STAT_PHASE_0: Init and sanity checks */
320 if (validate_keys_sizes(ctx_p
, keylen
)) {
321 dev_err(dev
, "Unsupported key size %d.\n", keylen
);
322 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
326 ctx_p
->hw_key
= false;
329 * Verify DES weak keys
330 * Note that we're dropping the expanded key since the
331 * HW does the expansion on its own.
333 if (ctx_p
->flow_mode
== S_DIN_to_DES
) {
334 u32 tmp
[DES3_EDE_EXPKEY_WORDS
];
335 if (keylen
== DES3_EDE_KEY_SIZE
&&
336 __des3_ede_setkey(tmp
, &tfm
->crt_flags
, key
,
337 DES3_EDE_KEY_SIZE
)) {
338 dev_dbg(dev
, "weak 3DES key");
340 } else if (!des_ekey(tmp
, key
) &&
341 (crypto_tfm_get_flags(tfm
) & CRYPTO_TFM_REQ_WEAK_KEY
)) {
342 tfm
->crt_flags
|= CRYPTO_TFM_RES_WEAK_KEY
;
343 dev_dbg(dev
, "weak DES key");
348 if (ctx_p
->cipher_mode
== DRV_CIPHER_XTS
&&
349 xts_check_key(tfm
, key
, keylen
)) {
350 dev_dbg(dev
, "weak XTS key");
354 /* STAT_PHASE_1: Copy key to ctx */
355 dma_sync_single_for_cpu(dev
, ctx_p
->user
.key_dma_addr
,
356 max_key_buf_size
, DMA_TO_DEVICE
);
358 memcpy(ctx_p
->user
.key
, key
, keylen
);
360 memset(ctx_p
->user
.key
+ 24, 0, CC_AES_KEY_SIZE_MAX
- 24);
362 if (ctx_p
->cipher_mode
== DRV_CIPHER_ESSIV
) {
363 /* sha256 for key2 - use sw implementation */
364 int key_len
= keylen
>> 1;
367 SHASH_DESC_ON_STACK(desc
, ctx_p
->shash_tfm
);
369 desc
->tfm
= ctx_p
->shash_tfm
;
371 err
= crypto_shash_digest(desc
, ctx_p
->user
.key
, key_len
,
372 ctx_p
->user
.key
+ key_len
);
374 dev_err(dev
, "Failed to hash ESSIV key.\n");
378 dma_sync_single_for_device(dev
, ctx_p
->user
.key_dma_addr
,
379 max_key_buf_size
, DMA_TO_DEVICE
);
380 ctx_p
->keylen
= keylen
;
382 dev_dbg(dev
, "return safely");
386 static void cc_setup_cipher_desc(struct crypto_tfm
*tfm
,
387 struct cipher_req_ctx
*req_ctx
,
388 unsigned int ivsize
, unsigned int nbytes
,
389 struct cc_hw_desc desc
[],
390 unsigned int *seq_size
)
392 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
393 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
394 int cipher_mode
= ctx_p
->cipher_mode
;
395 int flow_mode
= ctx_p
->flow_mode
;
396 int direction
= req_ctx
->gen_ctx
.op_type
;
397 dma_addr_t key_dma_addr
= ctx_p
->user
.key_dma_addr
;
398 unsigned int key_len
= ctx_p
->keylen
;
399 dma_addr_t iv_dma_addr
= req_ctx
->gen_ctx
.iv_dma_addr
;
400 unsigned int du_size
= nbytes
;
402 struct cc_crypto_alg
*cc_alg
=
403 container_of(tfm
->__crt_alg
, struct cc_crypto_alg
,
406 if (cc_alg
->data_unit
)
407 du_size
= cc_alg
->data_unit
;
409 switch (cipher_mode
) {
411 case DRV_CIPHER_CBC_CTS
:
414 /* Load cipher state */
415 hw_desc_init(&desc
[*seq_size
]);
416 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
, ivsize
,
418 set_cipher_config0(&desc
[*seq_size
], direction
);
419 set_flow_mode(&desc
[*seq_size
], flow_mode
);
420 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
421 if (cipher_mode
== DRV_CIPHER_CTR
||
422 cipher_mode
== DRV_CIPHER_OFB
) {
423 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
425 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE0
);
431 hw_desc_init(&desc
[*seq_size
]);
432 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
433 set_cipher_config0(&desc
[*seq_size
], direction
);
434 if (flow_mode
== S_DIN_to_AES
) {
435 if (cc_is_hw_key(tfm
)) {
436 set_hw_crypto_key(&desc
[*seq_size
],
437 ctx_p
->hw
.key1_slot
);
439 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
440 key_dma_addr
, ((key_len
== 24) ?
444 set_key_size_aes(&desc
[*seq_size
], key_len
);
447 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
449 set_key_size_des(&desc
[*seq_size
], key_len
);
451 set_flow_mode(&desc
[*seq_size
], flow_mode
);
452 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
456 case DRV_CIPHER_ESSIV
:
457 case DRV_CIPHER_BITLOCKER
:
459 hw_desc_init(&desc
[*seq_size
]);
460 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
461 set_cipher_config0(&desc
[*seq_size
], direction
);
462 if (cc_is_hw_key(tfm
)) {
463 set_hw_crypto_key(&desc
[*seq_size
],
464 ctx_p
->hw
.key1_slot
);
466 set_din_type(&desc
[*seq_size
], DMA_DLLI
, key_dma_addr
,
467 (key_len
/ 2), NS_BIT
);
469 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
470 set_flow_mode(&desc
[*seq_size
], flow_mode
);
471 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_KEY0
);
475 hw_desc_init(&desc
[*seq_size
]);
476 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
477 set_cipher_config0(&desc
[*seq_size
], direction
);
478 if (cc_is_hw_key(tfm
)) {
479 set_hw_crypto_key(&desc
[*seq_size
],
480 ctx_p
->hw
.key2_slot
);
482 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
483 (key_dma_addr
+ (key_len
/ 2)),
484 (key_len
/ 2), NS_BIT
);
486 set_xex_data_unit_size(&desc
[*seq_size
], du_size
);
487 set_flow_mode(&desc
[*seq_size
], S_DIN_to_AES2
);
488 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
489 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_XEX_KEY
);
493 hw_desc_init(&desc
[*seq_size
]);
494 set_setup_mode(&desc
[*seq_size
], SETUP_LOAD_STATE1
);
495 set_cipher_mode(&desc
[*seq_size
], cipher_mode
);
496 set_cipher_config0(&desc
[*seq_size
], direction
);
497 set_key_size_aes(&desc
[*seq_size
], (key_len
/ 2));
498 set_flow_mode(&desc
[*seq_size
], flow_mode
);
499 set_din_type(&desc
[*seq_size
], DMA_DLLI
, iv_dma_addr
,
500 CC_AES_BLOCK_SIZE
, NS_BIT
);
504 dev_err(dev
, "Unsupported cipher mode (%d)\n", cipher_mode
);
508 static void cc_setup_cipher_data(struct crypto_tfm
*tfm
,
509 struct cipher_req_ctx
*req_ctx
,
510 struct scatterlist
*dst
,
511 struct scatterlist
*src
, unsigned int nbytes
,
512 void *areq
, struct cc_hw_desc desc
[],
513 unsigned int *seq_size
)
515 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
516 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
517 unsigned int flow_mode
= ctx_p
->flow_mode
;
519 switch (ctx_p
->flow_mode
) {
521 flow_mode
= DIN_AES_DOUT
;
524 flow_mode
= DIN_DES_DOUT
;
527 dev_err(dev
, "invalid flow mode, flow_mode = %d\n", flow_mode
);
531 if (req_ctx
->dma_buf_type
== CC_DMA_BUF_DLLI
) {
532 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
533 &sg_dma_address(src
), nbytes
);
534 dev_dbg(dev
, " data params addr %pad length 0x%X\n",
535 &sg_dma_address(dst
), nbytes
);
536 hw_desc_init(&desc
[*seq_size
]);
537 set_din_type(&desc
[*seq_size
], DMA_DLLI
, sg_dma_address(src
),
539 set_dout_dlli(&desc
[*seq_size
], sg_dma_address(dst
),
540 nbytes
, NS_BIT
, (!areq
? 0 : 1));
542 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
544 set_flow_mode(&desc
[*seq_size
], flow_mode
);
548 dev_dbg(dev
, " bypass params addr %pad length 0x%X addr 0x%08X\n",
549 &req_ctx
->mlli_params
.mlli_dma_addr
,
550 req_ctx
->mlli_params
.mlli_len
,
551 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
);
552 hw_desc_init(&desc
[*seq_size
]);
553 set_din_type(&desc
[*seq_size
], DMA_DLLI
,
554 req_ctx
->mlli_params
.mlli_dma_addr
,
555 req_ctx
->mlli_params
.mlli_len
, NS_BIT
);
556 set_dout_sram(&desc
[*seq_size
],
557 ctx_p
->drvdata
->mlli_sram_addr
,
558 req_ctx
->mlli_params
.mlli_len
);
559 set_flow_mode(&desc
[*seq_size
], BYPASS
);
562 hw_desc_init(&desc
[*seq_size
]);
563 set_din_type(&desc
[*seq_size
], DMA_MLLI
,
564 ctx_p
->drvdata
->mlli_sram_addr
,
565 req_ctx
->in_mlli_nents
, NS_BIT
);
566 if (req_ctx
->out_nents
== 0) {
567 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
568 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
,
569 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
);
570 set_dout_mlli(&desc
[*seq_size
],
571 ctx_p
->drvdata
->mlli_sram_addr
,
572 req_ctx
->in_mlli_nents
, NS_BIT
,
575 dev_dbg(dev
, " din/dout params addr 0x%08X addr 0x%08X\n",
576 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
,
577 (unsigned int)ctx_p
->drvdata
->mlli_sram_addr
+
578 (u32
)LLI_ENTRY_BYTE_SIZE
* req_ctx
->in_nents
);
579 set_dout_mlli(&desc
[*seq_size
],
580 (ctx_p
->drvdata
->mlli_sram_addr
+
581 (LLI_ENTRY_BYTE_SIZE
*
582 req_ctx
->in_mlli_nents
)),
583 req_ctx
->out_mlli_nents
, NS_BIT
,
587 set_queue_last_ind(ctx_p
->drvdata
, &desc
[*seq_size
]);
589 set_flow_mode(&desc
[*seq_size
], flow_mode
);
595 * Update a CTR-AES 128 bit counter
597 static void cc_update_ctr(u8
*ctr
, unsigned int increment
)
599 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
) ||
600 IS_ALIGNED((unsigned long)ctr
, 8)) {
602 __be64
*high_be
= (__be64
*)ctr
;
603 __be64
*low_be
= high_be
+ 1;
604 u64 orig_low
= __be64_to_cpu(*low_be
);
605 u64 new_low
= orig_low
+ (u64
)increment
;
607 *low_be
= __cpu_to_be64(new_low
);
609 if (new_low
< orig_low
)
610 *high_be
= __cpu_to_be64(__be64_to_cpu(*high_be
) + 1);
612 u8
*pos
= (ctr
+ AES_BLOCK_SIZE
);
616 for (; increment
; increment
--)
617 for (size
= AES_BLOCK_SIZE
; size
; size
--) {
626 static void cc_cipher_complete(struct device
*dev
, void *cc_req
, int err
)
628 struct skcipher_request
*req
= (struct skcipher_request
*)cc_req
;
629 struct scatterlist
*dst
= req
->dst
;
630 struct scatterlist
*src
= req
->src
;
631 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
632 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
633 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
634 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
635 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
638 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
640 switch (ctx_p
->cipher_mode
) {
643 * The crypto API expects us to set the req->iv to the last
644 * ciphertext block. For encrypt, simply copy from the result.
645 * For decrypt, we must copy from a saved buffer since this
646 * could be an in-place decryption operation and the src is
647 * lost by this point.
649 if (req_ctx
->gen_ctx
.op_type
== DRV_CRYPTO_DIRECTION_DECRYPT
) {
650 memcpy(req
->iv
, req_ctx
->backup_info
, ivsize
);
651 kzfree(req_ctx
->backup_info
);
653 len
= req
->cryptlen
- ivsize
;
654 scatterwalk_map_and_copy(req
->iv
, req
->dst
, len
,
660 /* Compute the counter of the last block */
661 len
= ALIGN(req
->cryptlen
, AES_BLOCK_SIZE
) / AES_BLOCK_SIZE
;
662 cc_update_ctr((u8
*)req
->iv
, len
);
671 skcipher_request_complete(req
, err
);
674 static int cc_cipher_process(struct skcipher_request
*req
,
675 enum drv_crypto_direction direction
)
677 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
678 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
679 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
680 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
681 struct scatterlist
*dst
= req
->dst
;
682 struct scatterlist
*src
= req
->src
;
683 unsigned int nbytes
= req
->cryptlen
;
685 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
686 struct device
*dev
= drvdata_to_dev(ctx_p
->drvdata
);
687 struct cc_hw_desc desc
[MAX_ABLKCIPHER_SEQ_LEN
];
688 struct cc_crypto_req cc_req
= {};
690 unsigned int seq_len
= 0;
691 gfp_t flags
= cc_gfp_flags(&req
->base
);
693 dev_dbg(dev
, "%s req=%p iv=%p nbytes=%d\n",
694 ((direction
== DRV_CRYPTO_DIRECTION_ENCRYPT
) ?
695 "Encrypt" : "Decrypt"), req
, iv
, nbytes
);
697 /* STAT_PHASE_0: Init and sanity checks */
699 /* TODO: check data length according to mode */
700 if (validate_data_size(ctx_p
, nbytes
)) {
701 dev_err(dev
, "Unsupported data size %d.\n", nbytes
);
702 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_BLOCK_LEN
);
707 /* No data to process is valid */
712 /* The IV we are handed may be allocted from the stack so
713 * we must copy it to a DMAable buffer before use.
715 req_ctx
->iv
= kmemdup(iv
, ivsize
, flags
);
721 /* Setup request structure */
722 cc_req
.user_cb
= (void *)cc_cipher_complete
;
723 cc_req
.user_arg
= (void *)req
;
725 /* Setup request context */
726 req_ctx
->gen_ctx
.op_type
= direction
;
728 /* STAT_PHASE_1: Map buffers */
730 rc
= cc_map_cipher_request(ctx_p
->drvdata
, req_ctx
, ivsize
, nbytes
,
731 req_ctx
->iv
, src
, dst
, flags
);
733 dev_err(dev
, "map_request() failed\n");
737 /* STAT_PHASE_2: Create sequence */
739 /* Setup processing */
740 cc_setup_cipher_desc(tfm
, req_ctx
, ivsize
, nbytes
, desc
, &seq_len
);
741 /* Data processing */
742 cc_setup_cipher_data(tfm
, req_ctx
, dst
, src
, nbytes
, req
, desc
,
745 /* STAT_PHASE_3: Lock HW and push sequence */
747 rc
= cc_send_request(ctx_p
->drvdata
, &cc_req
, desc
, seq_len
,
749 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
750 /* Failed to send the request or request completed
753 cc_unmap_cipher_request(dev
, req_ctx
, ivsize
, src
, dst
);
757 if (rc
!= -EINPROGRESS
&& rc
!= -EBUSY
) {
758 kzfree(req_ctx
->backup_info
);
765 static int cc_cipher_encrypt(struct skcipher_request
*req
)
767 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
769 memset(req_ctx
, 0, sizeof(*req_ctx
));
771 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_ENCRYPT
);
774 static int cc_cipher_decrypt(struct skcipher_request
*req
)
776 struct crypto_skcipher
*sk_tfm
= crypto_skcipher_reqtfm(req
);
777 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(sk_tfm
);
778 struct cc_cipher_ctx
*ctx_p
= crypto_tfm_ctx(tfm
);
779 struct cipher_req_ctx
*req_ctx
= skcipher_request_ctx(req
);
780 unsigned int ivsize
= crypto_skcipher_ivsize(sk_tfm
);
781 gfp_t flags
= cc_gfp_flags(&req
->base
);
784 memset(req_ctx
, 0, sizeof(*req_ctx
));
786 if ((ctx_p
->cipher_mode
== DRV_CIPHER_CBC
) &&
787 (req
->cryptlen
>= ivsize
)) {
789 /* Allocate and save the last IV sized bytes of the source,
790 * which will be lost in case of in-place decryption.
792 req_ctx
->backup_info
= kzalloc(ivsize
, flags
);
793 if (!req_ctx
->backup_info
)
796 len
= req
->cryptlen
- ivsize
;
797 scatterwalk_map_and_copy(req_ctx
->backup_info
, req
->src
, len
,
801 return cc_cipher_process(req
, DRV_CRYPTO_DIRECTION_DECRYPT
);
804 /* Block cipher alg */
805 static const struct cc_alg_template skcipher_algs
[] = {
808 .driver_name
= "xts-paes-ccree",
809 .blocksize
= AES_BLOCK_SIZE
,
810 .template_skcipher
= {
811 .setkey
= cc_cipher_sethkey
,
812 .encrypt
= cc_cipher_encrypt
,
813 .decrypt
= cc_cipher_decrypt
,
814 .min_keysize
= CC_HW_KEY_SIZE
,
815 .max_keysize
= CC_HW_KEY_SIZE
,
816 .ivsize
= AES_BLOCK_SIZE
,
818 .cipher_mode
= DRV_CIPHER_XTS
,
819 .flow_mode
= S_DIN_to_AES
,
820 .min_hw_rev
= CC_HW_REV_630
,
823 .name
= "xts512(paes)",
824 .driver_name
= "xts-paes-du512-ccree",
825 .blocksize
= AES_BLOCK_SIZE
,
826 .template_skcipher
= {
827 .setkey
= cc_cipher_sethkey
,
828 .encrypt
= cc_cipher_encrypt
,
829 .decrypt
= cc_cipher_decrypt
,
830 .min_keysize
= CC_HW_KEY_SIZE
,
831 .max_keysize
= CC_HW_KEY_SIZE
,
832 .ivsize
= AES_BLOCK_SIZE
,
834 .cipher_mode
= DRV_CIPHER_XTS
,
835 .flow_mode
= S_DIN_to_AES
,
837 .min_hw_rev
= CC_HW_REV_712
,
840 .name
= "xts4096(paes)",
841 .driver_name
= "xts-paes-du4096-ccree",
842 .blocksize
= AES_BLOCK_SIZE
,
843 .template_skcipher
= {
844 .setkey
= cc_cipher_sethkey
,
845 .encrypt
= cc_cipher_encrypt
,
846 .decrypt
= cc_cipher_decrypt
,
847 .min_keysize
= CC_HW_KEY_SIZE
,
848 .max_keysize
= CC_HW_KEY_SIZE
,
849 .ivsize
= AES_BLOCK_SIZE
,
851 .cipher_mode
= DRV_CIPHER_XTS
,
852 .flow_mode
= S_DIN_to_AES
,
854 .min_hw_rev
= CC_HW_REV_712
,
857 .name
= "essiv(paes)",
858 .driver_name
= "essiv-paes-ccree",
859 .blocksize
= AES_BLOCK_SIZE
,
860 .template_skcipher
= {
861 .setkey
= cc_cipher_sethkey
,
862 .encrypt
= cc_cipher_encrypt
,
863 .decrypt
= cc_cipher_decrypt
,
864 .min_keysize
= CC_HW_KEY_SIZE
,
865 .max_keysize
= CC_HW_KEY_SIZE
,
866 .ivsize
= AES_BLOCK_SIZE
,
868 .cipher_mode
= DRV_CIPHER_ESSIV
,
869 .flow_mode
= S_DIN_to_AES
,
870 .min_hw_rev
= CC_HW_REV_712
,
873 .name
= "essiv512(paes)",
874 .driver_name
= "essiv-paes-du512-ccree",
875 .blocksize
= AES_BLOCK_SIZE
,
876 .template_skcipher
= {
877 .setkey
= cc_cipher_sethkey
,
878 .encrypt
= cc_cipher_encrypt
,
879 .decrypt
= cc_cipher_decrypt
,
880 .min_keysize
= CC_HW_KEY_SIZE
,
881 .max_keysize
= CC_HW_KEY_SIZE
,
882 .ivsize
= AES_BLOCK_SIZE
,
884 .cipher_mode
= DRV_CIPHER_ESSIV
,
885 .flow_mode
= S_DIN_to_AES
,
887 .min_hw_rev
= CC_HW_REV_712
,
890 .name
= "essiv4096(paes)",
891 .driver_name
= "essiv-paes-du4096-ccree",
892 .blocksize
= AES_BLOCK_SIZE
,
893 .template_skcipher
= {
894 .setkey
= cc_cipher_sethkey
,
895 .encrypt
= cc_cipher_encrypt
,
896 .decrypt
= cc_cipher_decrypt
,
897 .min_keysize
= CC_HW_KEY_SIZE
,
898 .max_keysize
= CC_HW_KEY_SIZE
,
899 .ivsize
= AES_BLOCK_SIZE
,
901 .cipher_mode
= DRV_CIPHER_ESSIV
,
902 .flow_mode
= S_DIN_to_AES
,
904 .min_hw_rev
= CC_HW_REV_712
,
907 .name
= "bitlocker(paes)",
908 .driver_name
= "bitlocker-paes-ccree",
909 .blocksize
= AES_BLOCK_SIZE
,
910 .template_skcipher
= {
911 .setkey
= cc_cipher_sethkey
,
912 .encrypt
= cc_cipher_encrypt
,
913 .decrypt
= cc_cipher_decrypt
,
914 .min_keysize
= CC_HW_KEY_SIZE
,
915 .max_keysize
= CC_HW_KEY_SIZE
,
916 .ivsize
= AES_BLOCK_SIZE
,
918 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
919 .flow_mode
= S_DIN_to_AES
,
920 .min_hw_rev
= CC_HW_REV_712
,
923 .name
= "bitlocker512(paes)",
924 .driver_name
= "bitlocker-paes-du512-ccree",
925 .blocksize
= AES_BLOCK_SIZE
,
926 .template_skcipher
= {
927 .setkey
= cc_cipher_sethkey
,
928 .encrypt
= cc_cipher_encrypt
,
929 .decrypt
= cc_cipher_decrypt
,
930 .min_keysize
= CC_HW_KEY_SIZE
,
931 .max_keysize
= CC_HW_KEY_SIZE
,
932 .ivsize
= AES_BLOCK_SIZE
,
934 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
935 .flow_mode
= S_DIN_to_AES
,
937 .min_hw_rev
= CC_HW_REV_712
,
940 .name
= "bitlocker4096(paes)",
941 .driver_name
= "bitlocker-paes-du4096-ccree",
942 .blocksize
= AES_BLOCK_SIZE
,
943 .template_skcipher
= {
944 .setkey
= cc_cipher_sethkey
,
945 .encrypt
= cc_cipher_encrypt
,
946 .decrypt
= cc_cipher_decrypt
,
947 .min_keysize
= CC_HW_KEY_SIZE
,
948 .max_keysize
= CC_HW_KEY_SIZE
,
949 .ivsize
= AES_BLOCK_SIZE
,
951 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
952 .flow_mode
= S_DIN_to_AES
,
954 .min_hw_rev
= CC_HW_REV_712
,
958 .driver_name
= "ecb-paes-ccree",
959 .blocksize
= AES_BLOCK_SIZE
,
960 .template_skcipher
= {
961 .setkey
= cc_cipher_sethkey
,
962 .encrypt
= cc_cipher_encrypt
,
963 .decrypt
= cc_cipher_decrypt
,
964 .min_keysize
= CC_HW_KEY_SIZE
,
965 .max_keysize
= CC_HW_KEY_SIZE
,
968 .cipher_mode
= DRV_CIPHER_ECB
,
969 .flow_mode
= S_DIN_to_AES
,
970 .min_hw_rev
= CC_HW_REV_712
,
974 .driver_name
= "cbc-paes-ccree",
975 .blocksize
= AES_BLOCK_SIZE
,
976 .template_skcipher
= {
977 .setkey
= cc_cipher_sethkey
,
978 .encrypt
= cc_cipher_encrypt
,
979 .decrypt
= cc_cipher_decrypt
,
980 .min_keysize
= CC_HW_KEY_SIZE
,
981 .max_keysize
= CC_HW_KEY_SIZE
,
982 .ivsize
= AES_BLOCK_SIZE
,
984 .cipher_mode
= DRV_CIPHER_CBC
,
985 .flow_mode
= S_DIN_to_AES
,
986 .min_hw_rev
= CC_HW_REV_712
,
990 .driver_name
= "ofb-paes-ccree",
991 .blocksize
= AES_BLOCK_SIZE
,
992 .template_skcipher
= {
993 .setkey
= cc_cipher_sethkey
,
994 .encrypt
= cc_cipher_encrypt
,
995 .decrypt
= cc_cipher_decrypt
,
996 .min_keysize
= CC_HW_KEY_SIZE
,
997 .max_keysize
= CC_HW_KEY_SIZE
,
998 .ivsize
= AES_BLOCK_SIZE
,
1000 .cipher_mode
= DRV_CIPHER_OFB
,
1001 .flow_mode
= S_DIN_to_AES
,
1002 .min_hw_rev
= CC_HW_REV_712
,
1005 .name
= "cts(cbc(paes))",
1006 .driver_name
= "cts-cbc-paes-ccree",
1007 .blocksize
= AES_BLOCK_SIZE
,
1008 .template_skcipher
= {
1009 .setkey
= cc_cipher_sethkey
,
1010 .encrypt
= cc_cipher_encrypt
,
1011 .decrypt
= cc_cipher_decrypt
,
1012 .min_keysize
= CC_HW_KEY_SIZE
,
1013 .max_keysize
= CC_HW_KEY_SIZE
,
1014 .ivsize
= AES_BLOCK_SIZE
,
1016 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1017 .flow_mode
= S_DIN_to_AES
,
1018 .min_hw_rev
= CC_HW_REV_712
,
1021 .name
= "ctr(paes)",
1022 .driver_name
= "ctr-paes-ccree",
1024 .template_skcipher
= {
1025 .setkey
= cc_cipher_sethkey
,
1026 .encrypt
= cc_cipher_encrypt
,
1027 .decrypt
= cc_cipher_decrypt
,
1028 .min_keysize
= CC_HW_KEY_SIZE
,
1029 .max_keysize
= CC_HW_KEY_SIZE
,
1030 .ivsize
= AES_BLOCK_SIZE
,
1032 .cipher_mode
= DRV_CIPHER_CTR
,
1033 .flow_mode
= S_DIN_to_AES
,
1034 .min_hw_rev
= CC_HW_REV_712
,
1038 .driver_name
= "xts-aes-ccree",
1039 .blocksize
= AES_BLOCK_SIZE
,
1040 .template_skcipher
= {
1041 .setkey
= cc_cipher_setkey
,
1042 .encrypt
= cc_cipher_encrypt
,
1043 .decrypt
= cc_cipher_decrypt
,
1044 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1045 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1046 .ivsize
= AES_BLOCK_SIZE
,
1048 .cipher_mode
= DRV_CIPHER_XTS
,
1049 .flow_mode
= S_DIN_to_AES
,
1050 .min_hw_rev
= CC_HW_REV_630
,
1053 .name
= "xts512(aes)",
1054 .driver_name
= "xts-aes-du512-ccree",
1055 .blocksize
= AES_BLOCK_SIZE
,
1056 .template_skcipher
= {
1057 .setkey
= cc_cipher_setkey
,
1058 .encrypt
= cc_cipher_encrypt
,
1059 .decrypt
= cc_cipher_decrypt
,
1060 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1061 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1062 .ivsize
= AES_BLOCK_SIZE
,
1064 .cipher_mode
= DRV_CIPHER_XTS
,
1065 .flow_mode
= S_DIN_to_AES
,
1067 .min_hw_rev
= CC_HW_REV_712
,
1070 .name
= "xts4096(aes)",
1071 .driver_name
= "xts-aes-du4096-ccree",
1072 .blocksize
= AES_BLOCK_SIZE
,
1073 .template_skcipher
= {
1074 .setkey
= cc_cipher_setkey
,
1075 .encrypt
= cc_cipher_encrypt
,
1076 .decrypt
= cc_cipher_decrypt
,
1077 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1078 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1079 .ivsize
= AES_BLOCK_SIZE
,
1081 .cipher_mode
= DRV_CIPHER_XTS
,
1082 .flow_mode
= S_DIN_to_AES
,
1084 .min_hw_rev
= CC_HW_REV_712
,
1087 .name
= "essiv(aes)",
1088 .driver_name
= "essiv-aes-ccree",
1089 .blocksize
= AES_BLOCK_SIZE
,
1090 .template_skcipher
= {
1091 .setkey
= cc_cipher_setkey
,
1092 .encrypt
= cc_cipher_encrypt
,
1093 .decrypt
= cc_cipher_decrypt
,
1094 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1095 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1096 .ivsize
= AES_BLOCK_SIZE
,
1098 .cipher_mode
= DRV_CIPHER_ESSIV
,
1099 .flow_mode
= S_DIN_to_AES
,
1100 .min_hw_rev
= CC_HW_REV_712
,
1103 .name
= "essiv512(aes)",
1104 .driver_name
= "essiv-aes-du512-ccree",
1105 .blocksize
= AES_BLOCK_SIZE
,
1106 .template_skcipher
= {
1107 .setkey
= cc_cipher_setkey
,
1108 .encrypt
= cc_cipher_encrypt
,
1109 .decrypt
= cc_cipher_decrypt
,
1110 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1111 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1112 .ivsize
= AES_BLOCK_SIZE
,
1114 .cipher_mode
= DRV_CIPHER_ESSIV
,
1115 .flow_mode
= S_DIN_to_AES
,
1117 .min_hw_rev
= CC_HW_REV_712
,
1120 .name
= "essiv4096(aes)",
1121 .driver_name
= "essiv-aes-du4096-ccree",
1122 .blocksize
= AES_BLOCK_SIZE
,
1123 .template_skcipher
= {
1124 .setkey
= cc_cipher_setkey
,
1125 .encrypt
= cc_cipher_encrypt
,
1126 .decrypt
= cc_cipher_decrypt
,
1127 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1128 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1129 .ivsize
= AES_BLOCK_SIZE
,
1131 .cipher_mode
= DRV_CIPHER_ESSIV
,
1132 .flow_mode
= S_DIN_to_AES
,
1134 .min_hw_rev
= CC_HW_REV_712
,
1137 .name
= "bitlocker(aes)",
1138 .driver_name
= "bitlocker-aes-ccree",
1139 .blocksize
= AES_BLOCK_SIZE
,
1140 .template_skcipher
= {
1141 .setkey
= cc_cipher_setkey
,
1142 .encrypt
= cc_cipher_encrypt
,
1143 .decrypt
= cc_cipher_decrypt
,
1144 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1145 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1146 .ivsize
= AES_BLOCK_SIZE
,
1148 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1149 .flow_mode
= S_DIN_to_AES
,
1150 .min_hw_rev
= CC_HW_REV_712
,
1153 .name
= "bitlocker512(aes)",
1154 .driver_name
= "bitlocker-aes-du512-ccree",
1155 .blocksize
= AES_BLOCK_SIZE
,
1156 .template_skcipher
= {
1157 .setkey
= cc_cipher_setkey
,
1158 .encrypt
= cc_cipher_encrypt
,
1159 .decrypt
= cc_cipher_decrypt
,
1160 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1161 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1162 .ivsize
= AES_BLOCK_SIZE
,
1164 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1165 .flow_mode
= S_DIN_to_AES
,
1167 .min_hw_rev
= CC_HW_REV_712
,
1170 .name
= "bitlocker4096(aes)",
1171 .driver_name
= "bitlocker-aes-du4096-ccree",
1172 .blocksize
= AES_BLOCK_SIZE
,
1173 .template_skcipher
= {
1174 .setkey
= cc_cipher_setkey
,
1175 .encrypt
= cc_cipher_encrypt
,
1176 .decrypt
= cc_cipher_decrypt
,
1177 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
1178 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
1179 .ivsize
= AES_BLOCK_SIZE
,
1181 .cipher_mode
= DRV_CIPHER_BITLOCKER
,
1182 .flow_mode
= S_DIN_to_AES
,
1184 .min_hw_rev
= CC_HW_REV_712
,
1188 .driver_name
= "ecb-aes-ccree",
1189 .blocksize
= AES_BLOCK_SIZE
,
1190 .template_skcipher
= {
1191 .setkey
= cc_cipher_setkey
,
1192 .encrypt
= cc_cipher_encrypt
,
1193 .decrypt
= cc_cipher_decrypt
,
1194 .min_keysize
= AES_MIN_KEY_SIZE
,
1195 .max_keysize
= AES_MAX_KEY_SIZE
,
1198 .cipher_mode
= DRV_CIPHER_ECB
,
1199 .flow_mode
= S_DIN_to_AES
,
1200 .min_hw_rev
= CC_HW_REV_630
,
1204 .driver_name
= "cbc-aes-ccree",
1205 .blocksize
= AES_BLOCK_SIZE
,
1206 .template_skcipher
= {
1207 .setkey
= cc_cipher_setkey
,
1208 .encrypt
= cc_cipher_encrypt
,
1209 .decrypt
= cc_cipher_decrypt
,
1210 .min_keysize
= AES_MIN_KEY_SIZE
,
1211 .max_keysize
= AES_MAX_KEY_SIZE
,
1212 .ivsize
= AES_BLOCK_SIZE
,
1214 .cipher_mode
= DRV_CIPHER_CBC
,
1215 .flow_mode
= S_DIN_to_AES
,
1216 .min_hw_rev
= CC_HW_REV_630
,
1220 .driver_name
= "ofb-aes-ccree",
1221 .blocksize
= AES_BLOCK_SIZE
,
1222 .template_skcipher
= {
1223 .setkey
= cc_cipher_setkey
,
1224 .encrypt
= cc_cipher_encrypt
,
1225 .decrypt
= cc_cipher_decrypt
,
1226 .min_keysize
= AES_MIN_KEY_SIZE
,
1227 .max_keysize
= AES_MAX_KEY_SIZE
,
1228 .ivsize
= AES_BLOCK_SIZE
,
1230 .cipher_mode
= DRV_CIPHER_OFB
,
1231 .flow_mode
= S_DIN_to_AES
,
1232 .min_hw_rev
= CC_HW_REV_630
,
1235 .name
= "cts(cbc(aes))",
1236 .driver_name
= "cts-cbc-aes-ccree",
1237 .blocksize
= AES_BLOCK_SIZE
,
1238 .template_skcipher
= {
1239 .setkey
= cc_cipher_setkey
,
1240 .encrypt
= cc_cipher_encrypt
,
1241 .decrypt
= cc_cipher_decrypt
,
1242 .min_keysize
= AES_MIN_KEY_SIZE
,
1243 .max_keysize
= AES_MAX_KEY_SIZE
,
1244 .ivsize
= AES_BLOCK_SIZE
,
1246 .cipher_mode
= DRV_CIPHER_CBC_CTS
,
1247 .flow_mode
= S_DIN_to_AES
,
1248 .min_hw_rev
= CC_HW_REV_630
,
1252 .driver_name
= "ctr-aes-ccree",
1254 .template_skcipher
= {
1255 .setkey
= cc_cipher_setkey
,
1256 .encrypt
= cc_cipher_encrypt
,
1257 .decrypt
= cc_cipher_decrypt
,
1258 .min_keysize
= AES_MIN_KEY_SIZE
,
1259 .max_keysize
= AES_MAX_KEY_SIZE
,
1260 .ivsize
= AES_BLOCK_SIZE
,
1262 .cipher_mode
= DRV_CIPHER_CTR
,
1263 .flow_mode
= S_DIN_to_AES
,
1264 .min_hw_rev
= CC_HW_REV_630
,
1267 .name
= "cbc(des3_ede)",
1268 .driver_name
= "cbc-3des-ccree",
1269 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1270 .template_skcipher
= {
1271 .setkey
= cc_cipher_setkey
,
1272 .encrypt
= cc_cipher_encrypt
,
1273 .decrypt
= cc_cipher_decrypt
,
1274 .min_keysize
= DES3_EDE_KEY_SIZE
,
1275 .max_keysize
= DES3_EDE_KEY_SIZE
,
1276 .ivsize
= DES3_EDE_BLOCK_SIZE
,
1278 .cipher_mode
= DRV_CIPHER_CBC
,
1279 .flow_mode
= S_DIN_to_DES
,
1280 .min_hw_rev
= CC_HW_REV_630
,
1283 .name
= "ecb(des3_ede)",
1284 .driver_name
= "ecb-3des-ccree",
1285 .blocksize
= DES3_EDE_BLOCK_SIZE
,
1286 .template_skcipher
= {
1287 .setkey
= cc_cipher_setkey
,
1288 .encrypt
= cc_cipher_encrypt
,
1289 .decrypt
= cc_cipher_decrypt
,
1290 .min_keysize
= DES3_EDE_KEY_SIZE
,
1291 .max_keysize
= DES3_EDE_KEY_SIZE
,
1294 .cipher_mode
= DRV_CIPHER_ECB
,
1295 .flow_mode
= S_DIN_to_DES
,
1296 .min_hw_rev
= CC_HW_REV_630
,
1300 .driver_name
= "cbc-des-ccree",
1301 .blocksize
= DES_BLOCK_SIZE
,
1302 .template_skcipher
= {
1303 .setkey
= cc_cipher_setkey
,
1304 .encrypt
= cc_cipher_encrypt
,
1305 .decrypt
= cc_cipher_decrypt
,
1306 .min_keysize
= DES_KEY_SIZE
,
1307 .max_keysize
= DES_KEY_SIZE
,
1308 .ivsize
= DES_BLOCK_SIZE
,
1310 .cipher_mode
= DRV_CIPHER_CBC
,
1311 .flow_mode
= S_DIN_to_DES
,
1312 .min_hw_rev
= CC_HW_REV_630
,
1316 .driver_name
= "ecb-des-ccree",
1317 .blocksize
= DES_BLOCK_SIZE
,
1318 .template_skcipher
= {
1319 .setkey
= cc_cipher_setkey
,
1320 .encrypt
= cc_cipher_encrypt
,
1321 .decrypt
= cc_cipher_decrypt
,
1322 .min_keysize
= DES_KEY_SIZE
,
1323 .max_keysize
= DES_KEY_SIZE
,
1326 .cipher_mode
= DRV_CIPHER_ECB
,
1327 .flow_mode
= S_DIN_to_DES
,
1328 .min_hw_rev
= CC_HW_REV_630
,
1332 static struct cc_crypto_alg
*cc_create_alg(const struct cc_alg_template
*tmpl
,
1335 struct cc_crypto_alg
*t_alg
;
1336 struct skcipher_alg
*alg
;
1338 t_alg
= kzalloc(sizeof(*t_alg
), GFP_KERNEL
);
1340 return ERR_PTR(-ENOMEM
);
1342 alg
= &t_alg
->skcipher_alg
;
1344 memcpy(alg
, &tmpl
->template_skcipher
, sizeof(*alg
));
1346 snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", tmpl
->name
);
1347 snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
1349 alg
->base
.cra_module
= THIS_MODULE
;
1350 alg
->base
.cra_priority
= CC_CRA_PRIO
;
1351 alg
->base
.cra_blocksize
= tmpl
->blocksize
;
1352 alg
->base
.cra_alignmask
= 0;
1353 alg
->base
.cra_ctxsize
= sizeof(struct cc_cipher_ctx
);
1355 alg
->base
.cra_init
= cc_cipher_init
;
1356 alg
->base
.cra_exit
= cc_cipher_exit
;
1357 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
| CRYPTO_ALG_KERN_DRIVER_ONLY
;
1359 t_alg
->cipher_mode
= tmpl
->cipher_mode
;
1360 t_alg
->flow_mode
= tmpl
->flow_mode
;
1361 t_alg
->data_unit
= tmpl
->data_unit
;
1366 int cc_cipher_free(struct cc_drvdata
*drvdata
)
1368 struct cc_crypto_alg
*t_alg
, *n
;
1369 struct cc_cipher_handle
*cipher_handle
= drvdata
->cipher_handle
;
1371 if (cipher_handle
) {
1372 /* Remove registered algs */
1373 list_for_each_entry_safe(t_alg
, n
, &cipher_handle
->alg_list
,
1375 crypto_unregister_skcipher(&t_alg
->skcipher_alg
);
1376 list_del(&t_alg
->entry
);
1379 kfree(cipher_handle
);
1380 drvdata
->cipher_handle
= NULL
;
1385 int cc_cipher_alloc(struct cc_drvdata
*drvdata
)
1387 struct cc_cipher_handle
*cipher_handle
;
1388 struct cc_crypto_alg
*t_alg
;
1389 struct device
*dev
= drvdata_to_dev(drvdata
);
1393 cipher_handle
= kmalloc(sizeof(*cipher_handle
), GFP_KERNEL
);
1397 INIT_LIST_HEAD(&cipher_handle
->alg_list
);
1398 drvdata
->cipher_handle
= cipher_handle
;
1401 dev_dbg(dev
, "Number of algorithms = %zu\n",
1402 ARRAY_SIZE(skcipher_algs
));
1403 for (alg
= 0; alg
< ARRAY_SIZE(skcipher_algs
); alg
++) {
1404 if (skcipher_algs
[alg
].min_hw_rev
> drvdata
->hw_rev
)
1407 dev_dbg(dev
, "creating %s\n", skcipher_algs
[alg
].driver_name
);
1408 t_alg
= cc_create_alg(&skcipher_algs
[alg
], dev
);
1409 if (IS_ERR(t_alg
)) {
1410 rc
= PTR_ERR(t_alg
);
1411 dev_err(dev
, "%s alg allocation failed\n",
1412 skcipher_algs
[alg
].driver_name
);
1415 t_alg
->drvdata
= drvdata
;
1417 dev_dbg(dev
, "registering %s\n",
1418 skcipher_algs
[alg
].driver_name
);
1419 rc
= crypto_register_skcipher(&t_alg
->skcipher_alg
);
1420 dev_dbg(dev
, "%s alg registration rc = %x\n",
1421 t_alg
->skcipher_alg
.base
.cra_driver_name
, rc
);
1423 dev_err(dev
, "%s alg registration failed\n",
1424 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1428 list_add_tail(&t_alg
->entry
,
1429 &cipher_handle
->alg_list
);
1430 dev_dbg(dev
, "Registered %s\n",
1431 t_alg
->skcipher_alg
.base
.cra_driver_name
);
1437 cc_cipher_free(drvdata
);