dmaengine: imx-sdma: Let the core do the device node validation
[linux/fpc-iii.git] / drivers / crypto / ccree / cc_cipher.c
blob5b58226ea24df6b23f43c48a4611eb224d0419fb
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2019 ARM Limited (or its affiliates). */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/sm4.h>
11 #include <crypto/scatterwalk.h>
13 #include "cc_driver.h"
14 #include "cc_lli_defs.h"
15 #include "cc_buffer_mgr.h"
16 #include "cc_cipher.h"
17 #include "cc_request_mgr.h"
19 #define MAX_ABLKCIPHER_SEQ_LEN 6
21 #define template_skcipher template_u.skcipher
23 struct cc_cipher_handle {
24 struct list_head alg_list;
27 struct cc_user_key_info {
28 u8 *key;
29 dma_addr_t key_dma_addr;
32 struct cc_hw_key_info {
33 enum cc_hw_crypto_key key1_slot;
34 enum cc_hw_crypto_key key2_slot;
37 struct cc_cpp_key_info {
38 u8 slot;
39 enum cc_cpp_alg alg;
42 enum cc_key_type {
43 CC_UNPROTECTED_KEY, /* User key */
44 CC_HW_PROTECTED_KEY, /* HW (FDE) key */
45 CC_POLICY_PROTECTED_KEY, /* CPP key */
46 CC_INVALID_PROTECTED_KEY /* Invalid key */
49 struct cc_cipher_ctx {
50 struct cc_drvdata *drvdata;
51 int keylen;
52 int key_round_number;
53 int cipher_mode;
54 int flow_mode;
55 unsigned int flags;
56 enum cc_key_type key_type;
57 struct cc_user_key_info user;
58 union {
59 struct cc_hw_key_info hw;
60 struct cc_cpp_key_info cpp;
62 struct crypto_shash *shash_tfm;
65 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
67 static inline enum cc_key_type cc_key_type(struct crypto_tfm *tfm)
69 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
71 return ctx_p->key_type;
74 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
76 switch (ctx_p->flow_mode) {
77 case S_DIN_to_AES:
78 switch (size) {
79 case CC_AES_128_BIT_KEY_SIZE:
80 case CC_AES_192_BIT_KEY_SIZE:
81 if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
82 ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
83 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
84 return 0;
85 break;
86 case CC_AES_256_BIT_KEY_SIZE:
87 return 0;
88 case (CC_AES_192_BIT_KEY_SIZE * 2):
89 case (CC_AES_256_BIT_KEY_SIZE * 2):
90 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
91 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
92 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
93 return 0;
94 break;
95 default:
96 break;
98 break;
99 case S_DIN_to_DES:
100 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
101 return 0;
102 break;
103 case S_DIN_to_SM4:
104 if (size == SM4_KEY_SIZE)
105 return 0;
106 default:
107 break;
109 return -EINVAL;
112 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
113 unsigned int size)
115 switch (ctx_p->flow_mode) {
116 case S_DIN_to_AES:
117 switch (ctx_p->cipher_mode) {
118 case DRV_CIPHER_XTS:
119 if (size >= AES_BLOCK_SIZE &&
120 IS_ALIGNED(size, AES_BLOCK_SIZE))
121 return 0;
122 break;
123 case DRV_CIPHER_CBC_CTS:
124 if (size >= AES_BLOCK_SIZE)
125 return 0;
126 break;
127 case DRV_CIPHER_OFB:
128 case DRV_CIPHER_CTR:
129 return 0;
130 case DRV_CIPHER_ECB:
131 case DRV_CIPHER_CBC:
132 case DRV_CIPHER_ESSIV:
133 case DRV_CIPHER_BITLOCKER:
134 if (IS_ALIGNED(size, AES_BLOCK_SIZE))
135 return 0;
136 break;
137 default:
138 break;
140 break;
141 case S_DIN_to_DES:
142 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
143 return 0;
144 break;
145 case S_DIN_to_SM4:
146 switch (ctx_p->cipher_mode) {
147 case DRV_CIPHER_CTR:
148 return 0;
149 case DRV_CIPHER_ECB:
150 case DRV_CIPHER_CBC:
151 if (IS_ALIGNED(size, SM4_BLOCK_SIZE))
152 return 0;
153 default:
154 break;
156 default:
157 break;
159 return -EINVAL;
162 static int cc_cipher_init(struct crypto_tfm *tfm)
164 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
165 struct cc_crypto_alg *cc_alg =
166 container_of(tfm->__crt_alg, struct cc_crypto_alg,
167 skcipher_alg.base);
168 struct device *dev = drvdata_to_dev(cc_alg->drvdata);
169 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
170 int rc = 0;
172 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
173 crypto_tfm_alg_name(tfm));
175 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
176 sizeof(struct cipher_req_ctx));
178 ctx_p->cipher_mode = cc_alg->cipher_mode;
179 ctx_p->flow_mode = cc_alg->flow_mode;
180 ctx_p->drvdata = cc_alg->drvdata;
182 /* Allocate key buffer, cache line aligned */
183 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
184 if (!ctx_p->user.key)
185 return -ENOMEM;
187 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
188 ctx_p->user.key);
190 /* Map key buffer */
191 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
192 max_key_buf_size,
193 DMA_TO_DEVICE);
194 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
195 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
196 max_key_buf_size, ctx_p->user.key);
197 return -ENOMEM;
199 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
200 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
202 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
203 /* Alloc hash tfm for essiv */
204 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
205 if (IS_ERR(ctx_p->shash_tfm)) {
206 dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
207 return PTR_ERR(ctx_p->shash_tfm);
211 return rc;
214 static void cc_cipher_exit(struct crypto_tfm *tfm)
216 struct crypto_alg *alg = tfm->__crt_alg;
217 struct cc_crypto_alg *cc_alg =
218 container_of(alg, struct cc_crypto_alg,
219 skcipher_alg.base);
220 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
221 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
222 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
224 dev_dbg(dev, "Clearing context @%p for %s\n",
225 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
227 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
228 /* Free hash tfm for essiv */
229 crypto_free_shash(ctx_p->shash_tfm);
230 ctx_p->shash_tfm = NULL;
233 /* Unmap key buffer */
234 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
235 DMA_TO_DEVICE);
236 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
237 &ctx_p->user.key_dma_addr);
239 /* Free key buffer in context */
240 kzfree(ctx_p->user.key);
241 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
244 struct tdes_keys {
245 u8 key1[DES_KEY_SIZE];
246 u8 key2[DES_KEY_SIZE];
247 u8 key3[DES_KEY_SIZE];
250 static enum cc_hw_crypto_key cc_slot_to_hw_key(u8 slot_num)
252 switch (slot_num) {
253 case 0:
254 return KFDE0_KEY;
255 case 1:
256 return KFDE1_KEY;
257 case 2:
258 return KFDE2_KEY;
259 case 3:
260 return KFDE3_KEY;
262 return END_OF_KEYS;
265 static u8 cc_slot_to_cpp_key(u8 slot_num)
267 return (slot_num - CC_FIRST_CPP_KEY_SLOT);
270 static inline enum cc_key_type cc_slot_to_key_type(u8 slot_num)
272 if (slot_num >= CC_FIRST_HW_KEY_SLOT && slot_num <= CC_LAST_HW_KEY_SLOT)
273 return CC_HW_PROTECTED_KEY;
274 else if (slot_num >= CC_FIRST_CPP_KEY_SLOT &&
275 slot_num <= CC_LAST_CPP_KEY_SLOT)
276 return CC_POLICY_PROTECTED_KEY;
277 else
278 return CC_INVALID_PROTECTED_KEY;
281 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
282 unsigned int keylen)
284 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
285 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
286 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
287 struct cc_hkey_info hki;
289 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
290 ctx_p, crypto_tfm_alg_name(tfm), keylen);
291 dump_byte_array("key", (u8 *)key, keylen);
293 /* STAT_PHASE_0: Init and sanity checks */
295 /* This check the size of the protected key token */
296 if (keylen != sizeof(hki)) {
297 dev_err(dev, "Unsupported protected key size %d.\n", keylen);
298 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
299 return -EINVAL;
302 memcpy(&hki, key, keylen);
304 /* The real key len for crypto op is the size of the HW key
305 * referenced by the HW key slot, not the hardware key token
307 keylen = hki.keylen;
309 if (validate_keys_sizes(ctx_p, keylen)) {
310 dev_err(dev, "Unsupported key size %d.\n", keylen);
311 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
312 return -EINVAL;
315 ctx_p->keylen = keylen;
317 switch (cc_slot_to_key_type(hki.hw_key1)) {
318 case CC_HW_PROTECTED_KEY:
319 if (ctx_p->flow_mode == S_DIN_to_SM4) {
320 dev_err(dev, "Only AES HW protected keys are supported\n");
321 return -EINVAL;
324 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
325 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
326 dev_err(dev, "Unsupported hw key1 number (%d)\n",
327 hki.hw_key1);
328 return -EINVAL;
331 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
332 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
333 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
334 if (hki.hw_key1 == hki.hw_key2) {
335 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
336 hki.hw_key1, hki.hw_key2);
337 return -EINVAL;
340 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
341 if (ctx_p->hw.key2_slot == END_OF_KEYS) {
342 dev_err(dev, "Unsupported hw key2 number (%d)\n",
343 hki.hw_key2);
344 return -EINVAL;
348 ctx_p->key_type = CC_HW_PROTECTED_KEY;
349 dev_dbg(dev, "HW protected key %d/%d set\n.",
350 ctx_p->hw.key1_slot, ctx_p->hw.key2_slot);
351 break;
353 case CC_POLICY_PROTECTED_KEY:
354 if (ctx_p->drvdata->hw_rev < CC_HW_REV_713) {
355 dev_err(dev, "CPP keys not supported in this hardware revision.\n");
356 return -EINVAL;
359 if (ctx_p->cipher_mode != DRV_CIPHER_CBC &&
360 ctx_p->cipher_mode != DRV_CIPHER_CTR) {
361 dev_err(dev, "CPP keys only supported in CBC or CTR modes.\n");
362 return -EINVAL;
365 ctx_p->cpp.slot = cc_slot_to_cpp_key(hki.hw_key1);
366 if (ctx_p->flow_mode == S_DIN_to_AES)
367 ctx_p->cpp.alg = CC_CPP_AES;
368 else /* Must be SM4 since due to sethkey registration */
369 ctx_p->cpp.alg = CC_CPP_SM4;
370 ctx_p->key_type = CC_POLICY_PROTECTED_KEY;
371 dev_dbg(dev, "policy protected key alg: %d slot: %d.\n",
372 ctx_p->cpp.alg, ctx_p->cpp.slot);
373 break;
375 default:
376 dev_err(dev, "Unsupported protected key (%d)\n", hki.hw_key1);
377 return -EINVAL;
380 return 0;
383 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
384 unsigned int keylen)
386 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
387 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
388 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
389 struct cc_crypto_alg *cc_alg =
390 container_of(tfm->__crt_alg, struct cc_crypto_alg,
391 skcipher_alg.base);
392 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
394 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
395 ctx_p, crypto_tfm_alg_name(tfm), keylen);
396 dump_byte_array("key", (u8 *)key, keylen);
398 /* STAT_PHASE_0: Init and sanity checks */
400 if (validate_keys_sizes(ctx_p, keylen)) {
401 dev_err(dev, "Unsupported key size %d.\n", keylen);
402 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
403 return -EINVAL;
406 ctx_p->key_type = CC_UNPROTECTED_KEY;
409 * Verify DES weak keys
410 * Note that we're dropping the expanded key since the
411 * HW does the expansion on its own.
413 if (ctx_p->flow_mode == S_DIN_to_DES) {
414 u32 tmp[DES3_EDE_EXPKEY_WORDS];
415 if (keylen == DES3_EDE_KEY_SIZE &&
416 __des3_ede_setkey(tmp, &tfm->crt_flags, key,
417 DES3_EDE_KEY_SIZE)) {
418 dev_dbg(dev, "weak 3DES key");
419 return -EINVAL;
420 } else if (!des_ekey(tmp, key) &&
421 (crypto_tfm_get_flags(tfm) &
422 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
423 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
424 dev_dbg(dev, "weak DES key");
425 return -EINVAL;
429 if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
430 xts_check_key(tfm, key, keylen)) {
431 dev_dbg(dev, "weak XTS key");
432 return -EINVAL;
435 /* STAT_PHASE_1: Copy key to ctx */
436 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
437 max_key_buf_size, DMA_TO_DEVICE);
439 memcpy(ctx_p->user.key, key, keylen);
440 if (keylen == 24)
441 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
443 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
444 /* sha256 for key2 - use sw implementation */
445 int key_len = keylen >> 1;
446 int err;
448 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
450 desc->tfm = ctx_p->shash_tfm;
452 err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
453 ctx_p->user.key + key_len);
454 if (err) {
455 dev_err(dev, "Failed to hash ESSIV key.\n");
456 return err;
459 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
460 max_key_buf_size, DMA_TO_DEVICE);
461 ctx_p->keylen = keylen;
463 dev_dbg(dev, "return safely");
464 return 0;
467 static int cc_out_setup_mode(struct cc_cipher_ctx *ctx_p)
469 switch (ctx_p->flow_mode) {
470 case S_DIN_to_AES:
471 return S_AES_to_DOUT;
472 case S_DIN_to_DES:
473 return S_DES_to_DOUT;
474 case S_DIN_to_SM4:
475 return S_SM4_to_DOUT;
476 default:
477 return ctx_p->flow_mode;
481 static void cc_setup_readiv_desc(struct crypto_tfm *tfm,
482 struct cipher_req_ctx *req_ctx,
483 unsigned int ivsize, struct cc_hw_desc desc[],
484 unsigned int *seq_size)
486 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
487 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
488 int cipher_mode = ctx_p->cipher_mode;
489 int flow_mode = cc_out_setup_mode(ctx_p);
490 int direction = req_ctx->gen_ctx.op_type;
491 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
493 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY)
494 return;
496 switch (cipher_mode) {
497 case DRV_CIPHER_ECB:
498 break;
499 case DRV_CIPHER_CBC:
500 case DRV_CIPHER_CBC_CTS:
501 case DRV_CIPHER_CTR:
502 case DRV_CIPHER_OFB:
503 /* Read next IV */
504 hw_desc_init(&desc[*seq_size]);
505 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1);
506 set_cipher_config0(&desc[*seq_size], direction);
507 set_flow_mode(&desc[*seq_size], flow_mode);
508 set_cipher_mode(&desc[*seq_size], cipher_mode);
509 if (cipher_mode == DRV_CIPHER_CTR ||
510 cipher_mode == DRV_CIPHER_OFB) {
511 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
512 } else {
513 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE0);
515 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
516 (*seq_size)++;
517 break;
518 case DRV_CIPHER_XTS:
519 case DRV_CIPHER_ESSIV:
520 case DRV_CIPHER_BITLOCKER:
521 /* IV */
522 hw_desc_init(&desc[*seq_size]);
523 set_setup_mode(&desc[*seq_size], SETUP_WRITE_STATE1);
524 set_cipher_mode(&desc[*seq_size], cipher_mode);
525 set_cipher_config0(&desc[*seq_size], direction);
526 set_flow_mode(&desc[*seq_size], flow_mode);
527 set_dout_dlli(&desc[*seq_size], iv_dma_addr, CC_AES_BLOCK_SIZE,
528 NS_BIT, 1);
529 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
530 (*seq_size)++;
531 break;
532 default:
533 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
537 static void cc_setup_state_desc(struct crypto_tfm *tfm,
538 struct cipher_req_ctx *req_ctx,
539 unsigned int ivsize, unsigned int nbytes,
540 struct cc_hw_desc desc[],
541 unsigned int *seq_size)
543 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
544 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
545 int cipher_mode = ctx_p->cipher_mode;
546 int flow_mode = ctx_p->flow_mode;
547 int direction = req_ctx->gen_ctx.op_type;
548 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
549 unsigned int key_len = ctx_p->keylen;
550 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
551 unsigned int du_size = nbytes;
553 struct cc_crypto_alg *cc_alg =
554 container_of(tfm->__crt_alg, struct cc_crypto_alg,
555 skcipher_alg.base);
557 if (cc_alg->data_unit)
558 du_size = cc_alg->data_unit;
560 switch (cipher_mode) {
561 case DRV_CIPHER_ECB:
562 break;
563 case DRV_CIPHER_CBC:
564 case DRV_CIPHER_CBC_CTS:
565 case DRV_CIPHER_CTR:
566 case DRV_CIPHER_OFB:
567 /* Load IV */
568 hw_desc_init(&desc[*seq_size]);
569 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
570 NS_BIT);
571 set_cipher_config0(&desc[*seq_size], direction);
572 set_flow_mode(&desc[*seq_size], flow_mode);
573 set_cipher_mode(&desc[*seq_size], cipher_mode);
574 if (cipher_mode == DRV_CIPHER_CTR ||
575 cipher_mode == DRV_CIPHER_OFB) {
576 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
577 } else {
578 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
580 (*seq_size)++;
581 break;
582 case DRV_CIPHER_XTS:
583 case DRV_CIPHER_ESSIV:
584 case DRV_CIPHER_BITLOCKER:
585 /* load XEX key */
586 hw_desc_init(&desc[*seq_size]);
587 set_cipher_mode(&desc[*seq_size], cipher_mode);
588 set_cipher_config0(&desc[*seq_size], direction);
589 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
590 set_hw_crypto_key(&desc[*seq_size],
591 ctx_p->hw.key2_slot);
592 } else {
593 set_din_type(&desc[*seq_size], DMA_DLLI,
594 (key_dma_addr + (key_len / 2)),
595 (key_len / 2), NS_BIT);
597 set_xex_data_unit_size(&desc[*seq_size], du_size);
598 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
599 set_key_size_aes(&desc[*seq_size], (key_len / 2));
600 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
601 (*seq_size)++;
603 /* Load IV */
604 hw_desc_init(&desc[*seq_size]);
605 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
606 set_cipher_mode(&desc[*seq_size], cipher_mode);
607 set_cipher_config0(&desc[*seq_size], direction);
608 set_key_size_aes(&desc[*seq_size], (key_len / 2));
609 set_flow_mode(&desc[*seq_size], flow_mode);
610 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
611 CC_AES_BLOCK_SIZE, NS_BIT);
612 (*seq_size)++;
613 break;
614 default:
615 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
619 static int cc_out_flow_mode(struct cc_cipher_ctx *ctx_p)
621 switch (ctx_p->flow_mode) {
622 case S_DIN_to_AES:
623 return DIN_AES_DOUT;
624 case S_DIN_to_DES:
625 return DIN_DES_DOUT;
626 case S_DIN_to_SM4:
627 return DIN_SM4_DOUT;
628 default:
629 return ctx_p->flow_mode;
633 static void cc_setup_key_desc(struct crypto_tfm *tfm,
634 struct cipher_req_ctx *req_ctx,
635 unsigned int nbytes, struct cc_hw_desc desc[],
636 unsigned int *seq_size)
638 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
639 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
640 int cipher_mode = ctx_p->cipher_mode;
641 int flow_mode = ctx_p->flow_mode;
642 int direction = req_ctx->gen_ctx.op_type;
643 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
644 unsigned int key_len = ctx_p->keylen;
645 unsigned int din_size;
647 switch (cipher_mode) {
648 case DRV_CIPHER_CBC:
649 case DRV_CIPHER_CBC_CTS:
650 case DRV_CIPHER_CTR:
651 case DRV_CIPHER_OFB:
652 case DRV_CIPHER_ECB:
653 /* Load key */
654 hw_desc_init(&desc[*seq_size]);
655 set_cipher_mode(&desc[*seq_size], cipher_mode);
656 set_cipher_config0(&desc[*seq_size], direction);
658 if (cc_key_type(tfm) == CC_POLICY_PROTECTED_KEY) {
659 /* We use the AES key size coding for all CPP algs */
660 set_key_size_aes(&desc[*seq_size], key_len);
661 set_cpp_crypto_key(&desc[*seq_size], ctx_p->cpp.slot);
662 flow_mode = cc_out_flow_mode(ctx_p);
663 } else {
664 if (flow_mode == S_DIN_to_AES) {
665 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
666 set_hw_crypto_key(&desc[*seq_size],
667 ctx_p->hw.key1_slot);
668 } else {
669 /* CC_POLICY_UNPROTECTED_KEY
670 * Invalid keys are filtered out in
671 * sethkey()
673 din_size = (key_len == 24) ?
674 AES_MAX_KEY_SIZE : key_len;
676 set_din_type(&desc[*seq_size], DMA_DLLI,
677 key_dma_addr, din_size,
678 NS_BIT);
680 set_key_size_aes(&desc[*seq_size], key_len);
681 } else {
682 /*des*/
683 set_din_type(&desc[*seq_size], DMA_DLLI,
684 key_dma_addr, key_len, NS_BIT);
685 set_key_size_des(&desc[*seq_size], key_len);
687 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
689 set_flow_mode(&desc[*seq_size], flow_mode);
690 (*seq_size)++;
691 break;
692 case DRV_CIPHER_XTS:
693 case DRV_CIPHER_ESSIV:
694 case DRV_CIPHER_BITLOCKER:
695 /* Load AES key */
696 hw_desc_init(&desc[*seq_size]);
697 set_cipher_mode(&desc[*seq_size], cipher_mode);
698 set_cipher_config0(&desc[*seq_size], direction);
699 if (cc_key_type(tfm) == CC_HW_PROTECTED_KEY) {
700 set_hw_crypto_key(&desc[*seq_size],
701 ctx_p->hw.key1_slot);
702 } else {
703 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
704 (key_len / 2), NS_BIT);
706 set_key_size_aes(&desc[*seq_size], (key_len / 2));
707 set_flow_mode(&desc[*seq_size], flow_mode);
708 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
709 (*seq_size)++;
710 break;
711 default:
712 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
716 static void cc_setup_mlli_desc(struct crypto_tfm *tfm,
717 struct cipher_req_ctx *req_ctx,
718 struct scatterlist *dst, struct scatterlist *src,
719 unsigned int nbytes, void *areq,
720 struct cc_hw_desc desc[], unsigned int *seq_size)
722 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
723 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
725 if (req_ctx->dma_buf_type == CC_DMA_BUF_MLLI) {
726 /* bypass */
727 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
728 &req_ctx->mlli_params.mlli_dma_addr,
729 req_ctx->mlli_params.mlli_len,
730 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
731 hw_desc_init(&desc[*seq_size]);
732 set_din_type(&desc[*seq_size], DMA_DLLI,
733 req_ctx->mlli_params.mlli_dma_addr,
734 req_ctx->mlli_params.mlli_len, NS_BIT);
735 set_dout_sram(&desc[*seq_size],
736 ctx_p->drvdata->mlli_sram_addr,
737 req_ctx->mlli_params.mlli_len);
738 set_flow_mode(&desc[*seq_size], BYPASS);
739 (*seq_size)++;
743 static void cc_setup_flow_desc(struct crypto_tfm *tfm,
744 struct cipher_req_ctx *req_ctx,
745 struct scatterlist *dst, struct scatterlist *src,
746 unsigned int nbytes, struct cc_hw_desc desc[],
747 unsigned int *seq_size)
749 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
750 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
751 unsigned int flow_mode = cc_out_flow_mode(ctx_p);
752 bool last_desc = (ctx_p->key_type == CC_POLICY_PROTECTED_KEY ||
753 ctx_p->cipher_mode == DRV_CIPHER_ECB);
755 /* Process */
756 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
757 dev_dbg(dev, " data params addr %pad length 0x%X\n",
758 &sg_dma_address(src), nbytes);
759 dev_dbg(dev, " data params addr %pad length 0x%X\n",
760 &sg_dma_address(dst), nbytes);
761 hw_desc_init(&desc[*seq_size]);
762 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
763 nbytes, NS_BIT);
764 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
765 nbytes, NS_BIT, (!last_desc ? 0 : 1));
766 if (last_desc)
767 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
769 set_flow_mode(&desc[*seq_size], flow_mode);
770 (*seq_size)++;
771 } else {
772 hw_desc_init(&desc[*seq_size]);
773 set_din_type(&desc[*seq_size], DMA_MLLI,
774 ctx_p->drvdata->mlli_sram_addr,
775 req_ctx->in_mlli_nents, NS_BIT);
776 if (req_ctx->out_nents == 0) {
777 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
778 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
779 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
780 set_dout_mlli(&desc[*seq_size],
781 ctx_p->drvdata->mlli_sram_addr,
782 req_ctx->in_mlli_nents, NS_BIT,
783 (!last_desc ? 0 : 1));
784 } else {
785 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
786 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
787 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
788 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
789 set_dout_mlli(&desc[*seq_size],
790 (ctx_p->drvdata->mlli_sram_addr +
791 (LLI_ENTRY_BYTE_SIZE *
792 req_ctx->in_mlli_nents)),
793 req_ctx->out_mlli_nents, NS_BIT,
794 (!last_desc ? 0 : 1));
796 if (last_desc)
797 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
799 set_flow_mode(&desc[*seq_size], flow_mode);
800 (*seq_size)++;
804 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
806 struct skcipher_request *req = (struct skcipher_request *)cc_req;
807 struct scatterlist *dst = req->dst;
808 struct scatterlist *src = req->src;
809 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
810 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
811 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
813 if (err != -EINPROGRESS) {
814 /* Not a BACKLOG notification */
815 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
816 memcpy(req->iv, req_ctx->iv, ivsize);
817 kzfree(req_ctx->iv);
820 skcipher_request_complete(req, err);
823 static int cc_cipher_process(struct skcipher_request *req,
824 enum drv_crypto_direction direction)
826 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
827 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
828 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
829 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
830 struct scatterlist *dst = req->dst;
831 struct scatterlist *src = req->src;
832 unsigned int nbytes = req->cryptlen;
833 void *iv = req->iv;
834 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
835 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
836 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
837 struct cc_crypto_req cc_req = {};
838 int rc;
839 unsigned int seq_len = 0;
840 gfp_t flags = cc_gfp_flags(&req->base);
842 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
843 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
844 "Encrypt" : "Decrypt"), req, iv, nbytes);
846 /* STAT_PHASE_0: Init and sanity checks */
848 /* TODO: check data length according to mode */
849 if (validate_data_size(ctx_p, nbytes)) {
850 dev_err(dev, "Unsupported data size %d.\n", nbytes);
851 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
852 rc = -EINVAL;
853 goto exit_process;
855 if (nbytes == 0) {
856 /* No data to process is valid */
857 rc = 0;
858 goto exit_process;
861 /* The IV we are handed may be allocted from the stack so
862 * we must copy it to a DMAable buffer before use.
864 req_ctx->iv = kmemdup(iv, ivsize, flags);
865 if (!req_ctx->iv) {
866 rc = -ENOMEM;
867 goto exit_process;
870 /* Setup request structure */
871 cc_req.user_cb = (void *)cc_cipher_complete;
872 cc_req.user_arg = (void *)req;
874 /* Setup CPP operation details */
875 if (ctx_p->key_type == CC_POLICY_PROTECTED_KEY) {
876 cc_req.cpp.is_cpp = true;
877 cc_req.cpp.alg = ctx_p->cpp.alg;
878 cc_req.cpp.slot = ctx_p->cpp.slot;
881 /* Setup request context */
882 req_ctx->gen_ctx.op_type = direction;
884 /* STAT_PHASE_1: Map buffers */
886 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
887 req_ctx->iv, src, dst, flags);
888 if (rc) {
889 dev_err(dev, "map_request() failed\n");
890 goto exit_process;
893 /* STAT_PHASE_2: Create sequence */
895 /* Setup IV and XEX key used */
896 cc_setup_state_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
897 /* Setup MLLI line, if needed */
898 cc_setup_mlli_desc(tfm, req_ctx, dst, src, nbytes, req, desc, &seq_len);
899 /* Setup key */
900 cc_setup_key_desc(tfm, req_ctx, nbytes, desc, &seq_len);
901 /* Data processing */
902 cc_setup_flow_desc(tfm, req_ctx, dst, src, nbytes, desc, &seq_len);
903 /* Read next IV */
904 cc_setup_readiv_desc(tfm, req_ctx, ivsize, desc, &seq_len);
906 /* STAT_PHASE_3: Lock HW and push sequence */
908 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
909 &req->base);
910 if (rc != -EINPROGRESS && rc != -EBUSY) {
911 /* Failed to send the request or request completed
912 * synchronously
914 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
917 exit_process:
918 if (rc != -EINPROGRESS && rc != -EBUSY) {
919 kzfree(req_ctx->iv);
922 return rc;
925 static int cc_cipher_encrypt(struct skcipher_request *req)
927 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
929 memset(req_ctx, 0, sizeof(*req_ctx));
931 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
934 static int cc_cipher_decrypt(struct skcipher_request *req)
936 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
938 memset(req_ctx, 0, sizeof(*req_ctx));
940 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
943 /* Block cipher alg */
944 static const struct cc_alg_template skcipher_algs[] = {
946 .name = "xts(paes)",
947 .driver_name = "xts-paes-ccree",
948 .blocksize = AES_BLOCK_SIZE,
949 .template_skcipher = {
950 .setkey = cc_cipher_sethkey,
951 .encrypt = cc_cipher_encrypt,
952 .decrypt = cc_cipher_decrypt,
953 .min_keysize = CC_HW_KEY_SIZE,
954 .max_keysize = CC_HW_KEY_SIZE,
955 .ivsize = AES_BLOCK_SIZE,
957 .cipher_mode = DRV_CIPHER_XTS,
958 .flow_mode = S_DIN_to_AES,
959 .min_hw_rev = CC_HW_REV_630,
960 .std_body = CC_STD_NIST,
961 .sec_func = true,
964 .name = "xts512(paes)",
965 .driver_name = "xts-paes-du512-ccree",
966 .blocksize = AES_BLOCK_SIZE,
967 .template_skcipher = {
968 .setkey = cc_cipher_sethkey,
969 .encrypt = cc_cipher_encrypt,
970 .decrypt = cc_cipher_decrypt,
971 .min_keysize = CC_HW_KEY_SIZE,
972 .max_keysize = CC_HW_KEY_SIZE,
973 .ivsize = AES_BLOCK_SIZE,
975 .cipher_mode = DRV_CIPHER_XTS,
976 .flow_mode = S_DIN_to_AES,
977 .data_unit = 512,
978 .min_hw_rev = CC_HW_REV_712,
979 .std_body = CC_STD_NIST,
980 .sec_func = true,
983 .name = "xts4096(paes)",
984 .driver_name = "xts-paes-du4096-ccree",
985 .blocksize = AES_BLOCK_SIZE,
986 .template_skcipher = {
987 .setkey = cc_cipher_sethkey,
988 .encrypt = cc_cipher_encrypt,
989 .decrypt = cc_cipher_decrypt,
990 .min_keysize = CC_HW_KEY_SIZE,
991 .max_keysize = CC_HW_KEY_SIZE,
992 .ivsize = AES_BLOCK_SIZE,
994 .cipher_mode = DRV_CIPHER_XTS,
995 .flow_mode = S_DIN_to_AES,
996 .data_unit = 4096,
997 .min_hw_rev = CC_HW_REV_712,
998 .std_body = CC_STD_NIST,
999 .sec_func = true,
1002 .name = "essiv(paes)",
1003 .driver_name = "essiv-paes-ccree",
1004 .blocksize = AES_BLOCK_SIZE,
1005 .template_skcipher = {
1006 .setkey = cc_cipher_sethkey,
1007 .encrypt = cc_cipher_encrypt,
1008 .decrypt = cc_cipher_decrypt,
1009 .min_keysize = CC_HW_KEY_SIZE,
1010 .max_keysize = CC_HW_KEY_SIZE,
1011 .ivsize = AES_BLOCK_SIZE,
1013 .cipher_mode = DRV_CIPHER_ESSIV,
1014 .flow_mode = S_DIN_to_AES,
1015 .min_hw_rev = CC_HW_REV_712,
1016 .std_body = CC_STD_NIST,
1017 .sec_func = true,
1020 .name = "essiv512(paes)",
1021 .driver_name = "essiv-paes-du512-ccree",
1022 .blocksize = AES_BLOCK_SIZE,
1023 .template_skcipher = {
1024 .setkey = cc_cipher_sethkey,
1025 .encrypt = cc_cipher_encrypt,
1026 .decrypt = cc_cipher_decrypt,
1027 .min_keysize = CC_HW_KEY_SIZE,
1028 .max_keysize = CC_HW_KEY_SIZE,
1029 .ivsize = AES_BLOCK_SIZE,
1031 .cipher_mode = DRV_CIPHER_ESSIV,
1032 .flow_mode = S_DIN_to_AES,
1033 .data_unit = 512,
1034 .min_hw_rev = CC_HW_REV_712,
1035 .std_body = CC_STD_NIST,
1036 .sec_func = true,
1039 .name = "essiv4096(paes)",
1040 .driver_name = "essiv-paes-du4096-ccree",
1041 .blocksize = AES_BLOCK_SIZE,
1042 .template_skcipher = {
1043 .setkey = cc_cipher_sethkey,
1044 .encrypt = cc_cipher_encrypt,
1045 .decrypt = cc_cipher_decrypt,
1046 .min_keysize = CC_HW_KEY_SIZE,
1047 .max_keysize = CC_HW_KEY_SIZE,
1048 .ivsize = AES_BLOCK_SIZE,
1050 .cipher_mode = DRV_CIPHER_ESSIV,
1051 .flow_mode = S_DIN_to_AES,
1052 .data_unit = 4096,
1053 .min_hw_rev = CC_HW_REV_712,
1054 .std_body = CC_STD_NIST,
1055 .sec_func = true,
1058 .name = "bitlocker(paes)",
1059 .driver_name = "bitlocker-paes-ccree",
1060 .blocksize = AES_BLOCK_SIZE,
1061 .template_skcipher = {
1062 .setkey = cc_cipher_sethkey,
1063 .encrypt = cc_cipher_encrypt,
1064 .decrypt = cc_cipher_decrypt,
1065 .min_keysize = CC_HW_KEY_SIZE,
1066 .max_keysize = CC_HW_KEY_SIZE,
1067 .ivsize = AES_BLOCK_SIZE,
1069 .cipher_mode = DRV_CIPHER_BITLOCKER,
1070 .flow_mode = S_DIN_to_AES,
1071 .min_hw_rev = CC_HW_REV_712,
1072 .std_body = CC_STD_NIST,
1073 .sec_func = true,
1076 .name = "bitlocker512(paes)",
1077 .driver_name = "bitlocker-paes-du512-ccree",
1078 .blocksize = AES_BLOCK_SIZE,
1079 .template_skcipher = {
1080 .setkey = cc_cipher_sethkey,
1081 .encrypt = cc_cipher_encrypt,
1082 .decrypt = cc_cipher_decrypt,
1083 .min_keysize = CC_HW_KEY_SIZE,
1084 .max_keysize = CC_HW_KEY_SIZE,
1085 .ivsize = AES_BLOCK_SIZE,
1087 .cipher_mode = DRV_CIPHER_BITLOCKER,
1088 .flow_mode = S_DIN_to_AES,
1089 .data_unit = 512,
1090 .min_hw_rev = CC_HW_REV_712,
1091 .std_body = CC_STD_NIST,
1092 .sec_func = true,
1095 .name = "bitlocker4096(paes)",
1096 .driver_name = "bitlocker-paes-du4096-ccree",
1097 .blocksize = AES_BLOCK_SIZE,
1098 .template_skcipher = {
1099 .setkey = cc_cipher_sethkey,
1100 .encrypt = cc_cipher_encrypt,
1101 .decrypt = cc_cipher_decrypt,
1102 .min_keysize = CC_HW_KEY_SIZE,
1103 .max_keysize = CC_HW_KEY_SIZE,
1104 .ivsize = AES_BLOCK_SIZE,
1106 .cipher_mode = DRV_CIPHER_BITLOCKER,
1107 .flow_mode = S_DIN_to_AES,
1108 .data_unit = 4096,
1109 .min_hw_rev = CC_HW_REV_712,
1110 .std_body = CC_STD_NIST,
1111 .sec_func = true,
1114 .name = "ecb(paes)",
1115 .driver_name = "ecb-paes-ccree",
1116 .blocksize = AES_BLOCK_SIZE,
1117 .template_skcipher = {
1118 .setkey = cc_cipher_sethkey,
1119 .encrypt = cc_cipher_encrypt,
1120 .decrypt = cc_cipher_decrypt,
1121 .min_keysize = CC_HW_KEY_SIZE,
1122 .max_keysize = CC_HW_KEY_SIZE,
1123 .ivsize = 0,
1125 .cipher_mode = DRV_CIPHER_ECB,
1126 .flow_mode = S_DIN_to_AES,
1127 .min_hw_rev = CC_HW_REV_712,
1128 .std_body = CC_STD_NIST,
1129 .sec_func = true,
1132 .name = "cbc(paes)",
1133 .driver_name = "cbc-paes-ccree",
1134 .blocksize = AES_BLOCK_SIZE,
1135 .template_skcipher = {
1136 .setkey = cc_cipher_sethkey,
1137 .encrypt = cc_cipher_encrypt,
1138 .decrypt = cc_cipher_decrypt,
1139 .min_keysize = CC_HW_KEY_SIZE,
1140 .max_keysize = CC_HW_KEY_SIZE,
1141 .ivsize = AES_BLOCK_SIZE,
1143 .cipher_mode = DRV_CIPHER_CBC,
1144 .flow_mode = S_DIN_to_AES,
1145 .min_hw_rev = CC_HW_REV_712,
1146 .std_body = CC_STD_NIST,
1147 .sec_func = true,
1150 .name = "ofb(paes)",
1151 .driver_name = "ofb-paes-ccree",
1152 .blocksize = AES_BLOCK_SIZE,
1153 .template_skcipher = {
1154 .setkey = cc_cipher_sethkey,
1155 .encrypt = cc_cipher_encrypt,
1156 .decrypt = cc_cipher_decrypt,
1157 .min_keysize = CC_HW_KEY_SIZE,
1158 .max_keysize = CC_HW_KEY_SIZE,
1159 .ivsize = AES_BLOCK_SIZE,
1161 .cipher_mode = DRV_CIPHER_OFB,
1162 .flow_mode = S_DIN_to_AES,
1163 .min_hw_rev = CC_HW_REV_712,
1164 .std_body = CC_STD_NIST,
1165 .sec_func = true,
1168 .name = "cts(cbc(paes))",
1169 .driver_name = "cts-cbc-paes-ccree",
1170 .blocksize = AES_BLOCK_SIZE,
1171 .template_skcipher = {
1172 .setkey = cc_cipher_sethkey,
1173 .encrypt = cc_cipher_encrypt,
1174 .decrypt = cc_cipher_decrypt,
1175 .min_keysize = CC_HW_KEY_SIZE,
1176 .max_keysize = CC_HW_KEY_SIZE,
1177 .ivsize = AES_BLOCK_SIZE,
1179 .cipher_mode = DRV_CIPHER_CBC_CTS,
1180 .flow_mode = S_DIN_to_AES,
1181 .min_hw_rev = CC_HW_REV_712,
1182 .std_body = CC_STD_NIST,
1183 .sec_func = true,
1186 .name = "ctr(paes)",
1187 .driver_name = "ctr-paes-ccree",
1188 .blocksize = 1,
1189 .template_skcipher = {
1190 .setkey = cc_cipher_sethkey,
1191 .encrypt = cc_cipher_encrypt,
1192 .decrypt = cc_cipher_decrypt,
1193 .min_keysize = CC_HW_KEY_SIZE,
1194 .max_keysize = CC_HW_KEY_SIZE,
1195 .ivsize = AES_BLOCK_SIZE,
1197 .cipher_mode = DRV_CIPHER_CTR,
1198 .flow_mode = S_DIN_to_AES,
1199 .min_hw_rev = CC_HW_REV_712,
1200 .std_body = CC_STD_NIST,
1201 .sec_func = true,
1204 .name = "xts(aes)",
1205 .driver_name = "xts-aes-ccree",
1206 .blocksize = AES_BLOCK_SIZE,
1207 .template_skcipher = {
1208 .setkey = cc_cipher_setkey,
1209 .encrypt = cc_cipher_encrypt,
1210 .decrypt = cc_cipher_decrypt,
1211 .min_keysize = AES_MIN_KEY_SIZE * 2,
1212 .max_keysize = AES_MAX_KEY_SIZE * 2,
1213 .ivsize = AES_BLOCK_SIZE,
1215 .cipher_mode = DRV_CIPHER_XTS,
1216 .flow_mode = S_DIN_to_AES,
1217 .min_hw_rev = CC_HW_REV_630,
1218 .std_body = CC_STD_NIST,
1221 .name = "xts512(aes)",
1222 .driver_name = "xts-aes-du512-ccree",
1223 .blocksize = AES_BLOCK_SIZE,
1224 .template_skcipher = {
1225 .setkey = cc_cipher_setkey,
1226 .encrypt = cc_cipher_encrypt,
1227 .decrypt = cc_cipher_decrypt,
1228 .min_keysize = AES_MIN_KEY_SIZE * 2,
1229 .max_keysize = AES_MAX_KEY_SIZE * 2,
1230 .ivsize = AES_BLOCK_SIZE,
1232 .cipher_mode = DRV_CIPHER_XTS,
1233 .flow_mode = S_DIN_to_AES,
1234 .data_unit = 512,
1235 .min_hw_rev = CC_HW_REV_712,
1236 .std_body = CC_STD_NIST,
1239 .name = "xts4096(aes)",
1240 .driver_name = "xts-aes-du4096-ccree",
1241 .blocksize = AES_BLOCK_SIZE,
1242 .template_skcipher = {
1243 .setkey = cc_cipher_setkey,
1244 .encrypt = cc_cipher_encrypt,
1245 .decrypt = cc_cipher_decrypt,
1246 .min_keysize = AES_MIN_KEY_SIZE * 2,
1247 .max_keysize = AES_MAX_KEY_SIZE * 2,
1248 .ivsize = AES_BLOCK_SIZE,
1250 .cipher_mode = DRV_CIPHER_XTS,
1251 .flow_mode = S_DIN_to_AES,
1252 .data_unit = 4096,
1253 .min_hw_rev = CC_HW_REV_712,
1254 .std_body = CC_STD_NIST,
1257 .name = "essiv(aes)",
1258 .driver_name = "essiv-aes-ccree",
1259 .blocksize = AES_BLOCK_SIZE,
1260 .template_skcipher = {
1261 .setkey = cc_cipher_setkey,
1262 .encrypt = cc_cipher_encrypt,
1263 .decrypt = cc_cipher_decrypt,
1264 .min_keysize = AES_MIN_KEY_SIZE * 2,
1265 .max_keysize = AES_MAX_KEY_SIZE * 2,
1266 .ivsize = AES_BLOCK_SIZE,
1268 .cipher_mode = DRV_CIPHER_ESSIV,
1269 .flow_mode = S_DIN_to_AES,
1270 .min_hw_rev = CC_HW_REV_712,
1271 .std_body = CC_STD_NIST,
1274 .name = "essiv512(aes)",
1275 .driver_name = "essiv-aes-du512-ccree",
1276 .blocksize = AES_BLOCK_SIZE,
1277 .template_skcipher = {
1278 .setkey = cc_cipher_setkey,
1279 .encrypt = cc_cipher_encrypt,
1280 .decrypt = cc_cipher_decrypt,
1281 .min_keysize = AES_MIN_KEY_SIZE * 2,
1282 .max_keysize = AES_MAX_KEY_SIZE * 2,
1283 .ivsize = AES_BLOCK_SIZE,
1285 .cipher_mode = DRV_CIPHER_ESSIV,
1286 .flow_mode = S_DIN_to_AES,
1287 .data_unit = 512,
1288 .min_hw_rev = CC_HW_REV_712,
1289 .std_body = CC_STD_NIST,
1292 .name = "essiv4096(aes)",
1293 .driver_name = "essiv-aes-du4096-ccree",
1294 .blocksize = AES_BLOCK_SIZE,
1295 .template_skcipher = {
1296 .setkey = cc_cipher_setkey,
1297 .encrypt = cc_cipher_encrypt,
1298 .decrypt = cc_cipher_decrypt,
1299 .min_keysize = AES_MIN_KEY_SIZE * 2,
1300 .max_keysize = AES_MAX_KEY_SIZE * 2,
1301 .ivsize = AES_BLOCK_SIZE,
1303 .cipher_mode = DRV_CIPHER_ESSIV,
1304 .flow_mode = S_DIN_to_AES,
1305 .data_unit = 4096,
1306 .min_hw_rev = CC_HW_REV_712,
1307 .std_body = CC_STD_NIST,
1310 .name = "bitlocker(aes)",
1311 .driver_name = "bitlocker-aes-ccree",
1312 .blocksize = AES_BLOCK_SIZE,
1313 .template_skcipher = {
1314 .setkey = cc_cipher_setkey,
1315 .encrypt = cc_cipher_encrypt,
1316 .decrypt = cc_cipher_decrypt,
1317 .min_keysize = AES_MIN_KEY_SIZE * 2,
1318 .max_keysize = AES_MAX_KEY_SIZE * 2,
1319 .ivsize = AES_BLOCK_SIZE,
1321 .cipher_mode = DRV_CIPHER_BITLOCKER,
1322 .flow_mode = S_DIN_to_AES,
1323 .min_hw_rev = CC_HW_REV_712,
1324 .std_body = CC_STD_NIST,
1327 .name = "bitlocker512(aes)",
1328 .driver_name = "bitlocker-aes-du512-ccree",
1329 .blocksize = AES_BLOCK_SIZE,
1330 .template_skcipher = {
1331 .setkey = cc_cipher_setkey,
1332 .encrypt = cc_cipher_encrypt,
1333 .decrypt = cc_cipher_decrypt,
1334 .min_keysize = AES_MIN_KEY_SIZE * 2,
1335 .max_keysize = AES_MAX_KEY_SIZE * 2,
1336 .ivsize = AES_BLOCK_SIZE,
1338 .cipher_mode = DRV_CIPHER_BITLOCKER,
1339 .flow_mode = S_DIN_to_AES,
1340 .data_unit = 512,
1341 .min_hw_rev = CC_HW_REV_712,
1342 .std_body = CC_STD_NIST,
1345 .name = "bitlocker4096(aes)",
1346 .driver_name = "bitlocker-aes-du4096-ccree",
1347 .blocksize = AES_BLOCK_SIZE,
1348 .template_skcipher = {
1349 .setkey = cc_cipher_setkey,
1350 .encrypt = cc_cipher_encrypt,
1351 .decrypt = cc_cipher_decrypt,
1352 .min_keysize = AES_MIN_KEY_SIZE * 2,
1353 .max_keysize = AES_MAX_KEY_SIZE * 2,
1354 .ivsize = AES_BLOCK_SIZE,
1356 .cipher_mode = DRV_CIPHER_BITLOCKER,
1357 .flow_mode = S_DIN_to_AES,
1358 .data_unit = 4096,
1359 .min_hw_rev = CC_HW_REV_712,
1360 .std_body = CC_STD_NIST,
1363 .name = "ecb(aes)",
1364 .driver_name = "ecb-aes-ccree",
1365 .blocksize = AES_BLOCK_SIZE,
1366 .template_skcipher = {
1367 .setkey = cc_cipher_setkey,
1368 .encrypt = cc_cipher_encrypt,
1369 .decrypt = cc_cipher_decrypt,
1370 .min_keysize = AES_MIN_KEY_SIZE,
1371 .max_keysize = AES_MAX_KEY_SIZE,
1372 .ivsize = 0,
1374 .cipher_mode = DRV_CIPHER_ECB,
1375 .flow_mode = S_DIN_to_AES,
1376 .min_hw_rev = CC_HW_REV_630,
1377 .std_body = CC_STD_NIST,
1380 .name = "cbc(aes)",
1381 .driver_name = "cbc-aes-ccree",
1382 .blocksize = AES_BLOCK_SIZE,
1383 .template_skcipher = {
1384 .setkey = cc_cipher_setkey,
1385 .encrypt = cc_cipher_encrypt,
1386 .decrypt = cc_cipher_decrypt,
1387 .min_keysize = AES_MIN_KEY_SIZE,
1388 .max_keysize = AES_MAX_KEY_SIZE,
1389 .ivsize = AES_BLOCK_SIZE,
1391 .cipher_mode = DRV_CIPHER_CBC,
1392 .flow_mode = S_DIN_to_AES,
1393 .min_hw_rev = CC_HW_REV_630,
1394 .std_body = CC_STD_NIST,
1397 .name = "ofb(aes)",
1398 .driver_name = "ofb-aes-ccree",
1399 .blocksize = AES_BLOCK_SIZE,
1400 .template_skcipher = {
1401 .setkey = cc_cipher_setkey,
1402 .encrypt = cc_cipher_encrypt,
1403 .decrypt = cc_cipher_decrypt,
1404 .min_keysize = AES_MIN_KEY_SIZE,
1405 .max_keysize = AES_MAX_KEY_SIZE,
1406 .ivsize = AES_BLOCK_SIZE,
1408 .cipher_mode = DRV_CIPHER_OFB,
1409 .flow_mode = S_DIN_to_AES,
1410 .min_hw_rev = CC_HW_REV_630,
1411 .std_body = CC_STD_NIST,
1414 .name = "cts(cbc(aes))",
1415 .driver_name = "cts-cbc-aes-ccree",
1416 .blocksize = AES_BLOCK_SIZE,
1417 .template_skcipher = {
1418 .setkey = cc_cipher_setkey,
1419 .encrypt = cc_cipher_encrypt,
1420 .decrypt = cc_cipher_decrypt,
1421 .min_keysize = AES_MIN_KEY_SIZE,
1422 .max_keysize = AES_MAX_KEY_SIZE,
1423 .ivsize = AES_BLOCK_SIZE,
1425 .cipher_mode = DRV_CIPHER_CBC_CTS,
1426 .flow_mode = S_DIN_to_AES,
1427 .min_hw_rev = CC_HW_REV_630,
1428 .std_body = CC_STD_NIST,
1431 .name = "ctr(aes)",
1432 .driver_name = "ctr-aes-ccree",
1433 .blocksize = 1,
1434 .template_skcipher = {
1435 .setkey = cc_cipher_setkey,
1436 .encrypt = cc_cipher_encrypt,
1437 .decrypt = cc_cipher_decrypt,
1438 .min_keysize = AES_MIN_KEY_SIZE,
1439 .max_keysize = AES_MAX_KEY_SIZE,
1440 .ivsize = AES_BLOCK_SIZE,
1442 .cipher_mode = DRV_CIPHER_CTR,
1443 .flow_mode = S_DIN_to_AES,
1444 .min_hw_rev = CC_HW_REV_630,
1445 .std_body = CC_STD_NIST,
1448 .name = "cbc(des3_ede)",
1449 .driver_name = "cbc-3des-ccree",
1450 .blocksize = DES3_EDE_BLOCK_SIZE,
1451 .template_skcipher = {
1452 .setkey = cc_cipher_setkey,
1453 .encrypt = cc_cipher_encrypt,
1454 .decrypt = cc_cipher_decrypt,
1455 .min_keysize = DES3_EDE_KEY_SIZE,
1456 .max_keysize = DES3_EDE_KEY_SIZE,
1457 .ivsize = DES3_EDE_BLOCK_SIZE,
1459 .cipher_mode = DRV_CIPHER_CBC,
1460 .flow_mode = S_DIN_to_DES,
1461 .min_hw_rev = CC_HW_REV_630,
1462 .std_body = CC_STD_NIST,
1465 .name = "ecb(des3_ede)",
1466 .driver_name = "ecb-3des-ccree",
1467 .blocksize = DES3_EDE_BLOCK_SIZE,
1468 .template_skcipher = {
1469 .setkey = cc_cipher_setkey,
1470 .encrypt = cc_cipher_encrypt,
1471 .decrypt = cc_cipher_decrypt,
1472 .min_keysize = DES3_EDE_KEY_SIZE,
1473 .max_keysize = DES3_EDE_KEY_SIZE,
1474 .ivsize = 0,
1476 .cipher_mode = DRV_CIPHER_ECB,
1477 .flow_mode = S_DIN_to_DES,
1478 .min_hw_rev = CC_HW_REV_630,
1479 .std_body = CC_STD_NIST,
1482 .name = "cbc(des)",
1483 .driver_name = "cbc-des-ccree",
1484 .blocksize = DES_BLOCK_SIZE,
1485 .template_skcipher = {
1486 .setkey = cc_cipher_setkey,
1487 .encrypt = cc_cipher_encrypt,
1488 .decrypt = cc_cipher_decrypt,
1489 .min_keysize = DES_KEY_SIZE,
1490 .max_keysize = DES_KEY_SIZE,
1491 .ivsize = DES_BLOCK_SIZE,
1493 .cipher_mode = DRV_CIPHER_CBC,
1494 .flow_mode = S_DIN_to_DES,
1495 .min_hw_rev = CC_HW_REV_630,
1496 .std_body = CC_STD_NIST,
1499 .name = "ecb(des)",
1500 .driver_name = "ecb-des-ccree",
1501 .blocksize = DES_BLOCK_SIZE,
1502 .template_skcipher = {
1503 .setkey = cc_cipher_setkey,
1504 .encrypt = cc_cipher_encrypt,
1505 .decrypt = cc_cipher_decrypt,
1506 .min_keysize = DES_KEY_SIZE,
1507 .max_keysize = DES_KEY_SIZE,
1508 .ivsize = 0,
1510 .cipher_mode = DRV_CIPHER_ECB,
1511 .flow_mode = S_DIN_to_DES,
1512 .min_hw_rev = CC_HW_REV_630,
1513 .std_body = CC_STD_NIST,
1516 .name = "cbc(sm4)",
1517 .driver_name = "cbc-sm4-ccree",
1518 .blocksize = SM4_BLOCK_SIZE,
1519 .template_skcipher = {
1520 .setkey = cc_cipher_setkey,
1521 .encrypt = cc_cipher_encrypt,
1522 .decrypt = cc_cipher_decrypt,
1523 .min_keysize = SM4_KEY_SIZE,
1524 .max_keysize = SM4_KEY_SIZE,
1525 .ivsize = SM4_BLOCK_SIZE,
1527 .cipher_mode = DRV_CIPHER_CBC,
1528 .flow_mode = S_DIN_to_SM4,
1529 .min_hw_rev = CC_HW_REV_713,
1530 .std_body = CC_STD_OSCCA,
1533 .name = "ecb(sm4)",
1534 .driver_name = "ecb-sm4-ccree",
1535 .blocksize = SM4_BLOCK_SIZE,
1536 .template_skcipher = {
1537 .setkey = cc_cipher_setkey,
1538 .encrypt = cc_cipher_encrypt,
1539 .decrypt = cc_cipher_decrypt,
1540 .min_keysize = SM4_KEY_SIZE,
1541 .max_keysize = SM4_KEY_SIZE,
1542 .ivsize = 0,
1544 .cipher_mode = DRV_CIPHER_ECB,
1545 .flow_mode = S_DIN_to_SM4,
1546 .min_hw_rev = CC_HW_REV_713,
1547 .std_body = CC_STD_OSCCA,
1550 .name = "ctr(sm4)",
1551 .driver_name = "ctr-sm4-ccree",
1552 .blocksize = SM4_BLOCK_SIZE,
1553 .template_skcipher = {
1554 .setkey = cc_cipher_setkey,
1555 .encrypt = cc_cipher_encrypt,
1556 .decrypt = cc_cipher_decrypt,
1557 .min_keysize = SM4_KEY_SIZE,
1558 .max_keysize = SM4_KEY_SIZE,
1559 .ivsize = SM4_BLOCK_SIZE,
1561 .cipher_mode = DRV_CIPHER_CTR,
1562 .flow_mode = S_DIN_to_SM4,
1563 .min_hw_rev = CC_HW_REV_713,
1564 .std_body = CC_STD_OSCCA,
1567 .name = "cbc(psm4)",
1568 .driver_name = "cbc-psm4-ccree",
1569 .blocksize = SM4_BLOCK_SIZE,
1570 .template_skcipher = {
1571 .setkey = cc_cipher_sethkey,
1572 .encrypt = cc_cipher_encrypt,
1573 .decrypt = cc_cipher_decrypt,
1574 .min_keysize = CC_HW_KEY_SIZE,
1575 .max_keysize = CC_HW_KEY_SIZE,
1576 .ivsize = SM4_BLOCK_SIZE,
1578 .cipher_mode = DRV_CIPHER_CBC,
1579 .flow_mode = S_DIN_to_SM4,
1580 .min_hw_rev = CC_HW_REV_713,
1581 .std_body = CC_STD_OSCCA,
1582 .sec_func = true,
1585 .name = "ctr(psm4)",
1586 .driver_name = "ctr-psm4-ccree",
1587 .blocksize = SM4_BLOCK_SIZE,
1588 .template_skcipher = {
1589 .setkey = cc_cipher_sethkey,
1590 .encrypt = cc_cipher_encrypt,
1591 .decrypt = cc_cipher_decrypt,
1592 .min_keysize = CC_HW_KEY_SIZE,
1593 .max_keysize = CC_HW_KEY_SIZE,
1594 .ivsize = SM4_BLOCK_SIZE,
1596 .cipher_mode = DRV_CIPHER_CTR,
1597 .flow_mode = S_DIN_to_SM4,
1598 .min_hw_rev = CC_HW_REV_713,
1599 .std_body = CC_STD_OSCCA,
1600 .sec_func = true,
1604 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1605 struct device *dev)
1607 struct cc_crypto_alg *t_alg;
1608 struct skcipher_alg *alg;
1610 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1611 if (!t_alg)
1612 return ERR_PTR(-ENOMEM);
1614 alg = &t_alg->skcipher_alg;
1616 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1618 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1619 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1620 tmpl->driver_name);
1621 alg->base.cra_module = THIS_MODULE;
1622 alg->base.cra_priority = CC_CRA_PRIO;
1623 alg->base.cra_blocksize = tmpl->blocksize;
1624 alg->base.cra_alignmask = 0;
1625 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1627 alg->base.cra_init = cc_cipher_init;
1628 alg->base.cra_exit = cc_cipher_exit;
1629 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1631 t_alg->cipher_mode = tmpl->cipher_mode;
1632 t_alg->flow_mode = tmpl->flow_mode;
1633 t_alg->data_unit = tmpl->data_unit;
1635 return t_alg;
1638 int cc_cipher_free(struct cc_drvdata *drvdata)
1640 struct cc_crypto_alg *t_alg, *n;
1641 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
1643 if (cipher_handle) {
1644 /* Remove registered algs */
1645 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1646 entry) {
1647 crypto_unregister_skcipher(&t_alg->skcipher_alg);
1648 list_del(&t_alg->entry);
1649 kfree(t_alg);
1651 kfree(cipher_handle);
1652 drvdata->cipher_handle = NULL;
1654 return 0;
1657 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1659 struct cc_cipher_handle *cipher_handle;
1660 struct cc_crypto_alg *t_alg;
1661 struct device *dev = drvdata_to_dev(drvdata);
1662 int rc = -ENOMEM;
1663 int alg;
1665 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1666 if (!cipher_handle)
1667 return -ENOMEM;
1669 INIT_LIST_HEAD(&cipher_handle->alg_list);
1670 drvdata->cipher_handle = cipher_handle;
1672 /* Linux crypto */
1673 dev_dbg(dev, "Number of algorithms = %zu\n",
1674 ARRAY_SIZE(skcipher_algs));
1675 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1676 if ((skcipher_algs[alg].min_hw_rev > drvdata->hw_rev) ||
1677 !(drvdata->std_bodies & skcipher_algs[alg].std_body) ||
1678 (drvdata->sec_disabled && skcipher_algs[alg].sec_func))
1679 continue;
1681 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1682 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1683 if (IS_ERR(t_alg)) {
1684 rc = PTR_ERR(t_alg);
1685 dev_err(dev, "%s alg allocation failed\n",
1686 skcipher_algs[alg].driver_name);
1687 goto fail0;
1689 t_alg->drvdata = drvdata;
1691 dev_dbg(dev, "registering %s\n",
1692 skcipher_algs[alg].driver_name);
1693 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1694 dev_dbg(dev, "%s alg registration rc = %x\n",
1695 t_alg->skcipher_alg.base.cra_driver_name, rc);
1696 if (rc) {
1697 dev_err(dev, "%s alg registration failed\n",
1698 t_alg->skcipher_alg.base.cra_driver_name);
1699 kfree(t_alg);
1700 goto fail0;
1701 } else {
1702 list_add_tail(&t_alg->entry,
1703 &cipher_handle->alg_list);
1704 dev_dbg(dev, "Registered %s\n",
1705 t_alg->skcipher_alg.base.cra_driver_name);
1708 return 0;
1710 fail0:
1711 cc_cipher_free(drvdata);
1712 return rc;