staging: rtl8188eu: rename HalSetBrateCfg() - style
[linux/fpc-iii.git] / drivers / crypto / ccree / cc_cipher.c
blob7623b29911af443ed62491acad2916f078e58a01
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/scatterwalk.h>
12 #include "cc_driver.h"
13 #include "cc_lli_defs.h"
14 #include "cc_buffer_mgr.h"
15 #include "cc_cipher.h"
16 #include "cc_request_mgr.h"
18 #define MAX_ABLKCIPHER_SEQ_LEN 6
20 #define template_skcipher template_u.skcipher
22 struct cc_cipher_handle {
23 struct list_head alg_list;
26 struct cc_user_key_info {
27 u8 *key;
28 dma_addr_t key_dma_addr;
31 struct cc_hw_key_info {
32 enum cc_hw_crypto_key key1_slot;
33 enum cc_hw_crypto_key key2_slot;
36 struct cc_cipher_ctx {
37 struct cc_drvdata *drvdata;
38 int keylen;
39 int key_round_number;
40 int cipher_mode;
41 int flow_mode;
42 unsigned int flags;
43 bool hw_key;
44 struct cc_user_key_info user;
45 struct cc_hw_key_info hw;
46 struct crypto_shash *shash_tfm;
49 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
51 static inline bool cc_is_hw_key(struct crypto_tfm *tfm)
53 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
55 return ctx_p->hw_key;
58 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
60 switch (ctx_p->flow_mode) {
61 case S_DIN_to_AES:
62 switch (size) {
63 case CC_AES_128_BIT_KEY_SIZE:
64 case CC_AES_192_BIT_KEY_SIZE:
65 if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
66 ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
67 ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
68 return 0;
69 break;
70 case CC_AES_256_BIT_KEY_SIZE:
71 return 0;
72 case (CC_AES_192_BIT_KEY_SIZE * 2):
73 case (CC_AES_256_BIT_KEY_SIZE * 2):
74 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
75 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
76 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
77 return 0;
78 break;
79 default:
80 break;
82 case S_DIN_to_DES:
83 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
84 return 0;
85 break;
86 default:
87 break;
89 return -EINVAL;
92 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
93 unsigned int size)
95 switch (ctx_p->flow_mode) {
96 case S_DIN_to_AES:
97 switch (ctx_p->cipher_mode) {
98 case DRV_CIPHER_XTS:
99 if (size >= AES_BLOCK_SIZE &&
100 IS_ALIGNED(size, AES_BLOCK_SIZE))
101 return 0;
102 break;
103 case DRV_CIPHER_CBC_CTS:
104 if (size >= AES_BLOCK_SIZE)
105 return 0;
106 break;
107 case DRV_CIPHER_OFB:
108 case DRV_CIPHER_CTR:
109 return 0;
110 case DRV_CIPHER_ECB:
111 case DRV_CIPHER_CBC:
112 case DRV_CIPHER_ESSIV:
113 case DRV_CIPHER_BITLOCKER:
114 if (IS_ALIGNED(size, AES_BLOCK_SIZE))
115 return 0;
116 break;
117 default:
118 break;
120 break;
121 case S_DIN_to_DES:
122 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
123 return 0;
124 break;
125 default:
126 break;
128 return -EINVAL;
131 static int cc_cipher_init(struct crypto_tfm *tfm)
133 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
134 struct cc_crypto_alg *cc_alg =
135 container_of(tfm->__crt_alg, struct cc_crypto_alg,
136 skcipher_alg.base);
137 struct device *dev = drvdata_to_dev(cc_alg->drvdata);
138 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
139 int rc = 0;
141 dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
142 crypto_tfm_alg_name(tfm));
144 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
145 sizeof(struct cipher_req_ctx));
147 ctx_p->cipher_mode = cc_alg->cipher_mode;
148 ctx_p->flow_mode = cc_alg->flow_mode;
149 ctx_p->drvdata = cc_alg->drvdata;
151 /* Allocate key buffer, cache line aligned */
152 ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
153 if (!ctx_p->user.key)
154 return -ENOMEM;
156 dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
157 ctx_p->user.key);
159 /* Map key buffer */
160 ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
161 max_key_buf_size,
162 DMA_TO_DEVICE);
163 if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
164 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
165 max_key_buf_size, ctx_p->user.key);
166 return -ENOMEM;
168 dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
169 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
171 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
172 /* Alloc hash tfm for essiv */
173 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
174 if (IS_ERR(ctx_p->shash_tfm)) {
175 dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
176 return PTR_ERR(ctx_p->shash_tfm);
180 return rc;
183 static void cc_cipher_exit(struct crypto_tfm *tfm)
185 struct crypto_alg *alg = tfm->__crt_alg;
186 struct cc_crypto_alg *cc_alg =
187 container_of(alg, struct cc_crypto_alg,
188 skcipher_alg.base);
189 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
190 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
191 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
193 dev_dbg(dev, "Clearing context @%p for %s\n",
194 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
196 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
197 /* Free hash tfm for essiv */
198 crypto_free_shash(ctx_p->shash_tfm);
199 ctx_p->shash_tfm = NULL;
202 /* Unmap key buffer */
203 dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
204 DMA_TO_DEVICE);
205 dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
206 &ctx_p->user.key_dma_addr);
208 /* Free key buffer in context */
209 kzfree(ctx_p->user.key);
210 dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
213 struct tdes_keys {
214 u8 key1[DES_KEY_SIZE];
215 u8 key2[DES_KEY_SIZE];
216 u8 key3[DES_KEY_SIZE];
219 static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num)
221 switch (slot_num) {
222 case 0:
223 return KFDE0_KEY;
224 case 1:
225 return KFDE1_KEY;
226 case 2:
227 return KFDE2_KEY;
228 case 3:
229 return KFDE3_KEY;
231 return END_OF_KEYS;
234 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
235 unsigned int keylen)
237 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
238 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
239 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
240 struct cc_hkey_info hki;
242 dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
243 ctx_p, crypto_tfm_alg_name(tfm), keylen);
244 dump_byte_array("key", (u8 *)key, keylen);
246 /* STAT_PHASE_0: Init and sanity checks */
248 /* This check the size of the hardware key token */
249 if (keylen != sizeof(hki)) {
250 dev_err(dev, "Unsupported HW key size %d.\n", keylen);
251 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
252 return -EINVAL;
255 if (ctx_p->flow_mode != S_DIN_to_AES) {
256 dev_err(dev, "HW key not supported for non-AES flows\n");
257 return -EINVAL;
260 memcpy(&hki, key, keylen);
262 /* The real key len for crypto op is the size of the HW key
263 * referenced by the HW key slot, not the hardware key token
265 keylen = hki.keylen;
267 if (validate_keys_sizes(ctx_p, keylen)) {
268 dev_err(dev, "Unsupported key size %d.\n", keylen);
269 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
270 return -EINVAL;
273 ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
274 if (ctx_p->hw.key1_slot == END_OF_KEYS) {
275 dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1);
276 return -EINVAL;
279 if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
280 ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
281 ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
282 if (hki.hw_key1 == hki.hw_key2) {
283 dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
284 hki.hw_key1, hki.hw_key2);
285 return -EINVAL;
287 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
288 if (ctx_p->hw.key2_slot == END_OF_KEYS) {
289 dev_err(dev, "Unsupported hw key2 number (%d)\n",
290 hki.hw_key2);
291 return -EINVAL;
295 ctx_p->keylen = keylen;
296 ctx_p->hw_key = true;
297 dev_dbg(dev, "cc_is_hw_key ret 0");
299 return 0;
302 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
303 unsigned int keylen)
305 struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
306 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
307 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
308 u32 tmp[DES3_EDE_EXPKEY_WORDS];
309 struct cc_crypto_alg *cc_alg =
310 container_of(tfm->__crt_alg, struct cc_crypto_alg,
311 skcipher_alg.base);
312 unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
314 dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
315 ctx_p, crypto_tfm_alg_name(tfm), keylen);
316 dump_byte_array("key", (u8 *)key, keylen);
318 /* STAT_PHASE_0: Init and sanity checks */
320 if (validate_keys_sizes(ctx_p, keylen)) {
321 dev_err(dev, "Unsupported key size %d.\n", keylen);
322 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
323 return -EINVAL;
326 ctx_p->hw_key = false;
329 * Verify DES weak keys
330 * Note that we're dropping the expanded key since the
331 * HW does the expansion on its own.
333 if (ctx_p->flow_mode == S_DIN_to_DES) {
334 if (keylen == DES3_EDE_KEY_SIZE &&
335 __des3_ede_setkey(tmp, &tfm->crt_flags, key,
336 DES3_EDE_KEY_SIZE)) {
337 dev_dbg(dev, "weak 3DES key");
338 return -EINVAL;
339 } else if (!des_ekey(tmp, key) &&
340 (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
341 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
342 dev_dbg(dev, "weak DES key");
343 return -EINVAL;
347 if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
348 xts_check_key(tfm, key, keylen)) {
349 dev_dbg(dev, "weak XTS key");
350 return -EINVAL;
353 /* STAT_PHASE_1: Copy key to ctx */
354 dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
355 max_key_buf_size, DMA_TO_DEVICE);
357 memcpy(ctx_p->user.key, key, keylen);
358 if (keylen == 24)
359 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
361 if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
362 /* sha256 for key2 - use sw implementation */
363 int key_len = keylen >> 1;
364 int err;
366 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
368 desc->tfm = ctx_p->shash_tfm;
370 err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
371 ctx_p->user.key + key_len);
372 if (err) {
373 dev_err(dev, "Failed to hash ESSIV key.\n");
374 return err;
377 dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
378 max_key_buf_size, DMA_TO_DEVICE);
379 ctx_p->keylen = keylen;
381 dev_dbg(dev, "return safely");
382 return 0;
385 static void cc_setup_cipher_desc(struct crypto_tfm *tfm,
386 struct cipher_req_ctx *req_ctx,
387 unsigned int ivsize, unsigned int nbytes,
388 struct cc_hw_desc desc[],
389 unsigned int *seq_size)
391 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
392 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
393 int cipher_mode = ctx_p->cipher_mode;
394 int flow_mode = ctx_p->flow_mode;
395 int direction = req_ctx->gen_ctx.op_type;
396 dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
397 unsigned int key_len = ctx_p->keylen;
398 dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
399 unsigned int du_size = nbytes;
401 struct cc_crypto_alg *cc_alg =
402 container_of(tfm->__crt_alg, struct cc_crypto_alg,
403 skcipher_alg.base);
405 if (cc_alg->data_unit)
406 du_size = cc_alg->data_unit;
408 switch (cipher_mode) {
409 case DRV_CIPHER_CBC:
410 case DRV_CIPHER_CBC_CTS:
411 case DRV_CIPHER_CTR:
412 case DRV_CIPHER_OFB:
413 /* Load cipher state */
414 hw_desc_init(&desc[*seq_size]);
415 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
416 NS_BIT);
417 set_cipher_config0(&desc[*seq_size], direction);
418 set_flow_mode(&desc[*seq_size], flow_mode);
419 set_cipher_mode(&desc[*seq_size], cipher_mode);
420 if (cipher_mode == DRV_CIPHER_CTR ||
421 cipher_mode == DRV_CIPHER_OFB) {
422 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
423 } else {
424 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
426 (*seq_size)++;
427 /*FALLTHROUGH*/
428 case DRV_CIPHER_ECB:
429 /* Load key */
430 hw_desc_init(&desc[*seq_size]);
431 set_cipher_mode(&desc[*seq_size], cipher_mode);
432 set_cipher_config0(&desc[*seq_size], direction);
433 if (flow_mode == S_DIN_to_AES) {
434 if (cc_is_hw_key(tfm)) {
435 set_hw_crypto_key(&desc[*seq_size],
436 ctx_p->hw.key1_slot);
437 } else {
438 set_din_type(&desc[*seq_size], DMA_DLLI,
439 key_dma_addr, ((key_len == 24) ?
440 AES_MAX_KEY_SIZE :
441 key_len), NS_BIT);
443 set_key_size_aes(&desc[*seq_size], key_len);
444 } else {
445 /*des*/
446 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
447 key_len, NS_BIT);
448 set_key_size_des(&desc[*seq_size], key_len);
450 set_flow_mode(&desc[*seq_size], flow_mode);
451 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
452 (*seq_size)++;
453 break;
454 case DRV_CIPHER_XTS:
455 case DRV_CIPHER_ESSIV:
456 case DRV_CIPHER_BITLOCKER:
457 /* Load AES key */
458 hw_desc_init(&desc[*seq_size]);
459 set_cipher_mode(&desc[*seq_size], cipher_mode);
460 set_cipher_config0(&desc[*seq_size], direction);
461 if (cc_is_hw_key(tfm)) {
462 set_hw_crypto_key(&desc[*seq_size],
463 ctx_p->hw.key1_slot);
464 } else {
465 set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
466 (key_len / 2), NS_BIT);
468 set_key_size_aes(&desc[*seq_size], (key_len / 2));
469 set_flow_mode(&desc[*seq_size], flow_mode);
470 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
471 (*seq_size)++;
473 /* load XEX key */
474 hw_desc_init(&desc[*seq_size]);
475 set_cipher_mode(&desc[*seq_size], cipher_mode);
476 set_cipher_config0(&desc[*seq_size], direction);
477 if (cc_is_hw_key(tfm)) {
478 set_hw_crypto_key(&desc[*seq_size],
479 ctx_p->hw.key2_slot);
480 } else {
481 set_din_type(&desc[*seq_size], DMA_DLLI,
482 (key_dma_addr + (key_len / 2)),
483 (key_len / 2), NS_BIT);
485 set_xex_data_unit_size(&desc[*seq_size], du_size);
486 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
487 set_key_size_aes(&desc[*seq_size], (key_len / 2));
488 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
489 (*seq_size)++;
491 /* Set state */
492 hw_desc_init(&desc[*seq_size]);
493 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
494 set_cipher_mode(&desc[*seq_size], cipher_mode);
495 set_cipher_config0(&desc[*seq_size], direction);
496 set_key_size_aes(&desc[*seq_size], (key_len / 2));
497 set_flow_mode(&desc[*seq_size], flow_mode);
498 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
499 CC_AES_BLOCK_SIZE, NS_BIT);
500 (*seq_size)++;
501 break;
502 default:
503 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
507 static void cc_setup_cipher_data(struct crypto_tfm *tfm,
508 struct cipher_req_ctx *req_ctx,
509 struct scatterlist *dst,
510 struct scatterlist *src, unsigned int nbytes,
511 void *areq, struct cc_hw_desc desc[],
512 unsigned int *seq_size)
514 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
515 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
516 unsigned int flow_mode = ctx_p->flow_mode;
518 switch (ctx_p->flow_mode) {
519 case S_DIN_to_AES:
520 flow_mode = DIN_AES_DOUT;
521 break;
522 case S_DIN_to_DES:
523 flow_mode = DIN_DES_DOUT;
524 break;
525 default:
526 dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode);
527 return;
529 /* Process */
530 if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
531 dev_dbg(dev, " data params addr %pad length 0x%X\n",
532 &sg_dma_address(src), nbytes);
533 dev_dbg(dev, " data params addr %pad length 0x%X\n",
534 &sg_dma_address(dst), nbytes);
535 hw_desc_init(&desc[*seq_size]);
536 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
537 nbytes, NS_BIT);
538 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
539 nbytes, NS_BIT, (!areq ? 0 : 1));
540 if (areq)
541 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
543 set_flow_mode(&desc[*seq_size], flow_mode);
544 (*seq_size)++;
545 } else {
546 /* bypass */
547 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
548 &req_ctx->mlli_params.mlli_dma_addr,
549 req_ctx->mlli_params.mlli_len,
550 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
551 hw_desc_init(&desc[*seq_size]);
552 set_din_type(&desc[*seq_size], DMA_DLLI,
553 req_ctx->mlli_params.mlli_dma_addr,
554 req_ctx->mlli_params.mlli_len, NS_BIT);
555 set_dout_sram(&desc[*seq_size],
556 ctx_p->drvdata->mlli_sram_addr,
557 req_ctx->mlli_params.mlli_len);
558 set_flow_mode(&desc[*seq_size], BYPASS);
559 (*seq_size)++;
561 hw_desc_init(&desc[*seq_size]);
562 set_din_type(&desc[*seq_size], DMA_MLLI,
563 ctx_p->drvdata->mlli_sram_addr,
564 req_ctx->in_mlli_nents, NS_BIT);
565 if (req_ctx->out_nents == 0) {
566 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
567 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
568 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
569 set_dout_mlli(&desc[*seq_size],
570 ctx_p->drvdata->mlli_sram_addr,
571 req_ctx->in_mlli_nents, NS_BIT,
572 (!areq ? 0 : 1));
573 } else {
574 dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
575 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
576 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
577 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
578 set_dout_mlli(&desc[*seq_size],
579 (ctx_p->drvdata->mlli_sram_addr +
580 (LLI_ENTRY_BYTE_SIZE *
581 req_ctx->in_mlli_nents)),
582 req_ctx->out_mlli_nents, NS_BIT,
583 (!areq ? 0 : 1));
585 if (areq)
586 set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
588 set_flow_mode(&desc[*seq_size], flow_mode);
589 (*seq_size)++;
594 * Update a CTR-AES 128 bit counter
596 static void cc_update_ctr(u8 *ctr, unsigned int increment)
598 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
599 IS_ALIGNED((unsigned long)ctr, 8)) {
601 __be64 *high_be = (__be64 *)ctr;
602 __be64 *low_be = high_be + 1;
603 u64 orig_low = __be64_to_cpu(*low_be);
604 u64 new_low = orig_low + (u64)increment;
606 *low_be = __cpu_to_be64(new_low);
608 if (new_low < orig_low)
609 *high_be = __cpu_to_be64(__be64_to_cpu(*high_be) + 1);
610 } else {
611 u8 *pos = (ctr + AES_BLOCK_SIZE);
612 u8 val;
613 unsigned int size;
615 for (; increment; increment--)
616 for (size = AES_BLOCK_SIZE; size; size--) {
617 val = *--pos + 1;
618 *pos = val;
619 if (val)
620 break;
625 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
627 struct skcipher_request *req = (struct skcipher_request *)cc_req;
628 struct scatterlist *dst = req->dst;
629 struct scatterlist *src = req->src;
630 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
631 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
632 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
633 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
634 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
635 unsigned int len;
637 switch (ctx_p->cipher_mode) {
638 case DRV_CIPHER_CBC:
640 * The crypto API expects us to set the req->iv to the last
641 * ciphertext block. For encrypt, simply copy from the result.
642 * For decrypt, we must copy from a saved buffer since this
643 * could be an in-place decryption operation and the src is
644 * lost by this point.
646 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT) {
647 memcpy(req->iv, req_ctx->backup_info, ivsize);
648 kzfree(req_ctx->backup_info);
649 } else if (!err) {
650 len = req->cryptlen - ivsize;
651 scatterwalk_map_and_copy(req->iv, req->dst, len,
652 ivsize, 0);
654 break;
656 case DRV_CIPHER_CTR:
657 /* Compute the counter of the last block */
658 len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE;
659 cc_update_ctr((u8 *)req->iv, len);
660 break;
662 default:
663 break;
666 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
667 kzfree(req_ctx->iv);
669 skcipher_request_complete(req, err);
672 static int cc_cipher_process(struct skcipher_request *req,
673 enum drv_crypto_direction direction)
675 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
676 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
677 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
678 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
679 struct scatterlist *dst = req->dst;
680 struct scatterlist *src = req->src;
681 unsigned int nbytes = req->cryptlen;
682 void *iv = req->iv;
683 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
684 struct device *dev = drvdata_to_dev(ctx_p->drvdata);
685 struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
686 struct cc_crypto_req cc_req = {};
687 int rc;
688 unsigned int seq_len = 0;
689 gfp_t flags = cc_gfp_flags(&req->base);
691 dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
692 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
693 "Encrypt" : "Decrypt"), req, iv, nbytes);
695 /* STAT_PHASE_0: Init and sanity checks */
697 /* TODO: check data length according to mode */
698 if (validate_data_size(ctx_p, nbytes)) {
699 dev_err(dev, "Unsupported data size %d.\n", nbytes);
700 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
701 rc = -EINVAL;
702 goto exit_process;
704 if (nbytes == 0) {
705 /* No data to process is valid */
706 rc = 0;
707 goto exit_process;
710 /* The IV we are handed may be allocted from the stack so
711 * we must copy it to a DMAable buffer before use.
713 req_ctx->iv = kmemdup(iv, ivsize, flags);
714 if (!req_ctx->iv) {
715 rc = -ENOMEM;
716 goto exit_process;
719 /* Setup request structure */
720 cc_req.user_cb = (void *)cc_cipher_complete;
721 cc_req.user_arg = (void *)req;
723 /* Setup request context */
724 req_ctx->gen_ctx.op_type = direction;
726 /* STAT_PHASE_1: Map buffers */
728 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
729 req_ctx->iv, src, dst, flags);
730 if (rc) {
731 dev_err(dev, "map_request() failed\n");
732 goto exit_process;
735 /* STAT_PHASE_2: Create sequence */
737 /* Setup processing */
738 cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
739 /* Data processing */
740 cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc,
741 &seq_len);
743 /* STAT_PHASE_3: Lock HW and push sequence */
745 rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
746 &req->base);
747 if (rc != -EINPROGRESS && rc != -EBUSY) {
748 /* Failed to send the request or request completed
749 * synchronously
751 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
754 exit_process:
755 if (rc != -EINPROGRESS && rc != -EBUSY) {
756 kzfree(req_ctx->backup_info);
757 kzfree(req_ctx->iv);
760 return rc;
763 static int cc_cipher_encrypt(struct skcipher_request *req)
765 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
767 memset(req_ctx, 0, sizeof(*req_ctx));
769 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
772 static int cc_cipher_decrypt(struct skcipher_request *req)
774 struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
775 struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
776 struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
777 struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
778 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
779 gfp_t flags = cc_gfp_flags(&req->base);
780 unsigned int len;
782 memset(req_ctx, 0, sizeof(*req_ctx));
784 if (ctx_p->cipher_mode == DRV_CIPHER_CBC) {
786 /* Allocate and save the last IV sized bytes of the source,
787 * which will be lost in case of in-place decryption.
789 req_ctx->backup_info = kzalloc(ivsize, flags);
790 if (!req_ctx->backup_info)
791 return -ENOMEM;
793 len = req->cryptlen - ivsize;
794 scatterwalk_map_and_copy(req_ctx->backup_info, req->src, len,
795 ivsize, 0);
798 return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
801 /* Block cipher alg */
802 static const struct cc_alg_template skcipher_algs[] = {
804 .name = "xts(paes)",
805 .driver_name = "xts-paes-ccree",
806 .blocksize = AES_BLOCK_SIZE,
807 .template_skcipher = {
808 .setkey = cc_cipher_sethkey,
809 .encrypt = cc_cipher_encrypt,
810 .decrypt = cc_cipher_decrypt,
811 .min_keysize = CC_HW_KEY_SIZE,
812 .max_keysize = CC_HW_KEY_SIZE,
813 .ivsize = AES_BLOCK_SIZE,
815 .cipher_mode = DRV_CIPHER_XTS,
816 .flow_mode = S_DIN_to_AES,
817 .min_hw_rev = CC_HW_REV_630,
820 .name = "xts512(paes)",
821 .driver_name = "xts-paes-du512-ccree",
822 .blocksize = AES_BLOCK_SIZE,
823 .template_skcipher = {
824 .setkey = cc_cipher_sethkey,
825 .encrypt = cc_cipher_encrypt,
826 .decrypt = cc_cipher_decrypt,
827 .min_keysize = CC_HW_KEY_SIZE,
828 .max_keysize = CC_HW_KEY_SIZE,
829 .ivsize = AES_BLOCK_SIZE,
831 .cipher_mode = DRV_CIPHER_XTS,
832 .flow_mode = S_DIN_to_AES,
833 .data_unit = 512,
834 .min_hw_rev = CC_HW_REV_712,
837 .name = "xts4096(paes)",
838 .driver_name = "xts-paes-du4096-ccree",
839 .blocksize = AES_BLOCK_SIZE,
840 .template_skcipher = {
841 .setkey = cc_cipher_sethkey,
842 .encrypt = cc_cipher_encrypt,
843 .decrypt = cc_cipher_decrypt,
844 .min_keysize = CC_HW_KEY_SIZE,
845 .max_keysize = CC_HW_KEY_SIZE,
846 .ivsize = AES_BLOCK_SIZE,
848 .cipher_mode = DRV_CIPHER_XTS,
849 .flow_mode = S_DIN_to_AES,
850 .data_unit = 4096,
851 .min_hw_rev = CC_HW_REV_712,
854 .name = "essiv(paes)",
855 .driver_name = "essiv-paes-ccree",
856 .blocksize = AES_BLOCK_SIZE,
857 .template_skcipher = {
858 .setkey = cc_cipher_sethkey,
859 .encrypt = cc_cipher_encrypt,
860 .decrypt = cc_cipher_decrypt,
861 .min_keysize = CC_HW_KEY_SIZE,
862 .max_keysize = CC_HW_KEY_SIZE,
863 .ivsize = AES_BLOCK_SIZE,
865 .cipher_mode = DRV_CIPHER_ESSIV,
866 .flow_mode = S_DIN_to_AES,
867 .min_hw_rev = CC_HW_REV_712,
870 .name = "essiv512(paes)",
871 .driver_name = "essiv-paes-du512-ccree",
872 .blocksize = AES_BLOCK_SIZE,
873 .template_skcipher = {
874 .setkey = cc_cipher_sethkey,
875 .encrypt = cc_cipher_encrypt,
876 .decrypt = cc_cipher_decrypt,
877 .min_keysize = CC_HW_KEY_SIZE,
878 .max_keysize = CC_HW_KEY_SIZE,
879 .ivsize = AES_BLOCK_SIZE,
881 .cipher_mode = DRV_CIPHER_ESSIV,
882 .flow_mode = S_DIN_to_AES,
883 .data_unit = 512,
884 .min_hw_rev = CC_HW_REV_712,
887 .name = "essiv4096(paes)",
888 .driver_name = "essiv-paes-du4096-ccree",
889 .blocksize = AES_BLOCK_SIZE,
890 .template_skcipher = {
891 .setkey = cc_cipher_sethkey,
892 .encrypt = cc_cipher_encrypt,
893 .decrypt = cc_cipher_decrypt,
894 .min_keysize = CC_HW_KEY_SIZE,
895 .max_keysize = CC_HW_KEY_SIZE,
896 .ivsize = AES_BLOCK_SIZE,
898 .cipher_mode = DRV_CIPHER_ESSIV,
899 .flow_mode = S_DIN_to_AES,
900 .data_unit = 4096,
901 .min_hw_rev = CC_HW_REV_712,
904 .name = "bitlocker(paes)",
905 .driver_name = "bitlocker-paes-ccree",
906 .blocksize = AES_BLOCK_SIZE,
907 .template_skcipher = {
908 .setkey = cc_cipher_sethkey,
909 .encrypt = cc_cipher_encrypt,
910 .decrypt = cc_cipher_decrypt,
911 .min_keysize = CC_HW_KEY_SIZE,
912 .max_keysize = CC_HW_KEY_SIZE,
913 .ivsize = AES_BLOCK_SIZE,
915 .cipher_mode = DRV_CIPHER_BITLOCKER,
916 .flow_mode = S_DIN_to_AES,
917 .min_hw_rev = CC_HW_REV_712,
920 .name = "bitlocker512(paes)",
921 .driver_name = "bitlocker-paes-du512-ccree",
922 .blocksize = AES_BLOCK_SIZE,
923 .template_skcipher = {
924 .setkey = cc_cipher_sethkey,
925 .encrypt = cc_cipher_encrypt,
926 .decrypt = cc_cipher_decrypt,
927 .min_keysize = CC_HW_KEY_SIZE,
928 .max_keysize = CC_HW_KEY_SIZE,
929 .ivsize = AES_BLOCK_SIZE,
931 .cipher_mode = DRV_CIPHER_BITLOCKER,
932 .flow_mode = S_DIN_to_AES,
933 .data_unit = 512,
934 .min_hw_rev = CC_HW_REV_712,
937 .name = "bitlocker4096(paes)",
938 .driver_name = "bitlocker-paes-du4096-ccree",
939 .blocksize = AES_BLOCK_SIZE,
940 .template_skcipher = {
941 .setkey = cc_cipher_sethkey,
942 .encrypt = cc_cipher_encrypt,
943 .decrypt = cc_cipher_decrypt,
944 .min_keysize = CC_HW_KEY_SIZE,
945 .max_keysize = CC_HW_KEY_SIZE,
946 .ivsize = AES_BLOCK_SIZE,
948 .cipher_mode = DRV_CIPHER_BITLOCKER,
949 .flow_mode = S_DIN_to_AES,
950 .data_unit = 4096,
951 .min_hw_rev = CC_HW_REV_712,
954 .name = "ecb(paes)",
955 .driver_name = "ecb-paes-ccree",
956 .blocksize = AES_BLOCK_SIZE,
957 .template_skcipher = {
958 .setkey = cc_cipher_sethkey,
959 .encrypt = cc_cipher_encrypt,
960 .decrypt = cc_cipher_decrypt,
961 .min_keysize = CC_HW_KEY_SIZE,
962 .max_keysize = CC_HW_KEY_SIZE,
963 .ivsize = 0,
965 .cipher_mode = DRV_CIPHER_ECB,
966 .flow_mode = S_DIN_to_AES,
967 .min_hw_rev = CC_HW_REV_712,
970 .name = "cbc(paes)",
971 .driver_name = "cbc-paes-ccree",
972 .blocksize = AES_BLOCK_SIZE,
973 .template_skcipher = {
974 .setkey = cc_cipher_sethkey,
975 .encrypt = cc_cipher_encrypt,
976 .decrypt = cc_cipher_decrypt,
977 .min_keysize = CC_HW_KEY_SIZE,
978 .max_keysize = CC_HW_KEY_SIZE,
979 .ivsize = AES_BLOCK_SIZE,
981 .cipher_mode = DRV_CIPHER_CBC,
982 .flow_mode = S_DIN_to_AES,
983 .min_hw_rev = CC_HW_REV_712,
986 .name = "ofb(paes)",
987 .driver_name = "ofb-paes-ccree",
988 .blocksize = AES_BLOCK_SIZE,
989 .template_skcipher = {
990 .setkey = cc_cipher_sethkey,
991 .encrypt = cc_cipher_encrypt,
992 .decrypt = cc_cipher_decrypt,
993 .min_keysize = CC_HW_KEY_SIZE,
994 .max_keysize = CC_HW_KEY_SIZE,
995 .ivsize = AES_BLOCK_SIZE,
997 .cipher_mode = DRV_CIPHER_OFB,
998 .flow_mode = S_DIN_to_AES,
999 .min_hw_rev = CC_HW_REV_712,
1002 .name = "cts(cbc(paes))",
1003 .driver_name = "cts-cbc-paes-ccree",
1004 .blocksize = AES_BLOCK_SIZE,
1005 .template_skcipher = {
1006 .setkey = cc_cipher_sethkey,
1007 .encrypt = cc_cipher_encrypt,
1008 .decrypt = cc_cipher_decrypt,
1009 .min_keysize = CC_HW_KEY_SIZE,
1010 .max_keysize = CC_HW_KEY_SIZE,
1011 .ivsize = AES_BLOCK_SIZE,
1013 .cipher_mode = DRV_CIPHER_CBC_CTS,
1014 .flow_mode = S_DIN_to_AES,
1015 .min_hw_rev = CC_HW_REV_712,
1018 .name = "ctr(paes)",
1019 .driver_name = "ctr-paes-ccree",
1020 .blocksize = 1,
1021 .template_skcipher = {
1022 .setkey = cc_cipher_sethkey,
1023 .encrypt = cc_cipher_encrypt,
1024 .decrypt = cc_cipher_decrypt,
1025 .min_keysize = CC_HW_KEY_SIZE,
1026 .max_keysize = CC_HW_KEY_SIZE,
1027 .ivsize = AES_BLOCK_SIZE,
1029 .cipher_mode = DRV_CIPHER_CTR,
1030 .flow_mode = S_DIN_to_AES,
1031 .min_hw_rev = CC_HW_REV_712,
1034 .name = "xts(aes)",
1035 .driver_name = "xts-aes-ccree",
1036 .blocksize = AES_BLOCK_SIZE,
1037 .template_skcipher = {
1038 .setkey = cc_cipher_setkey,
1039 .encrypt = cc_cipher_encrypt,
1040 .decrypt = cc_cipher_decrypt,
1041 .min_keysize = AES_MIN_KEY_SIZE * 2,
1042 .max_keysize = AES_MAX_KEY_SIZE * 2,
1043 .ivsize = AES_BLOCK_SIZE,
1045 .cipher_mode = DRV_CIPHER_XTS,
1046 .flow_mode = S_DIN_to_AES,
1047 .min_hw_rev = CC_HW_REV_630,
1050 .name = "xts512(aes)",
1051 .driver_name = "xts-aes-du512-ccree",
1052 .blocksize = AES_BLOCK_SIZE,
1053 .template_skcipher = {
1054 .setkey = cc_cipher_setkey,
1055 .encrypt = cc_cipher_encrypt,
1056 .decrypt = cc_cipher_decrypt,
1057 .min_keysize = AES_MIN_KEY_SIZE * 2,
1058 .max_keysize = AES_MAX_KEY_SIZE * 2,
1059 .ivsize = AES_BLOCK_SIZE,
1061 .cipher_mode = DRV_CIPHER_XTS,
1062 .flow_mode = S_DIN_to_AES,
1063 .data_unit = 512,
1064 .min_hw_rev = CC_HW_REV_712,
1067 .name = "xts4096(aes)",
1068 .driver_name = "xts-aes-du4096-ccree",
1069 .blocksize = AES_BLOCK_SIZE,
1070 .template_skcipher = {
1071 .setkey = cc_cipher_setkey,
1072 .encrypt = cc_cipher_encrypt,
1073 .decrypt = cc_cipher_decrypt,
1074 .min_keysize = AES_MIN_KEY_SIZE * 2,
1075 .max_keysize = AES_MAX_KEY_SIZE * 2,
1076 .ivsize = AES_BLOCK_SIZE,
1078 .cipher_mode = DRV_CIPHER_XTS,
1079 .flow_mode = S_DIN_to_AES,
1080 .data_unit = 4096,
1081 .min_hw_rev = CC_HW_REV_712,
1084 .name = "essiv(aes)",
1085 .driver_name = "essiv-aes-ccree",
1086 .blocksize = AES_BLOCK_SIZE,
1087 .template_skcipher = {
1088 .setkey = cc_cipher_setkey,
1089 .encrypt = cc_cipher_encrypt,
1090 .decrypt = cc_cipher_decrypt,
1091 .min_keysize = AES_MIN_KEY_SIZE * 2,
1092 .max_keysize = AES_MAX_KEY_SIZE * 2,
1093 .ivsize = AES_BLOCK_SIZE,
1095 .cipher_mode = DRV_CIPHER_ESSIV,
1096 .flow_mode = S_DIN_to_AES,
1097 .min_hw_rev = CC_HW_REV_712,
1100 .name = "essiv512(aes)",
1101 .driver_name = "essiv-aes-du512-ccree",
1102 .blocksize = AES_BLOCK_SIZE,
1103 .template_skcipher = {
1104 .setkey = cc_cipher_setkey,
1105 .encrypt = cc_cipher_encrypt,
1106 .decrypt = cc_cipher_decrypt,
1107 .min_keysize = AES_MIN_KEY_SIZE * 2,
1108 .max_keysize = AES_MAX_KEY_SIZE * 2,
1109 .ivsize = AES_BLOCK_SIZE,
1111 .cipher_mode = DRV_CIPHER_ESSIV,
1112 .flow_mode = S_DIN_to_AES,
1113 .data_unit = 512,
1114 .min_hw_rev = CC_HW_REV_712,
1117 .name = "essiv4096(aes)",
1118 .driver_name = "essiv-aes-du4096-ccree",
1119 .blocksize = AES_BLOCK_SIZE,
1120 .template_skcipher = {
1121 .setkey = cc_cipher_setkey,
1122 .encrypt = cc_cipher_encrypt,
1123 .decrypt = cc_cipher_decrypt,
1124 .min_keysize = AES_MIN_KEY_SIZE * 2,
1125 .max_keysize = AES_MAX_KEY_SIZE * 2,
1126 .ivsize = AES_BLOCK_SIZE,
1128 .cipher_mode = DRV_CIPHER_ESSIV,
1129 .flow_mode = S_DIN_to_AES,
1130 .data_unit = 4096,
1131 .min_hw_rev = CC_HW_REV_712,
1134 .name = "bitlocker(aes)",
1135 .driver_name = "bitlocker-aes-ccree",
1136 .blocksize = AES_BLOCK_SIZE,
1137 .template_skcipher = {
1138 .setkey = cc_cipher_setkey,
1139 .encrypt = cc_cipher_encrypt,
1140 .decrypt = cc_cipher_decrypt,
1141 .min_keysize = AES_MIN_KEY_SIZE * 2,
1142 .max_keysize = AES_MAX_KEY_SIZE * 2,
1143 .ivsize = AES_BLOCK_SIZE,
1145 .cipher_mode = DRV_CIPHER_BITLOCKER,
1146 .flow_mode = S_DIN_to_AES,
1147 .min_hw_rev = CC_HW_REV_712,
1150 .name = "bitlocker512(aes)",
1151 .driver_name = "bitlocker-aes-du512-ccree",
1152 .blocksize = AES_BLOCK_SIZE,
1153 .template_skcipher = {
1154 .setkey = cc_cipher_setkey,
1155 .encrypt = cc_cipher_encrypt,
1156 .decrypt = cc_cipher_decrypt,
1157 .min_keysize = AES_MIN_KEY_SIZE * 2,
1158 .max_keysize = AES_MAX_KEY_SIZE * 2,
1159 .ivsize = AES_BLOCK_SIZE,
1161 .cipher_mode = DRV_CIPHER_BITLOCKER,
1162 .flow_mode = S_DIN_to_AES,
1163 .data_unit = 512,
1164 .min_hw_rev = CC_HW_REV_712,
1167 .name = "bitlocker4096(aes)",
1168 .driver_name = "bitlocker-aes-du4096-ccree",
1169 .blocksize = AES_BLOCK_SIZE,
1170 .template_skcipher = {
1171 .setkey = cc_cipher_setkey,
1172 .encrypt = cc_cipher_encrypt,
1173 .decrypt = cc_cipher_decrypt,
1174 .min_keysize = AES_MIN_KEY_SIZE * 2,
1175 .max_keysize = AES_MAX_KEY_SIZE * 2,
1176 .ivsize = AES_BLOCK_SIZE,
1178 .cipher_mode = DRV_CIPHER_BITLOCKER,
1179 .flow_mode = S_DIN_to_AES,
1180 .data_unit = 4096,
1181 .min_hw_rev = CC_HW_REV_712,
1184 .name = "ecb(aes)",
1185 .driver_name = "ecb-aes-ccree",
1186 .blocksize = AES_BLOCK_SIZE,
1187 .template_skcipher = {
1188 .setkey = cc_cipher_setkey,
1189 .encrypt = cc_cipher_encrypt,
1190 .decrypt = cc_cipher_decrypt,
1191 .min_keysize = AES_MIN_KEY_SIZE,
1192 .max_keysize = AES_MAX_KEY_SIZE,
1193 .ivsize = 0,
1195 .cipher_mode = DRV_CIPHER_ECB,
1196 .flow_mode = S_DIN_to_AES,
1197 .min_hw_rev = CC_HW_REV_630,
1200 .name = "cbc(aes)",
1201 .driver_name = "cbc-aes-ccree",
1202 .blocksize = AES_BLOCK_SIZE,
1203 .template_skcipher = {
1204 .setkey = cc_cipher_setkey,
1205 .encrypt = cc_cipher_encrypt,
1206 .decrypt = cc_cipher_decrypt,
1207 .min_keysize = AES_MIN_KEY_SIZE,
1208 .max_keysize = AES_MAX_KEY_SIZE,
1209 .ivsize = AES_BLOCK_SIZE,
1211 .cipher_mode = DRV_CIPHER_CBC,
1212 .flow_mode = S_DIN_to_AES,
1213 .min_hw_rev = CC_HW_REV_630,
1216 .name = "ofb(aes)",
1217 .driver_name = "ofb-aes-ccree",
1218 .blocksize = AES_BLOCK_SIZE,
1219 .template_skcipher = {
1220 .setkey = cc_cipher_setkey,
1221 .encrypt = cc_cipher_encrypt,
1222 .decrypt = cc_cipher_decrypt,
1223 .min_keysize = AES_MIN_KEY_SIZE,
1224 .max_keysize = AES_MAX_KEY_SIZE,
1225 .ivsize = AES_BLOCK_SIZE,
1227 .cipher_mode = DRV_CIPHER_OFB,
1228 .flow_mode = S_DIN_to_AES,
1229 .min_hw_rev = CC_HW_REV_630,
1232 .name = "cts(cbc(aes))",
1233 .driver_name = "cts-cbc-aes-ccree",
1234 .blocksize = AES_BLOCK_SIZE,
1235 .template_skcipher = {
1236 .setkey = cc_cipher_setkey,
1237 .encrypt = cc_cipher_encrypt,
1238 .decrypt = cc_cipher_decrypt,
1239 .min_keysize = AES_MIN_KEY_SIZE,
1240 .max_keysize = AES_MAX_KEY_SIZE,
1241 .ivsize = AES_BLOCK_SIZE,
1243 .cipher_mode = DRV_CIPHER_CBC_CTS,
1244 .flow_mode = S_DIN_to_AES,
1245 .min_hw_rev = CC_HW_REV_630,
1248 .name = "ctr(aes)",
1249 .driver_name = "ctr-aes-ccree",
1250 .blocksize = 1,
1251 .template_skcipher = {
1252 .setkey = cc_cipher_setkey,
1253 .encrypt = cc_cipher_encrypt,
1254 .decrypt = cc_cipher_decrypt,
1255 .min_keysize = AES_MIN_KEY_SIZE,
1256 .max_keysize = AES_MAX_KEY_SIZE,
1257 .ivsize = AES_BLOCK_SIZE,
1259 .cipher_mode = DRV_CIPHER_CTR,
1260 .flow_mode = S_DIN_to_AES,
1261 .min_hw_rev = CC_HW_REV_630,
1264 .name = "cbc(des3_ede)",
1265 .driver_name = "cbc-3des-ccree",
1266 .blocksize = DES3_EDE_BLOCK_SIZE,
1267 .template_skcipher = {
1268 .setkey = cc_cipher_setkey,
1269 .encrypt = cc_cipher_encrypt,
1270 .decrypt = cc_cipher_decrypt,
1271 .min_keysize = DES3_EDE_KEY_SIZE,
1272 .max_keysize = DES3_EDE_KEY_SIZE,
1273 .ivsize = DES3_EDE_BLOCK_SIZE,
1275 .cipher_mode = DRV_CIPHER_CBC,
1276 .flow_mode = S_DIN_to_DES,
1277 .min_hw_rev = CC_HW_REV_630,
1280 .name = "ecb(des3_ede)",
1281 .driver_name = "ecb-3des-ccree",
1282 .blocksize = DES3_EDE_BLOCK_SIZE,
1283 .template_skcipher = {
1284 .setkey = cc_cipher_setkey,
1285 .encrypt = cc_cipher_encrypt,
1286 .decrypt = cc_cipher_decrypt,
1287 .min_keysize = DES3_EDE_KEY_SIZE,
1288 .max_keysize = DES3_EDE_KEY_SIZE,
1289 .ivsize = 0,
1291 .cipher_mode = DRV_CIPHER_ECB,
1292 .flow_mode = S_DIN_to_DES,
1293 .min_hw_rev = CC_HW_REV_630,
1296 .name = "cbc(des)",
1297 .driver_name = "cbc-des-ccree",
1298 .blocksize = DES_BLOCK_SIZE,
1299 .template_skcipher = {
1300 .setkey = cc_cipher_setkey,
1301 .encrypt = cc_cipher_encrypt,
1302 .decrypt = cc_cipher_decrypt,
1303 .min_keysize = DES_KEY_SIZE,
1304 .max_keysize = DES_KEY_SIZE,
1305 .ivsize = DES_BLOCK_SIZE,
1307 .cipher_mode = DRV_CIPHER_CBC,
1308 .flow_mode = S_DIN_to_DES,
1309 .min_hw_rev = CC_HW_REV_630,
1312 .name = "ecb(des)",
1313 .driver_name = "ecb-des-ccree",
1314 .blocksize = DES_BLOCK_SIZE,
1315 .template_skcipher = {
1316 .setkey = cc_cipher_setkey,
1317 .encrypt = cc_cipher_encrypt,
1318 .decrypt = cc_cipher_decrypt,
1319 .min_keysize = DES_KEY_SIZE,
1320 .max_keysize = DES_KEY_SIZE,
1321 .ivsize = 0,
1323 .cipher_mode = DRV_CIPHER_ECB,
1324 .flow_mode = S_DIN_to_DES,
1325 .min_hw_rev = CC_HW_REV_630,
1329 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1330 struct device *dev)
1332 struct cc_crypto_alg *t_alg;
1333 struct skcipher_alg *alg;
1335 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1336 if (!t_alg)
1337 return ERR_PTR(-ENOMEM);
1339 alg = &t_alg->skcipher_alg;
1341 memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1343 snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1344 snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1345 tmpl->driver_name);
1346 alg->base.cra_module = THIS_MODULE;
1347 alg->base.cra_priority = CC_CRA_PRIO;
1348 alg->base.cra_blocksize = tmpl->blocksize;
1349 alg->base.cra_alignmask = 0;
1350 alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1352 alg->base.cra_init = cc_cipher_init;
1353 alg->base.cra_exit = cc_cipher_exit;
1354 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1356 t_alg->cipher_mode = tmpl->cipher_mode;
1357 t_alg->flow_mode = tmpl->flow_mode;
1358 t_alg->data_unit = tmpl->data_unit;
1360 return t_alg;
1363 int cc_cipher_free(struct cc_drvdata *drvdata)
1365 struct cc_crypto_alg *t_alg, *n;
1366 struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
1368 if (cipher_handle) {
1369 /* Remove registered algs */
1370 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1371 entry) {
1372 crypto_unregister_skcipher(&t_alg->skcipher_alg);
1373 list_del(&t_alg->entry);
1374 kfree(t_alg);
1376 kfree(cipher_handle);
1377 drvdata->cipher_handle = NULL;
1379 return 0;
1382 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1384 struct cc_cipher_handle *cipher_handle;
1385 struct cc_crypto_alg *t_alg;
1386 struct device *dev = drvdata_to_dev(drvdata);
1387 int rc = -ENOMEM;
1388 int alg;
1390 cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1391 if (!cipher_handle)
1392 return -ENOMEM;
1394 INIT_LIST_HEAD(&cipher_handle->alg_list);
1395 drvdata->cipher_handle = cipher_handle;
1397 /* Linux crypto */
1398 dev_dbg(dev, "Number of algorithms = %zu\n",
1399 ARRAY_SIZE(skcipher_algs));
1400 for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1401 if (skcipher_algs[alg].min_hw_rev > drvdata->hw_rev)
1402 continue;
1404 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1405 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1406 if (IS_ERR(t_alg)) {
1407 rc = PTR_ERR(t_alg);
1408 dev_err(dev, "%s alg allocation failed\n",
1409 skcipher_algs[alg].driver_name);
1410 goto fail0;
1412 t_alg->drvdata = drvdata;
1414 dev_dbg(dev, "registering %s\n",
1415 skcipher_algs[alg].driver_name);
1416 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1417 dev_dbg(dev, "%s alg registration rc = %x\n",
1418 t_alg->skcipher_alg.base.cra_driver_name, rc);
1419 if (rc) {
1420 dev_err(dev, "%s alg registration failed\n",
1421 t_alg->skcipher_alg.base.cra_driver_name);
1422 kfree(t_alg);
1423 goto fail0;
1424 } else {
1425 list_add_tail(&t_alg->entry,
1426 &cipher_handle->alg_list);
1427 dev_dbg(dev, "Registered %s\n",
1428 t_alg->skcipher_alg.base.cra_driver_name);
1431 return 0;
1433 fail0:
1434 cc_cipher_free(drvdata);
1435 return rc;