1 // SPDX-License-Identifier: GPL-2.0
3 * sun8i-ce-core.c - hardware cryptographic offloader for
4 * Allwinner H3/A64/H5/H2+/H6/R40 SoC
6 * Copyright (C) 2015-2019 Corentin Labbe <clabbe.montjoie@gmail.com>
8 * Core file which registers crypto algorithms supported by the CryptoEngine.
10 * You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
13 #include <crypto/engine.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/rng.h>
16 #include <crypto/internal/skcipher.h>
17 #include <linux/clk.h>
18 #include <linux/delay.h>
19 #include <linux/dma-mapping.h>
20 #include <linux/err.h>
21 #include <linux/interrupt.h>
23 #include <linux/irq.h>
24 #include <linux/kernel.h>
25 #include <linux/module.h>
27 #include <linux/platform_device.h>
28 #include <linux/pm_runtime.h>
29 #include <linux/reset.h>
34 * mod clock is lower on H3 than other SoC due to some DMA timeout occurring
36 * If you want to tune mod clock, loading driver and passing selftest is
37 * insufficient, you need to test with some LUKS test (mount and write to it)
39 static const struct ce_variant ce_h3_variant
= {
40 .alg_cipher
= { CE_ALG_AES
, CE_ALG_DES
, CE_ALG_3DES
,
42 .alg_hash
= { CE_ALG_MD5
, CE_ALG_SHA1
, CE_ALG_SHA224
, CE_ALG_SHA256
,
43 CE_ALG_SHA384
, CE_ALG_SHA512
45 .op_mode
= { CE_OP_ECB
, CE_OP_CBC
48 { "bus", 0, 200000000 },
49 { "mod", 50000000, 0 },
53 .trng
= CE_ID_NOTSUPP
,
56 static const struct ce_variant ce_h5_variant
= {
57 .alg_cipher
= { CE_ALG_AES
, CE_ALG_DES
, CE_ALG_3DES
,
59 .alg_hash
= { CE_ALG_MD5
, CE_ALG_SHA1
, CE_ALG_SHA224
, CE_ALG_SHA256
,
60 CE_ID_NOTSUPP
, CE_ID_NOTSUPP
62 .op_mode
= { CE_OP_ECB
, CE_OP_CBC
65 { "bus", 0, 200000000 },
66 { "mod", 300000000, 0 },
70 .trng
= CE_ID_NOTSUPP
,
73 static const struct ce_variant ce_h6_variant
= {
74 .alg_cipher
= { CE_ALG_AES
, CE_ALG_DES
, CE_ALG_3DES
,
76 .alg_hash
= { CE_ALG_MD5
, CE_ALG_SHA1
, CE_ALG_SHA224
, CE_ALG_SHA256
,
77 CE_ALG_SHA384
, CE_ALG_SHA512
79 .op_mode
= { CE_OP_ECB
, CE_OP_CBC
81 .cipher_t_dlen_in_bytes
= true,
82 .hash_t_dlen_in_bits
= true,
83 .prng_t_dlen_in_bytes
= true,
84 .trng_t_dlen_in_bytes
= true,
86 { "bus", 0, 200000000 },
87 { "mod", 300000000, 0 },
88 { "ram", 0, 400000000 },
91 .prng
= CE_ALG_PRNG_V2
,
92 .trng
= CE_ALG_TRNG_V2
,
95 static const struct ce_variant ce_h616_variant
= {
96 .alg_cipher
= { CE_ALG_AES
, CE_ALG_DES
, CE_ALG_3DES
,
98 .alg_hash
= { CE_ALG_MD5
, CE_ALG_SHA1
, CE_ALG_SHA224
, CE_ALG_SHA256
,
99 CE_ALG_SHA384
, CE_ALG_SHA512
101 .op_mode
= { CE_OP_ECB
, CE_OP_CBC
103 .cipher_t_dlen_in_bytes
= true,
104 .hash_t_dlen_in_bits
= true,
105 .prng_t_dlen_in_bytes
= true,
106 .trng_t_dlen_in_bytes
= true,
107 .needs_word_addresses
= true,
109 { "bus", 0, 200000000 },
110 { "mod", 300000000, 0 },
111 { "ram", 0, 400000000 },
115 .prng
= CE_ALG_PRNG_V2
,
116 .trng
= CE_ALG_TRNG_V2
,
119 static const struct ce_variant ce_a64_variant
= {
120 .alg_cipher
= { CE_ALG_AES
, CE_ALG_DES
, CE_ALG_3DES
,
122 .alg_hash
= { CE_ALG_MD5
, CE_ALG_SHA1
, CE_ALG_SHA224
, CE_ALG_SHA256
,
123 CE_ID_NOTSUPP
, CE_ID_NOTSUPP
125 .op_mode
= { CE_OP_ECB
, CE_OP_CBC
128 { "bus", 0, 200000000 },
129 { "mod", 300000000, 0 },
133 .trng
= CE_ID_NOTSUPP
,
136 static const struct ce_variant ce_d1_variant
= {
137 .alg_cipher
= { CE_ALG_AES
, CE_ALG_DES
, CE_ALG_3DES
,
139 .alg_hash
= { CE_ALG_MD5
, CE_ALG_SHA1
, CE_ALG_SHA224
, CE_ALG_SHA256
,
140 CE_ALG_SHA384
, CE_ALG_SHA512
142 .op_mode
= { CE_OP_ECB
, CE_OP_CBC
145 { "bus", 0, 200000000 },
146 { "mod", 300000000, 0 },
147 { "ram", 0, 400000000 },
155 static const struct ce_variant ce_r40_variant
= {
156 .alg_cipher
= { CE_ALG_AES
, CE_ALG_DES
, CE_ALG_3DES
,
158 .alg_hash
= { CE_ALG_MD5
, CE_ALG_SHA1
, CE_ALG_SHA224
, CE_ALG_SHA256
,
159 CE_ID_NOTSUPP
, CE_ID_NOTSUPP
161 .op_mode
= { CE_OP_ECB
, CE_OP_CBC
164 { "bus", 0, 200000000 },
165 { "mod", 300000000, 0 },
169 .trng
= CE_ID_NOTSUPP
,
173 * sun8i_ce_get_engine_number() get the next channel slot
174 * This is a simple round-robin way of getting the next channel
175 * The flow 3 is reserve for xRNG operations
177 int sun8i_ce_get_engine_number(struct sun8i_ce_dev
*ce
)
179 return atomic_inc_return(&ce
->flow
) % (MAXFLOW
- 1);
182 int sun8i_ce_run_task(struct sun8i_ce_dev
*ce
, int flow
, const char *name
)
186 struct ce_task
*cet
= ce
->chanlist
[flow
].tl
;
188 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
189 ce
->chanlist
[flow
].stat_req
++;
192 mutex_lock(&ce
->mlock
);
194 v
= readl(ce
->base
+ CE_ICR
);
196 writel(v
, ce
->base
+ CE_ICR
);
198 reinit_completion(&ce
->chanlist
[flow
].complete
);
199 writel(desc_addr_val(ce
, ce
->chanlist
[flow
].t_phy
), ce
->base
+ CE_TDQ
);
201 ce
->chanlist
[flow
].status
= 0;
202 /* Be sure all data is written before enabling the task */
205 /* Only H6 needs to write a part of t_common_ctl along with "1", but since it is ignored
206 * on older SoCs, we have no reason to complicate things.
208 v
= 1 | ((le32_to_cpu(ce
->chanlist
[flow
].tl
->t_common_ctl
) & 0x7F) << 8);
209 writel(v
, ce
->base
+ CE_TLR
);
210 mutex_unlock(&ce
->mlock
);
212 wait_for_completion_interruptible_timeout(&ce
->chanlist
[flow
].complete
,
213 msecs_to_jiffies(ce
->chanlist
[flow
].timeout
));
215 if (ce
->chanlist
[flow
].status
== 0) {
216 dev_err(ce
->dev
, "DMA timeout for %s (tm=%d) on flow %d\n", name
,
217 ce
->chanlist
[flow
].timeout
, flow
);
220 /* No need to lock for this read, the channel is locked so
221 * nothing could modify the error value for this channel
223 v
= readl(ce
->base
+ CE_ESR
);
224 switch (ce
->variant
->esr
) {
226 /* Sadly, the error bit is not per flow */
228 dev_err(ce
->dev
, "CE ERROR: %x for flow %x\n", v
, flow
);
230 print_hex_dump(KERN_INFO
, "TASK: ", DUMP_PREFIX_NONE
, 16, 4,
231 cet
, sizeof(struct ce_task
), false);
233 if (v
& CE_ERR_ALGO_NOTSUP
)
234 dev_err(ce
->dev
, "CE ERROR: algorithm not supported\n");
235 if (v
& CE_ERR_DATALEN
)
236 dev_err(ce
->dev
, "CE ERROR: data length error\n");
237 if (v
& CE_ERR_KEYSRAM
)
238 dev_err(ce
->dev
, "CE ERROR: keysram access error for AES\n");
247 dev_err(ce
->dev
, "CE ERROR: %x for flow %x\n", v
, flow
);
249 print_hex_dump(KERN_INFO
, "TASK: ", DUMP_PREFIX_NONE
, 16, 4,
250 cet
, sizeof(struct ce_task
), false);
252 if (v
& CE_ERR_ALGO_NOTSUP
)
253 dev_err(ce
->dev
, "CE ERROR: algorithm not supported\n");
254 if (v
& CE_ERR_DATALEN
)
255 dev_err(ce
->dev
, "CE ERROR: data length error\n");
256 if (v
& CE_ERR_KEYSRAM
)
257 dev_err(ce
->dev
, "CE ERROR: keysram access error for AES\n");
263 dev_err(ce
->dev
, "CE ERROR: %x for flow %x\n", v
, flow
);
265 print_hex_dump(KERN_INFO
, "TASK: ", DUMP_PREFIX_NONE
, 16, 4,
266 cet
, sizeof(struct ce_task
), false);
268 if (v
& CE_ERR_ALGO_NOTSUP
)
269 dev_err(ce
->dev
, "CE ERROR: algorithm not supported\n");
270 if (v
& CE_ERR_DATALEN
)
271 dev_err(ce
->dev
, "CE ERROR: data length error\n");
272 if (v
& CE_ERR_KEYSRAM
)
273 dev_err(ce
->dev
, "CE ERROR: keysram access error for AES\n");
274 if (v
& CE_ERR_ADDR_INVALID
)
275 dev_err(ce
->dev
, "CE ERROR: address invalid\n");
276 if (v
& CE_ERR_KEYLADDER
)
277 dev_err(ce
->dev
, "CE ERROR: key ladder configuration error\n");
284 static irqreturn_t
ce_irq_handler(int irq
, void *data
)
286 struct sun8i_ce_dev
*ce
= (struct sun8i_ce_dev
*)data
;
290 p
= readl(ce
->base
+ CE_ISR
);
291 for (flow
= 0; flow
< MAXFLOW
; flow
++) {
292 if (p
& (BIT(flow
))) {
293 writel(BIT(flow
), ce
->base
+ CE_ISR
);
294 ce
->chanlist
[flow
].status
= 1;
295 complete(&ce
->chanlist
[flow
].complete
);
302 static struct sun8i_ce_alg_template ce_algs
[] = {
304 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
305 .ce_algo_id
= CE_ID_CIPHER_AES
,
306 .ce_blockmode
= CE_ID_OP_CBC
,
307 .alg
.skcipher
.base
= {
309 .cra_name
= "cbc(aes)",
310 .cra_driver_name
= "cbc-aes-sun8i-ce",
312 .cra_blocksize
= AES_BLOCK_SIZE
,
313 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
315 CRYPTO_ALG_NEED_FALLBACK
,
316 .cra_ctxsize
= sizeof(struct sun8i_cipher_tfm_ctx
),
317 .cra_module
= THIS_MODULE
,
318 .cra_alignmask
= 0xf,
319 .cra_init
= sun8i_ce_cipher_init
,
320 .cra_exit
= sun8i_ce_cipher_exit
,
322 .min_keysize
= AES_MIN_KEY_SIZE
,
323 .max_keysize
= AES_MAX_KEY_SIZE
,
324 .ivsize
= AES_BLOCK_SIZE
,
325 .setkey
= sun8i_ce_aes_setkey
,
326 .encrypt
= sun8i_ce_skencrypt
,
327 .decrypt
= sun8i_ce_skdecrypt
,
330 .do_one_request
= sun8i_ce_cipher_do_one
,
334 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
335 .ce_algo_id
= CE_ID_CIPHER_AES
,
336 .ce_blockmode
= CE_ID_OP_ECB
,
337 .alg
.skcipher
.base
= {
339 .cra_name
= "ecb(aes)",
340 .cra_driver_name
= "ecb-aes-sun8i-ce",
342 .cra_blocksize
= AES_BLOCK_SIZE
,
343 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
345 CRYPTO_ALG_NEED_FALLBACK
,
346 .cra_ctxsize
= sizeof(struct sun8i_cipher_tfm_ctx
),
347 .cra_module
= THIS_MODULE
,
348 .cra_alignmask
= 0xf,
349 .cra_init
= sun8i_ce_cipher_init
,
350 .cra_exit
= sun8i_ce_cipher_exit
,
352 .min_keysize
= AES_MIN_KEY_SIZE
,
353 .max_keysize
= AES_MAX_KEY_SIZE
,
354 .setkey
= sun8i_ce_aes_setkey
,
355 .encrypt
= sun8i_ce_skencrypt
,
356 .decrypt
= sun8i_ce_skdecrypt
,
359 .do_one_request
= sun8i_ce_cipher_do_one
,
363 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
364 .ce_algo_id
= CE_ID_CIPHER_DES3
,
365 .ce_blockmode
= CE_ID_OP_CBC
,
366 .alg
.skcipher
.base
= {
368 .cra_name
= "cbc(des3_ede)",
369 .cra_driver_name
= "cbc-des3-sun8i-ce",
371 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
372 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
374 CRYPTO_ALG_NEED_FALLBACK
,
375 .cra_ctxsize
= sizeof(struct sun8i_cipher_tfm_ctx
),
376 .cra_module
= THIS_MODULE
,
377 .cra_alignmask
= 0xf,
378 .cra_init
= sun8i_ce_cipher_init
,
379 .cra_exit
= sun8i_ce_cipher_exit
,
381 .min_keysize
= DES3_EDE_KEY_SIZE
,
382 .max_keysize
= DES3_EDE_KEY_SIZE
,
383 .ivsize
= DES3_EDE_BLOCK_SIZE
,
384 .setkey
= sun8i_ce_des3_setkey
,
385 .encrypt
= sun8i_ce_skencrypt
,
386 .decrypt
= sun8i_ce_skdecrypt
,
389 .do_one_request
= sun8i_ce_cipher_do_one
,
393 .type
= CRYPTO_ALG_TYPE_SKCIPHER
,
394 .ce_algo_id
= CE_ID_CIPHER_DES3
,
395 .ce_blockmode
= CE_ID_OP_ECB
,
396 .alg
.skcipher
.base
= {
398 .cra_name
= "ecb(des3_ede)",
399 .cra_driver_name
= "ecb-des3-sun8i-ce",
401 .cra_blocksize
= DES3_EDE_BLOCK_SIZE
,
402 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
|
404 CRYPTO_ALG_NEED_FALLBACK
,
405 .cra_ctxsize
= sizeof(struct sun8i_cipher_tfm_ctx
),
406 .cra_module
= THIS_MODULE
,
407 .cra_alignmask
= 0xf,
408 .cra_init
= sun8i_ce_cipher_init
,
409 .cra_exit
= sun8i_ce_cipher_exit
,
411 .min_keysize
= DES3_EDE_KEY_SIZE
,
412 .max_keysize
= DES3_EDE_KEY_SIZE
,
413 .setkey
= sun8i_ce_des3_setkey
,
414 .encrypt
= sun8i_ce_skencrypt
,
415 .decrypt
= sun8i_ce_skdecrypt
,
418 .do_one_request
= sun8i_ce_cipher_do_one
,
421 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_HASH
422 { .type
= CRYPTO_ALG_TYPE_AHASH
,
423 .ce_algo_id
= CE_ID_HASH_MD5
,
425 .init
= sun8i_ce_hash_init
,
426 .update
= sun8i_ce_hash_update
,
427 .final
= sun8i_ce_hash_final
,
428 .finup
= sun8i_ce_hash_finup
,
429 .digest
= sun8i_ce_hash_digest
,
430 .export
= sun8i_ce_hash_export
,
431 .import
= sun8i_ce_hash_import
,
432 .init_tfm
= sun8i_ce_hash_init_tfm
,
433 .exit_tfm
= sun8i_ce_hash_exit_tfm
,
435 .digestsize
= MD5_DIGEST_SIZE
,
436 .statesize
= sizeof(struct md5_state
),
439 .cra_driver_name
= "md5-sun8i-ce",
441 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
443 CRYPTO_ALG_NEED_FALLBACK
,
444 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
445 .cra_ctxsize
= sizeof(struct sun8i_ce_hash_tfm_ctx
),
446 .cra_module
= THIS_MODULE
,
451 .do_one_request
= sun8i_ce_hash_run
,
455 { .type
= CRYPTO_ALG_TYPE_AHASH
,
456 .ce_algo_id
= CE_ID_HASH_SHA1
,
458 .init
= sun8i_ce_hash_init
,
459 .update
= sun8i_ce_hash_update
,
460 .final
= sun8i_ce_hash_final
,
461 .finup
= sun8i_ce_hash_finup
,
462 .digest
= sun8i_ce_hash_digest
,
463 .export
= sun8i_ce_hash_export
,
464 .import
= sun8i_ce_hash_import
,
465 .init_tfm
= sun8i_ce_hash_init_tfm
,
466 .exit_tfm
= sun8i_ce_hash_exit_tfm
,
468 .digestsize
= SHA1_DIGEST_SIZE
,
469 .statesize
= sizeof(struct sha1_state
),
472 .cra_driver_name
= "sha1-sun8i-ce",
474 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
476 CRYPTO_ALG_NEED_FALLBACK
,
477 .cra_blocksize
= SHA1_BLOCK_SIZE
,
478 .cra_ctxsize
= sizeof(struct sun8i_ce_hash_tfm_ctx
),
479 .cra_module
= THIS_MODULE
,
484 .do_one_request
= sun8i_ce_hash_run
,
487 { .type
= CRYPTO_ALG_TYPE_AHASH
,
488 .ce_algo_id
= CE_ID_HASH_SHA224
,
490 .init
= sun8i_ce_hash_init
,
491 .update
= sun8i_ce_hash_update
,
492 .final
= sun8i_ce_hash_final
,
493 .finup
= sun8i_ce_hash_finup
,
494 .digest
= sun8i_ce_hash_digest
,
495 .export
= sun8i_ce_hash_export
,
496 .import
= sun8i_ce_hash_import
,
497 .init_tfm
= sun8i_ce_hash_init_tfm
,
498 .exit_tfm
= sun8i_ce_hash_exit_tfm
,
500 .digestsize
= SHA224_DIGEST_SIZE
,
501 .statesize
= sizeof(struct sha256_state
),
503 .cra_name
= "sha224",
504 .cra_driver_name
= "sha224-sun8i-ce",
506 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
508 CRYPTO_ALG_NEED_FALLBACK
,
509 .cra_blocksize
= SHA224_BLOCK_SIZE
,
510 .cra_ctxsize
= sizeof(struct sun8i_ce_hash_tfm_ctx
),
511 .cra_module
= THIS_MODULE
,
516 .do_one_request
= sun8i_ce_hash_run
,
519 { .type
= CRYPTO_ALG_TYPE_AHASH
,
520 .ce_algo_id
= CE_ID_HASH_SHA256
,
522 .init
= sun8i_ce_hash_init
,
523 .update
= sun8i_ce_hash_update
,
524 .final
= sun8i_ce_hash_final
,
525 .finup
= sun8i_ce_hash_finup
,
526 .digest
= sun8i_ce_hash_digest
,
527 .export
= sun8i_ce_hash_export
,
528 .import
= sun8i_ce_hash_import
,
529 .init_tfm
= sun8i_ce_hash_init_tfm
,
530 .exit_tfm
= sun8i_ce_hash_exit_tfm
,
532 .digestsize
= SHA256_DIGEST_SIZE
,
533 .statesize
= sizeof(struct sha256_state
),
535 .cra_name
= "sha256",
536 .cra_driver_name
= "sha256-sun8i-ce",
538 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
540 CRYPTO_ALG_NEED_FALLBACK
,
541 .cra_blocksize
= SHA256_BLOCK_SIZE
,
542 .cra_ctxsize
= sizeof(struct sun8i_ce_hash_tfm_ctx
),
543 .cra_module
= THIS_MODULE
,
548 .do_one_request
= sun8i_ce_hash_run
,
551 { .type
= CRYPTO_ALG_TYPE_AHASH
,
552 .ce_algo_id
= CE_ID_HASH_SHA384
,
554 .init
= sun8i_ce_hash_init
,
555 .update
= sun8i_ce_hash_update
,
556 .final
= sun8i_ce_hash_final
,
557 .finup
= sun8i_ce_hash_finup
,
558 .digest
= sun8i_ce_hash_digest
,
559 .export
= sun8i_ce_hash_export
,
560 .import
= sun8i_ce_hash_import
,
561 .init_tfm
= sun8i_ce_hash_init_tfm
,
562 .exit_tfm
= sun8i_ce_hash_exit_tfm
,
564 .digestsize
= SHA384_DIGEST_SIZE
,
565 .statesize
= sizeof(struct sha512_state
),
567 .cra_name
= "sha384",
568 .cra_driver_name
= "sha384-sun8i-ce",
570 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
572 CRYPTO_ALG_NEED_FALLBACK
,
573 .cra_blocksize
= SHA384_BLOCK_SIZE
,
574 .cra_ctxsize
= sizeof(struct sun8i_ce_hash_tfm_ctx
),
575 .cra_module
= THIS_MODULE
,
580 .do_one_request
= sun8i_ce_hash_run
,
583 { .type
= CRYPTO_ALG_TYPE_AHASH
,
584 .ce_algo_id
= CE_ID_HASH_SHA512
,
586 .init
= sun8i_ce_hash_init
,
587 .update
= sun8i_ce_hash_update
,
588 .final
= sun8i_ce_hash_final
,
589 .finup
= sun8i_ce_hash_finup
,
590 .digest
= sun8i_ce_hash_digest
,
591 .export
= sun8i_ce_hash_export
,
592 .import
= sun8i_ce_hash_import
,
593 .init_tfm
= sun8i_ce_hash_init_tfm
,
594 .exit_tfm
= sun8i_ce_hash_exit_tfm
,
596 .digestsize
= SHA512_DIGEST_SIZE
,
597 .statesize
= sizeof(struct sha512_state
),
599 .cra_name
= "sha512",
600 .cra_driver_name
= "sha512-sun8i-ce",
602 .cra_flags
= CRYPTO_ALG_TYPE_AHASH
|
604 CRYPTO_ALG_NEED_FALLBACK
,
605 .cra_blocksize
= SHA512_BLOCK_SIZE
,
606 .cra_ctxsize
= sizeof(struct sun8i_ce_hash_tfm_ctx
),
607 .cra_module
= THIS_MODULE
,
612 .do_one_request
= sun8i_ce_hash_run
,
616 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_PRNG
618 .type
= CRYPTO_ALG_TYPE_RNG
,
621 .cra_name
= "stdrng",
622 .cra_driver_name
= "sun8i-ce-prng",
624 .cra_ctxsize
= sizeof(struct sun8i_ce_rng_tfm_ctx
),
625 .cra_module
= THIS_MODULE
,
626 .cra_init
= sun8i_ce_prng_init
,
627 .cra_exit
= sun8i_ce_prng_exit
,
629 .generate
= sun8i_ce_prng_generate
,
630 .seed
= sun8i_ce_prng_seed
,
631 .seedsize
= PRNG_SEED_SIZE
,
637 static int sun8i_ce_debugfs_show(struct seq_file
*seq
, void *v
)
639 struct sun8i_ce_dev
*ce __maybe_unused
= seq
->private;
642 for (i
= 0; i
< MAXFLOW
; i
++)
643 seq_printf(seq
, "Channel %d: nreq %lu\n", i
,
644 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
645 ce
->chanlist
[i
].stat_req
);
650 for (i
= 0; i
< ARRAY_SIZE(ce_algs
); i
++) {
653 switch (ce_algs
[i
].type
) {
654 case CRYPTO_ALG_TYPE_SKCIPHER
:
655 seq_printf(seq
, "%s %s reqs=%lu fallback=%lu\n",
656 ce_algs
[i
].alg
.skcipher
.base
.base
.cra_driver_name
,
657 ce_algs
[i
].alg
.skcipher
.base
.base
.cra_name
,
658 ce_algs
[i
].stat_req
, ce_algs
[i
].stat_fb
);
659 seq_printf(seq
, "\tLast fallback is: %s\n",
661 seq_printf(seq
, "\tFallback due to 0 length: %lu\n",
662 ce_algs
[i
].stat_fb_len0
);
663 seq_printf(seq
, "\tFallback due to length !mod16: %lu\n",
664 ce_algs
[i
].stat_fb_mod16
);
665 seq_printf(seq
, "\tFallback due to length < IV: %lu\n",
666 ce_algs
[i
].stat_fb_leniv
);
667 seq_printf(seq
, "\tFallback due to source alignment: %lu\n",
668 ce_algs
[i
].stat_fb_srcali
);
669 seq_printf(seq
, "\tFallback due to dest alignment: %lu\n",
670 ce_algs
[i
].stat_fb_dstali
);
671 seq_printf(seq
, "\tFallback due to source length: %lu\n",
672 ce_algs
[i
].stat_fb_srclen
);
673 seq_printf(seq
, "\tFallback due to dest length: %lu\n",
674 ce_algs
[i
].stat_fb_dstlen
);
675 seq_printf(seq
, "\tFallback due to SG numbers: %lu\n",
676 ce_algs
[i
].stat_fb_maxsg
);
678 case CRYPTO_ALG_TYPE_AHASH
:
679 seq_printf(seq
, "%s %s reqs=%lu fallback=%lu\n",
680 ce_algs
[i
].alg
.hash
.base
.halg
.base
.cra_driver_name
,
681 ce_algs
[i
].alg
.hash
.base
.halg
.base
.cra_name
,
682 ce_algs
[i
].stat_req
, ce_algs
[i
].stat_fb
);
683 seq_printf(seq
, "\tLast fallback is: %s\n",
685 seq_printf(seq
, "\tFallback due to 0 length: %lu\n",
686 ce_algs
[i
].stat_fb_len0
);
687 seq_printf(seq
, "\tFallback due to length: %lu\n",
688 ce_algs
[i
].stat_fb_srclen
);
689 seq_printf(seq
, "\tFallback due to alignment: %lu\n",
690 ce_algs
[i
].stat_fb_srcali
);
691 seq_printf(seq
, "\tFallback due to SG numbers: %lu\n",
692 ce_algs
[i
].stat_fb_maxsg
);
694 case CRYPTO_ALG_TYPE_RNG
:
695 seq_printf(seq
, "%s %s reqs=%lu bytes=%lu\n",
696 ce_algs
[i
].alg
.rng
.base
.cra_driver_name
,
697 ce_algs
[i
].alg
.rng
.base
.cra_name
,
698 ce_algs
[i
].stat_req
, ce_algs
[i
].stat_bytes
);
702 #if defined(CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG) && \
703 defined(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG)
704 seq_printf(seq
, "HWRNG %lu %lu\n",
705 ce
->hwrng_stat_req
, ce
->hwrng_stat_bytes
);
710 DEFINE_SHOW_ATTRIBUTE(sun8i_ce_debugfs
);
712 static void sun8i_ce_free_chanlist(struct sun8i_ce_dev
*ce
, int i
)
715 crypto_engine_exit(ce
->chanlist
[i
].engine
);
716 if (ce
->chanlist
[i
].tl
)
717 dma_free_coherent(ce
->dev
, sizeof(struct ce_task
),
719 ce
->chanlist
[i
].t_phy
);
725 * Allocate the channel list structure
727 static int sun8i_ce_allocate_chanlist(struct sun8i_ce_dev
*ce
)
731 ce
->chanlist
= devm_kcalloc(ce
->dev
, MAXFLOW
,
732 sizeof(struct sun8i_ce_flow
), GFP_KERNEL
);
736 for (i
= 0; i
< MAXFLOW
; i
++) {
737 init_completion(&ce
->chanlist
[i
].complete
);
739 ce
->chanlist
[i
].engine
= crypto_engine_alloc_init(ce
->dev
, true);
740 if (!ce
->chanlist
[i
].engine
) {
741 dev_err(ce
->dev
, "Cannot allocate engine\n");
746 err
= crypto_engine_start(ce
->chanlist
[i
].engine
);
748 dev_err(ce
->dev
, "Cannot start engine\n");
751 ce
->chanlist
[i
].tl
= dma_alloc_coherent(ce
->dev
,
752 sizeof(struct ce_task
),
753 &ce
->chanlist
[i
].t_phy
,
755 if (!ce
->chanlist
[i
].tl
) {
756 dev_err(ce
->dev
, "Cannot get DMA memory for task %d\n",
761 ce
->chanlist
[i
].bounce_iv
= devm_kmalloc(ce
->dev
, AES_BLOCK_SIZE
,
762 GFP_KERNEL
| GFP_DMA
);
763 if (!ce
->chanlist
[i
].bounce_iv
) {
767 ce
->chanlist
[i
].backup_iv
= devm_kmalloc(ce
->dev
, AES_BLOCK_SIZE
,
769 if (!ce
->chanlist
[i
].backup_iv
) {
776 sun8i_ce_free_chanlist(ce
, i
);
781 * Power management strategy: The device is suspended unless a TFM exists for
782 * one of the algorithms proposed by this driver.
784 static int sun8i_ce_pm_suspend(struct device
*dev
)
786 struct sun8i_ce_dev
*ce
= dev_get_drvdata(dev
);
789 reset_control_assert(ce
->reset
);
790 for (i
= 0; i
< CE_MAX_CLOCKS
; i
++)
791 clk_disable_unprepare(ce
->ceclks
[i
]);
795 static int sun8i_ce_pm_resume(struct device
*dev
)
797 struct sun8i_ce_dev
*ce
= dev_get_drvdata(dev
);
800 for (i
= 0; i
< CE_MAX_CLOCKS
; i
++) {
801 if (!ce
->variant
->ce_clks
[i
].name
)
803 err
= clk_prepare_enable(ce
->ceclks
[i
]);
805 dev_err(ce
->dev
, "Cannot prepare_enable %s\n",
806 ce
->variant
->ce_clks
[i
].name
);
810 err
= reset_control_deassert(ce
->reset
);
812 dev_err(ce
->dev
, "Cannot deassert reset control\n");
817 sun8i_ce_pm_suspend(dev
);
821 static const struct dev_pm_ops sun8i_ce_pm_ops
= {
822 SET_RUNTIME_PM_OPS(sun8i_ce_pm_suspend
, sun8i_ce_pm_resume
, NULL
)
825 static int sun8i_ce_pm_init(struct sun8i_ce_dev
*ce
)
829 pm_runtime_use_autosuspend(ce
->dev
);
830 pm_runtime_set_autosuspend_delay(ce
->dev
, 2000);
832 err
= pm_runtime_set_suspended(ce
->dev
);
835 pm_runtime_enable(ce
->dev
);
839 static void sun8i_ce_pm_exit(struct sun8i_ce_dev
*ce
)
841 pm_runtime_disable(ce
->dev
);
844 static int sun8i_ce_get_clks(struct sun8i_ce_dev
*ce
)
849 for (i
= 0; i
< CE_MAX_CLOCKS
; i
++) {
850 if (!ce
->variant
->ce_clks
[i
].name
)
852 ce
->ceclks
[i
] = devm_clk_get(ce
->dev
, ce
->variant
->ce_clks
[i
].name
);
853 if (IS_ERR(ce
->ceclks
[i
])) {
854 err
= PTR_ERR(ce
->ceclks
[i
]);
855 dev_err(ce
->dev
, "Cannot get %s CE clock err=%d\n",
856 ce
->variant
->ce_clks
[i
].name
, err
);
859 cr
= clk_get_rate(ce
->ceclks
[i
]);
862 if (ce
->variant
->ce_clks
[i
].freq
> 0 &&
863 cr
!= ce
->variant
->ce_clks
[i
].freq
) {
864 dev_info(ce
->dev
, "Set %s clock to %lu (%lu Mhz) from %lu (%lu Mhz)\n",
865 ce
->variant
->ce_clks
[i
].name
,
866 ce
->variant
->ce_clks
[i
].freq
,
867 ce
->variant
->ce_clks
[i
].freq
/ 1000000,
869 err
= clk_set_rate(ce
->ceclks
[i
], ce
->variant
->ce_clks
[i
].freq
);
871 dev_err(ce
->dev
, "Fail to set %s clk speed to %lu hz\n",
872 ce
->variant
->ce_clks
[i
].name
,
873 ce
->variant
->ce_clks
[i
].freq
);
875 if (ce
->variant
->ce_clks
[i
].max_freq
> 0 &&
876 cr
> ce
->variant
->ce_clks
[i
].max_freq
)
877 dev_warn(ce
->dev
, "Frequency for %s (%lu hz) is higher than datasheet's recommendation (%lu hz)",
878 ce
->variant
->ce_clks
[i
].name
, cr
,
879 ce
->variant
->ce_clks
[i
].max_freq
);
884 static int sun8i_ce_register_algs(struct sun8i_ce_dev
*ce
)
886 int ce_method
, err
, id
;
889 for (i
= 0; i
< ARRAY_SIZE(ce_algs
); i
++) {
891 switch (ce_algs
[i
].type
) {
892 case CRYPTO_ALG_TYPE_SKCIPHER
:
893 id
= ce_algs
[i
].ce_algo_id
;
894 ce_method
= ce
->variant
->alg_cipher
[id
];
895 if (ce_method
== CE_ID_NOTSUPP
) {
897 "DEBUG: Algo of %s not supported\n",
898 ce_algs
[i
].alg
.skcipher
.base
.base
.cra_name
);
899 ce_algs
[i
].ce
= NULL
;
902 id
= ce_algs
[i
].ce_blockmode
;
903 ce_method
= ce
->variant
->op_mode
[id
];
904 if (ce_method
== CE_ID_NOTSUPP
) {
905 dev_dbg(ce
->dev
, "DEBUG: Blockmode of %s not supported\n",
906 ce_algs
[i
].alg
.skcipher
.base
.base
.cra_name
);
907 ce_algs
[i
].ce
= NULL
;
910 dev_info(ce
->dev
, "Register %s\n",
911 ce_algs
[i
].alg
.skcipher
.base
.base
.cra_name
);
912 err
= crypto_engine_register_skcipher(&ce_algs
[i
].alg
.skcipher
);
914 dev_err(ce
->dev
, "ERROR: Fail to register %s\n",
915 ce_algs
[i
].alg
.skcipher
.base
.base
.cra_name
);
916 ce_algs
[i
].ce
= NULL
;
920 case CRYPTO_ALG_TYPE_AHASH
:
921 id
= ce_algs
[i
].ce_algo_id
;
922 ce_method
= ce
->variant
->alg_hash
[id
];
923 if (ce_method
== CE_ID_NOTSUPP
) {
925 "DEBUG: Algo of %s not supported\n",
926 ce_algs
[i
].alg
.hash
.base
.halg
.base
.cra_name
);
927 ce_algs
[i
].ce
= NULL
;
930 dev_info(ce
->dev
, "Register %s\n",
931 ce_algs
[i
].alg
.hash
.base
.halg
.base
.cra_name
);
932 err
= crypto_engine_register_ahash(&ce_algs
[i
].alg
.hash
);
934 dev_err(ce
->dev
, "ERROR: Fail to register %s\n",
935 ce_algs
[i
].alg
.hash
.base
.halg
.base
.cra_name
);
936 ce_algs
[i
].ce
= NULL
;
940 case CRYPTO_ALG_TYPE_RNG
:
941 if (ce
->variant
->prng
== CE_ID_NOTSUPP
) {
943 "DEBUG: Algo of %s not supported\n",
944 ce_algs
[i
].alg
.rng
.base
.cra_name
);
945 ce_algs
[i
].ce
= NULL
;
948 dev_info(ce
->dev
, "Register %s\n",
949 ce_algs
[i
].alg
.rng
.base
.cra_name
);
950 err
= crypto_register_rng(&ce_algs
[i
].alg
.rng
);
952 dev_err(ce
->dev
, "Fail to register %s\n",
953 ce_algs
[i
].alg
.rng
.base
.cra_name
);
954 ce_algs
[i
].ce
= NULL
;
958 ce_algs
[i
].ce
= NULL
;
959 dev_err(ce
->dev
, "ERROR: tried to register an unknown algo\n");
965 static void sun8i_ce_unregister_algs(struct sun8i_ce_dev
*ce
)
969 for (i
= 0; i
< ARRAY_SIZE(ce_algs
); i
++) {
972 switch (ce_algs
[i
].type
) {
973 case CRYPTO_ALG_TYPE_SKCIPHER
:
974 dev_info(ce
->dev
, "Unregister %d %s\n", i
,
975 ce_algs
[i
].alg
.skcipher
.base
.base
.cra_name
);
976 crypto_engine_unregister_skcipher(&ce_algs
[i
].alg
.skcipher
);
978 case CRYPTO_ALG_TYPE_AHASH
:
979 dev_info(ce
->dev
, "Unregister %d %s\n", i
,
980 ce_algs
[i
].alg
.hash
.base
.halg
.base
.cra_name
);
981 crypto_engine_unregister_ahash(&ce_algs
[i
].alg
.hash
);
983 case CRYPTO_ALG_TYPE_RNG
:
984 dev_info(ce
->dev
, "Unregister %d %s\n", i
,
985 ce_algs
[i
].alg
.rng
.base
.cra_name
);
986 crypto_unregister_rng(&ce_algs
[i
].alg
.rng
);
992 static int sun8i_ce_probe(struct platform_device
*pdev
)
994 struct sun8i_ce_dev
*ce
;
998 ce
= devm_kzalloc(&pdev
->dev
, sizeof(*ce
), GFP_KERNEL
);
1002 ce
->dev
= &pdev
->dev
;
1003 platform_set_drvdata(pdev
, ce
);
1005 ce
->variant
= of_device_get_match_data(&pdev
->dev
);
1007 dev_err(&pdev
->dev
, "Missing Crypto Engine variant\n");
1011 ce
->base
= devm_platform_ioremap_resource(pdev
, 0);
1012 if (IS_ERR(ce
->base
))
1013 return PTR_ERR(ce
->base
);
1015 err
= sun8i_ce_get_clks(ce
);
1019 /* Get Non Secure IRQ */
1020 irq
= platform_get_irq(pdev
, 0);
1024 ce
->reset
= devm_reset_control_get(&pdev
->dev
, NULL
);
1025 if (IS_ERR(ce
->reset
))
1026 return dev_err_probe(&pdev
->dev
, PTR_ERR(ce
->reset
),
1027 "No reset control found\n");
1029 mutex_init(&ce
->mlock
);
1030 mutex_init(&ce
->rnglock
);
1032 err
= sun8i_ce_allocate_chanlist(ce
);
1036 err
= sun8i_ce_pm_init(ce
);
1040 err
= devm_request_irq(&pdev
->dev
, irq
, ce_irq_handler
, 0,
1043 dev_err(ce
->dev
, "Cannot request CryptoEngine Non-secure IRQ (err=%d)\n", err
);
1047 err
= sun8i_ce_register_algs(ce
);
1051 err
= pm_runtime_resume_and_get(ce
->dev
);
1055 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
1056 sun8i_ce_hwrng_register(ce
);
1059 v
= readl(ce
->base
+ CE_CTR
);
1060 v
>>= CE_DIE_ID_SHIFT
;
1061 v
&= CE_DIE_ID_MASK
;
1062 dev_info(&pdev
->dev
, "CryptoEngine Die ID %x\n", v
);
1064 pm_runtime_put_sync(ce
->dev
);
1066 if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
)) {
1067 struct dentry
*dbgfs_dir __maybe_unused
;
1068 struct dentry
*dbgfs_stats __maybe_unused
;
1070 /* Ignore error of debugfs */
1071 dbgfs_dir
= debugfs_create_dir("sun8i-ce", NULL
);
1072 dbgfs_stats
= debugfs_create_file("stats", 0444,
1074 &sun8i_ce_debugfs_fops
);
1076 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
1077 ce
->dbgfs_dir
= dbgfs_dir
;
1078 ce
->dbgfs_stats
= dbgfs_stats
;
1084 sun8i_ce_unregister_algs(ce
);
1086 sun8i_ce_pm_exit(ce
);
1088 sun8i_ce_free_chanlist(ce
, MAXFLOW
- 1);
1092 static void sun8i_ce_remove(struct platform_device
*pdev
)
1094 struct sun8i_ce_dev
*ce
= platform_get_drvdata(pdev
);
1096 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_TRNG
1097 sun8i_ce_hwrng_unregister(ce
);
1100 sun8i_ce_unregister_algs(ce
);
1102 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
1103 debugfs_remove_recursive(ce
->dbgfs_dir
);
1106 sun8i_ce_free_chanlist(ce
, MAXFLOW
- 1);
1108 sun8i_ce_pm_exit(ce
);
1111 static const struct of_device_id sun8i_ce_crypto_of_match_table
[] = {
1112 { .compatible
= "allwinner,sun8i-h3-crypto",
1113 .data
= &ce_h3_variant
},
1114 { .compatible
= "allwinner,sun8i-r40-crypto",
1115 .data
= &ce_r40_variant
},
1116 { .compatible
= "allwinner,sun20i-d1-crypto",
1117 .data
= &ce_d1_variant
},
1118 { .compatible
= "allwinner,sun50i-a64-crypto",
1119 .data
= &ce_a64_variant
},
1120 { .compatible
= "allwinner,sun50i-h5-crypto",
1121 .data
= &ce_h5_variant
},
1122 { .compatible
= "allwinner,sun50i-h6-crypto",
1123 .data
= &ce_h6_variant
},
1124 { .compatible
= "allwinner,sun50i-h616-crypto",
1125 .data
= &ce_h616_variant
},
1128 MODULE_DEVICE_TABLE(of
, sun8i_ce_crypto_of_match_table
);
1130 static struct platform_driver sun8i_ce_driver
= {
1131 .probe
= sun8i_ce_probe
,
1132 .remove
= sun8i_ce_remove
,
1135 .pm
= &sun8i_ce_pm_ops
,
1136 .of_match_table
= sun8i_ce_crypto_of_match_table
,
1140 module_platform_driver(sun8i_ce_driver
);
1142 MODULE_DESCRIPTION("Allwinner Crypto Engine cryptographic offloader");
1143 MODULE_LICENSE("GPL");
1144 MODULE_AUTHOR("Corentin Labbe <clabbe.montjoie@gmail.com>");