1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
17 #include <crypto/aes.h>
18 #include <crypto/internal/des.h>
19 #include <crypto/engine.h>
20 #include <crypto/scatterwalk.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
24 #define DRIVER_NAME "stm32-cryp"
26 /* Bit [0] encrypt / decrypt */
27 #define FLG_ENCRYPT BIT(0)
28 /* Bit [8..1] algo & operation mode */
29 #define FLG_AES BIT(1)
30 #define FLG_DES BIT(2)
31 #define FLG_TDES BIT(3)
32 #define FLG_ECB BIT(4)
33 #define FLG_CBC BIT(5)
34 #define FLG_CTR BIT(6)
35 #define FLG_GCM BIT(7)
36 #define FLG_CCM BIT(8)
37 /* Mode mask = bits [15..0] */
38 #define FLG_MODE_MASK GENMASK(15, 0)
39 /* Bit [31..16] status */
40 #define FLG_CCM_PADDED_WA BIT(16)
43 #define CRYP_CR 0x00000000
44 #define CRYP_SR 0x00000004
45 #define CRYP_DIN 0x00000008
46 #define CRYP_DOUT 0x0000000C
47 #define CRYP_DMACR 0x00000010
48 #define CRYP_IMSCR 0x00000014
49 #define CRYP_RISR 0x00000018
50 #define CRYP_MISR 0x0000001C
51 #define CRYP_K0LR 0x00000020
52 #define CRYP_K0RR 0x00000024
53 #define CRYP_K1LR 0x00000028
54 #define CRYP_K1RR 0x0000002C
55 #define CRYP_K2LR 0x00000030
56 #define CRYP_K2RR 0x00000034
57 #define CRYP_K3LR 0x00000038
58 #define CRYP_K3RR 0x0000003C
59 #define CRYP_IV0LR 0x00000040
60 #define CRYP_IV0RR 0x00000044
61 #define CRYP_IV1LR 0x00000048
62 #define CRYP_IV1RR 0x0000004C
63 #define CRYP_CSGCMCCM0R 0x00000050
64 #define CRYP_CSGCM0R 0x00000070
66 /* Registers values */
67 #define CR_DEC_NOT_ENC 0x00000004
68 #define CR_TDES_ECB 0x00000000
69 #define CR_TDES_CBC 0x00000008
70 #define CR_DES_ECB 0x00000010
71 #define CR_DES_CBC 0x00000018
72 #define CR_AES_ECB 0x00000020
73 #define CR_AES_CBC 0x00000028
74 #define CR_AES_CTR 0x00000030
75 #define CR_AES_KP 0x00000038
76 #define CR_AES_GCM 0x00080000
77 #define CR_AES_CCM 0x00080008
78 #define CR_AES_UNKNOWN 0xFFFFFFFF
79 #define CR_ALGO_MASK 0x00080038
80 #define CR_DATA32 0x00000000
81 #define CR_DATA16 0x00000040
82 #define CR_DATA8 0x00000080
83 #define CR_DATA1 0x000000C0
84 #define CR_KEY128 0x00000000
85 #define CR_KEY192 0x00000100
86 #define CR_KEY256 0x00000200
87 #define CR_FFLUSH 0x00004000
88 #define CR_CRYPEN 0x00008000
89 #define CR_PH_INIT 0x00000000
90 #define CR_PH_HEADER 0x00010000
91 #define CR_PH_PAYLOAD 0x00020000
92 #define CR_PH_FINAL 0x00030000
93 #define CR_PH_MASK 0x00030000
94 #define CR_NBPBL_SHIFT 20
96 #define SR_BUSY 0x00000010
97 #define SR_OFNE 0x00000004
99 #define IMSCR_IN BIT(0)
100 #define IMSCR_OUT BIT(1)
102 #define MISR_IN BIT(0)
103 #define MISR_OUT BIT(1)
106 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
107 #define GCM_CTR_INIT 2
108 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
109 #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
110 #define CRYP_AUTOSUSPEND_DELAY 50
112 struct stm32_cryp_caps
{
117 struct stm32_cryp_ctx
{
118 struct crypto_engine_ctx enginectx
;
119 struct stm32_cryp
*cryp
;
121 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
125 struct stm32_cryp_reqctx
{
130 struct list_head list
;
136 const struct stm32_cryp_caps
*caps
;
137 struct stm32_cryp_ctx
*ctx
;
139 struct crypto_engine
*engine
;
141 struct skcipher_request
*req
;
142 struct aead_request
*areq
;
148 size_t total_in_save
;
150 size_t total_out_save
;
152 struct scatterlist
*in_sg
;
153 struct scatterlist
*out_sg
;
154 struct scatterlist
*out_sg_save
;
156 struct scatterlist in_sgl
;
157 struct scatterlist out_sgl
;
163 struct scatter_walk in_walk
;
164 struct scatter_walk out_walk
;
170 struct stm32_cryp_list
{
171 struct list_head dev_list
;
172 spinlock_t lock
; /* protect dev_list */
175 static struct stm32_cryp_list cryp_list
= {
176 .dev_list
= LIST_HEAD_INIT(cryp_list
.dev_list
),
177 .lock
= __SPIN_LOCK_UNLOCKED(cryp_list
.lock
),
180 static inline bool is_aes(struct stm32_cryp
*cryp
)
182 return cryp
->flags
& FLG_AES
;
185 static inline bool is_des(struct stm32_cryp
*cryp
)
187 return cryp
->flags
& FLG_DES
;
190 static inline bool is_tdes(struct stm32_cryp
*cryp
)
192 return cryp
->flags
& FLG_TDES
;
195 static inline bool is_ecb(struct stm32_cryp
*cryp
)
197 return cryp
->flags
& FLG_ECB
;
200 static inline bool is_cbc(struct stm32_cryp
*cryp
)
202 return cryp
->flags
& FLG_CBC
;
205 static inline bool is_ctr(struct stm32_cryp
*cryp
)
207 return cryp
->flags
& FLG_CTR
;
210 static inline bool is_gcm(struct stm32_cryp
*cryp
)
212 return cryp
->flags
& FLG_GCM
;
215 static inline bool is_ccm(struct stm32_cryp
*cryp
)
217 return cryp
->flags
& FLG_CCM
;
220 static inline bool is_encrypt(struct stm32_cryp
*cryp
)
222 return cryp
->flags
& FLG_ENCRYPT
;
225 static inline bool is_decrypt(struct stm32_cryp
*cryp
)
227 return !is_encrypt(cryp
);
230 static inline u32
stm32_cryp_read(struct stm32_cryp
*cryp
, u32 ofst
)
232 return readl_relaxed(cryp
->regs
+ ofst
);
235 static inline void stm32_cryp_write(struct stm32_cryp
*cryp
, u32 ofst
, u32 val
)
237 writel_relaxed(val
, cryp
->regs
+ ofst
);
240 static inline int stm32_cryp_wait_busy(struct stm32_cryp
*cryp
)
244 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_SR
, status
,
245 !(status
& SR_BUSY
), 10, 100000);
248 static inline int stm32_cryp_wait_enable(struct stm32_cryp
*cryp
)
252 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_CR
, status
,
253 !(status
& CR_CRYPEN
), 10, 100000);
256 static inline int stm32_cryp_wait_output(struct stm32_cryp
*cryp
)
260 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_SR
, status
,
261 status
& SR_OFNE
, 10, 100000);
264 static int stm32_cryp_read_auth_tag(struct stm32_cryp
*cryp
);
266 static struct stm32_cryp
*stm32_cryp_find_dev(struct stm32_cryp_ctx
*ctx
)
268 struct stm32_cryp
*tmp
, *cryp
= NULL
;
270 spin_lock_bh(&cryp_list
.lock
);
272 list_for_each_entry(tmp
, &cryp_list
.dev_list
, list
) {
281 spin_unlock_bh(&cryp_list
.lock
);
286 static int stm32_cryp_check_aligned(struct scatterlist
*sg
, size_t total
,
294 if (!IS_ALIGNED(total
, align
))
298 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
301 if (!IS_ALIGNED(sg
->length
, align
))
314 static int stm32_cryp_check_io_aligned(struct stm32_cryp
*cryp
)
318 ret
= stm32_cryp_check_aligned(cryp
->in_sg
, cryp
->total_in
,
323 ret
= stm32_cryp_check_aligned(cryp
->out_sg
, cryp
->total_out
,
329 static void sg_copy_buf(void *buf
, struct scatterlist
*sg
,
330 unsigned int start
, unsigned int nbytes
, int out
)
332 struct scatter_walk walk
;
337 scatterwalk_start(&walk
, sg
);
338 scatterwalk_advance(&walk
, start
);
339 scatterwalk_copychunks(buf
, &walk
, nbytes
, out
);
340 scatterwalk_done(&walk
, out
, 0);
343 static int stm32_cryp_copy_sgs(struct stm32_cryp
*cryp
)
345 void *buf_in
, *buf_out
;
346 int pages
, total_in
, total_out
;
348 if (!stm32_cryp_check_io_aligned(cryp
)) {
349 cryp
->sgs_copied
= 0;
353 total_in
= ALIGN(cryp
->total_in
, cryp
->hw_blocksize
);
354 pages
= total_in
? get_order(total_in
) : 1;
355 buf_in
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
357 total_out
= ALIGN(cryp
->total_out
, cryp
->hw_blocksize
);
358 pages
= total_out
? get_order(total_out
) : 1;
359 buf_out
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
361 if (!buf_in
|| !buf_out
) {
362 dev_err(cryp
->dev
, "Can't allocate pages when unaligned\n");
363 cryp
->sgs_copied
= 0;
367 sg_copy_buf(buf_in
, cryp
->in_sg
, 0, cryp
->total_in
, 0);
369 sg_init_one(&cryp
->in_sgl
, buf_in
, total_in
);
370 cryp
->in_sg
= &cryp
->in_sgl
;
373 sg_init_one(&cryp
->out_sgl
, buf_out
, total_out
);
374 cryp
->out_sg_save
= cryp
->out_sg
;
375 cryp
->out_sg
= &cryp
->out_sgl
;
376 cryp
->out_sg_len
= 1;
378 cryp
->sgs_copied
= 1;
383 static void stm32_cryp_hw_write_iv(struct stm32_cryp
*cryp
, u32
*iv
)
388 stm32_cryp_write(cryp
, CRYP_IV0LR
, cpu_to_be32(*iv
++));
389 stm32_cryp_write(cryp
, CRYP_IV0RR
, cpu_to_be32(*iv
++));
392 stm32_cryp_write(cryp
, CRYP_IV1LR
, cpu_to_be32(*iv
++));
393 stm32_cryp_write(cryp
, CRYP_IV1RR
, cpu_to_be32(*iv
++));
397 static void stm32_cryp_get_iv(struct stm32_cryp
*cryp
)
399 struct skcipher_request
*req
= cryp
->req
;
400 u32
*tmp
= (void *)req
->iv
;
405 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV0LR
));
406 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV0RR
));
409 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV1LR
));
410 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV1RR
));
414 static void stm32_cryp_hw_write_key(struct stm32_cryp
*c
)
420 stm32_cryp_write(c
, CRYP_K1LR
, cpu_to_be32(c
->ctx
->key
[0]));
421 stm32_cryp_write(c
, CRYP_K1RR
, cpu_to_be32(c
->ctx
->key
[1]));
424 for (i
= c
->ctx
->keylen
/ sizeof(u32
); i
> 0; i
--, r_id
-= 4)
425 stm32_cryp_write(c
, r_id
,
426 cpu_to_be32(c
->ctx
->key
[i
- 1]));
430 static u32
stm32_cryp_get_hw_mode(struct stm32_cryp
*cryp
)
432 if (is_aes(cryp
) && is_ecb(cryp
))
435 if (is_aes(cryp
) && is_cbc(cryp
))
438 if (is_aes(cryp
) && is_ctr(cryp
))
441 if (is_aes(cryp
) && is_gcm(cryp
))
444 if (is_aes(cryp
) && is_ccm(cryp
))
447 if (is_des(cryp
) && is_ecb(cryp
))
450 if (is_des(cryp
) && is_cbc(cryp
))
453 if (is_tdes(cryp
) && is_ecb(cryp
))
456 if (is_tdes(cryp
) && is_cbc(cryp
))
459 dev_err(cryp
->dev
, "Unknown mode\n");
460 return CR_AES_UNKNOWN
;
463 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp
*cryp
)
465 return is_encrypt(cryp
) ? cryp
->areq
->cryptlen
:
466 cryp
->areq
->cryptlen
- cryp
->authsize
;
469 static int stm32_cryp_gcm_init(struct stm32_cryp
*cryp
, u32 cfg
)
475 memcpy(iv
, cryp
->areq
->iv
, 12);
476 iv
[3] = cpu_to_be32(GCM_CTR_INIT
);
477 cryp
->gcm_ctr
= GCM_CTR_INIT
;
478 stm32_cryp_hw_write_iv(cryp
, iv
);
480 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_PH_INIT
| CR_CRYPEN
);
482 /* Wait for end of processing */
483 ret
= stm32_cryp_wait_enable(cryp
);
485 dev_err(cryp
->dev
, "Timeout (gcm init)\n");
490 static int stm32_cryp_ccm_init(struct stm32_cryp
*cryp
, u32 cfg
)
493 u8 iv
[AES_BLOCK_SIZE
], b0
[AES_BLOCK_SIZE
];
495 unsigned int i
, textlen
;
497 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
498 memcpy(iv
, cryp
->areq
->iv
, AES_BLOCK_SIZE
);
499 memset(iv
+ AES_BLOCK_SIZE
- 1 - iv
[0], 0, iv
[0] + 1);
500 iv
[AES_BLOCK_SIZE
- 1] = 1;
501 stm32_cryp_hw_write_iv(cryp
, (u32
*)iv
);
504 memcpy(b0
, iv
, AES_BLOCK_SIZE
);
506 b0
[0] |= (8 * ((cryp
->authsize
- 2) / 2));
508 if (cryp
->areq
->assoclen
)
511 textlen
= stm32_cryp_get_input_text_len(cryp
);
513 b0
[AES_BLOCK_SIZE
- 2] = textlen
>> 8;
514 b0
[AES_BLOCK_SIZE
- 1] = textlen
& 0xFF;
517 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_PH_INIT
| CR_CRYPEN
);
522 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
523 if (!cryp
->caps
->padding_wa
)
524 *d
= cpu_to_be32(*d
);
525 stm32_cryp_write(cryp
, CRYP_DIN
, *d
++);
528 /* Wait for end of processing */
529 ret
= stm32_cryp_wait_enable(cryp
);
531 dev_err(cryp
->dev
, "Timeout (ccm init)\n");
536 static int stm32_cryp_hw_init(struct stm32_cryp
*cryp
)
541 pm_runtime_get_sync(cryp
->dev
);
543 /* Disable interrupt */
544 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
547 stm32_cryp_hw_write_key(cryp
);
549 /* Set configuration */
550 cfg
= CR_DATA8
| CR_FFLUSH
;
552 switch (cryp
->ctx
->keylen
) {
553 case AES_KEYSIZE_128
:
557 case AES_KEYSIZE_192
:
562 case AES_KEYSIZE_256
:
567 hw_mode
= stm32_cryp_get_hw_mode(cryp
);
568 if (hw_mode
== CR_AES_UNKNOWN
)
571 /* AES ECB/CBC decrypt: run key preparation first */
572 if (is_decrypt(cryp
) &&
573 ((hw_mode
== CR_AES_ECB
) || (hw_mode
== CR_AES_CBC
))) {
574 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_AES_KP
| CR_CRYPEN
);
576 /* Wait for end of processing */
577 ret
= stm32_cryp_wait_busy(cryp
);
579 dev_err(cryp
->dev
, "Timeout (key preparation)\n");
586 if (is_decrypt(cryp
))
587 cfg
|= CR_DEC_NOT_ENC
;
589 /* Apply config and flush (valid when CRYPEN = 0) */
590 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
596 if (hw_mode
== CR_AES_CCM
)
597 ret
= stm32_cryp_ccm_init(cryp
, cfg
);
599 ret
= stm32_cryp_gcm_init(cryp
, cfg
);
604 /* Phase 2 : header (authenticated data) */
605 if (cryp
->areq
->assoclen
) {
607 } else if (stm32_cryp_get_input_text_len(cryp
)) {
608 cfg
|= CR_PH_PAYLOAD
;
609 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
620 stm32_cryp_hw_write_iv(cryp
, (u32
*)cryp
->req
->iv
);
630 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
632 cryp
->flags
&= ~FLG_CCM_PADDED_WA
;
637 static void stm32_cryp_finish_req(struct stm32_cryp
*cryp
, int err
)
639 if (!err
&& (is_gcm(cryp
) || is_ccm(cryp
)))
640 /* Phase 4 : output tag */
641 err
= stm32_cryp_read_auth_tag(cryp
);
643 if (!err
&& (!(is_gcm(cryp
) || is_ccm(cryp
))))
644 stm32_cryp_get_iv(cryp
);
646 if (cryp
->sgs_copied
) {
647 void *buf_in
, *buf_out
;
650 buf_in
= sg_virt(&cryp
->in_sgl
);
651 buf_out
= sg_virt(&cryp
->out_sgl
);
653 sg_copy_buf(buf_out
, cryp
->out_sg_save
, 0,
654 cryp
->total_out_save
, 1);
656 len
= ALIGN(cryp
->total_in_save
, cryp
->hw_blocksize
);
657 pages
= len
? get_order(len
) : 1;
658 free_pages((unsigned long)buf_in
, pages
);
660 len
= ALIGN(cryp
->total_out_save
, cryp
->hw_blocksize
);
661 pages
= len
? get_order(len
) : 1;
662 free_pages((unsigned long)buf_out
, pages
);
665 pm_runtime_mark_last_busy(cryp
->dev
);
666 pm_runtime_put_autosuspend(cryp
->dev
);
668 if (is_gcm(cryp
) || is_ccm(cryp
))
669 crypto_finalize_aead_request(cryp
->engine
, cryp
->areq
, err
);
671 crypto_finalize_skcipher_request(cryp
->engine
, cryp
->req
,
674 memset(cryp
->ctx
->key
, 0, cryp
->ctx
->keylen
);
677 static int stm32_cryp_cpu_start(struct stm32_cryp
*cryp
)
679 /* Enable interrupt and let the IRQ handler do everything */
680 stm32_cryp_write(cryp
, CRYP_IMSCR
, IMSCR_IN
| IMSCR_OUT
);
685 static int stm32_cryp_cipher_one_req(struct crypto_engine
*engine
, void *areq
);
686 static int stm32_cryp_prepare_cipher_req(struct crypto_engine
*engine
,
689 static int stm32_cryp_init_tfm(struct crypto_skcipher
*tfm
)
691 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
693 crypto_skcipher_set_reqsize(tfm
, sizeof(struct stm32_cryp_reqctx
));
695 ctx
->enginectx
.op
.do_one_request
= stm32_cryp_cipher_one_req
;
696 ctx
->enginectx
.op
.prepare_request
= stm32_cryp_prepare_cipher_req
;
697 ctx
->enginectx
.op
.unprepare_request
= NULL
;
701 static int stm32_cryp_aead_one_req(struct crypto_engine
*engine
, void *areq
);
702 static int stm32_cryp_prepare_aead_req(struct crypto_engine
*engine
,
705 static int stm32_cryp_aes_aead_init(struct crypto_aead
*tfm
)
707 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(tfm
);
709 tfm
->reqsize
= sizeof(struct stm32_cryp_reqctx
);
711 ctx
->enginectx
.op
.do_one_request
= stm32_cryp_aead_one_req
;
712 ctx
->enginectx
.op
.prepare_request
= stm32_cryp_prepare_aead_req
;
713 ctx
->enginectx
.op
.unprepare_request
= NULL
;
718 static int stm32_cryp_crypt(struct skcipher_request
*req
, unsigned long mode
)
720 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(
721 crypto_skcipher_reqtfm(req
));
722 struct stm32_cryp_reqctx
*rctx
= skcipher_request_ctx(req
);
723 struct stm32_cryp
*cryp
= stm32_cryp_find_dev(ctx
);
730 return crypto_transfer_skcipher_request_to_engine(cryp
->engine
, req
);
733 static int stm32_cryp_aead_crypt(struct aead_request
*req
, unsigned long mode
)
735 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
736 struct stm32_cryp_reqctx
*rctx
= aead_request_ctx(req
);
737 struct stm32_cryp
*cryp
= stm32_cryp_find_dev(ctx
);
744 return crypto_transfer_aead_request_to_engine(cryp
->engine
, req
);
747 static int stm32_cryp_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
750 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
752 memcpy(ctx
->key
, key
, keylen
);
753 ctx
->keylen
= keylen
;
758 static int stm32_cryp_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
761 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
762 keylen
!= AES_KEYSIZE_256
)
765 return stm32_cryp_setkey(tfm
, key
, keylen
);
768 static int stm32_cryp_des_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
771 return verify_skcipher_des_key(tfm
, key
) ?:
772 stm32_cryp_setkey(tfm
, key
, keylen
);
775 static int stm32_cryp_tdes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
778 return verify_skcipher_des3_key(tfm
, key
) ?:
779 stm32_cryp_setkey(tfm
, key
, keylen
);
782 static int stm32_cryp_aes_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
785 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(tfm
);
787 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
788 keylen
!= AES_KEYSIZE_256
)
791 memcpy(ctx
->key
, key
, keylen
);
792 ctx
->keylen
= keylen
;
797 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
798 unsigned int authsize
)
800 return authsize
== AES_BLOCK_SIZE
? 0 : -EINVAL
;
803 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead
*tfm
,
804 unsigned int authsize
)
822 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request
*req
)
824 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
| FLG_ENCRYPT
);
827 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request
*req
)
829 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
);
832 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request
*req
)
834 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
| FLG_ENCRYPT
);
837 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request
*req
)
839 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
);
842 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request
*req
)
844 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
| FLG_ENCRYPT
);
847 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request
*req
)
849 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
);
852 static int stm32_cryp_aes_gcm_encrypt(struct aead_request
*req
)
854 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_GCM
| FLG_ENCRYPT
);
857 static int stm32_cryp_aes_gcm_decrypt(struct aead_request
*req
)
859 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_GCM
);
862 static int stm32_cryp_aes_ccm_encrypt(struct aead_request
*req
)
864 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_CCM
| FLG_ENCRYPT
);
867 static int stm32_cryp_aes_ccm_decrypt(struct aead_request
*req
)
869 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_CCM
);
872 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request
*req
)
874 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
| FLG_ENCRYPT
);
877 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request
*req
)
879 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
);
882 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request
*req
)
884 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
| FLG_ENCRYPT
);
887 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request
*req
)
889 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
);
892 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request
*req
)
894 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
| FLG_ENCRYPT
);
897 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request
*req
)
899 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
);
902 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request
*req
)
904 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
| FLG_ENCRYPT
);
907 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request
*req
)
909 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
);
912 static int stm32_cryp_prepare_req(struct skcipher_request
*req
,
913 struct aead_request
*areq
)
915 struct stm32_cryp_ctx
*ctx
;
916 struct stm32_cryp
*cryp
;
917 struct stm32_cryp_reqctx
*rctx
;
923 ctx
= req
? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
)) :
924 crypto_aead_ctx(crypto_aead_reqtfm(areq
));
931 rctx
= req
? skcipher_request_ctx(req
) : aead_request_ctx(areq
);
932 rctx
->mode
&= FLG_MODE_MASK
;
936 cryp
->flags
= (cryp
->flags
& ~FLG_MODE_MASK
) | rctx
->mode
;
937 cryp
->hw_blocksize
= is_aes(cryp
) ? AES_BLOCK_SIZE
: DES_BLOCK_SIZE
;
943 cryp
->total_in
= req
->cryptlen
;
944 cryp
->total_out
= cryp
->total_in
;
947 * Length of input and output data:
949 * INPUT = AssocData || PlainText
950 * <- assoclen -> <- cryptlen ->
951 * <------- total_in ----------->
953 * OUTPUT = AssocData || CipherText || AuthTag
954 * <- assoclen -> <- cryptlen -> <- authsize ->
955 * <---------------- total_out ----------------->
958 * INPUT = AssocData || CipherText || AuthTag
959 * <- assoclen -> <--------- cryptlen --------->
961 * <---------------- total_in ------------------>
963 * OUTPUT = AssocData || PlainText
964 * <- assoclen -> <- crypten - authsize ->
965 * <---------- total_out ----------------->
969 cryp
->authsize
= crypto_aead_authsize(crypto_aead_reqtfm(areq
));
970 cryp
->total_in
= areq
->assoclen
+ areq
->cryptlen
;
971 if (is_encrypt(cryp
))
972 /* Append auth tag to output */
973 cryp
->total_out
= cryp
->total_in
+ cryp
->authsize
;
975 /* No auth tag in output */
976 cryp
->total_out
= cryp
->total_in
- cryp
->authsize
;
979 cryp
->total_in_save
= cryp
->total_in
;
980 cryp
->total_out_save
= cryp
->total_out
;
982 cryp
->in_sg
= req
? req
->src
: areq
->src
;
983 cryp
->out_sg
= req
? req
->dst
: areq
->dst
;
984 cryp
->out_sg_save
= cryp
->out_sg
;
986 cryp
->in_sg_len
= sg_nents_for_len(cryp
->in_sg
, cryp
->total_in
);
987 if (cryp
->in_sg_len
< 0) {
988 dev_err(cryp
->dev
, "Cannot get in_sg_len\n");
989 ret
= cryp
->in_sg_len
;
993 cryp
->out_sg_len
= sg_nents_for_len(cryp
->out_sg
, cryp
->total_out
);
994 if (cryp
->out_sg_len
< 0) {
995 dev_err(cryp
->dev
, "Cannot get out_sg_len\n");
996 ret
= cryp
->out_sg_len
;
1000 ret
= stm32_cryp_copy_sgs(cryp
);
1004 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
1005 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
1007 if (is_gcm(cryp
) || is_ccm(cryp
)) {
1008 /* In output, jump after assoc data */
1009 scatterwalk_advance(&cryp
->out_walk
, cryp
->areq
->assoclen
);
1010 cryp
->total_out
-= cryp
->areq
->assoclen
;
1013 ret
= stm32_cryp_hw_init(cryp
);
1017 static int stm32_cryp_prepare_cipher_req(struct crypto_engine
*engine
,
1020 struct skcipher_request
*req
= container_of(areq
,
1021 struct skcipher_request
,
1024 return stm32_cryp_prepare_req(req
, NULL
);
1027 static int stm32_cryp_cipher_one_req(struct crypto_engine
*engine
, void *areq
)
1029 struct skcipher_request
*req
= container_of(areq
,
1030 struct skcipher_request
,
1032 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(
1033 crypto_skcipher_reqtfm(req
));
1034 struct stm32_cryp
*cryp
= ctx
->cryp
;
1039 return stm32_cryp_cpu_start(cryp
);
1042 static int stm32_cryp_prepare_aead_req(struct crypto_engine
*engine
, void *areq
)
1044 struct aead_request
*req
= container_of(areq
, struct aead_request
,
1047 return stm32_cryp_prepare_req(NULL
, req
);
1050 static int stm32_cryp_aead_one_req(struct crypto_engine
*engine
, void *areq
)
1052 struct aead_request
*req
= container_of(areq
, struct aead_request
,
1054 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1055 struct stm32_cryp
*cryp
= ctx
->cryp
;
1060 if (unlikely(!cryp
->areq
->assoclen
&&
1061 !stm32_cryp_get_input_text_len(cryp
))) {
1062 /* No input data to process: get tag and finish */
1063 stm32_cryp_finish_req(cryp
, 0);
1067 return stm32_cryp_cpu_start(cryp
);
1070 static u32
*stm32_cryp_next_out(struct stm32_cryp
*cryp
, u32
*dst
,
1073 scatterwalk_advance(&cryp
->out_walk
, n
);
1075 if (unlikely(cryp
->out_sg
->length
== _walked_out
)) {
1076 cryp
->out_sg
= sg_next(cryp
->out_sg
);
1078 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
1079 return (sg_virt(cryp
->out_sg
) + _walked_out
);
1083 return (u32
*)((u8
*)dst
+ n
);
1086 static u32
*stm32_cryp_next_in(struct stm32_cryp
*cryp
, u32
*src
,
1089 scatterwalk_advance(&cryp
->in_walk
, n
);
1091 if (unlikely(cryp
->in_sg
->length
== _walked_in
)) {
1092 cryp
->in_sg
= sg_next(cryp
->in_sg
);
1094 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
1095 return (sg_virt(cryp
->in_sg
) + _walked_in
);
1099 return (u32
*)((u8
*)src
+ n
);
1102 static int stm32_cryp_read_auth_tag(struct stm32_cryp
*cryp
)
1104 u32 cfg
, size_bit
, *dst
, d32
;
1110 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1114 cfg
&= ~CR_DEC_NOT_ENC
;
1117 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1120 /* GCM: write aad and payload size (in bits) */
1121 size_bit
= cryp
->areq
->assoclen
* 8;
1122 if (cryp
->caps
->swap_final
)
1123 size_bit
= cpu_to_be32(size_bit
);
1125 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1126 stm32_cryp_write(cryp
, CRYP_DIN
, size_bit
);
1128 size_bit
= is_encrypt(cryp
) ? cryp
->areq
->cryptlen
:
1129 cryp
->areq
->cryptlen
- AES_BLOCK_SIZE
;
1131 if (cryp
->caps
->swap_final
)
1132 size_bit
= cpu_to_be32(size_bit
);
1134 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1135 stm32_cryp_write(cryp
, CRYP_DIN
, size_bit
);
1137 /* CCM: write CTR0 */
1138 u8 iv
[AES_BLOCK_SIZE
];
1139 u32
*iv32
= (u32
*)iv
;
1141 memcpy(iv
, cryp
->areq
->iv
, AES_BLOCK_SIZE
);
1142 memset(iv
+ AES_BLOCK_SIZE
- 1 - iv
[0], 0, iv
[0] + 1);
1144 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1145 if (!cryp
->caps
->padding_wa
)
1146 *iv32
= cpu_to_be32(*iv32
);
1147 stm32_cryp_write(cryp
, CRYP_DIN
, *iv32
++);
1151 /* Wait for output data */
1152 ret
= stm32_cryp_wait_output(cryp
);
1154 dev_err(cryp
->dev
, "Timeout (read tag)\n");
1158 if (is_encrypt(cryp
)) {
1159 /* Get and write tag */
1160 dst
= sg_virt(cryp
->out_sg
) + _walked_out
;
1162 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1163 if (cryp
->total_out
>= sizeof(u32
)) {
1164 /* Read a full u32 */
1165 *dst
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1167 dst
= stm32_cryp_next_out(cryp
, dst
,
1169 cryp
->total_out
-= sizeof(u32
);
1170 } else if (!cryp
->total_out
) {
1171 /* Empty fifo out (data from input padding) */
1172 stm32_cryp_read(cryp
, CRYP_DOUT
);
1174 /* Read less than an u32 */
1175 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1178 for (j
= 0; j
< cryp
->total_out
; j
++) {
1179 *((u8
*)dst
) = *(d8
++);
1180 dst
= stm32_cryp_next_out(cryp
, dst
, 1);
1182 cryp
->total_out
= 0;
1186 /* Get and check tag */
1187 u32 in_tag
[AES_BLOCK_32
], out_tag
[AES_BLOCK_32
];
1189 scatterwalk_map_and_copy(in_tag
, cryp
->in_sg
,
1190 cryp
->total_in_save
- cryp
->authsize
,
1193 for (i
= 0; i
< AES_BLOCK_32
; i
++)
1194 out_tag
[i
] = stm32_cryp_read(cryp
, CRYP_DOUT
);
1196 if (crypto_memneq(in_tag
, out_tag
, cryp
->authsize
))
1202 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1207 static void stm32_cryp_check_ctr_counter(struct stm32_cryp
*cryp
)
1211 if (unlikely(cryp
->last_ctr
[3] == 0xFFFFFFFF)) {
1212 cryp
->last_ctr
[3] = 0;
1213 cryp
->last_ctr
[2]++;
1214 if (!cryp
->last_ctr
[2]) {
1215 cryp
->last_ctr
[1]++;
1216 if (!cryp
->last_ctr
[1])
1217 cryp
->last_ctr
[0]++;
1220 cr
= stm32_cryp_read(cryp
, CRYP_CR
);
1221 stm32_cryp_write(cryp
, CRYP_CR
, cr
& ~CR_CRYPEN
);
1223 stm32_cryp_hw_write_iv(cryp
, (u32
*)cryp
->last_ctr
);
1225 stm32_cryp_write(cryp
, CRYP_CR
, cr
);
1228 cryp
->last_ctr
[0] = stm32_cryp_read(cryp
, CRYP_IV0LR
);
1229 cryp
->last_ctr
[1] = stm32_cryp_read(cryp
, CRYP_IV0RR
);
1230 cryp
->last_ctr
[2] = stm32_cryp_read(cryp
, CRYP_IV1LR
);
1231 cryp
->last_ctr
[3] = stm32_cryp_read(cryp
, CRYP_IV1RR
);
1234 static bool stm32_cryp_irq_read_data(struct stm32_cryp
*cryp
)
1241 /* Do no read tag now (if any) */
1242 if (is_encrypt(cryp
) && (is_gcm(cryp
) || is_ccm(cryp
)))
1243 tag_size
= cryp
->authsize
;
1247 dst
= sg_virt(cryp
->out_sg
) + _walked_out
;
1249 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
1250 if (likely(cryp
->total_out
- tag_size
>= sizeof(u32
))) {
1251 /* Read a full u32 */
1252 *dst
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1254 dst
= stm32_cryp_next_out(cryp
, dst
, sizeof(u32
));
1255 cryp
->total_out
-= sizeof(u32
);
1256 } else if (cryp
->total_out
== tag_size
) {
1257 /* Empty fifo out (data from input padding) */
1258 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1260 /* Read less than an u32 */
1261 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1264 for (j
= 0; j
< cryp
->total_out
- tag_size
; j
++) {
1265 *((u8
*)dst
) = *(d8
++);
1266 dst
= stm32_cryp_next_out(cryp
, dst
, 1);
1268 cryp
->total_out
= tag_size
;
1272 return !(cryp
->total_out
- tag_size
) || !cryp
->total_in
;
1275 static void stm32_cryp_irq_write_block(struct stm32_cryp
*cryp
)
1282 /* Do no write tag (if any) */
1283 if (is_decrypt(cryp
) && (is_gcm(cryp
) || is_ccm(cryp
)))
1284 tag_size
= cryp
->authsize
;
1288 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1290 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
1291 if (likely(cryp
->total_in
- tag_size
>= sizeof(u32
))) {
1292 /* Write a full u32 */
1293 stm32_cryp_write(cryp
, CRYP_DIN
, *src
);
1295 src
= stm32_cryp_next_in(cryp
, src
, sizeof(u32
));
1296 cryp
->total_in
-= sizeof(u32
);
1297 } else if (cryp
->total_in
== tag_size
) {
1298 /* Write padding data */
1299 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1301 /* Write less than an u32 */
1302 memset(d8
, 0, sizeof(u32
));
1303 for (j
= 0; j
< cryp
->total_in
- tag_size
; j
++) {
1304 d8
[j
] = *((u8
*)src
);
1305 src
= stm32_cryp_next_in(cryp
, src
, 1);
1308 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1309 cryp
->total_in
= tag_size
;
1314 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp
*cryp
)
1317 u32 cfg
, tmp
[AES_BLOCK_32
];
1318 size_t total_in_ori
= cryp
->total_in
;
1319 struct scatterlist
*out_sg_ori
= cryp
->out_sg
;
1322 /* 'Special workaround' procedure described in the datasheet */
1325 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1326 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1328 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1330 /* b) Update IV1R */
1331 stm32_cryp_write(cryp
, CRYP_IV1RR
, cryp
->gcm_ctr
- 2);
1333 /* c) change mode to CTR */
1334 cfg
&= ~CR_ALGO_MASK
;
1336 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1340 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1342 /* b) pad and write the last block */
1343 stm32_cryp_irq_write_block(cryp
);
1344 cryp
->total_in
= total_in_ori
;
1345 err
= stm32_cryp_wait_output(cryp
);
1347 dev_err(cryp
->dev
, "Timeout (write gcm header)\n");
1348 return stm32_cryp_finish_req(cryp
, err
);
1351 /* c) get and store encrypted data */
1352 stm32_cryp_irq_read_data(cryp
);
1353 scatterwalk_map_and_copy(tmp
, out_sg_ori
,
1354 cryp
->total_in_save
- total_in_ori
,
1357 /* d) change mode back to AES GCM */
1358 cfg
&= ~CR_ALGO_MASK
;
1360 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1362 /* e) change phase to Final */
1365 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1367 /* f) write padded data */
1368 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1370 stm32_cryp_write(cryp
, CRYP_DIN
, tmp
[i
]);
1372 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1374 cryp
->total_in
-= min_t(size_t, sizeof(u32
), cryp
->total_in
);
1377 /* g) Empty fifo out */
1378 err
= stm32_cryp_wait_output(cryp
);
1380 dev_err(cryp
->dev
, "Timeout (write gcm header)\n");
1381 return stm32_cryp_finish_req(cryp
, err
);
1384 for (i
= 0; i
< AES_BLOCK_32
; i
++)
1385 stm32_cryp_read(cryp
, CRYP_DOUT
);
1387 /* h) run the he normal Final phase */
1388 stm32_cryp_finish_req(cryp
, 0);
1391 static void stm32_cryp_irq_set_npblb(struct stm32_cryp
*cryp
)
1393 u32 cfg
, payload_bytes
;
1395 /* disable ip, set NPBLB and reneable ip */
1396 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1398 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1400 payload_bytes
= is_decrypt(cryp
) ? cryp
->total_in
- cryp
->authsize
:
1402 cfg
|= (cryp
->hw_blocksize
- payload_bytes
) << CR_NBPBL_SHIFT
;
1404 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1407 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp
*cryp
)
1411 u32 cstmp1
[AES_BLOCK_32
], cstmp2
[AES_BLOCK_32
], tmp
[AES_BLOCK_32
];
1412 size_t last_total_out
, total_in_ori
= cryp
->total_in
;
1413 struct scatterlist
*out_sg_ori
= cryp
->out_sg
;
1416 /* 'Special workaround' procedure described in the datasheet */
1417 cryp
->flags
|= FLG_CCM_PADDED_WA
;
1420 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1422 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1424 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1426 /* b) get IV1 from CRYP_CSGCMCCM7 */
1427 iv1tmp
= stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ 7 * 4);
1429 /* c) Load CRYP_CSGCMCCMxR */
1430 for (i
= 0; i
< ARRAY_SIZE(cstmp1
); i
++)
1431 cstmp1
[i
] = stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ i
* 4);
1434 stm32_cryp_write(cryp
, CRYP_IV1RR
, iv1tmp
);
1436 /* e) change mode to CTR */
1437 cfg
&= ~CR_ALGO_MASK
;
1439 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1443 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1445 /* b) pad and write the last block */
1446 stm32_cryp_irq_write_block(cryp
);
1447 cryp
->total_in
= total_in_ori
;
1448 err
= stm32_cryp_wait_output(cryp
);
1450 dev_err(cryp
->dev
, "Timeout (wite ccm padded data)\n");
1451 return stm32_cryp_finish_req(cryp
, err
);
1454 /* c) get and store decrypted data */
1455 last_total_out
= cryp
->total_out
;
1456 stm32_cryp_irq_read_data(cryp
);
1458 memset(tmp
, 0, sizeof(tmp
));
1459 scatterwalk_map_and_copy(tmp
, out_sg_ori
,
1460 cryp
->total_out_save
- last_total_out
,
1463 /* d) Load again CRYP_CSGCMCCMxR */
1464 for (i
= 0; i
< ARRAY_SIZE(cstmp2
); i
++)
1465 cstmp2
[i
] = stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ i
* 4);
1467 /* e) change mode back to AES CCM */
1468 cfg
&= ~CR_ALGO_MASK
;
1470 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1472 /* f) change phase to header */
1474 cfg
|= CR_PH_HEADER
;
1475 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1477 /* g) XOR and write padded data */
1478 for (i
= 0; i
< ARRAY_SIZE(tmp
); i
++) {
1479 tmp
[i
] ^= cstmp1
[i
];
1480 tmp
[i
] ^= cstmp2
[i
];
1481 stm32_cryp_write(cryp
, CRYP_DIN
, tmp
[i
]);
1484 /* h) wait for completion */
1485 err
= stm32_cryp_wait_busy(cryp
);
1487 dev_err(cryp
->dev
, "Timeout (wite ccm padded data)\n");
1489 /* i) run the he normal Final phase */
1490 stm32_cryp_finish_req(cryp
, err
);
1493 static void stm32_cryp_irq_write_data(struct stm32_cryp
*cryp
)
1495 if (unlikely(!cryp
->total_in
)) {
1496 dev_warn(cryp
->dev
, "No more data to process\n");
1500 if (unlikely(cryp
->total_in
< AES_BLOCK_SIZE
&&
1501 (stm32_cryp_get_hw_mode(cryp
) == CR_AES_GCM
) &&
1502 is_encrypt(cryp
))) {
1503 /* Padding for AES GCM encryption */
1504 if (cryp
->caps
->padding_wa
)
1505 /* Special case 1 */
1506 return stm32_cryp_irq_write_gcm_padded_data(cryp
);
1508 /* Setting padding bytes (NBBLB) */
1509 stm32_cryp_irq_set_npblb(cryp
);
1512 if (unlikely((cryp
->total_in
- cryp
->authsize
< AES_BLOCK_SIZE
) &&
1513 (stm32_cryp_get_hw_mode(cryp
) == CR_AES_CCM
) &&
1514 is_decrypt(cryp
))) {
1515 /* Padding for AES CCM decryption */
1516 if (cryp
->caps
->padding_wa
)
1517 /* Special case 2 */
1518 return stm32_cryp_irq_write_ccm_padded_data(cryp
);
1520 /* Setting padding bytes (NBBLB) */
1521 stm32_cryp_irq_set_npblb(cryp
);
1524 if (is_aes(cryp
) && is_ctr(cryp
))
1525 stm32_cryp_check_ctr_counter(cryp
);
1527 stm32_cryp_irq_write_block(cryp
);
1530 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp
*cryp
)
1536 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1538 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1539 stm32_cryp_write(cryp
, CRYP_DIN
, *src
);
1541 src
= stm32_cryp_next_in(cryp
, src
, sizeof(u32
));
1542 cryp
->total_in
-= min_t(size_t, sizeof(u32
), cryp
->total_in
);
1544 /* Check if whole header written */
1545 if ((cryp
->total_in_save
- cryp
->total_in
) ==
1546 cryp
->areq
->assoclen
) {
1547 /* Write padding if needed */
1548 for (j
= i
+ 1; j
< AES_BLOCK_32
; j
++)
1549 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1551 /* Wait for completion */
1552 err
= stm32_cryp_wait_busy(cryp
);
1554 dev_err(cryp
->dev
, "Timeout (gcm header)\n");
1555 return stm32_cryp_finish_req(cryp
, err
);
1558 if (stm32_cryp_get_input_text_len(cryp
)) {
1559 /* Phase 3 : payload */
1560 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1562 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1565 cfg
|= CR_PH_PAYLOAD
;
1567 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1570 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1571 stm32_cryp_finish_req(cryp
, 0);
1577 if (!cryp
->total_in
)
1582 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp
*cryp
)
1585 unsigned int i
= 0, j
, k
;
1586 u32 alen
, cfg
, *src
;
1589 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1590 alen
= cryp
->areq
->assoclen
;
1593 if (cryp
->areq
->assoclen
<= 65280) {
1594 /* Write first u32 of B1 */
1595 d8
[0] = (alen
>> 8) & 0xFF;
1596 d8
[1] = alen
& 0xFF;
1597 d8
[2] = *((u8
*)src
);
1598 src
= stm32_cryp_next_in(cryp
, src
, 1);
1599 d8
[3] = *((u8
*)src
);
1600 src
= stm32_cryp_next_in(cryp
, src
, 1);
1602 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1605 cryp
->total_in
-= min_t(size_t, 2, cryp
->total_in
);
1607 /* Build the two first u32 of B1 */
1610 d8
[2] = alen
& 0xFF000000;
1611 d8
[3] = alen
& 0x00FF0000;
1613 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1616 d8
[0] = alen
& 0x0000FF00;
1617 d8
[1] = alen
& 0x000000FF;
1618 d8
[2] = *((u8
*)src
);
1619 src
= stm32_cryp_next_in(cryp
, src
, 1);
1620 d8
[3] = *((u8
*)src
);
1621 src
= stm32_cryp_next_in(cryp
, src
, 1);
1623 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1626 cryp
->total_in
-= min_t(size_t, 2, cryp
->total_in
);
1630 /* Write next u32 */
1631 for (; i
< AES_BLOCK_32
; i
++) {
1633 memset(d8
, 0, sizeof(u32
));
1634 for (k
= 0; k
< sizeof(u32
); k
++) {
1635 d8
[k
] = *((u8
*)src
);
1636 src
= stm32_cryp_next_in(cryp
, src
, 1);
1638 cryp
->total_in
-= min_t(size_t, 1, cryp
->total_in
);
1639 if ((cryp
->total_in_save
- cryp
->total_in
) == alen
)
1643 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1645 if ((cryp
->total_in_save
- cryp
->total_in
) == alen
) {
1646 /* Write padding if needed */
1647 for (j
= i
+ 1; j
< AES_BLOCK_32
; j
++)
1648 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1650 /* Wait for completion */
1651 err
= stm32_cryp_wait_busy(cryp
);
1653 dev_err(cryp
->dev
, "Timeout (ccm header)\n");
1654 return stm32_cryp_finish_req(cryp
, err
);
1657 if (stm32_cryp_get_input_text_len(cryp
)) {
1658 /* Phase 3 : payload */
1659 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1661 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1664 cfg
|= CR_PH_PAYLOAD
;
1666 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1669 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1670 stm32_cryp_finish_req(cryp
, 0);
1678 static irqreturn_t
stm32_cryp_irq_thread(int irq
, void *arg
)
1680 struct stm32_cryp
*cryp
= arg
;
1683 if (cryp
->irq_status
& MISR_OUT
)
1684 /* Output FIFO IRQ: read data */
1685 if (unlikely(stm32_cryp_irq_read_data(cryp
))) {
1686 /* All bytes processed, finish */
1687 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1688 stm32_cryp_finish_req(cryp
, 0);
1692 if (cryp
->irq_status
& MISR_IN
) {
1694 ph
= stm32_cryp_read(cryp
, CRYP_CR
) & CR_PH_MASK
;
1695 if (unlikely(ph
== CR_PH_HEADER
))
1697 stm32_cryp_irq_write_gcm_header(cryp
);
1699 /* Input FIFO IRQ: write data */
1700 stm32_cryp_irq_write_data(cryp
);
1702 } else if (is_ccm(cryp
)) {
1703 ph
= stm32_cryp_read(cryp
, CRYP_CR
) & CR_PH_MASK
;
1704 if (unlikely(ph
== CR_PH_HEADER
))
1706 stm32_cryp_irq_write_ccm_header(cryp
);
1708 /* Input FIFO IRQ: write data */
1709 stm32_cryp_irq_write_data(cryp
);
1711 /* Input FIFO IRQ: write data */
1712 stm32_cryp_irq_write_data(cryp
);
1719 static irqreturn_t
stm32_cryp_irq(int irq
, void *arg
)
1721 struct stm32_cryp
*cryp
= arg
;
1723 cryp
->irq_status
= stm32_cryp_read(cryp
, CRYP_MISR
);
1725 return IRQ_WAKE_THREAD
;
1728 static struct skcipher_alg crypto_algs
[] = {
1730 .base
.cra_name
= "ecb(aes)",
1731 .base
.cra_driver_name
= "stm32-ecb-aes",
1732 .base
.cra_priority
= 200,
1733 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1734 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1735 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1736 .base
.cra_alignmask
= 0xf,
1737 .base
.cra_module
= THIS_MODULE
,
1739 .init
= stm32_cryp_init_tfm
,
1740 .min_keysize
= AES_MIN_KEY_SIZE
,
1741 .max_keysize
= AES_MAX_KEY_SIZE
,
1742 .setkey
= stm32_cryp_aes_setkey
,
1743 .encrypt
= stm32_cryp_aes_ecb_encrypt
,
1744 .decrypt
= stm32_cryp_aes_ecb_decrypt
,
1747 .base
.cra_name
= "cbc(aes)",
1748 .base
.cra_driver_name
= "stm32-cbc-aes",
1749 .base
.cra_priority
= 200,
1750 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1751 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1752 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1753 .base
.cra_alignmask
= 0xf,
1754 .base
.cra_module
= THIS_MODULE
,
1756 .init
= stm32_cryp_init_tfm
,
1757 .min_keysize
= AES_MIN_KEY_SIZE
,
1758 .max_keysize
= AES_MAX_KEY_SIZE
,
1759 .ivsize
= AES_BLOCK_SIZE
,
1760 .setkey
= stm32_cryp_aes_setkey
,
1761 .encrypt
= stm32_cryp_aes_cbc_encrypt
,
1762 .decrypt
= stm32_cryp_aes_cbc_decrypt
,
1765 .base
.cra_name
= "ctr(aes)",
1766 .base
.cra_driver_name
= "stm32-ctr-aes",
1767 .base
.cra_priority
= 200,
1768 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1769 .base
.cra_blocksize
= 1,
1770 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1771 .base
.cra_alignmask
= 0xf,
1772 .base
.cra_module
= THIS_MODULE
,
1774 .init
= stm32_cryp_init_tfm
,
1775 .min_keysize
= AES_MIN_KEY_SIZE
,
1776 .max_keysize
= AES_MAX_KEY_SIZE
,
1777 .ivsize
= AES_BLOCK_SIZE
,
1778 .setkey
= stm32_cryp_aes_setkey
,
1779 .encrypt
= stm32_cryp_aes_ctr_encrypt
,
1780 .decrypt
= stm32_cryp_aes_ctr_decrypt
,
1783 .base
.cra_name
= "ecb(des)",
1784 .base
.cra_driver_name
= "stm32-ecb-des",
1785 .base
.cra_priority
= 200,
1786 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1787 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1788 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1789 .base
.cra_alignmask
= 0xf,
1790 .base
.cra_module
= THIS_MODULE
,
1792 .init
= stm32_cryp_init_tfm
,
1793 .min_keysize
= DES_BLOCK_SIZE
,
1794 .max_keysize
= DES_BLOCK_SIZE
,
1795 .setkey
= stm32_cryp_des_setkey
,
1796 .encrypt
= stm32_cryp_des_ecb_encrypt
,
1797 .decrypt
= stm32_cryp_des_ecb_decrypt
,
1800 .base
.cra_name
= "cbc(des)",
1801 .base
.cra_driver_name
= "stm32-cbc-des",
1802 .base
.cra_priority
= 200,
1803 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1804 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1805 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1806 .base
.cra_alignmask
= 0xf,
1807 .base
.cra_module
= THIS_MODULE
,
1809 .init
= stm32_cryp_init_tfm
,
1810 .min_keysize
= DES_BLOCK_SIZE
,
1811 .max_keysize
= DES_BLOCK_SIZE
,
1812 .ivsize
= DES_BLOCK_SIZE
,
1813 .setkey
= stm32_cryp_des_setkey
,
1814 .encrypt
= stm32_cryp_des_cbc_encrypt
,
1815 .decrypt
= stm32_cryp_des_cbc_decrypt
,
1818 .base
.cra_name
= "ecb(des3_ede)",
1819 .base
.cra_driver_name
= "stm32-ecb-des3",
1820 .base
.cra_priority
= 200,
1821 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1822 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1823 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1824 .base
.cra_alignmask
= 0xf,
1825 .base
.cra_module
= THIS_MODULE
,
1827 .init
= stm32_cryp_init_tfm
,
1828 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1829 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1830 .setkey
= stm32_cryp_tdes_setkey
,
1831 .encrypt
= stm32_cryp_tdes_ecb_encrypt
,
1832 .decrypt
= stm32_cryp_tdes_ecb_decrypt
,
1835 .base
.cra_name
= "cbc(des3_ede)",
1836 .base
.cra_driver_name
= "stm32-cbc-des3",
1837 .base
.cra_priority
= 200,
1838 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1839 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1840 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1841 .base
.cra_alignmask
= 0xf,
1842 .base
.cra_module
= THIS_MODULE
,
1844 .init
= stm32_cryp_init_tfm
,
1845 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1846 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1847 .ivsize
= DES_BLOCK_SIZE
,
1848 .setkey
= stm32_cryp_tdes_setkey
,
1849 .encrypt
= stm32_cryp_tdes_cbc_encrypt
,
1850 .decrypt
= stm32_cryp_tdes_cbc_decrypt
,
1854 static struct aead_alg aead_algs
[] = {
1856 .setkey
= stm32_cryp_aes_aead_setkey
,
1857 .setauthsize
= stm32_cryp_aes_gcm_setauthsize
,
1858 .encrypt
= stm32_cryp_aes_gcm_encrypt
,
1859 .decrypt
= stm32_cryp_aes_gcm_decrypt
,
1860 .init
= stm32_cryp_aes_aead_init
,
1862 .maxauthsize
= AES_BLOCK_SIZE
,
1865 .cra_name
= "gcm(aes)",
1866 .cra_driver_name
= "stm32-gcm-aes",
1867 .cra_priority
= 200,
1868 .cra_flags
= CRYPTO_ALG_ASYNC
,
1870 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1871 .cra_alignmask
= 0xf,
1872 .cra_module
= THIS_MODULE
,
1876 .setkey
= stm32_cryp_aes_aead_setkey
,
1877 .setauthsize
= stm32_cryp_aes_ccm_setauthsize
,
1878 .encrypt
= stm32_cryp_aes_ccm_encrypt
,
1879 .decrypt
= stm32_cryp_aes_ccm_decrypt
,
1880 .init
= stm32_cryp_aes_aead_init
,
1881 .ivsize
= AES_BLOCK_SIZE
,
1882 .maxauthsize
= AES_BLOCK_SIZE
,
1885 .cra_name
= "ccm(aes)",
1886 .cra_driver_name
= "stm32-ccm-aes",
1887 .cra_priority
= 200,
1888 .cra_flags
= CRYPTO_ALG_ASYNC
,
1890 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1891 .cra_alignmask
= 0xf,
1892 .cra_module
= THIS_MODULE
,
1897 static const struct stm32_cryp_caps f7_data
= {
1902 static const struct stm32_cryp_caps mp1_data
= {
1903 .swap_final
= false,
1904 .padding_wa
= false,
1907 static const struct of_device_id stm32_dt_ids
[] = {
1908 { .compatible
= "st,stm32f756-cryp", .data
= &f7_data
},
1909 { .compatible
= "st,stm32mp1-cryp", .data
= &mp1_data
},
1912 MODULE_DEVICE_TABLE(of
, stm32_dt_ids
);
1914 static int stm32_cryp_probe(struct platform_device
*pdev
)
1916 struct device
*dev
= &pdev
->dev
;
1917 struct stm32_cryp
*cryp
;
1918 struct reset_control
*rst
;
1921 cryp
= devm_kzalloc(dev
, sizeof(*cryp
), GFP_KERNEL
);
1925 cryp
->caps
= of_device_get_match_data(dev
);
1931 cryp
->regs
= devm_platform_ioremap_resource(pdev
, 0);
1932 if (IS_ERR(cryp
->regs
))
1933 return PTR_ERR(cryp
->regs
);
1935 irq
= platform_get_irq(pdev
, 0);
1939 ret
= devm_request_threaded_irq(dev
, irq
, stm32_cryp_irq
,
1940 stm32_cryp_irq_thread
, IRQF_ONESHOT
,
1941 dev_name(dev
), cryp
);
1943 dev_err(dev
, "Cannot grab IRQ\n");
1947 cryp
->clk
= devm_clk_get(dev
, NULL
);
1948 if (IS_ERR(cryp
->clk
)) {
1949 dev_err(dev
, "Could not get clock\n");
1950 return PTR_ERR(cryp
->clk
);
1953 ret
= clk_prepare_enable(cryp
->clk
);
1955 dev_err(cryp
->dev
, "Failed to enable clock\n");
1959 pm_runtime_set_autosuspend_delay(dev
, CRYP_AUTOSUSPEND_DELAY
);
1960 pm_runtime_use_autosuspend(dev
);
1962 pm_runtime_get_noresume(dev
);
1963 pm_runtime_set_active(dev
);
1964 pm_runtime_enable(dev
);
1966 rst
= devm_reset_control_get(dev
, NULL
);
1968 reset_control_assert(rst
);
1970 reset_control_deassert(rst
);
1973 platform_set_drvdata(pdev
, cryp
);
1975 spin_lock(&cryp_list
.lock
);
1976 list_add(&cryp
->list
, &cryp_list
.dev_list
);
1977 spin_unlock(&cryp_list
.lock
);
1979 /* Initialize crypto engine */
1980 cryp
->engine
= crypto_engine_alloc_init(dev
, 1);
1981 if (!cryp
->engine
) {
1982 dev_err(dev
, "Could not init crypto engine\n");
1987 ret
= crypto_engine_start(cryp
->engine
);
1989 dev_err(dev
, "Could not start crypto engine\n");
1993 ret
= crypto_register_skciphers(crypto_algs
, ARRAY_SIZE(crypto_algs
));
1995 dev_err(dev
, "Could not register algs\n");
1999 ret
= crypto_register_aeads(aead_algs
, ARRAY_SIZE(aead_algs
));
2003 dev_info(dev
, "Initialized\n");
2005 pm_runtime_put_sync(dev
);
2010 crypto_unregister_skciphers(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2013 crypto_engine_exit(cryp
->engine
);
2015 spin_lock(&cryp_list
.lock
);
2016 list_del(&cryp
->list
);
2017 spin_unlock(&cryp_list
.lock
);
2019 pm_runtime_disable(dev
);
2020 pm_runtime_put_noidle(dev
);
2021 pm_runtime_disable(dev
);
2022 pm_runtime_put_noidle(dev
);
2024 clk_disable_unprepare(cryp
->clk
);
2029 static int stm32_cryp_remove(struct platform_device
*pdev
)
2031 struct stm32_cryp
*cryp
= platform_get_drvdata(pdev
);
2037 ret
= pm_runtime_get_sync(cryp
->dev
);
2041 crypto_unregister_aeads(aead_algs
, ARRAY_SIZE(aead_algs
));
2042 crypto_unregister_skciphers(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2044 crypto_engine_exit(cryp
->engine
);
2046 spin_lock(&cryp_list
.lock
);
2047 list_del(&cryp
->list
);
2048 spin_unlock(&cryp_list
.lock
);
2050 pm_runtime_disable(cryp
->dev
);
2051 pm_runtime_put_noidle(cryp
->dev
);
2053 clk_disable_unprepare(cryp
->clk
);
2059 static int stm32_cryp_runtime_suspend(struct device
*dev
)
2061 struct stm32_cryp
*cryp
= dev_get_drvdata(dev
);
2063 clk_disable_unprepare(cryp
->clk
);
2068 static int stm32_cryp_runtime_resume(struct device
*dev
)
2070 struct stm32_cryp
*cryp
= dev_get_drvdata(dev
);
2073 ret
= clk_prepare_enable(cryp
->clk
);
2075 dev_err(cryp
->dev
, "Failed to prepare_enable clock\n");
2083 static const struct dev_pm_ops stm32_cryp_pm_ops
= {
2084 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend
,
2085 pm_runtime_force_resume
)
2086 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend
,
2087 stm32_cryp_runtime_resume
, NULL
)
2090 static struct platform_driver stm32_cryp_driver
= {
2091 .probe
= stm32_cryp_probe
,
2092 .remove
= stm32_cryp_remove
,
2094 .name
= DRIVER_NAME
,
2095 .pm
= &stm32_cryp_pm_ops
,
2096 .of_match_table
= stm32_dt_ids
,
2100 module_platform_driver(stm32_cryp_driver
);
2102 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2103 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2104 MODULE_LICENSE("GPL");