1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
17 #include <crypto/aes.h>
18 #include <crypto/internal/des.h>
19 #include <crypto/engine.h>
20 #include <crypto/scatterwalk.h>
21 #include <crypto/internal/aead.h>
22 #include <crypto/internal/skcipher.h>
24 #define DRIVER_NAME "stm32-cryp"
26 /* Bit [0] encrypt / decrypt */
27 #define FLG_ENCRYPT BIT(0)
28 /* Bit [8..1] algo & operation mode */
29 #define FLG_AES BIT(1)
30 #define FLG_DES BIT(2)
31 #define FLG_TDES BIT(3)
32 #define FLG_ECB BIT(4)
33 #define FLG_CBC BIT(5)
34 #define FLG_CTR BIT(6)
35 #define FLG_GCM BIT(7)
36 #define FLG_CCM BIT(8)
37 /* Mode mask = bits [15..0] */
38 #define FLG_MODE_MASK GENMASK(15, 0)
39 /* Bit [31..16] status */
40 #define FLG_CCM_PADDED_WA BIT(16)
43 #define CRYP_CR 0x00000000
44 #define CRYP_SR 0x00000004
45 #define CRYP_DIN 0x00000008
46 #define CRYP_DOUT 0x0000000C
47 #define CRYP_DMACR 0x00000010
48 #define CRYP_IMSCR 0x00000014
49 #define CRYP_RISR 0x00000018
50 #define CRYP_MISR 0x0000001C
51 #define CRYP_K0LR 0x00000020
52 #define CRYP_K0RR 0x00000024
53 #define CRYP_K1LR 0x00000028
54 #define CRYP_K1RR 0x0000002C
55 #define CRYP_K2LR 0x00000030
56 #define CRYP_K2RR 0x00000034
57 #define CRYP_K3LR 0x00000038
58 #define CRYP_K3RR 0x0000003C
59 #define CRYP_IV0LR 0x00000040
60 #define CRYP_IV0RR 0x00000044
61 #define CRYP_IV1LR 0x00000048
62 #define CRYP_IV1RR 0x0000004C
63 #define CRYP_CSGCMCCM0R 0x00000050
64 #define CRYP_CSGCM0R 0x00000070
66 /* Registers values */
67 #define CR_DEC_NOT_ENC 0x00000004
68 #define CR_TDES_ECB 0x00000000
69 #define CR_TDES_CBC 0x00000008
70 #define CR_DES_ECB 0x00000010
71 #define CR_DES_CBC 0x00000018
72 #define CR_AES_ECB 0x00000020
73 #define CR_AES_CBC 0x00000028
74 #define CR_AES_CTR 0x00000030
75 #define CR_AES_KP 0x00000038
76 #define CR_AES_GCM 0x00080000
77 #define CR_AES_CCM 0x00080008
78 #define CR_AES_UNKNOWN 0xFFFFFFFF
79 #define CR_ALGO_MASK 0x00080038
80 #define CR_DATA32 0x00000000
81 #define CR_DATA16 0x00000040
82 #define CR_DATA8 0x00000080
83 #define CR_DATA1 0x000000C0
84 #define CR_KEY128 0x00000000
85 #define CR_KEY192 0x00000100
86 #define CR_KEY256 0x00000200
87 #define CR_FFLUSH 0x00004000
88 #define CR_CRYPEN 0x00008000
89 #define CR_PH_INIT 0x00000000
90 #define CR_PH_HEADER 0x00010000
91 #define CR_PH_PAYLOAD 0x00020000
92 #define CR_PH_FINAL 0x00030000
93 #define CR_PH_MASK 0x00030000
94 #define CR_NBPBL_SHIFT 20
96 #define SR_BUSY 0x00000010
97 #define SR_OFNE 0x00000004
99 #define IMSCR_IN BIT(0)
100 #define IMSCR_OUT BIT(1)
102 #define MISR_IN BIT(0)
103 #define MISR_OUT BIT(1)
106 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
107 #define GCM_CTR_INIT 2
108 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
109 #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
110 #define CRYP_AUTOSUSPEND_DELAY 50
112 struct stm32_cryp_caps
{
117 struct stm32_cryp_ctx
{
118 struct crypto_engine_ctx enginectx
;
119 struct stm32_cryp
*cryp
;
121 __be32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
125 struct stm32_cryp_reqctx
{
130 struct list_head list
;
136 const struct stm32_cryp_caps
*caps
;
137 struct stm32_cryp_ctx
*ctx
;
139 struct crypto_engine
*engine
;
141 struct skcipher_request
*req
;
142 struct aead_request
*areq
;
148 size_t total_in_save
;
150 size_t total_out_save
;
152 struct scatterlist
*in_sg
;
153 struct scatterlist
*out_sg
;
154 struct scatterlist
*out_sg_save
;
156 struct scatterlist in_sgl
;
157 struct scatterlist out_sgl
;
163 struct scatter_walk in_walk
;
164 struct scatter_walk out_walk
;
170 struct stm32_cryp_list
{
171 struct list_head dev_list
;
172 spinlock_t lock
; /* protect dev_list */
175 static struct stm32_cryp_list cryp_list
= {
176 .dev_list
= LIST_HEAD_INIT(cryp_list
.dev_list
),
177 .lock
= __SPIN_LOCK_UNLOCKED(cryp_list
.lock
),
180 static inline bool is_aes(struct stm32_cryp
*cryp
)
182 return cryp
->flags
& FLG_AES
;
185 static inline bool is_des(struct stm32_cryp
*cryp
)
187 return cryp
->flags
& FLG_DES
;
190 static inline bool is_tdes(struct stm32_cryp
*cryp
)
192 return cryp
->flags
& FLG_TDES
;
195 static inline bool is_ecb(struct stm32_cryp
*cryp
)
197 return cryp
->flags
& FLG_ECB
;
200 static inline bool is_cbc(struct stm32_cryp
*cryp
)
202 return cryp
->flags
& FLG_CBC
;
205 static inline bool is_ctr(struct stm32_cryp
*cryp
)
207 return cryp
->flags
& FLG_CTR
;
210 static inline bool is_gcm(struct stm32_cryp
*cryp
)
212 return cryp
->flags
& FLG_GCM
;
215 static inline bool is_ccm(struct stm32_cryp
*cryp
)
217 return cryp
->flags
& FLG_CCM
;
220 static inline bool is_encrypt(struct stm32_cryp
*cryp
)
222 return cryp
->flags
& FLG_ENCRYPT
;
225 static inline bool is_decrypt(struct stm32_cryp
*cryp
)
227 return !is_encrypt(cryp
);
230 static inline u32
stm32_cryp_read(struct stm32_cryp
*cryp
, u32 ofst
)
232 return readl_relaxed(cryp
->regs
+ ofst
);
235 static inline void stm32_cryp_write(struct stm32_cryp
*cryp
, u32 ofst
, u32 val
)
237 writel_relaxed(val
, cryp
->regs
+ ofst
);
240 static inline int stm32_cryp_wait_busy(struct stm32_cryp
*cryp
)
244 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_SR
, status
,
245 !(status
& SR_BUSY
), 10, 100000);
248 static inline int stm32_cryp_wait_enable(struct stm32_cryp
*cryp
)
252 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_CR
, status
,
253 !(status
& CR_CRYPEN
), 10, 100000);
256 static inline int stm32_cryp_wait_output(struct stm32_cryp
*cryp
)
260 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_SR
, status
,
261 status
& SR_OFNE
, 10, 100000);
264 static int stm32_cryp_read_auth_tag(struct stm32_cryp
*cryp
);
266 static struct stm32_cryp
*stm32_cryp_find_dev(struct stm32_cryp_ctx
*ctx
)
268 struct stm32_cryp
*tmp
, *cryp
= NULL
;
270 spin_lock_bh(&cryp_list
.lock
);
272 list_for_each_entry(tmp
, &cryp_list
.dev_list
, list
) {
281 spin_unlock_bh(&cryp_list
.lock
);
286 static int stm32_cryp_check_aligned(struct scatterlist
*sg
, size_t total
,
294 if (!IS_ALIGNED(total
, align
))
298 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
301 if (!IS_ALIGNED(sg
->length
, align
))
314 static int stm32_cryp_check_io_aligned(struct stm32_cryp
*cryp
)
318 ret
= stm32_cryp_check_aligned(cryp
->in_sg
, cryp
->total_in
,
323 ret
= stm32_cryp_check_aligned(cryp
->out_sg
, cryp
->total_out
,
329 static void sg_copy_buf(void *buf
, struct scatterlist
*sg
,
330 unsigned int start
, unsigned int nbytes
, int out
)
332 struct scatter_walk walk
;
337 scatterwalk_start(&walk
, sg
);
338 scatterwalk_advance(&walk
, start
);
339 scatterwalk_copychunks(buf
, &walk
, nbytes
, out
);
340 scatterwalk_done(&walk
, out
, 0);
343 static int stm32_cryp_copy_sgs(struct stm32_cryp
*cryp
)
345 void *buf_in
, *buf_out
;
346 int pages
, total_in
, total_out
;
348 if (!stm32_cryp_check_io_aligned(cryp
)) {
349 cryp
->sgs_copied
= 0;
353 total_in
= ALIGN(cryp
->total_in
, cryp
->hw_blocksize
);
354 pages
= total_in
? get_order(total_in
) : 1;
355 buf_in
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
357 total_out
= ALIGN(cryp
->total_out
, cryp
->hw_blocksize
);
358 pages
= total_out
? get_order(total_out
) : 1;
359 buf_out
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
361 if (!buf_in
|| !buf_out
) {
362 dev_err(cryp
->dev
, "Can't allocate pages when unaligned\n");
363 cryp
->sgs_copied
= 0;
367 sg_copy_buf(buf_in
, cryp
->in_sg
, 0, cryp
->total_in
, 0);
369 sg_init_one(&cryp
->in_sgl
, buf_in
, total_in
);
370 cryp
->in_sg
= &cryp
->in_sgl
;
373 sg_init_one(&cryp
->out_sgl
, buf_out
, total_out
);
374 cryp
->out_sg_save
= cryp
->out_sg
;
375 cryp
->out_sg
= &cryp
->out_sgl
;
376 cryp
->out_sg_len
= 1;
378 cryp
->sgs_copied
= 1;
383 static void stm32_cryp_hw_write_iv(struct stm32_cryp
*cryp
, __be32
*iv
)
388 stm32_cryp_write(cryp
, CRYP_IV0LR
, be32_to_cpu(*iv
++));
389 stm32_cryp_write(cryp
, CRYP_IV0RR
, be32_to_cpu(*iv
++));
392 stm32_cryp_write(cryp
, CRYP_IV1LR
, be32_to_cpu(*iv
++));
393 stm32_cryp_write(cryp
, CRYP_IV1RR
, be32_to_cpu(*iv
++));
397 static void stm32_cryp_get_iv(struct stm32_cryp
*cryp
)
399 struct skcipher_request
*req
= cryp
->req
;
400 __be32
*tmp
= (void *)req
->iv
;
405 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV0LR
));
406 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV0RR
));
409 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV1LR
));
410 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV1RR
));
414 static void stm32_cryp_hw_write_key(struct stm32_cryp
*c
)
420 stm32_cryp_write(c
, CRYP_K1LR
, be32_to_cpu(c
->ctx
->key
[0]));
421 stm32_cryp_write(c
, CRYP_K1RR
, be32_to_cpu(c
->ctx
->key
[1]));
424 for (i
= c
->ctx
->keylen
/ sizeof(u32
); i
> 0; i
--, r_id
-= 4)
425 stm32_cryp_write(c
, r_id
,
426 be32_to_cpu(c
->ctx
->key
[i
- 1]));
430 static u32
stm32_cryp_get_hw_mode(struct stm32_cryp
*cryp
)
432 if (is_aes(cryp
) && is_ecb(cryp
))
435 if (is_aes(cryp
) && is_cbc(cryp
))
438 if (is_aes(cryp
) && is_ctr(cryp
))
441 if (is_aes(cryp
) && is_gcm(cryp
))
444 if (is_aes(cryp
) && is_ccm(cryp
))
447 if (is_des(cryp
) && is_ecb(cryp
))
450 if (is_des(cryp
) && is_cbc(cryp
))
453 if (is_tdes(cryp
) && is_ecb(cryp
))
456 if (is_tdes(cryp
) && is_cbc(cryp
))
459 dev_err(cryp
->dev
, "Unknown mode\n");
460 return CR_AES_UNKNOWN
;
463 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp
*cryp
)
465 return is_encrypt(cryp
) ? cryp
->areq
->cryptlen
:
466 cryp
->areq
->cryptlen
- cryp
->authsize
;
469 static int stm32_cryp_gcm_init(struct stm32_cryp
*cryp
, u32 cfg
)
475 memcpy(iv
, cryp
->areq
->iv
, 12);
476 iv
[3] = cpu_to_be32(GCM_CTR_INIT
);
477 cryp
->gcm_ctr
= GCM_CTR_INIT
;
478 stm32_cryp_hw_write_iv(cryp
, iv
);
480 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_PH_INIT
| CR_CRYPEN
);
482 /* Wait for end of processing */
483 ret
= stm32_cryp_wait_enable(cryp
);
485 dev_err(cryp
->dev
, "Timeout (gcm init)\n");
490 static int stm32_cryp_ccm_init(struct stm32_cryp
*cryp
, u32 cfg
)
493 u8 iv
[AES_BLOCK_SIZE
], b0
[AES_BLOCK_SIZE
];
496 unsigned int i
, textlen
;
498 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
499 memcpy(iv
, cryp
->areq
->iv
, AES_BLOCK_SIZE
);
500 memset(iv
+ AES_BLOCK_SIZE
- 1 - iv
[0], 0, iv
[0] + 1);
501 iv
[AES_BLOCK_SIZE
- 1] = 1;
502 stm32_cryp_hw_write_iv(cryp
, (__be32
*)iv
);
505 memcpy(b0
, iv
, AES_BLOCK_SIZE
);
507 b0
[0] |= (8 * ((cryp
->authsize
- 2) / 2));
509 if (cryp
->areq
->assoclen
)
512 textlen
= stm32_cryp_get_input_text_len(cryp
);
514 b0
[AES_BLOCK_SIZE
- 2] = textlen
>> 8;
515 b0
[AES_BLOCK_SIZE
- 1] = textlen
& 0xFF;
518 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_PH_INIT
| CR_CRYPEN
);
524 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
527 if (!cryp
->caps
->padding_wa
)
528 xd
= be32_to_cpu(bd
[i
]);
529 stm32_cryp_write(cryp
, CRYP_DIN
, xd
);
532 /* Wait for end of processing */
533 ret
= stm32_cryp_wait_enable(cryp
);
535 dev_err(cryp
->dev
, "Timeout (ccm init)\n");
540 static int stm32_cryp_hw_init(struct stm32_cryp
*cryp
)
545 pm_runtime_get_sync(cryp
->dev
);
547 /* Disable interrupt */
548 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
551 stm32_cryp_hw_write_key(cryp
);
553 /* Set configuration */
554 cfg
= CR_DATA8
| CR_FFLUSH
;
556 switch (cryp
->ctx
->keylen
) {
557 case AES_KEYSIZE_128
:
561 case AES_KEYSIZE_192
:
566 case AES_KEYSIZE_256
:
571 hw_mode
= stm32_cryp_get_hw_mode(cryp
);
572 if (hw_mode
== CR_AES_UNKNOWN
)
575 /* AES ECB/CBC decrypt: run key preparation first */
576 if (is_decrypt(cryp
) &&
577 ((hw_mode
== CR_AES_ECB
) || (hw_mode
== CR_AES_CBC
))) {
578 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_AES_KP
| CR_CRYPEN
);
580 /* Wait for end of processing */
581 ret
= stm32_cryp_wait_busy(cryp
);
583 dev_err(cryp
->dev
, "Timeout (key preparation)\n");
590 if (is_decrypt(cryp
))
591 cfg
|= CR_DEC_NOT_ENC
;
593 /* Apply config and flush (valid when CRYPEN = 0) */
594 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
600 if (hw_mode
== CR_AES_CCM
)
601 ret
= stm32_cryp_ccm_init(cryp
, cfg
);
603 ret
= stm32_cryp_gcm_init(cryp
, cfg
);
608 /* Phase 2 : header (authenticated data) */
609 if (cryp
->areq
->assoclen
) {
611 } else if (stm32_cryp_get_input_text_len(cryp
)) {
612 cfg
|= CR_PH_PAYLOAD
;
613 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
624 stm32_cryp_hw_write_iv(cryp
, (__be32
*)cryp
->req
->iv
);
634 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
636 cryp
->flags
&= ~FLG_CCM_PADDED_WA
;
641 static void stm32_cryp_finish_req(struct stm32_cryp
*cryp
, int err
)
643 if (!err
&& (is_gcm(cryp
) || is_ccm(cryp
)))
644 /* Phase 4 : output tag */
645 err
= stm32_cryp_read_auth_tag(cryp
);
647 if (!err
&& (!(is_gcm(cryp
) || is_ccm(cryp
))))
648 stm32_cryp_get_iv(cryp
);
650 if (cryp
->sgs_copied
) {
651 void *buf_in
, *buf_out
;
654 buf_in
= sg_virt(&cryp
->in_sgl
);
655 buf_out
= sg_virt(&cryp
->out_sgl
);
657 sg_copy_buf(buf_out
, cryp
->out_sg_save
, 0,
658 cryp
->total_out_save
, 1);
660 len
= ALIGN(cryp
->total_in_save
, cryp
->hw_blocksize
);
661 pages
= len
? get_order(len
) : 1;
662 free_pages((unsigned long)buf_in
, pages
);
664 len
= ALIGN(cryp
->total_out_save
, cryp
->hw_blocksize
);
665 pages
= len
? get_order(len
) : 1;
666 free_pages((unsigned long)buf_out
, pages
);
669 pm_runtime_mark_last_busy(cryp
->dev
);
670 pm_runtime_put_autosuspend(cryp
->dev
);
672 if (is_gcm(cryp
) || is_ccm(cryp
))
673 crypto_finalize_aead_request(cryp
->engine
, cryp
->areq
, err
);
675 crypto_finalize_skcipher_request(cryp
->engine
, cryp
->req
,
678 memset(cryp
->ctx
->key
, 0, cryp
->ctx
->keylen
);
681 static int stm32_cryp_cpu_start(struct stm32_cryp
*cryp
)
683 /* Enable interrupt and let the IRQ handler do everything */
684 stm32_cryp_write(cryp
, CRYP_IMSCR
, IMSCR_IN
| IMSCR_OUT
);
689 static int stm32_cryp_cipher_one_req(struct crypto_engine
*engine
, void *areq
);
690 static int stm32_cryp_prepare_cipher_req(struct crypto_engine
*engine
,
693 static int stm32_cryp_init_tfm(struct crypto_skcipher
*tfm
)
695 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
697 crypto_skcipher_set_reqsize(tfm
, sizeof(struct stm32_cryp_reqctx
));
699 ctx
->enginectx
.op
.do_one_request
= stm32_cryp_cipher_one_req
;
700 ctx
->enginectx
.op
.prepare_request
= stm32_cryp_prepare_cipher_req
;
701 ctx
->enginectx
.op
.unprepare_request
= NULL
;
705 static int stm32_cryp_aead_one_req(struct crypto_engine
*engine
, void *areq
);
706 static int stm32_cryp_prepare_aead_req(struct crypto_engine
*engine
,
709 static int stm32_cryp_aes_aead_init(struct crypto_aead
*tfm
)
711 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(tfm
);
713 tfm
->reqsize
= sizeof(struct stm32_cryp_reqctx
);
715 ctx
->enginectx
.op
.do_one_request
= stm32_cryp_aead_one_req
;
716 ctx
->enginectx
.op
.prepare_request
= stm32_cryp_prepare_aead_req
;
717 ctx
->enginectx
.op
.unprepare_request
= NULL
;
722 static int stm32_cryp_crypt(struct skcipher_request
*req
, unsigned long mode
)
724 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(
725 crypto_skcipher_reqtfm(req
));
726 struct stm32_cryp_reqctx
*rctx
= skcipher_request_ctx(req
);
727 struct stm32_cryp
*cryp
= stm32_cryp_find_dev(ctx
);
734 return crypto_transfer_skcipher_request_to_engine(cryp
->engine
, req
);
737 static int stm32_cryp_aead_crypt(struct aead_request
*req
, unsigned long mode
)
739 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
740 struct stm32_cryp_reqctx
*rctx
= aead_request_ctx(req
);
741 struct stm32_cryp
*cryp
= stm32_cryp_find_dev(ctx
);
748 return crypto_transfer_aead_request_to_engine(cryp
->engine
, req
);
751 static int stm32_cryp_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
754 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
756 memcpy(ctx
->key
, key
, keylen
);
757 ctx
->keylen
= keylen
;
762 static int stm32_cryp_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
765 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
766 keylen
!= AES_KEYSIZE_256
)
769 return stm32_cryp_setkey(tfm
, key
, keylen
);
772 static int stm32_cryp_des_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
775 return verify_skcipher_des_key(tfm
, key
) ?:
776 stm32_cryp_setkey(tfm
, key
, keylen
);
779 static int stm32_cryp_tdes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
782 return verify_skcipher_des3_key(tfm
, key
) ?:
783 stm32_cryp_setkey(tfm
, key
, keylen
);
786 static int stm32_cryp_aes_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
789 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(tfm
);
791 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
792 keylen
!= AES_KEYSIZE_256
)
795 memcpy(ctx
->key
, key
, keylen
);
796 ctx
->keylen
= keylen
;
801 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
802 unsigned int authsize
)
804 return authsize
== AES_BLOCK_SIZE
? 0 : -EINVAL
;
807 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead
*tfm
,
808 unsigned int authsize
)
826 static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request
*req
)
828 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
| FLG_ENCRYPT
);
831 static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request
*req
)
833 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
);
836 static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request
*req
)
838 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
| FLG_ENCRYPT
);
841 static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request
*req
)
843 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
);
846 static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request
*req
)
848 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
| FLG_ENCRYPT
);
851 static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request
*req
)
853 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
);
856 static int stm32_cryp_aes_gcm_encrypt(struct aead_request
*req
)
858 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_GCM
| FLG_ENCRYPT
);
861 static int stm32_cryp_aes_gcm_decrypt(struct aead_request
*req
)
863 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_GCM
);
866 static int stm32_cryp_aes_ccm_encrypt(struct aead_request
*req
)
868 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_CCM
| FLG_ENCRYPT
);
871 static int stm32_cryp_aes_ccm_decrypt(struct aead_request
*req
)
873 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_CCM
);
876 static int stm32_cryp_des_ecb_encrypt(struct skcipher_request
*req
)
878 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
| FLG_ENCRYPT
);
881 static int stm32_cryp_des_ecb_decrypt(struct skcipher_request
*req
)
883 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
);
886 static int stm32_cryp_des_cbc_encrypt(struct skcipher_request
*req
)
888 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
| FLG_ENCRYPT
);
891 static int stm32_cryp_des_cbc_decrypt(struct skcipher_request
*req
)
893 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
);
896 static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request
*req
)
898 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
| FLG_ENCRYPT
);
901 static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request
*req
)
903 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
);
906 static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request
*req
)
908 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
| FLG_ENCRYPT
);
911 static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request
*req
)
913 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
);
916 static int stm32_cryp_prepare_req(struct skcipher_request
*req
,
917 struct aead_request
*areq
)
919 struct stm32_cryp_ctx
*ctx
;
920 struct stm32_cryp
*cryp
;
921 struct stm32_cryp_reqctx
*rctx
;
927 ctx
= req
? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req
)) :
928 crypto_aead_ctx(crypto_aead_reqtfm(areq
));
935 rctx
= req
? skcipher_request_ctx(req
) : aead_request_ctx(areq
);
936 rctx
->mode
&= FLG_MODE_MASK
;
940 cryp
->flags
= (cryp
->flags
& ~FLG_MODE_MASK
) | rctx
->mode
;
941 cryp
->hw_blocksize
= is_aes(cryp
) ? AES_BLOCK_SIZE
: DES_BLOCK_SIZE
;
947 cryp
->total_in
= req
->cryptlen
;
948 cryp
->total_out
= cryp
->total_in
;
951 * Length of input and output data:
953 * INPUT = AssocData || PlainText
954 * <- assoclen -> <- cryptlen ->
955 * <------- total_in ----------->
957 * OUTPUT = AssocData || CipherText || AuthTag
958 * <- assoclen -> <- cryptlen -> <- authsize ->
959 * <---------------- total_out ----------------->
962 * INPUT = AssocData || CipherText || AuthTag
963 * <- assoclen -> <--------- cryptlen --------->
965 * <---------------- total_in ------------------>
967 * OUTPUT = AssocData || PlainText
968 * <- assoclen -> <- crypten - authsize ->
969 * <---------- total_out ----------------->
973 cryp
->authsize
= crypto_aead_authsize(crypto_aead_reqtfm(areq
));
974 cryp
->total_in
= areq
->assoclen
+ areq
->cryptlen
;
975 if (is_encrypt(cryp
))
976 /* Append auth tag to output */
977 cryp
->total_out
= cryp
->total_in
+ cryp
->authsize
;
979 /* No auth tag in output */
980 cryp
->total_out
= cryp
->total_in
- cryp
->authsize
;
983 cryp
->total_in_save
= cryp
->total_in
;
984 cryp
->total_out_save
= cryp
->total_out
;
986 cryp
->in_sg
= req
? req
->src
: areq
->src
;
987 cryp
->out_sg
= req
? req
->dst
: areq
->dst
;
988 cryp
->out_sg_save
= cryp
->out_sg
;
990 cryp
->in_sg_len
= sg_nents_for_len(cryp
->in_sg
, cryp
->total_in
);
991 if (cryp
->in_sg_len
< 0) {
992 dev_err(cryp
->dev
, "Cannot get in_sg_len\n");
993 ret
= cryp
->in_sg_len
;
997 cryp
->out_sg_len
= sg_nents_for_len(cryp
->out_sg
, cryp
->total_out
);
998 if (cryp
->out_sg_len
< 0) {
999 dev_err(cryp
->dev
, "Cannot get out_sg_len\n");
1000 ret
= cryp
->out_sg_len
;
1004 ret
= stm32_cryp_copy_sgs(cryp
);
1008 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
1009 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
1011 if (is_gcm(cryp
) || is_ccm(cryp
)) {
1012 /* In output, jump after assoc data */
1013 scatterwalk_advance(&cryp
->out_walk
, cryp
->areq
->assoclen
);
1014 cryp
->total_out
-= cryp
->areq
->assoclen
;
1017 ret
= stm32_cryp_hw_init(cryp
);
1021 static int stm32_cryp_prepare_cipher_req(struct crypto_engine
*engine
,
1024 struct skcipher_request
*req
= container_of(areq
,
1025 struct skcipher_request
,
1028 return stm32_cryp_prepare_req(req
, NULL
);
1031 static int stm32_cryp_cipher_one_req(struct crypto_engine
*engine
, void *areq
)
1033 struct skcipher_request
*req
= container_of(areq
,
1034 struct skcipher_request
,
1036 struct stm32_cryp_ctx
*ctx
= crypto_skcipher_ctx(
1037 crypto_skcipher_reqtfm(req
));
1038 struct stm32_cryp
*cryp
= ctx
->cryp
;
1043 return stm32_cryp_cpu_start(cryp
);
1046 static int stm32_cryp_prepare_aead_req(struct crypto_engine
*engine
, void *areq
)
1048 struct aead_request
*req
= container_of(areq
, struct aead_request
,
1051 return stm32_cryp_prepare_req(NULL
, req
);
1054 static int stm32_cryp_aead_one_req(struct crypto_engine
*engine
, void *areq
)
1056 struct aead_request
*req
= container_of(areq
, struct aead_request
,
1058 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1059 struct stm32_cryp
*cryp
= ctx
->cryp
;
1064 if (unlikely(!cryp
->areq
->assoclen
&&
1065 !stm32_cryp_get_input_text_len(cryp
))) {
1066 /* No input data to process: get tag and finish */
1067 stm32_cryp_finish_req(cryp
, 0);
1071 return stm32_cryp_cpu_start(cryp
);
1074 static u32
*stm32_cryp_next_out(struct stm32_cryp
*cryp
, u32
*dst
,
1077 scatterwalk_advance(&cryp
->out_walk
, n
);
1079 if (unlikely(cryp
->out_sg
->length
== _walked_out
)) {
1080 cryp
->out_sg
= sg_next(cryp
->out_sg
);
1082 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
1083 return (sg_virt(cryp
->out_sg
) + _walked_out
);
1087 return (u32
*)((u8
*)dst
+ n
);
1090 static u32
*stm32_cryp_next_in(struct stm32_cryp
*cryp
, u32
*src
,
1093 scatterwalk_advance(&cryp
->in_walk
, n
);
1095 if (unlikely(cryp
->in_sg
->length
== _walked_in
)) {
1096 cryp
->in_sg
= sg_next(cryp
->in_sg
);
1098 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
1099 return (sg_virt(cryp
->in_sg
) + _walked_in
);
1103 return (u32
*)((u8
*)src
+ n
);
1106 static int stm32_cryp_read_auth_tag(struct stm32_cryp
*cryp
)
1108 u32 cfg
, size_bit
, *dst
, d32
;
1114 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1118 cfg
&= ~CR_DEC_NOT_ENC
;
1121 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1124 /* GCM: write aad and payload size (in bits) */
1125 size_bit
= cryp
->areq
->assoclen
* 8;
1126 if (cryp
->caps
->swap_final
)
1127 size_bit
= (__force u32
)cpu_to_be32(size_bit
);
1129 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1130 stm32_cryp_write(cryp
, CRYP_DIN
, size_bit
);
1132 size_bit
= is_encrypt(cryp
) ? cryp
->areq
->cryptlen
:
1133 cryp
->areq
->cryptlen
- AES_BLOCK_SIZE
;
1135 if (cryp
->caps
->swap_final
)
1136 size_bit
= (__force u32
)cpu_to_be32(size_bit
);
1138 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1139 stm32_cryp_write(cryp
, CRYP_DIN
, size_bit
);
1141 /* CCM: write CTR0 */
1142 u8 iv
[AES_BLOCK_SIZE
];
1143 u32
*iv32
= (u32
*)iv
;
1148 memcpy(iv
, cryp
->areq
->iv
, AES_BLOCK_SIZE
);
1149 memset(iv
+ AES_BLOCK_SIZE
- 1 - iv
[0], 0, iv
[0] + 1);
1151 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1154 if (!cryp
->caps
->padding_wa
)
1155 xiv
= be32_to_cpu(biv
[i
]);
1156 stm32_cryp_write(cryp
, CRYP_DIN
, xiv
);
1160 /* Wait for output data */
1161 ret
= stm32_cryp_wait_output(cryp
);
1163 dev_err(cryp
->dev
, "Timeout (read tag)\n");
1167 if (is_encrypt(cryp
)) {
1168 /* Get and write tag */
1169 dst
= sg_virt(cryp
->out_sg
) + _walked_out
;
1171 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1172 if (cryp
->total_out
>= sizeof(u32
)) {
1173 /* Read a full u32 */
1174 *dst
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1176 dst
= stm32_cryp_next_out(cryp
, dst
,
1178 cryp
->total_out
-= sizeof(u32
);
1179 } else if (!cryp
->total_out
) {
1180 /* Empty fifo out (data from input padding) */
1181 stm32_cryp_read(cryp
, CRYP_DOUT
);
1183 /* Read less than an u32 */
1184 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1187 for (j
= 0; j
< cryp
->total_out
; j
++) {
1188 *((u8
*)dst
) = *(d8
++);
1189 dst
= stm32_cryp_next_out(cryp
, dst
, 1);
1191 cryp
->total_out
= 0;
1195 /* Get and check tag */
1196 u32 in_tag
[AES_BLOCK_32
], out_tag
[AES_BLOCK_32
];
1198 scatterwalk_map_and_copy(in_tag
, cryp
->in_sg
,
1199 cryp
->total_in_save
- cryp
->authsize
,
1202 for (i
= 0; i
< AES_BLOCK_32
; i
++)
1203 out_tag
[i
] = stm32_cryp_read(cryp
, CRYP_DOUT
);
1205 if (crypto_memneq(in_tag
, out_tag
, cryp
->authsize
))
1211 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1216 static void stm32_cryp_check_ctr_counter(struct stm32_cryp
*cryp
)
1220 if (unlikely(cryp
->last_ctr
[3] == 0xFFFFFFFF)) {
1221 cryp
->last_ctr
[3] = 0;
1222 cryp
->last_ctr
[2]++;
1223 if (!cryp
->last_ctr
[2]) {
1224 cryp
->last_ctr
[1]++;
1225 if (!cryp
->last_ctr
[1])
1226 cryp
->last_ctr
[0]++;
1229 cr
= stm32_cryp_read(cryp
, CRYP_CR
);
1230 stm32_cryp_write(cryp
, CRYP_CR
, cr
& ~CR_CRYPEN
);
1232 stm32_cryp_hw_write_iv(cryp
, (u32
*)cryp
->last_ctr
);
1234 stm32_cryp_write(cryp
, CRYP_CR
, cr
);
1237 cryp
->last_ctr
[0] = stm32_cryp_read(cryp
, CRYP_IV0LR
);
1238 cryp
->last_ctr
[1] = stm32_cryp_read(cryp
, CRYP_IV0RR
);
1239 cryp
->last_ctr
[2] = stm32_cryp_read(cryp
, CRYP_IV1LR
);
1240 cryp
->last_ctr
[3] = stm32_cryp_read(cryp
, CRYP_IV1RR
);
1243 static bool stm32_cryp_irq_read_data(struct stm32_cryp
*cryp
)
1250 /* Do no read tag now (if any) */
1251 if (is_encrypt(cryp
) && (is_gcm(cryp
) || is_ccm(cryp
)))
1252 tag_size
= cryp
->authsize
;
1256 dst
= sg_virt(cryp
->out_sg
) + _walked_out
;
1258 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
1259 if (likely(cryp
->total_out
- tag_size
>= sizeof(u32
))) {
1260 /* Read a full u32 */
1261 *dst
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1263 dst
= stm32_cryp_next_out(cryp
, dst
, sizeof(u32
));
1264 cryp
->total_out
-= sizeof(u32
);
1265 } else if (cryp
->total_out
== tag_size
) {
1266 /* Empty fifo out (data from input padding) */
1267 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1269 /* Read less than an u32 */
1270 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1273 for (j
= 0; j
< cryp
->total_out
- tag_size
; j
++) {
1274 *((u8
*)dst
) = *(d8
++);
1275 dst
= stm32_cryp_next_out(cryp
, dst
, 1);
1277 cryp
->total_out
= tag_size
;
1281 return !(cryp
->total_out
- tag_size
) || !cryp
->total_in
;
1284 static void stm32_cryp_irq_write_block(struct stm32_cryp
*cryp
)
1291 /* Do no write tag (if any) */
1292 if (is_decrypt(cryp
) && (is_gcm(cryp
) || is_ccm(cryp
)))
1293 tag_size
= cryp
->authsize
;
1297 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1299 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
1300 if (likely(cryp
->total_in
- tag_size
>= sizeof(u32
))) {
1301 /* Write a full u32 */
1302 stm32_cryp_write(cryp
, CRYP_DIN
, *src
);
1304 src
= stm32_cryp_next_in(cryp
, src
, sizeof(u32
));
1305 cryp
->total_in
-= sizeof(u32
);
1306 } else if (cryp
->total_in
== tag_size
) {
1307 /* Write padding data */
1308 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1310 /* Write less than an u32 */
1311 memset(d8
, 0, sizeof(u32
));
1312 for (j
= 0; j
< cryp
->total_in
- tag_size
; j
++) {
1313 d8
[j
] = *((u8
*)src
);
1314 src
= stm32_cryp_next_in(cryp
, src
, 1);
1317 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1318 cryp
->total_in
= tag_size
;
1323 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp
*cryp
)
1326 u32 cfg
, tmp
[AES_BLOCK_32
];
1327 size_t total_in_ori
= cryp
->total_in
;
1328 struct scatterlist
*out_sg_ori
= cryp
->out_sg
;
1331 /* 'Special workaround' procedure described in the datasheet */
1334 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1335 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1337 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1339 /* b) Update IV1R */
1340 stm32_cryp_write(cryp
, CRYP_IV1RR
, cryp
->gcm_ctr
- 2);
1342 /* c) change mode to CTR */
1343 cfg
&= ~CR_ALGO_MASK
;
1345 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1349 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1351 /* b) pad and write the last block */
1352 stm32_cryp_irq_write_block(cryp
);
1353 cryp
->total_in
= total_in_ori
;
1354 err
= stm32_cryp_wait_output(cryp
);
1356 dev_err(cryp
->dev
, "Timeout (write gcm header)\n");
1357 return stm32_cryp_finish_req(cryp
, err
);
1360 /* c) get and store encrypted data */
1361 stm32_cryp_irq_read_data(cryp
);
1362 scatterwalk_map_and_copy(tmp
, out_sg_ori
,
1363 cryp
->total_in_save
- total_in_ori
,
1366 /* d) change mode back to AES GCM */
1367 cfg
&= ~CR_ALGO_MASK
;
1369 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1371 /* e) change phase to Final */
1374 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1376 /* f) write padded data */
1377 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1379 stm32_cryp_write(cryp
, CRYP_DIN
, tmp
[i
]);
1381 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1383 cryp
->total_in
-= min_t(size_t, sizeof(u32
), cryp
->total_in
);
1386 /* g) Empty fifo out */
1387 err
= stm32_cryp_wait_output(cryp
);
1389 dev_err(cryp
->dev
, "Timeout (write gcm header)\n");
1390 return stm32_cryp_finish_req(cryp
, err
);
1393 for (i
= 0; i
< AES_BLOCK_32
; i
++)
1394 stm32_cryp_read(cryp
, CRYP_DOUT
);
1396 /* h) run the he normal Final phase */
1397 stm32_cryp_finish_req(cryp
, 0);
1400 static void stm32_cryp_irq_set_npblb(struct stm32_cryp
*cryp
)
1402 u32 cfg
, payload_bytes
;
1404 /* disable ip, set NPBLB and reneable ip */
1405 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1407 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1409 payload_bytes
= is_decrypt(cryp
) ? cryp
->total_in
- cryp
->authsize
:
1411 cfg
|= (cryp
->hw_blocksize
- payload_bytes
) << CR_NBPBL_SHIFT
;
1413 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1416 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp
*cryp
)
1420 u32 cstmp1
[AES_BLOCK_32
], cstmp2
[AES_BLOCK_32
], tmp
[AES_BLOCK_32
];
1421 size_t last_total_out
, total_in_ori
= cryp
->total_in
;
1422 struct scatterlist
*out_sg_ori
= cryp
->out_sg
;
1425 /* 'Special workaround' procedure described in the datasheet */
1426 cryp
->flags
|= FLG_CCM_PADDED_WA
;
1429 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1431 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1433 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1435 /* b) get IV1 from CRYP_CSGCMCCM7 */
1436 iv1tmp
= stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ 7 * 4);
1438 /* c) Load CRYP_CSGCMCCMxR */
1439 for (i
= 0; i
< ARRAY_SIZE(cstmp1
); i
++)
1440 cstmp1
[i
] = stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ i
* 4);
1443 stm32_cryp_write(cryp
, CRYP_IV1RR
, iv1tmp
);
1445 /* e) change mode to CTR */
1446 cfg
&= ~CR_ALGO_MASK
;
1448 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1452 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1454 /* b) pad and write the last block */
1455 stm32_cryp_irq_write_block(cryp
);
1456 cryp
->total_in
= total_in_ori
;
1457 err
= stm32_cryp_wait_output(cryp
);
1459 dev_err(cryp
->dev
, "Timeout (wite ccm padded data)\n");
1460 return stm32_cryp_finish_req(cryp
, err
);
1463 /* c) get and store decrypted data */
1464 last_total_out
= cryp
->total_out
;
1465 stm32_cryp_irq_read_data(cryp
);
1467 memset(tmp
, 0, sizeof(tmp
));
1468 scatterwalk_map_and_copy(tmp
, out_sg_ori
,
1469 cryp
->total_out_save
- last_total_out
,
1472 /* d) Load again CRYP_CSGCMCCMxR */
1473 for (i
= 0; i
< ARRAY_SIZE(cstmp2
); i
++)
1474 cstmp2
[i
] = stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ i
* 4);
1476 /* e) change mode back to AES CCM */
1477 cfg
&= ~CR_ALGO_MASK
;
1479 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1481 /* f) change phase to header */
1483 cfg
|= CR_PH_HEADER
;
1484 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1486 /* g) XOR and write padded data */
1487 for (i
= 0; i
< ARRAY_SIZE(tmp
); i
++) {
1488 tmp
[i
] ^= cstmp1
[i
];
1489 tmp
[i
] ^= cstmp2
[i
];
1490 stm32_cryp_write(cryp
, CRYP_DIN
, tmp
[i
]);
1493 /* h) wait for completion */
1494 err
= stm32_cryp_wait_busy(cryp
);
1496 dev_err(cryp
->dev
, "Timeout (wite ccm padded data)\n");
1498 /* i) run the he normal Final phase */
1499 stm32_cryp_finish_req(cryp
, err
);
1502 static void stm32_cryp_irq_write_data(struct stm32_cryp
*cryp
)
1504 if (unlikely(!cryp
->total_in
)) {
1505 dev_warn(cryp
->dev
, "No more data to process\n");
1509 if (unlikely(cryp
->total_in
< AES_BLOCK_SIZE
&&
1510 (stm32_cryp_get_hw_mode(cryp
) == CR_AES_GCM
) &&
1511 is_encrypt(cryp
))) {
1512 /* Padding for AES GCM encryption */
1513 if (cryp
->caps
->padding_wa
)
1514 /* Special case 1 */
1515 return stm32_cryp_irq_write_gcm_padded_data(cryp
);
1517 /* Setting padding bytes (NBBLB) */
1518 stm32_cryp_irq_set_npblb(cryp
);
1521 if (unlikely((cryp
->total_in
- cryp
->authsize
< AES_BLOCK_SIZE
) &&
1522 (stm32_cryp_get_hw_mode(cryp
) == CR_AES_CCM
) &&
1523 is_decrypt(cryp
))) {
1524 /* Padding for AES CCM decryption */
1525 if (cryp
->caps
->padding_wa
)
1526 /* Special case 2 */
1527 return stm32_cryp_irq_write_ccm_padded_data(cryp
);
1529 /* Setting padding bytes (NBBLB) */
1530 stm32_cryp_irq_set_npblb(cryp
);
1533 if (is_aes(cryp
) && is_ctr(cryp
))
1534 stm32_cryp_check_ctr_counter(cryp
);
1536 stm32_cryp_irq_write_block(cryp
);
1539 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp
*cryp
)
1545 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1547 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1548 stm32_cryp_write(cryp
, CRYP_DIN
, *src
);
1550 src
= stm32_cryp_next_in(cryp
, src
, sizeof(u32
));
1551 cryp
->total_in
-= min_t(size_t, sizeof(u32
), cryp
->total_in
);
1553 /* Check if whole header written */
1554 if ((cryp
->total_in_save
- cryp
->total_in
) ==
1555 cryp
->areq
->assoclen
) {
1556 /* Write padding if needed */
1557 for (j
= i
+ 1; j
< AES_BLOCK_32
; j
++)
1558 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1560 /* Wait for completion */
1561 err
= stm32_cryp_wait_busy(cryp
);
1563 dev_err(cryp
->dev
, "Timeout (gcm header)\n");
1564 return stm32_cryp_finish_req(cryp
, err
);
1567 if (stm32_cryp_get_input_text_len(cryp
)) {
1568 /* Phase 3 : payload */
1569 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1571 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1574 cfg
|= CR_PH_PAYLOAD
;
1576 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1579 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1580 stm32_cryp_finish_req(cryp
, 0);
1586 if (!cryp
->total_in
)
1591 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp
*cryp
)
1594 unsigned int i
= 0, j
, k
;
1595 u32 alen
, cfg
, *src
;
1598 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1599 alen
= cryp
->areq
->assoclen
;
1602 if (cryp
->areq
->assoclen
<= 65280) {
1603 /* Write first u32 of B1 */
1604 d8
[0] = (alen
>> 8) & 0xFF;
1605 d8
[1] = alen
& 0xFF;
1606 d8
[2] = *((u8
*)src
);
1607 src
= stm32_cryp_next_in(cryp
, src
, 1);
1608 d8
[3] = *((u8
*)src
);
1609 src
= stm32_cryp_next_in(cryp
, src
, 1);
1611 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1614 cryp
->total_in
-= min_t(size_t, 2, cryp
->total_in
);
1616 /* Build the two first u32 of B1 */
1619 d8
[2] = alen
& 0xFF000000;
1620 d8
[3] = alen
& 0x00FF0000;
1622 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1625 d8
[0] = alen
& 0x0000FF00;
1626 d8
[1] = alen
& 0x000000FF;
1627 d8
[2] = *((u8
*)src
);
1628 src
= stm32_cryp_next_in(cryp
, src
, 1);
1629 d8
[3] = *((u8
*)src
);
1630 src
= stm32_cryp_next_in(cryp
, src
, 1);
1632 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1635 cryp
->total_in
-= min_t(size_t, 2, cryp
->total_in
);
1639 /* Write next u32 */
1640 for (; i
< AES_BLOCK_32
; i
++) {
1642 memset(d8
, 0, sizeof(u32
));
1643 for (k
= 0; k
< sizeof(u32
); k
++) {
1644 d8
[k
] = *((u8
*)src
);
1645 src
= stm32_cryp_next_in(cryp
, src
, 1);
1647 cryp
->total_in
-= min_t(size_t, 1, cryp
->total_in
);
1648 if ((cryp
->total_in_save
- cryp
->total_in
) == alen
)
1652 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1654 if ((cryp
->total_in_save
- cryp
->total_in
) == alen
) {
1655 /* Write padding if needed */
1656 for (j
= i
+ 1; j
< AES_BLOCK_32
; j
++)
1657 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1659 /* Wait for completion */
1660 err
= stm32_cryp_wait_busy(cryp
);
1662 dev_err(cryp
->dev
, "Timeout (ccm header)\n");
1663 return stm32_cryp_finish_req(cryp
, err
);
1666 if (stm32_cryp_get_input_text_len(cryp
)) {
1667 /* Phase 3 : payload */
1668 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1670 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1673 cfg
|= CR_PH_PAYLOAD
;
1675 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1678 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1679 stm32_cryp_finish_req(cryp
, 0);
1687 static irqreturn_t
stm32_cryp_irq_thread(int irq
, void *arg
)
1689 struct stm32_cryp
*cryp
= arg
;
1692 if (cryp
->irq_status
& MISR_OUT
)
1693 /* Output FIFO IRQ: read data */
1694 if (unlikely(stm32_cryp_irq_read_data(cryp
))) {
1695 /* All bytes processed, finish */
1696 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1697 stm32_cryp_finish_req(cryp
, 0);
1701 if (cryp
->irq_status
& MISR_IN
) {
1703 ph
= stm32_cryp_read(cryp
, CRYP_CR
) & CR_PH_MASK
;
1704 if (unlikely(ph
== CR_PH_HEADER
))
1706 stm32_cryp_irq_write_gcm_header(cryp
);
1708 /* Input FIFO IRQ: write data */
1709 stm32_cryp_irq_write_data(cryp
);
1711 } else if (is_ccm(cryp
)) {
1712 ph
= stm32_cryp_read(cryp
, CRYP_CR
) & CR_PH_MASK
;
1713 if (unlikely(ph
== CR_PH_HEADER
))
1715 stm32_cryp_irq_write_ccm_header(cryp
);
1717 /* Input FIFO IRQ: write data */
1718 stm32_cryp_irq_write_data(cryp
);
1720 /* Input FIFO IRQ: write data */
1721 stm32_cryp_irq_write_data(cryp
);
1728 static irqreturn_t
stm32_cryp_irq(int irq
, void *arg
)
1730 struct stm32_cryp
*cryp
= arg
;
1732 cryp
->irq_status
= stm32_cryp_read(cryp
, CRYP_MISR
);
1734 return IRQ_WAKE_THREAD
;
1737 static struct skcipher_alg crypto_algs
[] = {
1739 .base
.cra_name
= "ecb(aes)",
1740 .base
.cra_driver_name
= "stm32-ecb-aes",
1741 .base
.cra_priority
= 200,
1742 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1743 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1744 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1745 .base
.cra_alignmask
= 0xf,
1746 .base
.cra_module
= THIS_MODULE
,
1748 .init
= stm32_cryp_init_tfm
,
1749 .min_keysize
= AES_MIN_KEY_SIZE
,
1750 .max_keysize
= AES_MAX_KEY_SIZE
,
1751 .setkey
= stm32_cryp_aes_setkey
,
1752 .encrypt
= stm32_cryp_aes_ecb_encrypt
,
1753 .decrypt
= stm32_cryp_aes_ecb_decrypt
,
1756 .base
.cra_name
= "cbc(aes)",
1757 .base
.cra_driver_name
= "stm32-cbc-aes",
1758 .base
.cra_priority
= 200,
1759 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1760 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
1761 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1762 .base
.cra_alignmask
= 0xf,
1763 .base
.cra_module
= THIS_MODULE
,
1765 .init
= stm32_cryp_init_tfm
,
1766 .min_keysize
= AES_MIN_KEY_SIZE
,
1767 .max_keysize
= AES_MAX_KEY_SIZE
,
1768 .ivsize
= AES_BLOCK_SIZE
,
1769 .setkey
= stm32_cryp_aes_setkey
,
1770 .encrypt
= stm32_cryp_aes_cbc_encrypt
,
1771 .decrypt
= stm32_cryp_aes_cbc_decrypt
,
1774 .base
.cra_name
= "ctr(aes)",
1775 .base
.cra_driver_name
= "stm32-ctr-aes",
1776 .base
.cra_priority
= 200,
1777 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1778 .base
.cra_blocksize
= 1,
1779 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1780 .base
.cra_alignmask
= 0xf,
1781 .base
.cra_module
= THIS_MODULE
,
1783 .init
= stm32_cryp_init_tfm
,
1784 .min_keysize
= AES_MIN_KEY_SIZE
,
1785 .max_keysize
= AES_MAX_KEY_SIZE
,
1786 .ivsize
= AES_BLOCK_SIZE
,
1787 .setkey
= stm32_cryp_aes_setkey
,
1788 .encrypt
= stm32_cryp_aes_ctr_encrypt
,
1789 .decrypt
= stm32_cryp_aes_ctr_decrypt
,
1792 .base
.cra_name
= "ecb(des)",
1793 .base
.cra_driver_name
= "stm32-ecb-des",
1794 .base
.cra_priority
= 200,
1795 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1796 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1797 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1798 .base
.cra_alignmask
= 0xf,
1799 .base
.cra_module
= THIS_MODULE
,
1801 .init
= stm32_cryp_init_tfm
,
1802 .min_keysize
= DES_BLOCK_SIZE
,
1803 .max_keysize
= DES_BLOCK_SIZE
,
1804 .setkey
= stm32_cryp_des_setkey
,
1805 .encrypt
= stm32_cryp_des_ecb_encrypt
,
1806 .decrypt
= stm32_cryp_des_ecb_decrypt
,
1809 .base
.cra_name
= "cbc(des)",
1810 .base
.cra_driver_name
= "stm32-cbc-des",
1811 .base
.cra_priority
= 200,
1812 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1813 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1814 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1815 .base
.cra_alignmask
= 0xf,
1816 .base
.cra_module
= THIS_MODULE
,
1818 .init
= stm32_cryp_init_tfm
,
1819 .min_keysize
= DES_BLOCK_SIZE
,
1820 .max_keysize
= DES_BLOCK_SIZE
,
1821 .ivsize
= DES_BLOCK_SIZE
,
1822 .setkey
= stm32_cryp_des_setkey
,
1823 .encrypt
= stm32_cryp_des_cbc_encrypt
,
1824 .decrypt
= stm32_cryp_des_cbc_decrypt
,
1827 .base
.cra_name
= "ecb(des3_ede)",
1828 .base
.cra_driver_name
= "stm32-ecb-des3",
1829 .base
.cra_priority
= 200,
1830 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1831 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1832 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1833 .base
.cra_alignmask
= 0xf,
1834 .base
.cra_module
= THIS_MODULE
,
1836 .init
= stm32_cryp_init_tfm
,
1837 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1838 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1839 .setkey
= stm32_cryp_tdes_setkey
,
1840 .encrypt
= stm32_cryp_tdes_ecb_encrypt
,
1841 .decrypt
= stm32_cryp_tdes_ecb_decrypt
,
1844 .base
.cra_name
= "cbc(des3_ede)",
1845 .base
.cra_driver_name
= "stm32-cbc-des3",
1846 .base
.cra_priority
= 200,
1847 .base
.cra_flags
= CRYPTO_ALG_ASYNC
,
1848 .base
.cra_blocksize
= DES_BLOCK_SIZE
,
1849 .base
.cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1850 .base
.cra_alignmask
= 0xf,
1851 .base
.cra_module
= THIS_MODULE
,
1853 .init
= stm32_cryp_init_tfm
,
1854 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1855 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1856 .ivsize
= DES_BLOCK_SIZE
,
1857 .setkey
= stm32_cryp_tdes_setkey
,
1858 .encrypt
= stm32_cryp_tdes_cbc_encrypt
,
1859 .decrypt
= stm32_cryp_tdes_cbc_decrypt
,
1863 static struct aead_alg aead_algs
[] = {
1865 .setkey
= stm32_cryp_aes_aead_setkey
,
1866 .setauthsize
= stm32_cryp_aes_gcm_setauthsize
,
1867 .encrypt
= stm32_cryp_aes_gcm_encrypt
,
1868 .decrypt
= stm32_cryp_aes_gcm_decrypt
,
1869 .init
= stm32_cryp_aes_aead_init
,
1871 .maxauthsize
= AES_BLOCK_SIZE
,
1874 .cra_name
= "gcm(aes)",
1875 .cra_driver_name
= "stm32-gcm-aes",
1876 .cra_priority
= 200,
1877 .cra_flags
= CRYPTO_ALG_ASYNC
,
1879 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1880 .cra_alignmask
= 0xf,
1881 .cra_module
= THIS_MODULE
,
1885 .setkey
= stm32_cryp_aes_aead_setkey
,
1886 .setauthsize
= stm32_cryp_aes_ccm_setauthsize
,
1887 .encrypt
= stm32_cryp_aes_ccm_encrypt
,
1888 .decrypt
= stm32_cryp_aes_ccm_decrypt
,
1889 .init
= stm32_cryp_aes_aead_init
,
1890 .ivsize
= AES_BLOCK_SIZE
,
1891 .maxauthsize
= AES_BLOCK_SIZE
,
1894 .cra_name
= "ccm(aes)",
1895 .cra_driver_name
= "stm32-ccm-aes",
1896 .cra_priority
= 200,
1897 .cra_flags
= CRYPTO_ALG_ASYNC
,
1899 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1900 .cra_alignmask
= 0xf,
1901 .cra_module
= THIS_MODULE
,
1906 static const struct stm32_cryp_caps f7_data
= {
1911 static const struct stm32_cryp_caps mp1_data
= {
1912 .swap_final
= false,
1913 .padding_wa
= false,
1916 static const struct of_device_id stm32_dt_ids
[] = {
1917 { .compatible
= "st,stm32f756-cryp", .data
= &f7_data
},
1918 { .compatible
= "st,stm32mp1-cryp", .data
= &mp1_data
},
1921 MODULE_DEVICE_TABLE(of
, stm32_dt_ids
);
1923 static int stm32_cryp_probe(struct platform_device
*pdev
)
1925 struct device
*dev
= &pdev
->dev
;
1926 struct stm32_cryp
*cryp
;
1927 struct reset_control
*rst
;
1930 cryp
= devm_kzalloc(dev
, sizeof(*cryp
), GFP_KERNEL
);
1934 cryp
->caps
= of_device_get_match_data(dev
);
1940 cryp
->regs
= devm_platform_ioremap_resource(pdev
, 0);
1941 if (IS_ERR(cryp
->regs
))
1942 return PTR_ERR(cryp
->regs
);
1944 irq
= platform_get_irq(pdev
, 0);
1948 ret
= devm_request_threaded_irq(dev
, irq
, stm32_cryp_irq
,
1949 stm32_cryp_irq_thread
, IRQF_ONESHOT
,
1950 dev_name(dev
), cryp
);
1952 dev_err(dev
, "Cannot grab IRQ\n");
1956 cryp
->clk
= devm_clk_get(dev
, NULL
);
1957 if (IS_ERR(cryp
->clk
)) {
1958 dev_err(dev
, "Could not get clock\n");
1959 return PTR_ERR(cryp
->clk
);
1962 ret
= clk_prepare_enable(cryp
->clk
);
1964 dev_err(cryp
->dev
, "Failed to enable clock\n");
1968 pm_runtime_set_autosuspend_delay(dev
, CRYP_AUTOSUSPEND_DELAY
);
1969 pm_runtime_use_autosuspend(dev
);
1971 pm_runtime_get_noresume(dev
);
1972 pm_runtime_set_active(dev
);
1973 pm_runtime_enable(dev
);
1975 rst
= devm_reset_control_get(dev
, NULL
);
1977 reset_control_assert(rst
);
1979 reset_control_deassert(rst
);
1982 platform_set_drvdata(pdev
, cryp
);
1984 spin_lock(&cryp_list
.lock
);
1985 list_add(&cryp
->list
, &cryp_list
.dev_list
);
1986 spin_unlock(&cryp_list
.lock
);
1988 /* Initialize crypto engine */
1989 cryp
->engine
= crypto_engine_alloc_init(dev
, 1);
1990 if (!cryp
->engine
) {
1991 dev_err(dev
, "Could not init crypto engine\n");
1996 ret
= crypto_engine_start(cryp
->engine
);
1998 dev_err(dev
, "Could not start crypto engine\n");
2002 ret
= crypto_register_skciphers(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2004 dev_err(dev
, "Could not register algs\n");
2008 ret
= crypto_register_aeads(aead_algs
, ARRAY_SIZE(aead_algs
));
2012 dev_info(dev
, "Initialized\n");
2014 pm_runtime_put_sync(dev
);
2019 crypto_unregister_skciphers(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2022 crypto_engine_exit(cryp
->engine
);
2024 spin_lock(&cryp_list
.lock
);
2025 list_del(&cryp
->list
);
2026 spin_unlock(&cryp_list
.lock
);
2028 pm_runtime_disable(dev
);
2029 pm_runtime_put_noidle(dev
);
2030 pm_runtime_disable(dev
);
2031 pm_runtime_put_noidle(dev
);
2033 clk_disable_unprepare(cryp
->clk
);
2038 static int stm32_cryp_remove(struct platform_device
*pdev
)
2040 struct stm32_cryp
*cryp
= platform_get_drvdata(pdev
);
2046 ret
= pm_runtime_get_sync(cryp
->dev
);
2050 crypto_unregister_aeads(aead_algs
, ARRAY_SIZE(aead_algs
));
2051 crypto_unregister_skciphers(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2053 crypto_engine_exit(cryp
->engine
);
2055 spin_lock(&cryp_list
.lock
);
2056 list_del(&cryp
->list
);
2057 spin_unlock(&cryp_list
.lock
);
2059 pm_runtime_disable(cryp
->dev
);
2060 pm_runtime_put_noidle(cryp
->dev
);
2062 clk_disable_unprepare(cryp
->clk
);
2068 static int stm32_cryp_runtime_suspend(struct device
*dev
)
2070 struct stm32_cryp
*cryp
= dev_get_drvdata(dev
);
2072 clk_disable_unprepare(cryp
->clk
);
2077 static int stm32_cryp_runtime_resume(struct device
*dev
)
2079 struct stm32_cryp
*cryp
= dev_get_drvdata(dev
);
2082 ret
= clk_prepare_enable(cryp
->clk
);
2084 dev_err(cryp
->dev
, "Failed to prepare_enable clock\n");
2092 static const struct dev_pm_ops stm32_cryp_pm_ops
= {
2093 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend
,
2094 pm_runtime_force_resume
)
2095 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend
,
2096 stm32_cryp_runtime_resume
, NULL
)
2099 static struct platform_driver stm32_cryp_driver
= {
2100 .probe
= stm32_cryp_probe
,
2101 .remove
= stm32_cryp_remove
,
2103 .name
= DRIVER_NAME
,
2104 .pm
= &stm32_cryp_pm_ops
,
2105 .of_match_table
= stm32_dt_ids
,
2109 module_platform_driver(stm32_cryp_driver
);
2111 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2112 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2113 MODULE_LICENSE("GPL");