2 * Copyright (C) STMicroelectronics SA 2017
3 * Author: Fabien Dessenne <fabien.dessenne@st.com>
4 * License terms: GNU General Public License (GPL), version 2
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/pm_runtime.h>
15 #include <linux/reset.h>
17 #include <crypto/aes.h>
18 #include <crypto/des.h>
19 #include <crypto/engine.h>
20 #include <crypto/scatterwalk.h>
21 #include <crypto/internal/aead.h>
23 #define DRIVER_NAME "stm32-cryp"
25 /* Bit [0] encrypt / decrypt */
26 #define FLG_ENCRYPT BIT(0)
27 /* Bit [8..1] algo & operation mode */
28 #define FLG_AES BIT(1)
29 #define FLG_DES BIT(2)
30 #define FLG_TDES BIT(3)
31 #define FLG_ECB BIT(4)
32 #define FLG_CBC BIT(5)
33 #define FLG_CTR BIT(6)
34 #define FLG_GCM BIT(7)
35 #define FLG_CCM BIT(8)
36 /* Mode mask = bits [15..0] */
37 #define FLG_MODE_MASK GENMASK(15, 0)
38 /* Bit [31..16] status */
39 #define FLG_CCM_PADDED_WA BIT(16)
42 #define CRYP_CR 0x00000000
43 #define CRYP_SR 0x00000004
44 #define CRYP_DIN 0x00000008
45 #define CRYP_DOUT 0x0000000C
46 #define CRYP_DMACR 0x00000010
47 #define CRYP_IMSCR 0x00000014
48 #define CRYP_RISR 0x00000018
49 #define CRYP_MISR 0x0000001C
50 #define CRYP_K0LR 0x00000020
51 #define CRYP_K0RR 0x00000024
52 #define CRYP_K1LR 0x00000028
53 #define CRYP_K1RR 0x0000002C
54 #define CRYP_K2LR 0x00000030
55 #define CRYP_K2RR 0x00000034
56 #define CRYP_K3LR 0x00000038
57 #define CRYP_K3RR 0x0000003C
58 #define CRYP_IV0LR 0x00000040
59 #define CRYP_IV0RR 0x00000044
60 #define CRYP_IV1LR 0x00000048
61 #define CRYP_IV1RR 0x0000004C
62 #define CRYP_CSGCMCCM0R 0x00000050
63 #define CRYP_CSGCM0R 0x00000070
65 /* Registers values */
66 #define CR_DEC_NOT_ENC 0x00000004
67 #define CR_TDES_ECB 0x00000000
68 #define CR_TDES_CBC 0x00000008
69 #define CR_DES_ECB 0x00000010
70 #define CR_DES_CBC 0x00000018
71 #define CR_AES_ECB 0x00000020
72 #define CR_AES_CBC 0x00000028
73 #define CR_AES_CTR 0x00000030
74 #define CR_AES_KP 0x00000038
75 #define CR_AES_GCM 0x00080000
76 #define CR_AES_CCM 0x00080008
77 #define CR_AES_UNKNOWN 0xFFFFFFFF
78 #define CR_ALGO_MASK 0x00080038
79 #define CR_DATA32 0x00000000
80 #define CR_DATA16 0x00000040
81 #define CR_DATA8 0x00000080
82 #define CR_DATA1 0x000000C0
83 #define CR_KEY128 0x00000000
84 #define CR_KEY192 0x00000100
85 #define CR_KEY256 0x00000200
86 #define CR_FFLUSH 0x00004000
87 #define CR_CRYPEN 0x00008000
88 #define CR_PH_INIT 0x00000000
89 #define CR_PH_HEADER 0x00010000
90 #define CR_PH_PAYLOAD 0x00020000
91 #define CR_PH_FINAL 0x00030000
92 #define CR_PH_MASK 0x00030000
93 #define CR_NBPBL_SHIFT 20
95 #define SR_BUSY 0x00000010
96 #define SR_OFNE 0x00000004
98 #define IMSCR_IN BIT(0)
99 #define IMSCR_OUT BIT(1)
101 #define MISR_IN BIT(0)
102 #define MISR_OUT BIT(1)
105 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
106 #define GCM_CTR_INIT 2
107 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
108 #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
109 #define CRYP_AUTOSUSPEND_DELAY 50
111 struct stm32_cryp_caps
{
116 struct stm32_cryp_ctx
{
117 struct crypto_engine_ctx enginectx
;
118 struct stm32_cryp
*cryp
;
120 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
124 struct stm32_cryp_reqctx
{
129 struct list_head list
;
135 const struct stm32_cryp_caps
*caps
;
136 struct stm32_cryp_ctx
*ctx
;
138 struct crypto_engine
*engine
;
140 struct ablkcipher_request
*req
;
141 struct aead_request
*areq
;
147 size_t total_in_save
;
149 size_t total_out_save
;
151 struct scatterlist
*in_sg
;
152 struct scatterlist
*out_sg
;
153 struct scatterlist
*out_sg_save
;
155 struct scatterlist in_sgl
;
156 struct scatterlist out_sgl
;
162 struct scatter_walk in_walk
;
163 struct scatter_walk out_walk
;
169 struct stm32_cryp_list
{
170 struct list_head dev_list
;
171 spinlock_t lock
; /* protect dev_list */
174 static struct stm32_cryp_list cryp_list
= {
175 .dev_list
= LIST_HEAD_INIT(cryp_list
.dev_list
),
176 .lock
= __SPIN_LOCK_UNLOCKED(cryp_list
.lock
),
179 static inline bool is_aes(struct stm32_cryp
*cryp
)
181 return cryp
->flags
& FLG_AES
;
184 static inline bool is_des(struct stm32_cryp
*cryp
)
186 return cryp
->flags
& FLG_DES
;
189 static inline bool is_tdes(struct stm32_cryp
*cryp
)
191 return cryp
->flags
& FLG_TDES
;
194 static inline bool is_ecb(struct stm32_cryp
*cryp
)
196 return cryp
->flags
& FLG_ECB
;
199 static inline bool is_cbc(struct stm32_cryp
*cryp
)
201 return cryp
->flags
& FLG_CBC
;
204 static inline bool is_ctr(struct stm32_cryp
*cryp
)
206 return cryp
->flags
& FLG_CTR
;
209 static inline bool is_gcm(struct stm32_cryp
*cryp
)
211 return cryp
->flags
& FLG_GCM
;
214 static inline bool is_ccm(struct stm32_cryp
*cryp
)
216 return cryp
->flags
& FLG_CCM
;
219 static inline bool is_encrypt(struct stm32_cryp
*cryp
)
221 return cryp
->flags
& FLG_ENCRYPT
;
224 static inline bool is_decrypt(struct stm32_cryp
*cryp
)
226 return !is_encrypt(cryp
);
229 static inline u32
stm32_cryp_read(struct stm32_cryp
*cryp
, u32 ofst
)
231 return readl_relaxed(cryp
->regs
+ ofst
);
234 static inline void stm32_cryp_write(struct stm32_cryp
*cryp
, u32 ofst
, u32 val
)
236 writel_relaxed(val
, cryp
->regs
+ ofst
);
239 static inline int stm32_cryp_wait_busy(struct stm32_cryp
*cryp
)
243 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_SR
, status
,
244 !(status
& SR_BUSY
), 10, 100000);
247 static inline int stm32_cryp_wait_enable(struct stm32_cryp
*cryp
)
251 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_CR
, status
,
252 !(status
& CR_CRYPEN
), 10, 100000);
255 static inline int stm32_cryp_wait_output(struct stm32_cryp
*cryp
)
259 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_SR
, status
,
260 status
& SR_OFNE
, 10, 100000);
263 static int stm32_cryp_read_auth_tag(struct stm32_cryp
*cryp
);
265 static struct stm32_cryp
*stm32_cryp_find_dev(struct stm32_cryp_ctx
*ctx
)
267 struct stm32_cryp
*tmp
, *cryp
= NULL
;
269 spin_lock_bh(&cryp_list
.lock
);
271 list_for_each_entry(tmp
, &cryp_list
.dev_list
, list
) {
280 spin_unlock_bh(&cryp_list
.lock
);
285 static int stm32_cryp_check_aligned(struct scatterlist
*sg
, size_t total
,
293 if (!IS_ALIGNED(total
, align
))
297 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
300 if (!IS_ALIGNED(sg
->length
, align
))
313 static int stm32_cryp_check_io_aligned(struct stm32_cryp
*cryp
)
317 ret
= stm32_cryp_check_aligned(cryp
->in_sg
, cryp
->total_in
,
322 ret
= stm32_cryp_check_aligned(cryp
->out_sg
, cryp
->total_out
,
328 static void sg_copy_buf(void *buf
, struct scatterlist
*sg
,
329 unsigned int start
, unsigned int nbytes
, int out
)
331 struct scatter_walk walk
;
336 scatterwalk_start(&walk
, sg
);
337 scatterwalk_advance(&walk
, start
);
338 scatterwalk_copychunks(buf
, &walk
, nbytes
, out
);
339 scatterwalk_done(&walk
, out
, 0);
342 static int stm32_cryp_copy_sgs(struct stm32_cryp
*cryp
)
344 void *buf_in
, *buf_out
;
345 int pages
, total_in
, total_out
;
347 if (!stm32_cryp_check_io_aligned(cryp
)) {
348 cryp
->sgs_copied
= 0;
352 total_in
= ALIGN(cryp
->total_in
, cryp
->hw_blocksize
);
353 pages
= total_in
? get_order(total_in
) : 1;
354 buf_in
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
356 total_out
= ALIGN(cryp
->total_out
, cryp
->hw_blocksize
);
357 pages
= total_out
? get_order(total_out
) : 1;
358 buf_out
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
360 if (!buf_in
|| !buf_out
) {
361 dev_err(cryp
->dev
, "Can't allocate pages when unaligned\n");
362 cryp
->sgs_copied
= 0;
366 sg_copy_buf(buf_in
, cryp
->in_sg
, 0, cryp
->total_in
, 0);
368 sg_init_one(&cryp
->in_sgl
, buf_in
, total_in
);
369 cryp
->in_sg
= &cryp
->in_sgl
;
372 sg_init_one(&cryp
->out_sgl
, buf_out
, total_out
);
373 cryp
->out_sg_save
= cryp
->out_sg
;
374 cryp
->out_sg
= &cryp
->out_sgl
;
375 cryp
->out_sg_len
= 1;
377 cryp
->sgs_copied
= 1;
382 static void stm32_cryp_hw_write_iv(struct stm32_cryp
*cryp
, u32
*iv
)
387 stm32_cryp_write(cryp
, CRYP_IV0LR
, cpu_to_be32(*iv
++));
388 stm32_cryp_write(cryp
, CRYP_IV0RR
, cpu_to_be32(*iv
++));
391 stm32_cryp_write(cryp
, CRYP_IV1LR
, cpu_to_be32(*iv
++));
392 stm32_cryp_write(cryp
, CRYP_IV1RR
, cpu_to_be32(*iv
++));
396 static void stm32_cryp_get_iv(struct stm32_cryp
*cryp
)
398 struct ablkcipher_request
*req
= cryp
->req
;
399 u32
*tmp
= req
->info
;
404 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV0LR
));
405 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV0RR
));
408 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV1LR
));
409 *tmp
++ = cpu_to_be32(stm32_cryp_read(cryp
, CRYP_IV1RR
));
413 static void stm32_cryp_hw_write_key(struct stm32_cryp
*c
)
419 stm32_cryp_write(c
, CRYP_K1LR
, cpu_to_be32(c
->ctx
->key
[0]));
420 stm32_cryp_write(c
, CRYP_K1RR
, cpu_to_be32(c
->ctx
->key
[1]));
423 for (i
= c
->ctx
->keylen
/ sizeof(u32
); i
> 0; i
--, r_id
-= 4)
424 stm32_cryp_write(c
, r_id
,
425 cpu_to_be32(c
->ctx
->key
[i
- 1]));
429 static u32
stm32_cryp_get_hw_mode(struct stm32_cryp
*cryp
)
431 if (is_aes(cryp
) && is_ecb(cryp
))
434 if (is_aes(cryp
) && is_cbc(cryp
))
437 if (is_aes(cryp
) && is_ctr(cryp
))
440 if (is_aes(cryp
) && is_gcm(cryp
))
443 if (is_aes(cryp
) && is_ccm(cryp
))
446 if (is_des(cryp
) && is_ecb(cryp
))
449 if (is_des(cryp
) && is_cbc(cryp
))
452 if (is_tdes(cryp
) && is_ecb(cryp
))
455 if (is_tdes(cryp
) && is_cbc(cryp
))
458 dev_err(cryp
->dev
, "Unknown mode\n");
459 return CR_AES_UNKNOWN
;
462 static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp
*cryp
)
464 return is_encrypt(cryp
) ? cryp
->areq
->cryptlen
:
465 cryp
->areq
->cryptlen
- cryp
->authsize
;
468 static int stm32_cryp_gcm_init(struct stm32_cryp
*cryp
, u32 cfg
)
474 memcpy(iv
, cryp
->areq
->iv
, 12);
475 iv
[3] = cpu_to_be32(GCM_CTR_INIT
);
476 cryp
->gcm_ctr
= GCM_CTR_INIT
;
477 stm32_cryp_hw_write_iv(cryp
, iv
);
479 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_PH_INIT
| CR_CRYPEN
);
481 /* Wait for end of processing */
482 ret
= stm32_cryp_wait_enable(cryp
);
484 dev_err(cryp
->dev
, "Timeout (gcm init)\n");
489 static int stm32_cryp_ccm_init(struct stm32_cryp
*cryp
, u32 cfg
)
492 u8 iv
[AES_BLOCK_SIZE
], b0
[AES_BLOCK_SIZE
];
494 unsigned int i
, textlen
;
496 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
497 memcpy(iv
, cryp
->areq
->iv
, AES_BLOCK_SIZE
);
498 memset(iv
+ AES_BLOCK_SIZE
- 1 - iv
[0], 0, iv
[0] + 1);
499 iv
[AES_BLOCK_SIZE
- 1] = 1;
500 stm32_cryp_hw_write_iv(cryp
, (u32
*)iv
);
503 memcpy(b0
, iv
, AES_BLOCK_SIZE
);
505 b0
[0] |= (8 * ((cryp
->authsize
- 2) / 2));
507 if (cryp
->areq
->assoclen
)
510 textlen
= stm32_cryp_get_input_text_len(cryp
);
512 b0
[AES_BLOCK_SIZE
- 2] = textlen
>> 8;
513 b0
[AES_BLOCK_SIZE
- 1] = textlen
& 0xFF;
516 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_PH_INIT
| CR_CRYPEN
);
521 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
522 if (!cryp
->caps
->padding_wa
)
523 *d
= cpu_to_be32(*d
);
524 stm32_cryp_write(cryp
, CRYP_DIN
, *d
++);
527 /* Wait for end of processing */
528 ret
= stm32_cryp_wait_enable(cryp
);
530 dev_err(cryp
->dev
, "Timeout (ccm init)\n");
535 static int stm32_cryp_hw_init(struct stm32_cryp
*cryp
)
540 pm_runtime_get_sync(cryp
->dev
);
542 /* Disable interrupt */
543 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
546 stm32_cryp_hw_write_key(cryp
);
548 /* Set configuration */
549 cfg
= CR_DATA8
| CR_FFLUSH
;
551 switch (cryp
->ctx
->keylen
) {
552 case AES_KEYSIZE_128
:
556 case AES_KEYSIZE_192
:
561 case AES_KEYSIZE_256
:
566 hw_mode
= stm32_cryp_get_hw_mode(cryp
);
567 if (hw_mode
== CR_AES_UNKNOWN
)
570 /* AES ECB/CBC decrypt: run key preparation first */
571 if (is_decrypt(cryp
) &&
572 ((hw_mode
== CR_AES_ECB
) || (hw_mode
== CR_AES_CBC
))) {
573 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_AES_KP
| CR_CRYPEN
);
575 /* Wait for end of processing */
576 ret
= stm32_cryp_wait_busy(cryp
);
578 dev_err(cryp
->dev
, "Timeout (key preparation)\n");
585 if (is_decrypt(cryp
))
586 cfg
|= CR_DEC_NOT_ENC
;
588 /* Apply config and flush (valid when CRYPEN = 0) */
589 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
595 if (hw_mode
== CR_AES_CCM
)
596 ret
= stm32_cryp_ccm_init(cryp
, cfg
);
598 ret
= stm32_cryp_gcm_init(cryp
, cfg
);
603 /* Phase 2 : header (authenticated data) */
604 if (cryp
->areq
->assoclen
) {
606 } else if (stm32_cryp_get_input_text_len(cryp
)) {
607 cfg
|= CR_PH_PAYLOAD
;
608 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
619 stm32_cryp_hw_write_iv(cryp
, (u32
*)cryp
->req
->info
);
629 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
631 cryp
->flags
&= ~FLG_CCM_PADDED_WA
;
636 static void stm32_cryp_finish_req(struct stm32_cryp
*cryp
, int err
)
638 if (!err
&& (is_gcm(cryp
) || is_ccm(cryp
)))
639 /* Phase 4 : output tag */
640 err
= stm32_cryp_read_auth_tag(cryp
);
642 if (!err
&& (!(is_gcm(cryp
) || is_ccm(cryp
))))
643 stm32_cryp_get_iv(cryp
);
645 if (cryp
->sgs_copied
) {
646 void *buf_in
, *buf_out
;
649 buf_in
= sg_virt(&cryp
->in_sgl
);
650 buf_out
= sg_virt(&cryp
->out_sgl
);
652 sg_copy_buf(buf_out
, cryp
->out_sg_save
, 0,
653 cryp
->total_out_save
, 1);
655 len
= ALIGN(cryp
->total_in_save
, cryp
->hw_blocksize
);
656 pages
= len
? get_order(len
) : 1;
657 free_pages((unsigned long)buf_in
, pages
);
659 len
= ALIGN(cryp
->total_out_save
, cryp
->hw_blocksize
);
660 pages
= len
? get_order(len
) : 1;
661 free_pages((unsigned long)buf_out
, pages
);
664 pm_runtime_mark_last_busy(cryp
->dev
);
665 pm_runtime_put_autosuspend(cryp
->dev
);
667 if (is_gcm(cryp
) || is_ccm(cryp
))
668 crypto_finalize_aead_request(cryp
->engine
, cryp
->areq
, err
);
670 crypto_finalize_ablkcipher_request(cryp
->engine
, cryp
->req
,
673 memset(cryp
->ctx
->key
, 0, cryp
->ctx
->keylen
);
676 static int stm32_cryp_cpu_start(struct stm32_cryp
*cryp
)
678 /* Enable interrupt and let the IRQ handler do everything */
679 stm32_cryp_write(cryp
, CRYP_IMSCR
, IMSCR_IN
| IMSCR_OUT
);
684 static int stm32_cryp_cipher_one_req(struct crypto_engine
*engine
, void *areq
);
685 static int stm32_cryp_prepare_cipher_req(struct crypto_engine
*engine
,
688 static int stm32_cryp_cra_init(struct crypto_tfm
*tfm
)
690 struct stm32_cryp_ctx
*ctx
= crypto_tfm_ctx(tfm
);
692 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct stm32_cryp_reqctx
);
694 ctx
->enginectx
.op
.do_one_request
= stm32_cryp_cipher_one_req
;
695 ctx
->enginectx
.op
.prepare_request
= stm32_cryp_prepare_cipher_req
;
696 ctx
->enginectx
.op
.unprepare_request
= NULL
;
700 static int stm32_cryp_aead_one_req(struct crypto_engine
*engine
, void *areq
);
701 static int stm32_cryp_prepare_aead_req(struct crypto_engine
*engine
,
704 static int stm32_cryp_aes_aead_init(struct crypto_aead
*tfm
)
706 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(tfm
);
708 tfm
->reqsize
= sizeof(struct stm32_cryp_reqctx
);
710 ctx
->enginectx
.op
.do_one_request
= stm32_cryp_aead_one_req
;
711 ctx
->enginectx
.op
.prepare_request
= stm32_cryp_prepare_aead_req
;
712 ctx
->enginectx
.op
.unprepare_request
= NULL
;
717 static int stm32_cryp_crypt(struct ablkcipher_request
*req
, unsigned long mode
)
719 struct stm32_cryp_ctx
*ctx
= crypto_ablkcipher_ctx(
720 crypto_ablkcipher_reqtfm(req
));
721 struct stm32_cryp_reqctx
*rctx
= ablkcipher_request_ctx(req
);
722 struct stm32_cryp
*cryp
= stm32_cryp_find_dev(ctx
);
729 return crypto_transfer_ablkcipher_request_to_engine(cryp
->engine
, req
);
732 static int stm32_cryp_aead_crypt(struct aead_request
*req
, unsigned long mode
)
734 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
735 struct stm32_cryp_reqctx
*rctx
= aead_request_ctx(req
);
736 struct stm32_cryp
*cryp
= stm32_cryp_find_dev(ctx
);
743 return crypto_transfer_aead_request_to_engine(cryp
->engine
, req
);
746 static int stm32_cryp_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
749 struct stm32_cryp_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
751 memcpy(ctx
->key
, key
, keylen
);
752 ctx
->keylen
= keylen
;
757 static int stm32_cryp_aes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
760 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
761 keylen
!= AES_KEYSIZE_256
)
764 return stm32_cryp_setkey(tfm
, key
, keylen
);
767 static int stm32_cryp_des_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
770 u32 tmp
[DES_EXPKEY_WORDS
];
772 if (keylen
!= DES_KEY_SIZE
)
775 if ((crypto_ablkcipher_get_flags(tfm
) &
776 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
) &&
777 unlikely(!des_ekey(tmp
, key
))) {
778 crypto_ablkcipher_set_flags(tfm
, CRYPTO_TFM_RES_WEAK_KEY
);
782 return stm32_cryp_setkey(tfm
, key
, keylen
);
785 static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
791 flags
= crypto_ablkcipher_get_flags(tfm
);
792 err
= __des3_verify_key(&flags
, key
);
794 crypto_ablkcipher_set_flags(tfm
, flags
);
798 return stm32_cryp_setkey(tfm
, key
, keylen
);
801 static int stm32_cryp_aes_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
804 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(tfm
);
806 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
807 keylen
!= AES_KEYSIZE_256
)
810 memcpy(ctx
->key
, key
, keylen
);
811 ctx
->keylen
= keylen
;
816 static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
817 unsigned int authsize
)
819 return authsize
== AES_BLOCK_SIZE
? 0 : -EINVAL
;
822 static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead
*tfm
,
823 unsigned int authsize
)
841 static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request
*req
)
843 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
| FLG_ENCRYPT
);
846 static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request
*req
)
848 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
);
851 static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request
*req
)
853 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
| FLG_ENCRYPT
);
856 static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request
*req
)
858 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
);
861 static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request
*req
)
863 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
| FLG_ENCRYPT
);
866 static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request
*req
)
868 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
);
871 static int stm32_cryp_aes_gcm_encrypt(struct aead_request
*req
)
873 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_GCM
| FLG_ENCRYPT
);
876 static int stm32_cryp_aes_gcm_decrypt(struct aead_request
*req
)
878 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_GCM
);
881 static int stm32_cryp_aes_ccm_encrypt(struct aead_request
*req
)
883 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_CCM
| FLG_ENCRYPT
);
886 static int stm32_cryp_aes_ccm_decrypt(struct aead_request
*req
)
888 return stm32_cryp_aead_crypt(req
, FLG_AES
| FLG_CCM
);
891 static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request
*req
)
893 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
| FLG_ENCRYPT
);
896 static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request
*req
)
898 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
);
901 static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request
*req
)
903 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
| FLG_ENCRYPT
);
906 static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request
*req
)
908 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
);
911 static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request
*req
)
913 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
| FLG_ENCRYPT
);
916 static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request
*req
)
918 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
);
921 static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request
*req
)
923 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
| FLG_ENCRYPT
);
926 static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request
*req
)
928 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
);
931 static int stm32_cryp_prepare_req(struct ablkcipher_request
*req
,
932 struct aead_request
*areq
)
934 struct stm32_cryp_ctx
*ctx
;
935 struct stm32_cryp
*cryp
;
936 struct stm32_cryp_reqctx
*rctx
;
942 ctx
= req
? crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req
)) :
943 crypto_aead_ctx(crypto_aead_reqtfm(areq
));
950 rctx
= req
? ablkcipher_request_ctx(req
) : aead_request_ctx(areq
);
951 rctx
->mode
&= FLG_MODE_MASK
;
955 cryp
->flags
= (cryp
->flags
& ~FLG_MODE_MASK
) | rctx
->mode
;
956 cryp
->hw_blocksize
= is_aes(cryp
) ? AES_BLOCK_SIZE
: DES_BLOCK_SIZE
;
962 cryp
->total_in
= req
->nbytes
;
963 cryp
->total_out
= cryp
->total_in
;
966 * Length of input and output data:
968 * INPUT = AssocData || PlainText
969 * <- assoclen -> <- cryptlen ->
970 * <------- total_in ----------->
972 * OUTPUT = AssocData || CipherText || AuthTag
973 * <- assoclen -> <- cryptlen -> <- authsize ->
974 * <---------------- total_out ----------------->
977 * INPUT = AssocData || CipherText || AuthTag
978 * <- assoclen -> <--------- cryptlen --------->
980 * <---------------- total_in ------------------>
982 * OUTPUT = AssocData || PlainText
983 * <- assoclen -> <- crypten - authsize ->
984 * <---------- total_out ----------------->
988 cryp
->authsize
= crypto_aead_authsize(crypto_aead_reqtfm(areq
));
989 cryp
->total_in
= areq
->assoclen
+ areq
->cryptlen
;
990 if (is_encrypt(cryp
))
991 /* Append auth tag to output */
992 cryp
->total_out
= cryp
->total_in
+ cryp
->authsize
;
994 /* No auth tag in output */
995 cryp
->total_out
= cryp
->total_in
- cryp
->authsize
;
998 cryp
->total_in_save
= cryp
->total_in
;
999 cryp
->total_out_save
= cryp
->total_out
;
1001 cryp
->in_sg
= req
? req
->src
: areq
->src
;
1002 cryp
->out_sg
= req
? req
->dst
: areq
->dst
;
1003 cryp
->out_sg_save
= cryp
->out_sg
;
1005 cryp
->in_sg_len
= sg_nents_for_len(cryp
->in_sg
, cryp
->total_in
);
1006 if (cryp
->in_sg_len
< 0) {
1007 dev_err(cryp
->dev
, "Cannot get in_sg_len\n");
1008 ret
= cryp
->in_sg_len
;
1012 cryp
->out_sg_len
= sg_nents_for_len(cryp
->out_sg
, cryp
->total_out
);
1013 if (cryp
->out_sg_len
< 0) {
1014 dev_err(cryp
->dev
, "Cannot get out_sg_len\n");
1015 ret
= cryp
->out_sg_len
;
1019 ret
= stm32_cryp_copy_sgs(cryp
);
1023 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
1024 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
1026 if (is_gcm(cryp
) || is_ccm(cryp
)) {
1027 /* In output, jump after assoc data */
1028 scatterwalk_advance(&cryp
->out_walk
, cryp
->areq
->assoclen
);
1029 cryp
->total_out
-= cryp
->areq
->assoclen
;
1032 ret
= stm32_cryp_hw_init(cryp
);
1036 static int stm32_cryp_prepare_cipher_req(struct crypto_engine
*engine
,
1039 struct ablkcipher_request
*req
= container_of(areq
,
1040 struct ablkcipher_request
,
1043 return stm32_cryp_prepare_req(req
, NULL
);
1046 static int stm32_cryp_cipher_one_req(struct crypto_engine
*engine
, void *areq
)
1048 struct ablkcipher_request
*req
= container_of(areq
,
1049 struct ablkcipher_request
,
1051 struct stm32_cryp_ctx
*ctx
= crypto_ablkcipher_ctx(
1052 crypto_ablkcipher_reqtfm(req
));
1053 struct stm32_cryp
*cryp
= ctx
->cryp
;
1058 return stm32_cryp_cpu_start(cryp
);
1061 static int stm32_cryp_prepare_aead_req(struct crypto_engine
*engine
, void *areq
)
1063 struct aead_request
*req
= container_of(areq
, struct aead_request
,
1066 return stm32_cryp_prepare_req(NULL
, req
);
1069 static int stm32_cryp_aead_one_req(struct crypto_engine
*engine
, void *areq
)
1071 struct aead_request
*req
= container_of(areq
, struct aead_request
,
1073 struct stm32_cryp_ctx
*ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1074 struct stm32_cryp
*cryp
= ctx
->cryp
;
1079 if (unlikely(!cryp
->areq
->assoclen
&&
1080 !stm32_cryp_get_input_text_len(cryp
))) {
1081 /* No input data to process: get tag and finish */
1082 stm32_cryp_finish_req(cryp
, 0);
1086 return stm32_cryp_cpu_start(cryp
);
1089 static u32
*stm32_cryp_next_out(struct stm32_cryp
*cryp
, u32
*dst
,
1092 scatterwalk_advance(&cryp
->out_walk
, n
);
1094 if (unlikely(cryp
->out_sg
->length
== _walked_out
)) {
1095 cryp
->out_sg
= sg_next(cryp
->out_sg
);
1097 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
1098 return (sg_virt(cryp
->out_sg
) + _walked_out
);
1102 return (u32
*)((u8
*)dst
+ n
);
1105 static u32
*stm32_cryp_next_in(struct stm32_cryp
*cryp
, u32
*src
,
1108 scatterwalk_advance(&cryp
->in_walk
, n
);
1110 if (unlikely(cryp
->in_sg
->length
== _walked_in
)) {
1111 cryp
->in_sg
= sg_next(cryp
->in_sg
);
1113 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
1114 return (sg_virt(cryp
->in_sg
) + _walked_in
);
1118 return (u32
*)((u8
*)src
+ n
);
1121 static int stm32_cryp_read_auth_tag(struct stm32_cryp
*cryp
)
1123 u32 cfg
, size_bit
, *dst
, d32
;
1129 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1133 cfg
&= ~CR_DEC_NOT_ENC
;
1136 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1139 /* GCM: write aad and payload size (in bits) */
1140 size_bit
= cryp
->areq
->assoclen
* 8;
1141 if (cryp
->caps
->swap_final
)
1142 size_bit
= cpu_to_be32(size_bit
);
1144 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1145 stm32_cryp_write(cryp
, CRYP_DIN
, size_bit
);
1147 size_bit
= is_encrypt(cryp
) ? cryp
->areq
->cryptlen
:
1148 cryp
->areq
->cryptlen
- AES_BLOCK_SIZE
;
1150 if (cryp
->caps
->swap_final
)
1151 size_bit
= cpu_to_be32(size_bit
);
1153 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1154 stm32_cryp_write(cryp
, CRYP_DIN
, size_bit
);
1156 /* CCM: write CTR0 */
1157 u8 iv
[AES_BLOCK_SIZE
];
1158 u32
*iv32
= (u32
*)iv
;
1160 memcpy(iv
, cryp
->areq
->iv
, AES_BLOCK_SIZE
);
1161 memset(iv
+ AES_BLOCK_SIZE
- 1 - iv
[0], 0, iv
[0] + 1);
1163 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1164 if (!cryp
->caps
->padding_wa
)
1165 *iv32
= cpu_to_be32(*iv32
);
1166 stm32_cryp_write(cryp
, CRYP_DIN
, *iv32
++);
1170 /* Wait for output data */
1171 ret
= stm32_cryp_wait_output(cryp
);
1173 dev_err(cryp
->dev
, "Timeout (read tag)\n");
1177 if (is_encrypt(cryp
)) {
1178 /* Get and write tag */
1179 dst
= sg_virt(cryp
->out_sg
) + _walked_out
;
1181 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1182 if (cryp
->total_out
>= sizeof(u32
)) {
1183 /* Read a full u32 */
1184 *dst
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1186 dst
= stm32_cryp_next_out(cryp
, dst
,
1188 cryp
->total_out
-= sizeof(u32
);
1189 } else if (!cryp
->total_out
) {
1190 /* Empty fifo out (data from input padding) */
1191 stm32_cryp_read(cryp
, CRYP_DOUT
);
1193 /* Read less than an u32 */
1194 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1197 for (j
= 0; j
< cryp
->total_out
; j
++) {
1198 *((u8
*)dst
) = *(d8
++);
1199 dst
= stm32_cryp_next_out(cryp
, dst
, 1);
1201 cryp
->total_out
= 0;
1205 /* Get and check tag */
1206 u32 in_tag
[AES_BLOCK_32
], out_tag
[AES_BLOCK_32
];
1208 scatterwalk_map_and_copy(in_tag
, cryp
->in_sg
,
1209 cryp
->total_in_save
- cryp
->authsize
,
1212 for (i
= 0; i
< AES_BLOCK_32
; i
++)
1213 out_tag
[i
] = stm32_cryp_read(cryp
, CRYP_DOUT
);
1215 if (crypto_memneq(in_tag
, out_tag
, cryp
->authsize
))
1221 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1226 static void stm32_cryp_check_ctr_counter(struct stm32_cryp
*cryp
)
1230 if (unlikely(cryp
->last_ctr
[3] == 0xFFFFFFFF)) {
1231 cryp
->last_ctr
[3] = 0;
1232 cryp
->last_ctr
[2]++;
1233 if (!cryp
->last_ctr
[2]) {
1234 cryp
->last_ctr
[1]++;
1235 if (!cryp
->last_ctr
[1])
1236 cryp
->last_ctr
[0]++;
1239 cr
= stm32_cryp_read(cryp
, CRYP_CR
);
1240 stm32_cryp_write(cryp
, CRYP_CR
, cr
& ~CR_CRYPEN
);
1242 stm32_cryp_hw_write_iv(cryp
, (u32
*)cryp
->last_ctr
);
1244 stm32_cryp_write(cryp
, CRYP_CR
, cr
);
1247 cryp
->last_ctr
[0] = stm32_cryp_read(cryp
, CRYP_IV0LR
);
1248 cryp
->last_ctr
[1] = stm32_cryp_read(cryp
, CRYP_IV0RR
);
1249 cryp
->last_ctr
[2] = stm32_cryp_read(cryp
, CRYP_IV1LR
);
1250 cryp
->last_ctr
[3] = stm32_cryp_read(cryp
, CRYP_IV1RR
);
1253 static bool stm32_cryp_irq_read_data(struct stm32_cryp
*cryp
)
1260 /* Do no read tag now (if any) */
1261 if (is_encrypt(cryp
) && (is_gcm(cryp
) || is_ccm(cryp
)))
1262 tag_size
= cryp
->authsize
;
1266 dst
= sg_virt(cryp
->out_sg
) + _walked_out
;
1268 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
1269 if (likely(cryp
->total_out
- tag_size
>= sizeof(u32
))) {
1270 /* Read a full u32 */
1271 *dst
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1273 dst
= stm32_cryp_next_out(cryp
, dst
, sizeof(u32
));
1274 cryp
->total_out
-= sizeof(u32
);
1275 } else if (cryp
->total_out
== tag_size
) {
1276 /* Empty fifo out (data from input padding) */
1277 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1279 /* Read less than an u32 */
1280 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
1283 for (j
= 0; j
< cryp
->total_out
- tag_size
; j
++) {
1284 *((u8
*)dst
) = *(d8
++);
1285 dst
= stm32_cryp_next_out(cryp
, dst
, 1);
1287 cryp
->total_out
= tag_size
;
1291 return !(cryp
->total_out
- tag_size
) || !cryp
->total_in
;
1294 static void stm32_cryp_irq_write_block(struct stm32_cryp
*cryp
)
1301 /* Do no write tag (if any) */
1302 if (is_decrypt(cryp
) && (is_gcm(cryp
) || is_ccm(cryp
)))
1303 tag_size
= cryp
->authsize
;
1307 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1309 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
1310 if (likely(cryp
->total_in
- tag_size
>= sizeof(u32
))) {
1311 /* Write a full u32 */
1312 stm32_cryp_write(cryp
, CRYP_DIN
, *src
);
1314 src
= stm32_cryp_next_in(cryp
, src
, sizeof(u32
));
1315 cryp
->total_in
-= sizeof(u32
);
1316 } else if (cryp
->total_in
== tag_size
) {
1317 /* Write padding data */
1318 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1320 /* Write less than an u32 */
1321 memset(d8
, 0, sizeof(u32
));
1322 for (j
= 0; j
< cryp
->total_in
- tag_size
; j
++) {
1323 d8
[j
] = *((u8
*)src
);
1324 src
= stm32_cryp_next_in(cryp
, src
, 1);
1327 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1328 cryp
->total_in
= tag_size
;
1333 static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp
*cryp
)
1336 u32 cfg
, tmp
[AES_BLOCK_32
];
1337 size_t total_in_ori
= cryp
->total_in
;
1338 struct scatterlist
*out_sg_ori
= cryp
->out_sg
;
1341 /* 'Special workaround' procedure described in the datasheet */
1344 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1345 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1347 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1349 /* b) Update IV1R */
1350 stm32_cryp_write(cryp
, CRYP_IV1RR
, cryp
->gcm_ctr
- 2);
1352 /* c) change mode to CTR */
1353 cfg
&= ~CR_ALGO_MASK
;
1355 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1359 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1361 /* b) pad and write the last block */
1362 stm32_cryp_irq_write_block(cryp
);
1363 cryp
->total_in
= total_in_ori
;
1364 err
= stm32_cryp_wait_output(cryp
);
1366 dev_err(cryp
->dev
, "Timeout (write gcm header)\n");
1367 return stm32_cryp_finish_req(cryp
, err
);
1370 /* c) get and store encrypted data */
1371 stm32_cryp_irq_read_data(cryp
);
1372 scatterwalk_map_and_copy(tmp
, out_sg_ori
,
1373 cryp
->total_in_save
- total_in_ori
,
1376 /* d) change mode back to AES GCM */
1377 cfg
&= ~CR_ALGO_MASK
;
1379 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1381 /* e) change phase to Final */
1384 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1386 /* f) write padded data */
1387 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1389 stm32_cryp_write(cryp
, CRYP_DIN
, tmp
[i
]);
1391 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1393 cryp
->total_in
-= min_t(size_t, sizeof(u32
), cryp
->total_in
);
1396 /* g) Empty fifo out */
1397 err
= stm32_cryp_wait_output(cryp
);
1399 dev_err(cryp
->dev
, "Timeout (write gcm header)\n");
1400 return stm32_cryp_finish_req(cryp
, err
);
1403 for (i
= 0; i
< AES_BLOCK_32
; i
++)
1404 stm32_cryp_read(cryp
, CRYP_DOUT
);
1406 /* h) run the he normal Final phase */
1407 stm32_cryp_finish_req(cryp
, 0);
1410 static void stm32_cryp_irq_set_npblb(struct stm32_cryp
*cryp
)
1412 u32 cfg
, payload_bytes
;
1414 /* disable ip, set NPBLB and reneable ip */
1415 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1417 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1419 payload_bytes
= is_decrypt(cryp
) ? cryp
->total_in
- cryp
->authsize
:
1421 cfg
|= (cryp
->hw_blocksize
- payload_bytes
) << CR_NBPBL_SHIFT
;
1423 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1426 static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp
*cryp
)
1430 u32 cstmp1
[AES_BLOCK_32
], cstmp2
[AES_BLOCK_32
], tmp
[AES_BLOCK_32
];
1431 size_t last_total_out
, total_in_ori
= cryp
->total_in
;
1432 struct scatterlist
*out_sg_ori
= cryp
->out_sg
;
1435 /* 'Special workaround' procedure described in the datasheet */
1436 cryp
->flags
|= FLG_CCM_PADDED_WA
;
1439 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1441 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1443 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1445 /* b) get IV1 from CRYP_CSGCMCCM7 */
1446 iv1tmp
= stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ 7 * 4);
1448 /* c) Load CRYP_CSGCMCCMxR */
1449 for (i
= 0; i
< ARRAY_SIZE(cstmp1
); i
++)
1450 cstmp1
[i
] = stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ i
* 4);
1453 stm32_cryp_write(cryp
, CRYP_IV1RR
, iv1tmp
);
1455 /* e) change mode to CTR */
1456 cfg
&= ~CR_ALGO_MASK
;
1458 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1462 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1464 /* b) pad and write the last block */
1465 stm32_cryp_irq_write_block(cryp
);
1466 cryp
->total_in
= total_in_ori
;
1467 err
= stm32_cryp_wait_output(cryp
);
1469 dev_err(cryp
->dev
, "Timeout (wite ccm padded data)\n");
1470 return stm32_cryp_finish_req(cryp
, err
);
1473 /* c) get and store decrypted data */
1474 last_total_out
= cryp
->total_out
;
1475 stm32_cryp_irq_read_data(cryp
);
1477 memset(tmp
, 0, sizeof(tmp
));
1478 scatterwalk_map_and_copy(tmp
, out_sg_ori
,
1479 cryp
->total_out_save
- last_total_out
,
1482 /* d) Load again CRYP_CSGCMCCMxR */
1483 for (i
= 0; i
< ARRAY_SIZE(cstmp2
); i
++)
1484 cstmp2
[i
] = stm32_cryp_read(cryp
, CRYP_CSGCMCCM0R
+ i
* 4);
1486 /* e) change mode back to AES CCM */
1487 cfg
&= ~CR_ALGO_MASK
;
1489 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1491 /* f) change phase to header */
1493 cfg
|= CR_PH_HEADER
;
1494 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1496 /* g) XOR and write padded data */
1497 for (i
= 0; i
< ARRAY_SIZE(tmp
); i
++) {
1498 tmp
[i
] ^= cstmp1
[i
];
1499 tmp
[i
] ^= cstmp2
[i
];
1500 stm32_cryp_write(cryp
, CRYP_DIN
, tmp
[i
]);
1503 /* h) wait for completion */
1504 err
= stm32_cryp_wait_busy(cryp
);
1506 dev_err(cryp
->dev
, "Timeout (wite ccm padded data)\n");
1508 /* i) run the he normal Final phase */
1509 stm32_cryp_finish_req(cryp
, err
);
1512 static void stm32_cryp_irq_write_data(struct stm32_cryp
*cryp
)
1514 if (unlikely(!cryp
->total_in
)) {
1515 dev_warn(cryp
->dev
, "No more data to process\n");
1519 if (unlikely(cryp
->total_in
< AES_BLOCK_SIZE
&&
1520 (stm32_cryp_get_hw_mode(cryp
) == CR_AES_GCM
) &&
1521 is_encrypt(cryp
))) {
1522 /* Padding for AES GCM encryption */
1523 if (cryp
->caps
->padding_wa
)
1524 /* Special case 1 */
1525 return stm32_cryp_irq_write_gcm_padded_data(cryp
);
1527 /* Setting padding bytes (NBBLB) */
1528 stm32_cryp_irq_set_npblb(cryp
);
1531 if (unlikely((cryp
->total_in
- cryp
->authsize
< AES_BLOCK_SIZE
) &&
1532 (stm32_cryp_get_hw_mode(cryp
) == CR_AES_CCM
) &&
1533 is_decrypt(cryp
))) {
1534 /* Padding for AES CCM decryption */
1535 if (cryp
->caps
->padding_wa
)
1536 /* Special case 2 */
1537 return stm32_cryp_irq_write_ccm_padded_data(cryp
);
1539 /* Setting padding bytes (NBBLB) */
1540 stm32_cryp_irq_set_npblb(cryp
);
1543 if (is_aes(cryp
) && is_ctr(cryp
))
1544 stm32_cryp_check_ctr_counter(cryp
);
1546 stm32_cryp_irq_write_block(cryp
);
1549 static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp
*cryp
)
1555 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1557 for (i
= 0; i
< AES_BLOCK_32
; i
++) {
1558 stm32_cryp_write(cryp
, CRYP_DIN
, *src
);
1560 src
= stm32_cryp_next_in(cryp
, src
, sizeof(u32
));
1561 cryp
->total_in
-= min_t(size_t, sizeof(u32
), cryp
->total_in
);
1563 /* Check if whole header written */
1564 if ((cryp
->total_in_save
- cryp
->total_in
) ==
1565 cryp
->areq
->assoclen
) {
1566 /* Write padding if needed */
1567 for (j
= i
+ 1; j
< AES_BLOCK_32
; j
++)
1568 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1570 /* Wait for completion */
1571 err
= stm32_cryp_wait_busy(cryp
);
1573 dev_err(cryp
->dev
, "Timeout (gcm header)\n");
1574 return stm32_cryp_finish_req(cryp
, err
);
1577 if (stm32_cryp_get_input_text_len(cryp
)) {
1578 /* Phase 3 : payload */
1579 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1581 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1584 cfg
|= CR_PH_PAYLOAD
;
1586 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1589 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1590 stm32_cryp_finish_req(cryp
, 0);
1596 if (!cryp
->total_in
)
1601 static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp
*cryp
)
1604 unsigned int i
= 0, j
, k
;
1605 u32 alen
, cfg
, *src
;
1608 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
1609 alen
= cryp
->areq
->assoclen
;
1612 if (cryp
->areq
->assoclen
<= 65280) {
1613 /* Write first u32 of B1 */
1614 d8
[0] = (alen
>> 8) & 0xFF;
1615 d8
[1] = alen
& 0xFF;
1616 d8
[2] = *((u8
*)src
);
1617 src
= stm32_cryp_next_in(cryp
, src
, 1);
1618 d8
[3] = *((u8
*)src
);
1619 src
= stm32_cryp_next_in(cryp
, src
, 1);
1621 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1624 cryp
->total_in
-= min_t(size_t, 2, cryp
->total_in
);
1626 /* Build the two first u32 of B1 */
1629 d8
[2] = alen
& 0xFF000000;
1630 d8
[3] = alen
& 0x00FF0000;
1632 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1635 d8
[0] = alen
& 0x0000FF00;
1636 d8
[1] = alen
& 0x000000FF;
1637 d8
[2] = *((u8
*)src
);
1638 src
= stm32_cryp_next_in(cryp
, src
, 1);
1639 d8
[3] = *((u8
*)src
);
1640 src
= stm32_cryp_next_in(cryp
, src
, 1);
1642 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1645 cryp
->total_in
-= min_t(size_t, 2, cryp
->total_in
);
1649 /* Write next u32 */
1650 for (; i
< AES_BLOCK_32
; i
++) {
1652 memset(d8
, 0, sizeof(u32
));
1653 for (k
= 0; k
< sizeof(u32
); k
++) {
1654 d8
[k
] = *((u8
*)src
);
1655 src
= stm32_cryp_next_in(cryp
, src
, 1);
1657 cryp
->total_in
-= min_t(size_t, 1, cryp
->total_in
);
1658 if ((cryp
->total_in_save
- cryp
->total_in
) == alen
)
1662 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
1664 if ((cryp
->total_in_save
- cryp
->total_in
) == alen
) {
1665 /* Write padding if needed */
1666 for (j
= i
+ 1; j
< AES_BLOCK_32
; j
++)
1667 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
1669 /* Wait for completion */
1670 err
= stm32_cryp_wait_busy(cryp
);
1672 dev_err(cryp
->dev
, "Timeout (ccm header)\n");
1673 return stm32_cryp_finish_req(cryp
, err
);
1676 if (stm32_cryp_get_input_text_len(cryp
)) {
1677 /* Phase 3 : payload */
1678 cfg
= stm32_cryp_read(cryp
, CRYP_CR
);
1680 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1683 cfg
|= CR_PH_PAYLOAD
;
1685 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
1688 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1689 stm32_cryp_finish_req(cryp
, 0);
1697 static irqreturn_t
stm32_cryp_irq_thread(int irq
, void *arg
)
1699 struct stm32_cryp
*cryp
= arg
;
1702 if (cryp
->irq_status
& MISR_OUT
)
1703 /* Output FIFO IRQ: read data */
1704 if (unlikely(stm32_cryp_irq_read_data(cryp
))) {
1705 /* All bytes processed, finish */
1706 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
1707 stm32_cryp_finish_req(cryp
, 0);
1711 if (cryp
->irq_status
& MISR_IN
) {
1713 ph
= stm32_cryp_read(cryp
, CRYP_CR
) & CR_PH_MASK
;
1714 if (unlikely(ph
== CR_PH_HEADER
))
1716 stm32_cryp_irq_write_gcm_header(cryp
);
1718 /* Input FIFO IRQ: write data */
1719 stm32_cryp_irq_write_data(cryp
);
1721 } else if (is_ccm(cryp
)) {
1722 ph
= stm32_cryp_read(cryp
, CRYP_CR
) & CR_PH_MASK
;
1723 if (unlikely(ph
== CR_PH_HEADER
))
1725 stm32_cryp_irq_write_ccm_header(cryp
);
1727 /* Input FIFO IRQ: write data */
1728 stm32_cryp_irq_write_data(cryp
);
1730 /* Input FIFO IRQ: write data */
1731 stm32_cryp_irq_write_data(cryp
);
1738 static irqreturn_t
stm32_cryp_irq(int irq
, void *arg
)
1740 struct stm32_cryp
*cryp
= arg
;
1742 cryp
->irq_status
= stm32_cryp_read(cryp
, CRYP_MISR
);
1744 return IRQ_WAKE_THREAD
;
1747 static struct crypto_alg crypto_algs
[] = {
1749 .cra_name
= "ecb(aes)",
1750 .cra_driver_name
= "stm32-ecb-aes",
1751 .cra_priority
= 200,
1752 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1754 .cra_blocksize
= AES_BLOCK_SIZE
,
1755 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1756 .cra_alignmask
= 0xf,
1757 .cra_type
= &crypto_ablkcipher_type
,
1758 .cra_module
= THIS_MODULE
,
1759 .cra_init
= stm32_cryp_cra_init
,
1761 .min_keysize
= AES_MIN_KEY_SIZE
,
1762 .max_keysize
= AES_MAX_KEY_SIZE
,
1763 .setkey
= stm32_cryp_aes_setkey
,
1764 .encrypt
= stm32_cryp_aes_ecb_encrypt
,
1765 .decrypt
= stm32_cryp_aes_ecb_decrypt
,
1769 .cra_name
= "cbc(aes)",
1770 .cra_driver_name
= "stm32-cbc-aes",
1771 .cra_priority
= 200,
1772 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1774 .cra_blocksize
= AES_BLOCK_SIZE
,
1775 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1776 .cra_alignmask
= 0xf,
1777 .cra_type
= &crypto_ablkcipher_type
,
1778 .cra_module
= THIS_MODULE
,
1779 .cra_init
= stm32_cryp_cra_init
,
1781 .min_keysize
= AES_MIN_KEY_SIZE
,
1782 .max_keysize
= AES_MAX_KEY_SIZE
,
1783 .ivsize
= AES_BLOCK_SIZE
,
1784 .setkey
= stm32_cryp_aes_setkey
,
1785 .encrypt
= stm32_cryp_aes_cbc_encrypt
,
1786 .decrypt
= stm32_cryp_aes_cbc_decrypt
,
1790 .cra_name
= "ctr(aes)",
1791 .cra_driver_name
= "stm32-ctr-aes",
1792 .cra_priority
= 200,
1793 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1796 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1797 .cra_alignmask
= 0xf,
1798 .cra_type
= &crypto_ablkcipher_type
,
1799 .cra_module
= THIS_MODULE
,
1800 .cra_init
= stm32_cryp_cra_init
,
1802 .min_keysize
= AES_MIN_KEY_SIZE
,
1803 .max_keysize
= AES_MAX_KEY_SIZE
,
1804 .ivsize
= AES_BLOCK_SIZE
,
1805 .setkey
= stm32_cryp_aes_setkey
,
1806 .encrypt
= stm32_cryp_aes_ctr_encrypt
,
1807 .decrypt
= stm32_cryp_aes_ctr_decrypt
,
1811 .cra_name
= "ecb(des)",
1812 .cra_driver_name
= "stm32-ecb-des",
1813 .cra_priority
= 200,
1814 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1816 .cra_blocksize
= DES_BLOCK_SIZE
,
1817 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1818 .cra_alignmask
= 0xf,
1819 .cra_type
= &crypto_ablkcipher_type
,
1820 .cra_module
= THIS_MODULE
,
1821 .cra_init
= stm32_cryp_cra_init
,
1823 .min_keysize
= DES_BLOCK_SIZE
,
1824 .max_keysize
= DES_BLOCK_SIZE
,
1825 .setkey
= stm32_cryp_des_setkey
,
1826 .encrypt
= stm32_cryp_des_ecb_encrypt
,
1827 .decrypt
= stm32_cryp_des_ecb_decrypt
,
1831 .cra_name
= "cbc(des)",
1832 .cra_driver_name
= "stm32-cbc-des",
1833 .cra_priority
= 200,
1834 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1836 .cra_blocksize
= DES_BLOCK_SIZE
,
1837 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1838 .cra_alignmask
= 0xf,
1839 .cra_type
= &crypto_ablkcipher_type
,
1840 .cra_module
= THIS_MODULE
,
1841 .cra_init
= stm32_cryp_cra_init
,
1843 .min_keysize
= DES_BLOCK_SIZE
,
1844 .max_keysize
= DES_BLOCK_SIZE
,
1845 .ivsize
= DES_BLOCK_SIZE
,
1846 .setkey
= stm32_cryp_des_setkey
,
1847 .encrypt
= stm32_cryp_des_cbc_encrypt
,
1848 .decrypt
= stm32_cryp_des_cbc_decrypt
,
1852 .cra_name
= "ecb(des3_ede)",
1853 .cra_driver_name
= "stm32-ecb-des3",
1854 .cra_priority
= 200,
1855 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1857 .cra_blocksize
= DES_BLOCK_SIZE
,
1858 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1859 .cra_alignmask
= 0xf,
1860 .cra_type
= &crypto_ablkcipher_type
,
1861 .cra_module
= THIS_MODULE
,
1862 .cra_init
= stm32_cryp_cra_init
,
1864 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1865 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1866 .setkey
= stm32_cryp_tdes_setkey
,
1867 .encrypt
= stm32_cryp_tdes_ecb_encrypt
,
1868 .decrypt
= stm32_cryp_tdes_ecb_decrypt
,
1872 .cra_name
= "cbc(des3_ede)",
1873 .cra_driver_name
= "stm32-cbc-des3",
1874 .cra_priority
= 200,
1875 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1877 .cra_blocksize
= DES_BLOCK_SIZE
,
1878 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1879 .cra_alignmask
= 0xf,
1880 .cra_type
= &crypto_ablkcipher_type
,
1881 .cra_module
= THIS_MODULE
,
1882 .cra_init
= stm32_cryp_cra_init
,
1884 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1885 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1886 .ivsize
= DES_BLOCK_SIZE
,
1887 .setkey
= stm32_cryp_tdes_setkey
,
1888 .encrypt
= stm32_cryp_tdes_cbc_encrypt
,
1889 .decrypt
= stm32_cryp_tdes_cbc_decrypt
,
1894 static struct aead_alg aead_algs
[] = {
1896 .setkey
= stm32_cryp_aes_aead_setkey
,
1897 .setauthsize
= stm32_cryp_aes_gcm_setauthsize
,
1898 .encrypt
= stm32_cryp_aes_gcm_encrypt
,
1899 .decrypt
= stm32_cryp_aes_gcm_decrypt
,
1900 .init
= stm32_cryp_aes_aead_init
,
1902 .maxauthsize
= AES_BLOCK_SIZE
,
1905 .cra_name
= "gcm(aes)",
1906 .cra_driver_name
= "stm32-gcm-aes",
1907 .cra_priority
= 200,
1908 .cra_flags
= CRYPTO_ALG_ASYNC
,
1910 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1911 .cra_alignmask
= 0xf,
1912 .cra_module
= THIS_MODULE
,
1916 .setkey
= stm32_cryp_aes_aead_setkey
,
1917 .setauthsize
= stm32_cryp_aes_ccm_setauthsize
,
1918 .encrypt
= stm32_cryp_aes_ccm_encrypt
,
1919 .decrypt
= stm32_cryp_aes_ccm_decrypt
,
1920 .init
= stm32_cryp_aes_aead_init
,
1921 .ivsize
= AES_BLOCK_SIZE
,
1922 .maxauthsize
= AES_BLOCK_SIZE
,
1925 .cra_name
= "ccm(aes)",
1926 .cra_driver_name
= "stm32-ccm-aes",
1927 .cra_priority
= 200,
1928 .cra_flags
= CRYPTO_ALG_ASYNC
,
1930 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1931 .cra_alignmask
= 0xf,
1932 .cra_module
= THIS_MODULE
,
1937 static const struct stm32_cryp_caps f7_data
= {
1942 static const struct stm32_cryp_caps mp1_data
= {
1943 .swap_final
= false,
1944 .padding_wa
= false,
1947 static const struct of_device_id stm32_dt_ids
[] = {
1948 { .compatible
= "st,stm32f756-cryp", .data
= &f7_data
},
1949 { .compatible
= "st,stm32mp1-cryp", .data
= &mp1_data
},
1952 MODULE_DEVICE_TABLE(of
, stm32_dt_ids
);
1954 static int stm32_cryp_probe(struct platform_device
*pdev
)
1956 struct device
*dev
= &pdev
->dev
;
1957 struct stm32_cryp
*cryp
;
1958 struct resource
*res
;
1959 struct reset_control
*rst
;
1962 cryp
= devm_kzalloc(dev
, sizeof(*cryp
), GFP_KERNEL
);
1966 cryp
->caps
= of_device_get_match_data(dev
);
1972 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1973 cryp
->regs
= devm_ioremap_resource(dev
, res
);
1974 if (IS_ERR(cryp
->regs
))
1975 return PTR_ERR(cryp
->regs
);
1977 irq
= platform_get_irq(pdev
, 0);
1979 dev_err(dev
, "Cannot get IRQ resource\n");
1983 ret
= devm_request_threaded_irq(dev
, irq
, stm32_cryp_irq
,
1984 stm32_cryp_irq_thread
, IRQF_ONESHOT
,
1985 dev_name(dev
), cryp
);
1987 dev_err(dev
, "Cannot grab IRQ\n");
1991 cryp
->clk
= devm_clk_get(dev
, NULL
);
1992 if (IS_ERR(cryp
->clk
)) {
1993 dev_err(dev
, "Could not get clock\n");
1994 return PTR_ERR(cryp
->clk
);
1997 ret
= clk_prepare_enable(cryp
->clk
);
1999 dev_err(cryp
->dev
, "Failed to enable clock\n");
2003 pm_runtime_set_autosuspend_delay(dev
, CRYP_AUTOSUSPEND_DELAY
);
2004 pm_runtime_use_autosuspend(dev
);
2006 pm_runtime_get_noresume(dev
);
2007 pm_runtime_set_active(dev
);
2008 pm_runtime_enable(dev
);
2010 rst
= devm_reset_control_get(dev
, NULL
);
2012 reset_control_assert(rst
);
2014 reset_control_deassert(rst
);
2017 platform_set_drvdata(pdev
, cryp
);
2019 spin_lock(&cryp_list
.lock
);
2020 list_add(&cryp
->list
, &cryp_list
.dev_list
);
2021 spin_unlock(&cryp_list
.lock
);
2023 /* Initialize crypto engine */
2024 cryp
->engine
= crypto_engine_alloc_init(dev
, 1);
2025 if (!cryp
->engine
) {
2026 dev_err(dev
, "Could not init crypto engine\n");
2031 ret
= crypto_engine_start(cryp
->engine
);
2033 dev_err(dev
, "Could not start crypto engine\n");
2037 ret
= crypto_register_algs(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2039 dev_err(dev
, "Could not register algs\n");
2043 ret
= crypto_register_aeads(aead_algs
, ARRAY_SIZE(aead_algs
));
2047 dev_info(dev
, "Initialized\n");
2049 pm_runtime_put_sync(dev
);
2054 crypto_unregister_algs(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2057 crypto_engine_exit(cryp
->engine
);
2059 spin_lock(&cryp_list
.lock
);
2060 list_del(&cryp
->list
);
2061 spin_unlock(&cryp_list
.lock
);
2063 pm_runtime_disable(dev
);
2064 pm_runtime_put_noidle(dev
);
2065 pm_runtime_disable(dev
);
2066 pm_runtime_put_noidle(dev
);
2068 clk_disable_unprepare(cryp
->clk
);
2073 static int stm32_cryp_remove(struct platform_device
*pdev
)
2075 struct stm32_cryp
*cryp
= platform_get_drvdata(pdev
);
2081 ret
= pm_runtime_get_sync(cryp
->dev
);
2085 crypto_unregister_aeads(aead_algs
, ARRAY_SIZE(aead_algs
));
2086 crypto_unregister_algs(crypto_algs
, ARRAY_SIZE(crypto_algs
));
2088 crypto_engine_exit(cryp
->engine
);
2090 spin_lock(&cryp_list
.lock
);
2091 list_del(&cryp
->list
);
2092 spin_unlock(&cryp_list
.lock
);
2094 pm_runtime_disable(cryp
->dev
);
2095 pm_runtime_put_noidle(cryp
->dev
);
2097 clk_disable_unprepare(cryp
->clk
);
2103 static int stm32_cryp_runtime_suspend(struct device
*dev
)
2105 struct stm32_cryp
*cryp
= dev_get_drvdata(dev
);
2107 clk_disable_unprepare(cryp
->clk
);
2112 static int stm32_cryp_runtime_resume(struct device
*dev
)
2114 struct stm32_cryp
*cryp
= dev_get_drvdata(dev
);
2117 ret
= clk_prepare_enable(cryp
->clk
);
2119 dev_err(cryp
->dev
, "Failed to prepare_enable clock\n");
2127 static const struct dev_pm_ops stm32_cryp_pm_ops
= {
2128 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend
,
2129 pm_runtime_force_resume
)
2130 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend
,
2131 stm32_cryp_runtime_resume
, NULL
)
2134 static struct platform_driver stm32_cryp_driver
= {
2135 .probe
= stm32_cryp_probe
,
2136 .remove
= stm32_cryp_remove
,
2138 .name
= DRIVER_NAME
,
2139 .pm
= &stm32_cryp_pm_ops
,
2140 .of_match_table
= stm32_dt_ids
,
2144 module_platform_driver(stm32_cryp_driver
);
2146 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2147 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2148 MODULE_LICENSE("GPL");