2 * Copyright (C) STMicroelectronics SA 2017
3 * Author: Fabien Dessenne <fabien.dessenne@st.com>
4 * License terms: GNU General Public License (GPL), version 2
8 #include <linux/delay.h>
9 #include <linux/interrupt.h>
10 #include <linux/iopoll.h>
11 #include <linux/module.h>
12 #include <linux/of_device.h>
13 #include <linux/platform_device.h>
14 #include <linux/reset.h>
16 #include <crypto/aes.h>
17 #include <crypto/des.h>
18 #include <crypto/engine.h>
19 #include <crypto/scatterwalk.h>
21 #define DRIVER_NAME "stm32-cryp"
23 /* Bit [0] encrypt / decrypt */
24 #define FLG_ENCRYPT BIT(0)
25 /* Bit [8..1] algo & operation mode */
26 #define FLG_AES BIT(1)
27 #define FLG_DES BIT(2)
28 #define FLG_TDES BIT(3)
29 #define FLG_ECB BIT(4)
30 #define FLG_CBC BIT(5)
31 #define FLG_CTR BIT(6)
32 /* Mode mask = bits [15..0] */
33 #define FLG_MODE_MASK GENMASK(15, 0)
36 #define CRYP_CR 0x00000000
37 #define CRYP_SR 0x00000004
38 #define CRYP_DIN 0x00000008
39 #define CRYP_DOUT 0x0000000C
40 #define CRYP_DMACR 0x00000010
41 #define CRYP_IMSCR 0x00000014
42 #define CRYP_RISR 0x00000018
43 #define CRYP_MISR 0x0000001C
44 #define CRYP_K0LR 0x00000020
45 #define CRYP_K0RR 0x00000024
46 #define CRYP_K1LR 0x00000028
47 #define CRYP_K1RR 0x0000002C
48 #define CRYP_K2LR 0x00000030
49 #define CRYP_K2RR 0x00000034
50 #define CRYP_K3LR 0x00000038
51 #define CRYP_K3RR 0x0000003C
52 #define CRYP_IV0LR 0x00000040
53 #define CRYP_IV0RR 0x00000044
54 #define CRYP_IV1LR 0x00000048
55 #define CRYP_IV1RR 0x0000004C
57 /* Registers values */
58 #define CR_DEC_NOT_ENC 0x00000004
59 #define CR_TDES_ECB 0x00000000
60 #define CR_TDES_CBC 0x00000008
61 #define CR_DES_ECB 0x00000010
62 #define CR_DES_CBC 0x00000018
63 #define CR_AES_ECB 0x00000020
64 #define CR_AES_CBC 0x00000028
65 #define CR_AES_CTR 0x00000030
66 #define CR_AES_KP 0x00000038
67 #define CR_AES_UNKNOWN 0xFFFFFFFF
68 #define CR_ALGO_MASK 0x00080038
69 #define CR_DATA32 0x00000000
70 #define CR_DATA16 0x00000040
71 #define CR_DATA8 0x00000080
72 #define CR_DATA1 0x000000C0
73 #define CR_KEY128 0x00000000
74 #define CR_KEY192 0x00000100
75 #define CR_KEY256 0x00000200
76 #define CR_FFLUSH 0x00004000
77 #define CR_CRYPEN 0x00008000
79 #define SR_BUSY 0x00000010
80 #define SR_OFNE 0x00000004
82 #define IMSCR_IN BIT(0)
83 #define IMSCR_OUT BIT(1)
85 #define MISR_IN BIT(0)
86 #define MISR_OUT BIT(1)
89 #define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
90 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
91 #define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
93 struct stm32_cryp_ctx
{
94 struct stm32_cryp
*cryp
;
96 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
100 struct stm32_cryp_reqctx
{
105 struct list_head list
;
111 struct stm32_cryp_ctx
*ctx
;
113 struct crypto_engine
*engine
;
115 struct mutex lock
; /* protects req */
116 struct ablkcipher_request
*req
;
121 size_t total_in_save
;
123 size_t total_out_save
;
125 struct scatterlist
*in_sg
;
126 struct scatterlist
*out_sg
;
127 struct scatterlist
*out_sg_save
;
129 struct scatterlist in_sgl
;
130 struct scatterlist out_sgl
;
136 struct scatter_walk in_walk
;
137 struct scatter_walk out_walk
;
142 struct stm32_cryp_list
{
143 struct list_head dev_list
;
144 spinlock_t lock
; /* protect dev_list */
147 static struct stm32_cryp_list cryp_list
= {
148 .dev_list
= LIST_HEAD_INIT(cryp_list
.dev_list
),
149 .lock
= __SPIN_LOCK_UNLOCKED(cryp_list
.lock
),
152 static inline bool is_aes(struct stm32_cryp
*cryp
)
154 return cryp
->flags
& FLG_AES
;
157 static inline bool is_des(struct stm32_cryp
*cryp
)
159 return cryp
->flags
& FLG_DES
;
162 static inline bool is_tdes(struct stm32_cryp
*cryp
)
164 return cryp
->flags
& FLG_TDES
;
167 static inline bool is_ecb(struct stm32_cryp
*cryp
)
169 return cryp
->flags
& FLG_ECB
;
172 static inline bool is_cbc(struct stm32_cryp
*cryp
)
174 return cryp
->flags
& FLG_CBC
;
177 static inline bool is_ctr(struct stm32_cryp
*cryp
)
179 return cryp
->flags
& FLG_CTR
;
182 static inline bool is_encrypt(struct stm32_cryp
*cryp
)
184 return cryp
->flags
& FLG_ENCRYPT
;
187 static inline bool is_decrypt(struct stm32_cryp
*cryp
)
189 return !is_encrypt(cryp
);
192 static inline u32
stm32_cryp_read(struct stm32_cryp
*cryp
, u32 ofst
)
194 return readl_relaxed(cryp
->regs
+ ofst
);
197 static inline void stm32_cryp_write(struct stm32_cryp
*cryp
, u32 ofst
, u32 val
)
199 writel_relaxed(val
, cryp
->regs
+ ofst
);
202 static inline int stm32_cryp_wait_busy(struct stm32_cryp
*cryp
)
206 return readl_relaxed_poll_timeout(cryp
->regs
+ CRYP_SR
, status
,
207 !(status
& SR_BUSY
), 10, 100000);
210 static struct stm32_cryp
*stm32_cryp_find_dev(struct stm32_cryp_ctx
*ctx
)
212 struct stm32_cryp
*tmp
, *cryp
= NULL
;
214 spin_lock_bh(&cryp_list
.lock
);
216 list_for_each_entry(tmp
, &cryp_list
.dev_list
, list
) {
225 spin_unlock_bh(&cryp_list
.lock
);
230 static int stm32_cryp_check_aligned(struct scatterlist
*sg
, size_t total
,
238 if (!IS_ALIGNED(total
, align
))
242 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
245 if (!IS_ALIGNED(sg
->length
, align
))
258 static int stm32_cryp_check_io_aligned(struct stm32_cryp
*cryp
)
262 ret
= stm32_cryp_check_aligned(cryp
->in_sg
, cryp
->total_in
,
267 ret
= stm32_cryp_check_aligned(cryp
->out_sg
, cryp
->total_out
,
273 static void sg_copy_buf(void *buf
, struct scatterlist
*sg
,
274 unsigned int start
, unsigned int nbytes
, int out
)
276 struct scatter_walk walk
;
281 scatterwalk_start(&walk
, sg
);
282 scatterwalk_advance(&walk
, start
);
283 scatterwalk_copychunks(buf
, &walk
, nbytes
, out
);
284 scatterwalk_done(&walk
, out
, 0);
287 static int stm32_cryp_copy_sgs(struct stm32_cryp
*cryp
)
289 void *buf_in
, *buf_out
;
290 int pages
, total_in
, total_out
;
292 if (!stm32_cryp_check_io_aligned(cryp
)) {
293 cryp
->sgs_copied
= 0;
297 total_in
= ALIGN(cryp
->total_in
, cryp
->hw_blocksize
);
298 pages
= total_in
? get_order(total_in
) : 1;
299 buf_in
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
301 total_out
= ALIGN(cryp
->total_out
, cryp
->hw_blocksize
);
302 pages
= total_out
? get_order(total_out
) : 1;
303 buf_out
= (void *)__get_free_pages(GFP_ATOMIC
, pages
);
305 if (!buf_in
|| !buf_out
) {
306 dev_err(cryp
->dev
, "Can't allocate pages when unaligned\n");
307 cryp
->sgs_copied
= 0;
311 sg_copy_buf(buf_in
, cryp
->in_sg
, 0, cryp
->total_in
, 0);
313 sg_init_one(&cryp
->in_sgl
, buf_in
, total_in
);
314 cryp
->in_sg
= &cryp
->in_sgl
;
317 sg_init_one(&cryp
->out_sgl
, buf_out
, total_out
);
318 cryp
->out_sg_save
= cryp
->out_sg
;
319 cryp
->out_sg
= &cryp
->out_sgl
;
320 cryp
->out_sg_len
= 1;
322 cryp
->sgs_copied
= 1;
327 static void stm32_cryp_hw_write_iv(struct stm32_cryp
*cryp
, u32
*iv
)
332 stm32_cryp_write(cryp
, CRYP_IV0LR
, cpu_to_be32(*iv
++));
333 stm32_cryp_write(cryp
, CRYP_IV0RR
, cpu_to_be32(*iv
++));
336 stm32_cryp_write(cryp
, CRYP_IV1LR
, cpu_to_be32(*iv
++));
337 stm32_cryp_write(cryp
, CRYP_IV1RR
, cpu_to_be32(*iv
++));
341 static void stm32_cryp_hw_write_key(struct stm32_cryp
*c
)
347 stm32_cryp_write(c
, CRYP_K1LR
, cpu_to_be32(c
->ctx
->key
[0]));
348 stm32_cryp_write(c
, CRYP_K1RR
, cpu_to_be32(c
->ctx
->key
[1]));
351 for (i
= c
->ctx
->keylen
/ sizeof(u32
); i
> 0; i
--, r_id
-= 4)
352 stm32_cryp_write(c
, r_id
,
353 cpu_to_be32(c
->ctx
->key
[i
- 1]));
357 static u32
stm32_cryp_get_hw_mode(struct stm32_cryp
*cryp
)
359 if (is_aes(cryp
) && is_ecb(cryp
))
362 if (is_aes(cryp
) && is_cbc(cryp
))
365 if (is_aes(cryp
) && is_ctr(cryp
))
368 if (is_des(cryp
) && is_ecb(cryp
))
371 if (is_des(cryp
) && is_cbc(cryp
))
374 if (is_tdes(cryp
) && is_ecb(cryp
))
377 if (is_tdes(cryp
) && is_cbc(cryp
))
380 dev_err(cryp
->dev
, "Unknown mode\n");
381 return CR_AES_UNKNOWN
;
384 static int stm32_cryp_hw_init(struct stm32_cryp
*cryp
)
389 /* Disable interrupt */
390 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
393 stm32_cryp_hw_write_key(cryp
);
395 /* Set configuration */
396 cfg
= CR_DATA8
| CR_FFLUSH
;
398 switch (cryp
->ctx
->keylen
) {
399 case AES_KEYSIZE_128
:
403 case AES_KEYSIZE_192
:
408 case AES_KEYSIZE_256
:
413 hw_mode
= stm32_cryp_get_hw_mode(cryp
);
414 if (hw_mode
== CR_AES_UNKNOWN
)
417 /* AES ECB/CBC decrypt: run key preparation first */
418 if (is_decrypt(cryp
) &&
419 ((hw_mode
== CR_AES_ECB
) || (hw_mode
== CR_AES_CBC
))) {
420 stm32_cryp_write(cryp
, CRYP_CR
, cfg
| CR_AES_KP
| CR_CRYPEN
);
422 /* Wait for end of processing */
423 ret
= stm32_cryp_wait_busy(cryp
);
425 dev_err(cryp
->dev
, "Timeout (key preparation)\n");
432 if (is_decrypt(cryp
))
433 cfg
|= CR_DEC_NOT_ENC
;
435 /* Apply config and flush (valid when CRYPEN = 0) */
436 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
443 stm32_cryp_hw_write_iv(cryp
, (u32
*)cryp
->req
->info
);
453 stm32_cryp_write(cryp
, CRYP_CR
, cfg
);
458 static void stm32_cryp_finish_req(struct stm32_cryp
*cryp
)
462 if (cryp
->sgs_copied
) {
463 void *buf_in
, *buf_out
;
466 buf_in
= sg_virt(&cryp
->in_sgl
);
467 buf_out
= sg_virt(&cryp
->out_sgl
);
469 sg_copy_buf(buf_out
, cryp
->out_sg_save
, 0,
470 cryp
->total_out_save
, 1);
472 len
= ALIGN(cryp
->total_in_save
, cryp
->hw_blocksize
);
473 pages
= len
? get_order(len
) : 1;
474 free_pages((unsigned long)buf_in
, pages
);
476 len
= ALIGN(cryp
->total_out_save
, cryp
->hw_blocksize
);
477 pages
= len
? get_order(len
) : 1;
478 free_pages((unsigned long)buf_out
, pages
);
481 crypto_finalize_cipher_request(cryp
->engine
, cryp
->req
, err
);
484 memset(cryp
->ctx
->key
, 0, cryp
->ctx
->keylen
);
486 mutex_unlock(&cryp
->lock
);
489 static int stm32_cryp_cpu_start(struct stm32_cryp
*cryp
)
491 /* Enable interrupt and let the IRQ handler do everything */
492 stm32_cryp_write(cryp
, CRYP_IMSCR
, IMSCR_IN
| IMSCR_OUT
);
497 static int stm32_cryp_cra_init(struct crypto_tfm
*tfm
)
499 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct stm32_cryp_reqctx
);
504 static int stm32_cryp_crypt(struct ablkcipher_request
*req
, unsigned long mode
)
506 struct stm32_cryp_ctx
*ctx
= crypto_ablkcipher_ctx(
507 crypto_ablkcipher_reqtfm(req
));
508 struct stm32_cryp_reqctx
*rctx
= ablkcipher_request_ctx(req
);
509 struct stm32_cryp
*cryp
= stm32_cryp_find_dev(ctx
);
516 return crypto_transfer_cipher_request_to_engine(cryp
->engine
, req
);
519 static int stm32_cryp_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
522 struct stm32_cryp_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
524 memcpy(ctx
->key
, key
, keylen
);
525 ctx
->keylen
= keylen
;
530 static int stm32_cryp_aes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
533 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
534 keylen
!= AES_KEYSIZE_256
)
537 return stm32_cryp_setkey(tfm
, key
, keylen
);
540 static int stm32_cryp_des_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
543 if (keylen
!= DES_KEY_SIZE
)
546 return stm32_cryp_setkey(tfm
, key
, keylen
);
549 static int stm32_cryp_tdes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
552 if (keylen
!= (3 * DES_KEY_SIZE
))
555 return stm32_cryp_setkey(tfm
, key
, keylen
);
558 static int stm32_cryp_aes_ecb_encrypt(struct ablkcipher_request
*req
)
560 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
| FLG_ENCRYPT
);
563 static int stm32_cryp_aes_ecb_decrypt(struct ablkcipher_request
*req
)
565 return stm32_cryp_crypt(req
, FLG_AES
| FLG_ECB
);
568 static int stm32_cryp_aes_cbc_encrypt(struct ablkcipher_request
*req
)
570 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
| FLG_ENCRYPT
);
573 static int stm32_cryp_aes_cbc_decrypt(struct ablkcipher_request
*req
)
575 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CBC
);
578 static int stm32_cryp_aes_ctr_encrypt(struct ablkcipher_request
*req
)
580 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
| FLG_ENCRYPT
);
583 static int stm32_cryp_aes_ctr_decrypt(struct ablkcipher_request
*req
)
585 return stm32_cryp_crypt(req
, FLG_AES
| FLG_CTR
);
588 static int stm32_cryp_des_ecb_encrypt(struct ablkcipher_request
*req
)
590 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
| FLG_ENCRYPT
);
593 static int stm32_cryp_des_ecb_decrypt(struct ablkcipher_request
*req
)
595 return stm32_cryp_crypt(req
, FLG_DES
| FLG_ECB
);
598 static int stm32_cryp_des_cbc_encrypt(struct ablkcipher_request
*req
)
600 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
| FLG_ENCRYPT
);
603 static int stm32_cryp_des_cbc_decrypt(struct ablkcipher_request
*req
)
605 return stm32_cryp_crypt(req
, FLG_DES
| FLG_CBC
);
608 static int stm32_cryp_tdes_ecb_encrypt(struct ablkcipher_request
*req
)
610 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
| FLG_ENCRYPT
);
613 static int stm32_cryp_tdes_ecb_decrypt(struct ablkcipher_request
*req
)
615 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_ECB
);
618 static int stm32_cryp_tdes_cbc_encrypt(struct ablkcipher_request
*req
)
620 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
| FLG_ENCRYPT
);
623 static int stm32_cryp_tdes_cbc_decrypt(struct ablkcipher_request
*req
)
625 return stm32_cryp_crypt(req
, FLG_TDES
| FLG_CBC
);
628 static int stm32_cryp_prepare_req(struct crypto_engine
*engine
,
629 struct ablkcipher_request
*req
)
631 struct stm32_cryp_ctx
*ctx
;
632 struct stm32_cryp
*cryp
;
633 struct stm32_cryp_reqctx
*rctx
;
639 ctx
= crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req
));
646 mutex_lock(&cryp
->lock
);
648 rctx
= ablkcipher_request_ctx(req
);
649 rctx
->mode
&= FLG_MODE_MASK
;
653 cryp
->flags
= (cryp
->flags
& ~FLG_MODE_MASK
) | rctx
->mode
;
654 cryp
->hw_blocksize
= is_aes(cryp
) ? AES_BLOCK_SIZE
: DES_BLOCK_SIZE
;
658 cryp
->total_in
= req
->nbytes
;
659 cryp
->total_out
= cryp
->total_in
;
661 cryp
->total_in_save
= cryp
->total_in
;
662 cryp
->total_out_save
= cryp
->total_out
;
664 cryp
->in_sg
= req
->src
;
665 cryp
->out_sg
= req
->dst
;
666 cryp
->out_sg_save
= cryp
->out_sg
;
668 cryp
->in_sg_len
= sg_nents_for_len(cryp
->in_sg
, cryp
->total_in
);
669 if (cryp
->in_sg_len
< 0) {
670 dev_err(cryp
->dev
, "Cannot get in_sg_len\n");
671 ret
= cryp
->in_sg_len
;
675 cryp
->out_sg_len
= sg_nents_for_len(cryp
->out_sg
, cryp
->total_out
);
676 if (cryp
->out_sg_len
< 0) {
677 dev_err(cryp
->dev
, "Cannot get out_sg_len\n");
678 ret
= cryp
->out_sg_len
;
682 ret
= stm32_cryp_copy_sgs(cryp
);
686 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
687 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
689 ret
= stm32_cryp_hw_init(cryp
);
692 mutex_unlock(&cryp
->lock
);
697 static int stm32_cryp_prepare_cipher_req(struct crypto_engine
*engine
,
698 struct ablkcipher_request
*req
)
700 return stm32_cryp_prepare_req(engine
, req
);
703 static int stm32_cryp_cipher_one_req(struct crypto_engine
*engine
,
704 struct ablkcipher_request
*req
)
706 struct stm32_cryp_ctx
*ctx
= crypto_ablkcipher_ctx(
707 crypto_ablkcipher_reqtfm(req
));
708 struct stm32_cryp
*cryp
= ctx
->cryp
;
713 return stm32_cryp_cpu_start(cryp
);
716 static u32
*stm32_cryp_next_out(struct stm32_cryp
*cryp
, u32
*dst
,
719 scatterwalk_advance(&cryp
->out_walk
, n
);
721 if (unlikely(cryp
->out_sg
->length
== _walked_out
)) {
722 cryp
->out_sg
= sg_next(cryp
->out_sg
);
724 scatterwalk_start(&cryp
->out_walk
, cryp
->out_sg
);
725 return (sg_virt(cryp
->out_sg
) + _walked_out
);
729 return (u32
*)((u8
*)dst
+ n
);
732 static u32
*stm32_cryp_next_in(struct stm32_cryp
*cryp
, u32
*src
,
735 scatterwalk_advance(&cryp
->in_walk
, n
);
737 if (unlikely(cryp
->in_sg
->length
== _walked_in
)) {
738 cryp
->in_sg
= sg_next(cryp
->in_sg
);
740 scatterwalk_start(&cryp
->in_walk
, cryp
->in_sg
);
741 return (sg_virt(cryp
->in_sg
) + _walked_in
);
745 return (u32
*)((u8
*)src
+ n
);
748 static void stm32_cryp_check_ctr_counter(struct stm32_cryp
*cryp
)
752 if (unlikely(cryp
->last_ctr
[3] == 0xFFFFFFFF)) {
753 cryp
->last_ctr
[3] = 0;
755 if (!cryp
->last_ctr
[2]) {
757 if (!cryp
->last_ctr
[1])
761 cr
= stm32_cryp_read(cryp
, CRYP_CR
);
762 stm32_cryp_write(cryp
, CRYP_CR
, cr
& ~CR_CRYPEN
);
764 stm32_cryp_hw_write_iv(cryp
, (u32
*)cryp
->last_ctr
);
766 stm32_cryp_write(cryp
, CRYP_CR
, cr
);
769 cryp
->last_ctr
[0] = stm32_cryp_read(cryp
, CRYP_IV0LR
);
770 cryp
->last_ctr
[1] = stm32_cryp_read(cryp
, CRYP_IV0RR
);
771 cryp
->last_ctr
[2] = stm32_cryp_read(cryp
, CRYP_IV1LR
);
772 cryp
->last_ctr
[3] = stm32_cryp_read(cryp
, CRYP_IV1RR
);
775 static bool stm32_cryp_irq_read_data(struct stm32_cryp
*cryp
)
781 dst
= sg_virt(cryp
->out_sg
) + _walked_out
;
783 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
784 if (likely(cryp
->total_out
>= sizeof(u32
))) {
785 /* Read a full u32 */
786 *dst
= stm32_cryp_read(cryp
, CRYP_DOUT
);
788 dst
= stm32_cryp_next_out(cryp
, dst
, sizeof(u32
));
789 cryp
->total_out
-= sizeof(u32
);
790 } else if (!cryp
->total_out
) {
791 /* Empty fifo out (data from input padding) */
792 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
794 /* Read less than an u32 */
795 d32
= stm32_cryp_read(cryp
, CRYP_DOUT
);
798 for (j
= 0; j
< cryp
->total_out
; j
++) {
799 *((u8
*)dst
) = *(d8
++);
800 dst
= stm32_cryp_next_out(cryp
, dst
, 1);
806 return !cryp
->total_out
|| !cryp
->total_in
;
809 static void stm32_cryp_irq_write_block(struct stm32_cryp
*cryp
)
815 src
= sg_virt(cryp
->in_sg
) + _walked_in
;
817 for (i
= 0; i
< cryp
->hw_blocksize
/ sizeof(u32
); i
++) {
818 if (likely(cryp
->total_in
>= sizeof(u32
))) {
819 /* Write a full u32 */
820 stm32_cryp_write(cryp
, CRYP_DIN
, *src
);
822 src
= stm32_cryp_next_in(cryp
, src
, sizeof(u32
));
823 cryp
->total_in
-= sizeof(u32
);
824 } else if (!cryp
->total_in
) {
825 /* Write padding data */
826 stm32_cryp_write(cryp
, CRYP_DIN
, 0);
828 /* Write less than an u32 */
829 memset(d8
, 0, sizeof(u32
));
830 for (j
= 0; j
< cryp
->total_in
; j
++) {
831 d8
[j
] = *((u8
*)src
);
832 src
= stm32_cryp_next_in(cryp
, src
, 1);
835 stm32_cryp_write(cryp
, CRYP_DIN
, *(u32
*)d8
);
841 static void stm32_cryp_irq_write_data(struct stm32_cryp
*cryp
)
843 if (unlikely(!cryp
->total_in
)) {
844 dev_warn(cryp
->dev
, "No more data to process\n");
848 if (is_aes(cryp
) && is_ctr(cryp
))
849 stm32_cryp_check_ctr_counter(cryp
);
851 stm32_cryp_irq_write_block(cryp
);
854 static irqreturn_t
stm32_cryp_irq_thread(int irq
, void *arg
)
856 struct stm32_cryp
*cryp
= arg
;
858 if (cryp
->irq_status
& MISR_OUT
)
859 /* Output FIFO IRQ: read data */
860 if (unlikely(stm32_cryp_irq_read_data(cryp
))) {
861 /* All bytes processed, finish */
862 stm32_cryp_write(cryp
, CRYP_IMSCR
, 0);
863 stm32_cryp_finish_req(cryp
);
867 if (cryp
->irq_status
& MISR_IN
) {
868 /* Input FIFO IRQ: write data */
869 stm32_cryp_irq_write_data(cryp
);
875 static irqreturn_t
stm32_cryp_irq(int irq
, void *arg
)
877 struct stm32_cryp
*cryp
= arg
;
879 cryp
->irq_status
= stm32_cryp_read(cryp
, CRYP_MISR
);
881 return IRQ_WAKE_THREAD
;
884 static struct crypto_alg crypto_algs
[] = {
886 .cra_name
= "ecb(aes)",
887 .cra_driver_name
= "stm32-ecb-aes",
889 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
891 .cra_blocksize
= AES_BLOCK_SIZE
,
892 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
893 .cra_alignmask
= 0xf,
894 .cra_type
= &crypto_ablkcipher_type
,
895 .cra_module
= THIS_MODULE
,
896 .cra_init
= stm32_cryp_cra_init
,
898 .min_keysize
= AES_MIN_KEY_SIZE
,
899 .max_keysize
= AES_MAX_KEY_SIZE
,
900 .setkey
= stm32_cryp_aes_setkey
,
901 .encrypt
= stm32_cryp_aes_ecb_encrypt
,
902 .decrypt
= stm32_cryp_aes_ecb_decrypt
,
906 .cra_name
= "cbc(aes)",
907 .cra_driver_name
= "stm32-cbc-aes",
909 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
911 .cra_blocksize
= AES_BLOCK_SIZE
,
912 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
913 .cra_alignmask
= 0xf,
914 .cra_type
= &crypto_ablkcipher_type
,
915 .cra_module
= THIS_MODULE
,
916 .cra_init
= stm32_cryp_cra_init
,
918 .min_keysize
= AES_MIN_KEY_SIZE
,
919 .max_keysize
= AES_MAX_KEY_SIZE
,
920 .ivsize
= AES_BLOCK_SIZE
,
921 .setkey
= stm32_cryp_aes_setkey
,
922 .encrypt
= stm32_cryp_aes_cbc_encrypt
,
923 .decrypt
= stm32_cryp_aes_cbc_decrypt
,
927 .cra_name
= "ctr(aes)",
928 .cra_driver_name
= "stm32-ctr-aes",
930 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
933 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
934 .cra_alignmask
= 0xf,
935 .cra_type
= &crypto_ablkcipher_type
,
936 .cra_module
= THIS_MODULE
,
937 .cra_init
= stm32_cryp_cra_init
,
939 .min_keysize
= AES_MIN_KEY_SIZE
,
940 .max_keysize
= AES_MAX_KEY_SIZE
,
941 .ivsize
= AES_BLOCK_SIZE
,
942 .setkey
= stm32_cryp_aes_setkey
,
943 .encrypt
= stm32_cryp_aes_ctr_encrypt
,
944 .decrypt
= stm32_cryp_aes_ctr_decrypt
,
948 .cra_name
= "ecb(des)",
949 .cra_driver_name
= "stm32-ecb-des",
951 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
953 .cra_blocksize
= DES_BLOCK_SIZE
,
954 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
955 .cra_alignmask
= 0xf,
956 .cra_type
= &crypto_ablkcipher_type
,
957 .cra_module
= THIS_MODULE
,
958 .cra_init
= stm32_cryp_cra_init
,
960 .min_keysize
= DES_BLOCK_SIZE
,
961 .max_keysize
= DES_BLOCK_SIZE
,
962 .setkey
= stm32_cryp_des_setkey
,
963 .encrypt
= stm32_cryp_des_ecb_encrypt
,
964 .decrypt
= stm32_cryp_des_ecb_decrypt
,
968 .cra_name
= "cbc(des)",
969 .cra_driver_name
= "stm32-cbc-des",
971 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
973 .cra_blocksize
= DES_BLOCK_SIZE
,
974 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
975 .cra_alignmask
= 0xf,
976 .cra_type
= &crypto_ablkcipher_type
,
977 .cra_module
= THIS_MODULE
,
978 .cra_init
= stm32_cryp_cra_init
,
980 .min_keysize
= DES_BLOCK_SIZE
,
981 .max_keysize
= DES_BLOCK_SIZE
,
982 .ivsize
= DES_BLOCK_SIZE
,
983 .setkey
= stm32_cryp_des_setkey
,
984 .encrypt
= stm32_cryp_des_cbc_encrypt
,
985 .decrypt
= stm32_cryp_des_cbc_decrypt
,
989 .cra_name
= "ecb(des3_ede)",
990 .cra_driver_name
= "stm32-ecb-des3",
992 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
994 .cra_blocksize
= DES_BLOCK_SIZE
,
995 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
996 .cra_alignmask
= 0xf,
997 .cra_type
= &crypto_ablkcipher_type
,
998 .cra_module
= THIS_MODULE
,
999 .cra_init
= stm32_cryp_cra_init
,
1001 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1002 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1003 .setkey
= stm32_cryp_tdes_setkey
,
1004 .encrypt
= stm32_cryp_tdes_ecb_encrypt
,
1005 .decrypt
= stm32_cryp_tdes_ecb_decrypt
,
1009 .cra_name
= "cbc(des3_ede)",
1010 .cra_driver_name
= "stm32-cbc-des3",
1011 .cra_priority
= 200,
1012 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|
1014 .cra_blocksize
= DES_BLOCK_SIZE
,
1015 .cra_ctxsize
= sizeof(struct stm32_cryp_ctx
),
1016 .cra_alignmask
= 0xf,
1017 .cra_type
= &crypto_ablkcipher_type
,
1018 .cra_module
= THIS_MODULE
,
1019 .cra_init
= stm32_cryp_cra_init
,
1021 .min_keysize
= 3 * DES_BLOCK_SIZE
,
1022 .max_keysize
= 3 * DES_BLOCK_SIZE
,
1023 .ivsize
= DES_BLOCK_SIZE
,
1024 .setkey
= stm32_cryp_tdes_setkey
,
1025 .encrypt
= stm32_cryp_tdes_cbc_encrypt
,
1026 .decrypt
= stm32_cryp_tdes_cbc_decrypt
,
1031 static const struct of_device_id stm32_dt_ids
[] = {
1032 { .compatible
= "st,stm32f756-cryp", },
1035 MODULE_DEVICE_TABLE(of
, stm32_dt_ids
);
1037 static int stm32_cryp_probe(struct platform_device
*pdev
)
1039 struct device
*dev
= &pdev
->dev
;
1040 struct stm32_cryp
*cryp
;
1041 struct resource
*res
;
1042 struct reset_control
*rst
;
1045 cryp
= devm_kzalloc(dev
, sizeof(*cryp
), GFP_KERNEL
);
1051 mutex_init(&cryp
->lock
);
1053 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1054 cryp
->regs
= devm_ioremap_resource(dev
, res
);
1055 if (IS_ERR(cryp
->regs
))
1056 return PTR_ERR(cryp
->regs
);
1058 irq
= platform_get_irq(pdev
, 0);
1060 dev_err(dev
, "Cannot get IRQ resource\n");
1064 ret
= devm_request_threaded_irq(dev
, irq
, stm32_cryp_irq
,
1065 stm32_cryp_irq_thread
, IRQF_ONESHOT
,
1066 dev_name(dev
), cryp
);
1068 dev_err(dev
, "Cannot grab IRQ\n");
1072 cryp
->clk
= devm_clk_get(dev
, NULL
);
1073 if (IS_ERR(cryp
->clk
)) {
1074 dev_err(dev
, "Could not get clock\n");
1075 return PTR_ERR(cryp
->clk
);
1078 ret
= clk_prepare_enable(cryp
->clk
);
1080 dev_err(cryp
->dev
, "Failed to enable clock\n");
1084 rst
= devm_reset_control_get(dev
, NULL
);
1086 reset_control_assert(rst
);
1088 reset_control_deassert(rst
);
1091 platform_set_drvdata(pdev
, cryp
);
1093 spin_lock(&cryp_list
.lock
);
1094 list_add(&cryp
->list
, &cryp_list
.dev_list
);
1095 spin_unlock(&cryp_list
.lock
);
1097 /* Initialize crypto engine */
1098 cryp
->engine
= crypto_engine_alloc_init(dev
, 1);
1099 if (!cryp
->engine
) {
1100 dev_err(dev
, "Could not init crypto engine\n");
1105 cryp
->engine
->prepare_cipher_request
= stm32_cryp_prepare_cipher_req
;
1106 cryp
->engine
->cipher_one_request
= stm32_cryp_cipher_one_req
;
1108 ret
= crypto_engine_start(cryp
->engine
);
1110 dev_err(dev
, "Could not start crypto engine\n");
1114 ret
= crypto_register_algs(crypto_algs
, ARRAY_SIZE(crypto_algs
));
1116 dev_err(dev
, "Could not register algs\n");
1120 dev_info(dev
, "Initialized\n");
1126 crypto_engine_exit(cryp
->engine
);
1128 spin_lock(&cryp_list
.lock
);
1129 list_del(&cryp
->list
);
1130 spin_unlock(&cryp_list
.lock
);
1132 clk_disable_unprepare(cryp
->clk
);
1137 static int stm32_cryp_remove(struct platform_device
*pdev
)
1139 struct stm32_cryp
*cryp
= platform_get_drvdata(pdev
);
1144 crypto_unregister_algs(crypto_algs
, ARRAY_SIZE(crypto_algs
));
1146 crypto_engine_exit(cryp
->engine
);
1148 spin_lock(&cryp_list
.lock
);
1149 list_del(&cryp
->list
);
1150 spin_unlock(&cryp_list
.lock
);
1152 clk_disable_unprepare(cryp
->clk
);
1157 static struct platform_driver stm32_cryp_driver
= {
1158 .probe
= stm32_cryp_probe
,
1159 .remove
= stm32_cryp_remove
,
1161 .name
= DRIVER_NAME
,
1162 .of_match_table
= stm32_dt_ids
,
1166 module_platform_driver(stm32_cryp_driver
);
1168 MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
1169 MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
1170 MODULE_LICENSE("GPL");