1 // SPDX-License-Identifier: GPL-2.0-only
3 * This file is part of STM32 Crypto driver for Linux.
5 * Copyright (C) 2017, STMicroelectronics - All Rights Reserved
6 * Author(s): Lionel DEBIEVE <lionel.debieve@st.com> for STMicroelectronics.
10 #include <linux/crypto.h>
11 #include <linux/delay.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmaengine.h>
14 #include <linux/interrupt.h>
16 #include <linux/iopoll.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/of_device.h>
20 #include <linux/platform_device.h>
21 #include <linux/pm_runtime.h>
22 #include <linux/reset.h>
24 #include <crypto/engine.h>
25 #include <crypto/hash.h>
26 #include <crypto/md5.h>
27 #include <crypto/scatterwalk.h>
28 #include <crypto/sha1.h>
29 #include <crypto/sha2.h>
30 #include <crypto/internal/hash.h>
37 #define HASH_CSR(x) (0x0F8 + ((x) * 0x04))
38 #define HASH_HREG(x) (0x310 + ((x) * 0x04))
39 #define HASH_HWCFGR 0x3F0
40 #define HASH_VER 0x3F4
43 /* Control Register */
44 #define HASH_CR_INIT BIT(2)
45 #define HASH_CR_DMAE BIT(3)
46 #define HASH_CR_DATATYPE_POS 4
47 #define HASH_CR_MODE BIT(6)
48 #define HASH_CR_MDMAT BIT(13)
49 #define HASH_CR_DMAA BIT(14)
50 #define HASH_CR_LKEY BIT(16)
52 #define HASH_CR_ALGO_SHA1 0x0
53 #define HASH_CR_ALGO_MD5 0x80
54 #define HASH_CR_ALGO_SHA224 0x40000
55 #define HASH_CR_ALGO_SHA256 0x40080
58 #define HASH_DINIE BIT(0)
59 #define HASH_DCIE BIT(1)
62 #define HASH_MASK_CALC_COMPLETION BIT(0)
63 #define HASH_MASK_DATA_INPUT BIT(1)
65 /* Context swap register */
66 #define HASH_CSR_REGISTER_NUMBER 53
69 #define HASH_SR_DATA_INPUT_READY BIT(0)
70 #define HASH_SR_OUTPUT_READY BIT(1)
71 #define HASH_SR_DMA_ACTIVE BIT(2)
72 #define HASH_SR_BUSY BIT(3)
75 #define HASH_STR_NBLW_MASK GENMASK(4, 0)
76 #define HASH_STR_DCAL BIT(8)
78 #define HASH_FLAGS_INIT BIT(0)
79 #define HASH_FLAGS_OUTPUT_READY BIT(1)
80 #define HASH_FLAGS_CPU BIT(2)
81 #define HASH_FLAGS_DMA_READY BIT(3)
82 #define HASH_FLAGS_DMA_ACTIVE BIT(4)
83 #define HASH_FLAGS_HMAC_INIT BIT(5)
84 #define HASH_FLAGS_HMAC_FINAL BIT(6)
85 #define HASH_FLAGS_HMAC_KEY BIT(7)
87 #define HASH_FLAGS_FINAL BIT(15)
88 #define HASH_FLAGS_FINUP BIT(16)
89 #define HASH_FLAGS_ALGO_MASK GENMASK(21, 18)
90 #define HASH_FLAGS_MD5 BIT(18)
91 #define HASH_FLAGS_SHA1 BIT(19)
92 #define HASH_FLAGS_SHA224 BIT(20)
93 #define HASH_FLAGS_SHA256 BIT(21)
94 #define HASH_FLAGS_ERRORS BIT(22)
95 #define HASH_FLAGS_HMAC BIT(23)
97 #define HASH_OP_UPDATE 1
98 #define HASH_OP_FINAL 2
100 enum stm32_hash_data_format
{
101 HASH_DATA_32_BITS
= 0x0,
102 HASH_DATA_16_BITS
= 0x1,
103 HASH_DATA_8_BITS
= 0x2,
104 HASH_DATA_1_BIT
= 0x3
107 #define HASH_BUFLEN 256
108 #define HASH_LONG_KEY 64
109 #define HASH_MAX_KEY_SIZE (SHA256_BLOCK_SIZE * 8)
110 #define HASH_QUEUE_LENGTH 16
111 #define HASH_DMA_THRESHOLD 50
113 #define HASH_AUTOSUSPEND_DELAY 50
115 struct stm32_hash_ctx
{
116 struct crypto_engine_ctx enginectx
;
117 struct stm32_hash_dev
*hdev
;
120 u8 key
[HASH_MAX_KEY_SIZE
];
124 struct stm32_hash_request_ctx
{
125 struct stm32_hash_dev
*hdev
;
129 u8 digest
[SHA256_DIGEST_SIZE
] __aligned(sizeof(u32
));
135 struct scatterlist
*sg
;
138 struct scatterlist sg_key
;
146 u8 buffer
[HASH_BUFLEN
] __aligned(sizeof(u32
));
152 struct stm32_hash_algs_info
{
153 struct ahash_alg
*algs_list
;
157 struct stm32_hash_pdata
{
158 struct stm32_hash_algs_info
*algs_info
;
159 size_t algs_info_size
;
162 struct stm32_hash_dev
{
163 struct list_head list
;
166 struct reset_control
*rst
;
167 void __iomem
*io_base
;
168 phys_addr_t phys_base
;
172 struct ahash_request
*req
;
173 struct crypto_engine
*engine
;
178 struct dma_chan
*dma_lch
;
179 struct completion dma_completion
;
181 const struct stm32_hash_pdata
*pdata
;
184 struct stm32_hash_drv
{
185 struct list_head dev_list
;
186 spinlock_t lock
; /* List protection access */
189 static struct stm32_hash_drv stm32_hash
= {
190 .dev_list
= LIST_HEAD_INIT(stm32_hash
.dev_list
),
191 .lock
= __SPIN_LOCK_UNLOCKED(stm32_hash
.lock
),
194 static void stm32_hash_dma_callback(void *param
);
196 static inline u32
stm32_hash_read(struct stm32_hash_dev
*hdev
, u32 offset
)
198 return readl_relaxed(hdev
->io_base
+ offset
);
201 static inline void stm32_hash_write(struct stm32_hash_dev
*hdev
,
202 u32 offset
, u32 value
)
204 writel_relaxed(value
, hdev
->io_base
+ offset
);
207 static inline int stm32_hash_wait_busy(struct stm32_hash_dev
*hdev
)
211 return readl_relaxed_poll_timeout(hdev
->io_base
+ HASH_SR
, status
,
212 !(status
& HASH_SR_BUSY
), 10, 10000);
215 static void stm32_hash_set_nblw(struct stm32_hash_dev
*hdev
, int length
)
219 reg
= stm32_hash_read(hdev
, HASH_STR
);
220 reg
&= ~(HASH_STR_NBLW_MASK
);
221 reg
|= (8U * ((length
) % 4U));
222 stm32_hash_write(hdev
, HASH_STR
, reg
);
225 static int stm32_hash_write_key(struct stm32_hash_dev
*hdev
)
227 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(hdev
->req
);
228 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
230 int keylen
= ctx
->keylen
;
231 void *key
= ctx
->key
;
234 stm32_hash_set_nblw(hdev
, keylen
);
237 stm32_hash_write(hdev
, HASH_DIN
, *(u32
*)key
);
242 reg
= stm32_hash_read(hdev
, HASH_STR
);
243 reg
|= HASH_STR_DCAL
;
244 stm32_hash_write(hdev
, HASH_STR
, reg
);
252 static void stm32_hash_write_ctrl(struct stm32_hash_dev
*hdev
)
254 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(hdev
->req
);
255 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(hdev
->req
);
256 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
258 u32 reg
= HASH_CR_INIT
;
260 if (!(hdev
->flags
& HASH_FLAGS_INIT
)) {
261 switch (rctx
->flags
& HASH_FLAGS_ALGO_MASK
) {
263 reg
|= HASH_CR_ALGO_MD5
;
265 case HASH_FLAGS_SHA1
:
266 reg
|= HASH_CR_ALGO_SHA1
;
268 case HASH_FLAGS_SHA224
:
269 reg
|= HASH_CR_ALGO_SHA224
;
271 case HASH_FLAGS_SHA256
:
272 reg
|= HASH_CR_ALGO_SHA256
;
275 reg
|= HASH_CR_ALGO_MD5
;
278 reg
|= (rctx
->data_type
<< HASH_CR_DATATYPE_POS
);
280 if (rctx
->flags
& HASH_FLAGS_HMAC
) {
281 hdev
->flags
|= HASH_FLAGS_HMAC
;
283 if (ctx
->keylen
> HASH_LONG_KEY
)
287 stm32_hash_write(hdev
, HASH_IMR
, HASH_DCIE
);
289 stm32_hash_write(hdev
, HASH_CR
, reg
);
291 hdev
->flags
|= HASH_FLAGS_INIT
;
293 dev_dbg(hdev
->dev
, "Write Control %x\n", reg
);
297 static void stm32_hash_append_sg(struct stm32_hash_request_ctx
*rctx
)
301 while ((rctx
->bufcnt
< rctx
->buflen
) && rctx
->total
) {
302 count
= min(rctx
->sg
->length
- rctx
->offset
, rctx
->total
);
303 count
= min(count
, rctx
->buflen
- rctx
->bufcnt
);
306 if ((rctx
->sg
->length
== 0) && !sg_is_last(rctx
->sg
)) {
307 rctx
->sg
= sg_next(rctx
->sg
);
314 scatterwalk_map_and_copy(rctx
->buffer
+ rctx
->bufcnt
, rctx
->sg
,
315 rctx
->offset
, count
, 0);
317 rctx
->bufcnt
+= count
;
318 rctx
->offset
+= count
;
319 rctx
->total
-= count
;
321 if (rctx
->offset
== rctx
->sg
->length
) {
322 rctx
->sg
= sg_next(rctx
->sg
);
331 static int stm32_hash_xmit_cpu(struct stm32_hash_dev
*hdev
,
332 const u8
*buf
, size_t length
, int final
)
334 unsigned int count
, len32
;
335 const u32
*buffer
= (const u32
*)buf
;
339 hdev
->flags
|= HASH_FLAGS_FINAL
;
341 len32
= DIV_ROUND_UP(length
, sizeof(u32
));
343 dev_dbg(hdev
->dev
, "%s: length: %zd, final: %x len32 %i\n",
344 __func__
, length
, final
, len32
);
346 hdev
->flags
|= HASH_FLAGS_CPU
;
348 stm32_hash_write_ctrl(hdev
);
350 if (stm32_hash_wait_busy(hdev
))
353 if ((hdev
->flags
& HASH_FLAGS_HMAC
) &&
354 (!(hdev
->flags
& HASH_FLAGS_HMAC_KEY
))) {
355 hdev
->flags
|= HASH_FLAGS_HMAC_KEY
;
356 stm32_hash_write_key(hdev
);
357 if (stm32_hash_wait_busy(hdev
))
361 for (count
= 0; count
< len32
; count
++)
362 stm32_hash_write(hdev
, HASH_DIN
, buffer
[count
]);
365 stm32_hash_set_nblw(hdev
, length
);
366 reg
= stm32_hash_read(hdev
, HASH_STR
);
367 reg
|= HASH_STR_DCAL
;
368 stm32_hash_write(hdev
, HASH_STR
, reg
);
369 if (hdev
->flags
& HASH_FLAGS_HMAC
) {
370 if (stm32_hash_wait_busy(hdev
))
372 stm32_hash_write_key(hdev
);
380 static int stm32_hash_update_cpu(struct stm32_hash_dev
*hdev
)
382 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(hdev
->req
);
383 int bufcnt
, err
= 0, final
;
385 dev_dbg(hdev
->dev
, "%s flags %lx\n", __func__
, rctx
->flags
);
387 final
= (rctx
->flags
& HASH_FLAGS_FINUP
);
389 while ((rctx
->total
>= rctx
->buflen
) ||
390 (rctx
->bufcnt
+ rctx
->total
>= rctx
->buflen
)) {
391 stm32_hash_append_sg(rctx
);
392 bufcnt
= rctx
->bufcnt
;
394 err
= stm32_hash_xmit_cpu(hdev
, rctx
->buffer
, bufcnt
, 0);
397 stm32_hash_append_sg(rctx
);
400 bufcnt
= rctx
->bufcnt
;
402 err
= stm32_hash_xmit_cpu(hdev
, rctx
->buffer
, bufcnt
,
403 (rctx
->flags
& HASH_FLAGS_FINUP
));
409 static int stm32_hash_xmit_dma(struct stm32_hash_dev
*hdev
,
410 struct scatterlist
*sg
, int length
, int mdma
)
412 struct dma_async_tx_descriptor
*in_desc
;
417 in_desc
= dmaengine_prep_slave_sg(hdev
->dma_lch
, sg
, 1,
418 DMA_MEM_TO_DEV
, DMA_PREP_INTERRUPT
|
421 dev_err(hdev
->dev
, "dmaengine_prep_slave error\n");
425 reinit_completion(&hdev
->dma_completion
);
426 in_desc
->callback
= stm32_hash_dma_callback
;
427 in_desc
->callback_param
= hdev
;
429 hdev
->flags
|= HASH_FLAGS_FINAL
;
430 hdev
->flags
|= HASH_FLAGS_DMA_ACTIVE
;
432 reg
= stm32_hash_read(hdev
, HASH_CR
);
435 reg
|= HASH_CR_MDMAT
;
437 reg
&= ~HASH_CR_MDMAT
;
441 stm32_hash_write(hdev
, HASH_CR
, reg
);
443 stm32_hash_set_nblw(hdev
, length
);
445 cookie
= dmaengine_submit(in_desc
);
446 err
= dma_submit_error(cookie
);
450 dma_async_issue_pending(hdev
->dma_lch
);
452 if (!wait_for_completion_timeout(&hdev
->dma_completion
,
453 msecs_to_jiffies(100)))
456 if (dma_async_is_tx_complete(hdev
->dma_lch
, cookie
,
457 NULL
, NULL
) != DMA_COMPLETE
)
461 dev_err(hdev
->dev
, "DMA Error %i\n", err
);
462 dmaengine_terminate_all(hdev
->dma_lch
);
469 static void stm32_hash_dma_callback(void *param
)
471 struct stm32_hash_dev
*hdev
= param
;
473 complete(&hdev
->dma_completion
);
475 hdev
->flags
|= HASH_FLAGS_DMA_READY
;
478 static int stm32_hash_hmac_dma_send(struct stm32_hash_dev
*hdev
)
480 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(hdev
->req
);
481 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(hdev
->req
);
482 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
485 if (ctx
->keylen
< HASH_DMA_THRESHOLD
|| (hdev
->dma_mode
== 1)) {
486 err
= stm32_hash_write_key(hdev
);
487 if (stm32_hash_wait_busy(hdev
))
490 if (!(hdev
->flags
& HASH_FLAGS_HMAC_KEY
))
491 sg_init_one(&rctx
->sg_key
, ctx
->key
,
492 ALIGN(ctx
->keylen
, sizeof(u32
)));
494 rctx
->dma_ct
= dma_map_sg(hdev
->dev
, &rctx
->sg_key
, 1,
496 if (rctx
->dma_ct
== 0) {
497 dev_err(hdev
->dev
, "dma_map_sg error\n");
501 err
= stm32_hash_xmit_dma(hdev
, &rctx
->sg_key
, ctx
->keylen
, 0);
503 dma_unmap_sg(hdev
->dev
, &rctx
->sg_key
, 1, DMA_TO_DEVICE
);
509 static int stm32_hash_dma_init(struct stm32_hash_dev
*hdev
)
511 struct dma_slave_config dma_conf
;
512 struct dma_chan
*chan
;
515 memset(&dma_conf
, 0, sizeof(dma_conf
));
517 dma_conf
.direction
= DMA_MEM_TO_DEV
;
518 dma_conf
.dst_addr
= hdev
->phys_base
+ HASH_DIN
;
519 dma_conf
.dst_addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
520 dma_conf
.src_maxburst
= hdev
->dma_maxburst
;
521 dma_conf
.dst_maxburst
= hdev
->dma_maxburst
;
522 dma_conf
.device_fc
= false;
524 chan
= dma_request_chan(hdev
->dev
, "in");
526 return PTR_ERR(chan
);
528 hdev
->dma_lch
= chan
;
530 err
= dmaengine_slave_config(hdev
->dma_lch
, &dma_conf
);
532 dma_release_channel(hdev
->dma_lch
);
533 hdev
->dma_lch
= NULL
;
534 dev_err(hdev
->dev
, "Couldn't configure DMA slave.\n");
538 init_completion(&hdev
->dma_completion
);
543 static int stm32_hash_dma_send(struct stm32_hash_dev
*hdev
)
545 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(hdev
->req
);
546 struct scatterlist sg
[1], *tsg
;
547 int err
= 0, len
= 0, reg
, ncp
= 0;
549 u32
*buffer
= (void *)rctx
->buffer
;
551 rctx
->sg
= hdev
->req
->src
;
552 rctx
->total
= hdev
->req
->nbytes
;
554 rctx
->nents
= sg_nents(rctx
->sg
);
559 stm32_hash_write_ctrl(hdev
);
561 if (hdev
->flags
& HASH_FLAGS_HMAC
) {
562 err
= stm32_hash_hmac_dma_send(hdev
);
563 if (err
!= -EINPROGRESS
)
567 for_each_sg(rctx
->sg
, tsg
, rctx
->nents
, i
) {
571 if (sg_is_last(sg
)) {
572 if (hdev
->dma_mode
== 1) {
573 len
= (ALIGN(sg
->length
, 16) - 16);
575 ncp
= sg_pcopy_to_buffer(
576 rctx
->sg
, rctx
->nents
,
577 rctx
->buffer
, sg
->length
- len
,
578 rctx
->total
- sg
->length
+ len
);
582 if (!(IS_ALIGNED(sg
->length
, sizeof(u32
)))) {
584 sg
->length
= ALIGN(sg
->length
,
590 rctx
->dma_ct
= dma_map_sg(hdev
->dev
, sg
, 1,
592 if (rctx
->dma_ct
== 0) {
593 dev_err(hdev
->dev
, "dma_map_sg error\n");
597 err
= stm32_hash_xmit_dma(hdev
, sg
, len
,
600 dma_unmap_sg(hdev
->dev
, sg
, 1, DMA_TO_DEVICE
);
606 if (hdev
->dma_mode
== 1) {
607 if (stm32_hash_wait_busy(hdev
))
609 reg
= stm32_hash_read(hdev
, HASH_CR
);
610 reg
&= ~HASH_CR_DMAE
;
612 stm32_hash_write(hdev
, HASH_CR
, reg
);
615 memset(buffer
+ ncp
, 0,
616 DIV_ROUND_UP(ncp
, sizeof(u32
)) - ncp
);
617 writesl(hdev
->io_base
+ HASH_DIN
, buffer
,
618 DIV_ROUND_UP(ncp
, sizeof(u32
)));
620 stm32_hash_set_nblw(hdev
, ncp
);
621 reg
= stm32_hash_read(hdev
, HASH_STR
);
622 reg
|= HASH_STR_DCAL
;
623 stm32_hash_write(hdev
, HASH_STR
, reg
);
627 if (hdev
->flags
& HASH_FLAGS_HMAC
) {
628 if (stm32_hash_wait_busy(hdev
))
630 err
= stm32_hash_hmac_dma_send(hdev
);
636 static struct stm32_hash_dev
*stm32_hash_find_dev(struct stm32_hash_ctx
*ctx
)
638 struct stm32_hash_dev
*hdev
= NULL
, *tmp
;
640 spin_lock_bh(&stm32_hash
.lock
);
642 list_for_each_entry(tmp
, &stm32_hash
.dev_list
, list
) {
651 spin_unlock_bh(&stm32_hash
.lock
);
656 static bool stm32_hash_dma_aligned_data(struct ahash_request
*req
)
658 struct scatterlist
*sg
;
659 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
660 struct stm32_hash_dev
*hdev
= stm32_hash_find_dev(ctx
);
663 if (req
->nbytes
<= HASH_DMA_THRESHOLD
)
666 if (sg_nents(req
->src
) > 1) {
667 if (hdev
->dma_mode
== 1)
669 for_each_sg(req
->src
, sg
, sg_nents(req
->src
), i
) {
670 if ((!IS_ALIGNED(sg
->length
, sizeof(u32
))) &&
676 if (req
->src
->offset
% 4)
682 static int stm32_hash_init(struct ahash_request
*req
)
684 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
685 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
686 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
687 struct stm32_hash_dev
*hdev
= stm32_hash_find_dev(ctx
);
691 rctx
->flags
= HASH_FLAGS_CPU
;
693 rctx
->digcnt
= crypto_ahash_digestsize(tfm
);
694 switch (rctx
->digcnt
) {
695 case MD5_DIGEST_SIZE
:
696 rctx
->flags
|= HASH_FLAGS_MD5
;
698 case SHA1_DIGEST_SIZE
:
699 rctx
->flags
|= HASH_FLAGS_SHA1
;
701 case SHA224_DIGEST_SIZE
:
702 rctx
->flags
|= HASH_FLAGS_SHA224
;
704 case SHA256_DIGEST_SIZE
:
705 rctx
->flags
|= HASH_FLAGS_SHA256
;
712 rctx
->buflen
= HASH_BUFLEN
;
715 rctx
->data_type
= HASH_DATA_8_BITS
;
717 memset(rctx
->buffer
, 0, HASH_BUFLEN
);
719 if (ctx
->flags
& HASH_FLAGS_HMAC
)
720 rctx
->flags
|= HASH_FLAGS_HMAC
;
722 dev_dbg(hdev
->dev
, "%s Flags %lx\n", __func__
, rctx
->flags
);
727 static int stm32_hash_update_req(struct stm32_hash_dev
*hdev
)
729 return stm32_hash_update_cpu(hdev
);
732 static int stm32_hash_final_req(struct stm32_hash_dev
*hdev
)
734 struct ahash_request
*req
= hdev
->req
;
735 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
737 int buflen
= rctx
->bufcnt
;
741 if (!(rctx
->flags
& HASH_FLAGS_CPU
))
742 err
= stm32_hash_dma_send(hdev
);
744 err
= stm32_hash_xmit_cpu(hdev
, rctx
->buffer
, buflen
, 1);
750 static void stm32_hash_copy_hash(struct ahash_request
*req
)
752 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
753 __be32
*hash
= (void *)rctx
->digest
;
754 unsigned int i
, hashsize
;
756 switch (rctx
->flags
& HASH_FLAGS_ALGO_MASK
) {
758 hashsize
= MD5_DIGEST_SIZE
;
760 case HASH_FLAGS_SHA1
:
761 hashsize
= SHA1_DIGEST_SIZE
;
763 case HASH_FLAGS_SHA224
:
764 hashsize
= SHA224_DIGEST_SIZE
;
766 case HASH_FLAGS_SHA256
:
767 hashsize
= SHA256_DIGEST_SIZE
;
773 for (i
= 0; i
< hashsize
/ sizeof(u32
); i
++)
774 hash
[i
] = cpu_to_be32(stm32_hash_read(rctx
->hdev
,
778 static int stm32_hash_finish(struct ahash_request
*req
)
780 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
785 memcpy(req
->result
, rctx
->digest
, rctx
->digcnt
);
790 static void stm32_hash_finish_req(struct ahash_request
*req
, int err
)
792 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
793 struct stm32_hash_dev
*hdev
= rctx
->hdev
;
795 if (!err
&& (HASH_FLAGS_FINAL
& hdev
->flags
)) {
796 stm32_hash_copy_hash(req
);
797 err
= stm32_hash_finish(req
);
798 hdev
->flags
&= ~(HASH_FLAGS_FINAL
| HASH_FLAGS_CPU
|
799 HASH_FLAGS_INIT
| HASH_FLAGS_DMA_READY
|
800 HASH_FLAGS_OUTPUT_READY
| HASH_FLAGS_HMAC
|
801 HASH_FLAGS_HMAC_INIT
| HASH_FLAGS_HMAC_FINAL
|
802 HASH_FLAGS_HMAC_KEY
);
804 rctx
->flags
|= HASH_FLAGS_ERRORS
;
807 pm_runtime_mark_last_busy(hdev
->dev
);
808 pm_runtime_put_autosuspend(hdev
->dev
);
810 crypto_finalize_hash_request(hdev
->engine
, req
, err
);
813 static int stm32_hash_hw_init(struct stm32_hash_dev
*hdev
,
814 struct stm32_hash_request_ctx
*rctx
)
816 pm_runtime_get_sync(hdev
->dev
);
818 if (!(HASH_FLAGS_INIT
& hdev
->flags
)) {
819 stm32_hash_write(hdev
, HASH_CR
, HASH_CR_INIT
);
820 stm32_hash_write(hdev
, HASH_STR
, 0);
821 stm32_hash_write(hdev
, HASH_DIN
, 0);
822 stm32_hash_write(hdev
, HASH_IMR
, 0);
829 static int stm32_hash_one_request(struct crypto_engine
*engine
, void *areq
);
830 static int stm32_hash_prepare_req(struct crypto_engine
*engine
, void *areq
);
832 static int stm32_hash_handle_queue(struct stm32_hash_dev
*hdev
,
833 struct ahash_request
*req
)
835 return crypto_transfer_hash_request_to_engine(hdev
->engine
, req
);
838 static int stm32_hash_prepare_req(struct crypto_engine
*engine
, void *areq
)
840 struct ahash_request
*req
= container_of(areq
, struct ahash_request
,
842 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
843 struct stm32_hash_dev
*hdev
= stm32_hash_find_dev(ctx
);
844 struct stm32_hash_request_ctx
*rctx
;
851 rctx
= ahash_request_ctx(req
);
853 dev_dbg(hdev
->dev
, "processing new req, op: %lu, nbytes %d\n",
854 rctx
->op
, req
->nbytes
);
856 return stm32_hash_hw_init(hdev
, rctx
);
859 static int stm32_hash_one_request(struct crypto_engine
*engine
, void *areq
)
861 struct ahash_request
*req
= container_of(areq
, struct ahash_request
,
863 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
864 struct stm32_hash_dev
*hdev
= stm32_hash_find_dev(ctx
);
865 struct stm32_hash_request_ctx
*rctx
;
873 rctx
= ahash_request_ctx(req
);
875 if (rctx
->op
== HASH_OP_UPDATE
)
876 err
= stm32_hash_update_req(hdev
);
877 else if (rctx
->op
== HASH_OP_FINAL
)
878 err
= stm32_hash_final_req(hdev
);
880 if (err
!= -EINPROGRESS
)
881 /* done task will not finish it, so do it here */
882 stm32_hash_finish_req(req
, err
);
887 static int stm32_hash_enqueue(struct ahash_request
*req
, unsigned int op
)
889 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
890 struct stm32_hash_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
891 struct stm32_hash_dev
*hdev
= ctx
->hdev
;
895 return stm32_hash_handle_queue(hdev
, req
);
898 static int stm32_hash_update(struct ahash_request
*req
)
900 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
902 if (!req
->nbytes
|| !(rctx
->flags
& HASH_FLAGS_CPU
))
905 rctx
->total
= req
->nbytes
;
909 if ((rctx
->bufcnt
+ rctx
->total
< rctx
->buflen
)) {
910 stm32_hash_append_sg(rctx
);
914 return stm32_hash_enqueue(req
, HASH_OP_UPDATE
);
917 static int stm32_hash_final(struct ahash_request
*req
)
919 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
921 rctx
->flags
|= HASH_FLAGS_FINUP
;
923 return stm32_hash_enqueue(req
, HASH_OP_FINAL
);
926 static int stm32_hash_finup(struct ahash_request
*req
)
928 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
929 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
930 struct stm32_hash_dev
*hdev
= stm32_hash_find_dev(ctx
);
933 rctx
->flags
|= HASH_FLAGS_FINUP
;
935 if (hdev
->dma_lch
&& stm32_hash_dma_aligned_data(req
))
936 rctx
->flags
&= ~HASH_FLAGS_CPU
;
938 err1
= stm32_hash_update(req
);
940 if (err1
== -EINPROGRESS
|| err1
== -EBUSY
)
944 * final() has to be always called to cleanup resources
945 * even if update() failed, except EINPROGRESS
947 err2
= stm32_hash_final(req
);
952 static int stm32_hash_digest(struct ahash_request
*req
)
954 return stm32_hash_init(req
) ?: stm32_hash_finup(req
);
957 static int stm32_hash_export(struct ahash_request
*req
, void *out
)
959 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
960 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
961 struct stm32_hash_dev
*hdev
= stm32_hash_find_dev(ctx
);
965 pm_runtime_get_sync(hdev
->dev
);
967 while ((stm32_hash_read(hdev
, HASH_SR
) & HASH_SR_BUSY
))
970 rctx
->hw_context
= kmalloc_array(3 + HASH_CSR_REGISTER_NUMBER
,
974 preg
= rctx
->hw_context
;
976 *preg
++ = stm32_hash_read(hdev
, HASH_IMR
);
977 *preg
++ = stm32_hash_read(hdev
, HASH_STR
);
978 *preg
++ = stm32_hash_read(hdev
, HASH_CR
);
979 for (i
= 0; i
< HASH_CSR_REGISTER_NUMBER
; i
++)
980 *preg
++ = stm32_hash_read(hdev
, HASH_CSR(i
));
982 pm_runtime_mark_last_busy(hdev
->dev
);
983 pm_runtime_put_autosuspend(hdev
->dev
);
985 memcpy(out
, rctx
, sizeof(*rctx
));
990 static int stm32_hash_import(struct ahash_request
*req
, const void *in
)
992 struct stm32_hash_request_ctx
*rctx
= ahash_request_ctx(req
);
993 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
994 struct stm32_hash_dev
*hdev
= stm32_hash_find_dev(ctx
);
995 const u32
*preg
= in
;
999 memcpy(rctx
, in
, sizeof(*rctx
));
1001 preg
= rctx
->hw_context
;
1003 pm_runtime_get_sync(hdev
->dev
);
1005 stm32_hash_write(hdev
, HASH_IMR
, *preg
++);
1006 stm32_hash_write(hdev
, HASH_STR
, *preg
++);
1007 stm32_hash_write(hdev
, HASH_CR
, *preg
);
1008 reg
= *preg
++ | HASH_CR_INIT
;
1009 stm32_hash_write(hdev
, HASH_CR
, reg
);
1011 for (i
= 0; i
< HASH_CSR_REGISTER_NUMBER
; i
++)
1012 stm32_hash_write(hdev
, HASH_CSR(i
), *preg
++);
1014 pm_runtime_mark_last_busy(hdev
->dev
);
1015 pm_runtime_put_autosuspend(hdev
->dev
);
1017 kfree(rctx
->hw_context
);
1022 static int stm32_hash_setkey(struct crypto_ahash
*tfm
,
1023 const u8
*key
, unsigned int keylen
)
1025 struct stm32_hash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1027 if (keylen
<= HASH_MAX_KEY_SIZE
) {
1028 memcpy(ctx
->key
, key
, keylen
);
1029 ctx
->keylen
= keylen
;
1037 static int stm32_hash_cra_init_algs(struct crypto_tfm
*tfm
,
1038 const char *algs_hmac_name
)
1040 struct stm32_hash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1042 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
1043 sizeof(struct stm32_hash_request_ctx
));
1048 ctx
->flags
|= HASH_FLAGS_HMAC
;
1050 ctx
->enginectx
.op
.do_one_request
= stm32_hash_one_request
;
1051 ctx
->enginectx
.op
.prepare_request
= stm32_hash_prepare_req
;
1052 ctx
->enginectx
.op
.unprepare_request
= NULL
;
1056 static int stm32_hash_cra_init(struct crypto_tfm
*tfm
)
1058 return stm32_hash_cra_init_algs(tfm
, NULL
);
1061 static int stm32_hash_cra_md5_init(struct crypto_tfm
*tfm
)
1063 return stm32_hash_cra_init_algs(tfm
, "md5");
1066 static int stm32_hash_cra_sha1_init(struct crypto_tfm
*tfm
)
1068 return stm32_hash_cra_init_algs(tfm
, "sha1");
1071 static int stm32_hash_cra_sha224_init(struct crypto_tfm
*tfm
)
1073 return stm32_hash_cra_init_algs(tfm
, "sha224");
1076 static int stm32_hash_cra_sha256_init(struct crypto_tfm
*tfm
)
1078 return stm32_hash_cra_init_algs(tfm
, "sha256");
1081 static irqreturn_t
stm32_hash_irq_thread(int irq
, void *dev_id
)
1083 struct stm32_hash_dev
*hdev
= dev_id
;
1085 if (HASH_FLAGS_CPU
& hdev
->flags
) {
1086 if (HASH_FLAGS_OUTPUT_READY
& hdev
->flags
) {
1087 hdev
->flags
&= ~HASH_FLAGS_OUTPUT_READY
;
1090 } else if (HASH_FLAGS_DMA_READY
& hdev
->flags
) {
1091 if (HASH_FLAGS_DMA_ACTIVE
& hdev
->flags
) {
1092 hdev
->flags
&= ~HASH_FLAGS_DMA_ACTIVE
;
1100 /* Finish current request */
1101 stm32_hash_finish_req(hdev
->req
, 0);
1106 static irqreturn_t
stm32_hash_irq_handler(int irq
, void *dev_id
)
1108 struct stm32_hash_dev
*hdev
= dev_id
;
1111 reg
= stm32_hash_read(hdev
, HASH_SR
);
1112 if (reg
& HASH_SR_OUTPUT_READY
) {
1113 reg
&= ~HASH_SR_OUTPUT_READY
;
1114 stm32_hash_write(hdev
, HASH_SR
, reg
);
1115 hdev
->flags
|= HASH_FLAGS_OUTPUT_READY
;
1117 stm32_hash_write(hdev
, HASH_IMR
, 0);
1118 return IRQ_WAKE_THREAD
;
1124 static struct ahash_alg algs_md5_sha1
[] = {
1126 .init
= stm32_hash_init
,
1127 .update
= stm32_hash_update
,
1128 .final
= stm32_hash_final
,
1129 .finup
= stm32_hash_finup
,
1130 .digest
= stm32_hash_digest
,
1131 .export
= stm32_hash_export
,
1132 .import
= stm32_hash_import
,
1134 .digestsize
= MD5_DIGEST_SIZE
,
1135 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1138 .cra_driver_name
= "stm32-md5",
1139 .cra_priority
= 200,
1140 .cra_flags
= CRYPTO_ALG_ASYNC
|
1141 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1142 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1143 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1145 .cra_init
= stm32_hash_cra_init
,
1146 .cra_module
= THIS_MODULE
,
1151 .init
= stm32_hash_init
,
1152 .update
= stm32_hash_update
,
1153 .final
= stm32_hash_final
,
1154 .finup
= stm32_hash_finup
,
1155 .digest
= stm32_hash_digest
,
1156 .export
= stm32_hash_export
,
1157 .import
= stm32_hash_import
,
1158 .setkey
= stm32_hash_setkey
,
1160 .digestsize
= MD5_DIGEST_SIZE
,
1161 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1163 .cra_name
= "hmac(md5)",
1164 .cra_driver_name
= "stm32-hmac-md5",
1165 .cra_priority
= 200,
1166 .cra_flags
= CRYPTO_ALG_ASYNC
|
1167 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1168 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1169 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1171 .cra_init
= stm32_hash_cra_md5_init
,
1172 .cra_module
= THIS_MODULE
,
1177 .init
= stm32_hash_init
,
1178 .update
= stm32_hash_update
,
1179 .final
= stm32_hash_final
,
1180 .finup
= stm32_hash_finup
,
1181 .digest
= stm32_hash_digest
,
1182 .export
= stm32_hash_export
,
1183 .import
= stm32_hash_import
,
1185 .digestsize
= SHA1_DIGEST_SIZE
,
1186 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1189 .cra_driver_name
= "stm32-sha1",
1190 .cra_priority
= 200,
1191 .cra_flags
= CRYPTO_ALG_ASYNC
|
1192 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1193 .cra_blocksize
= SHA1_BLOCK_SIZE
,
1194 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1196 .cra_init
= stm32_hash_cra_init
,
1197 .cra_module
= THIS_MODULE
,
1202 .init
= stm32_hash_init
,
1203 .update
= stm32_hash_update
,
1204 .final
= stm32_hash_final
,
1205 .finup
= stm32_hash_finup
,
1206 .digest
= stm32_hash_digest
,
1207 .export
= stm32_hash_export
,
1208 .import
= stm32_hash_import
,
1209 .setkey
= stm32_hash_setkey
,
1211 .digestsize
= SHA1_DIGEST_SIZE
,
1212 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1214 .cra_name
= "hmac(sha1)",
1215 .cra_driver_name
= "stm32-hmac-sha1",
1216 .cra_priority
= 200,
1217 .cra_flags
= CRYPTO_ALG_ASYNC
|
1218 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1219 .cra_blocksize
= SHA1_BLOCK_SIZE
,
1220 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1222 .cra_init
= stm32_hash_cra_sha1_init
,
1223 .cra_module
= THIS_MODULE
,
1229 static struct ahash_alg algs_sha224_sha256
[] = {
1231 .init
= stm32_hash_init
,
1232 .update
= stm32_hash_update
,
1233 .final
= stm32_hash_final
,
1234 .finup
= stm32_hash_finup
,
1235 .digest
= stm32_hash_digest
,
1236 .export
= stm32_hash_export
,
1237 .import
= stm32_hash_import
,
1239 .digestsize
= SHA224_DIGEST_SIZE
,
1240 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1242 .cra_name
= "sha224",
1243 .cra_driver_name
= "stm32-sha224",
1244 .cra_priority
= 200,
1245 .cra_flags
= CRYPTO_ALG_ASYNC
|
1246 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1247 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1248 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1250 .cra_init
= stm32_hash_cra_init
,
1251 .cra_module
= THIS_MODULE
,
1256 .init
= stm32_hash_init
,
1257 .update
= stm32_hash_update
,
1258 .final
= stm32_hash_final
,
1259 .finup
= stm32_hash_finup
,
1260 .digest
= stm32_hash_digest
,
1261 .setkey
= stm32_hash_setkey
,
1262 .export
= stm32_hash_export
,
1263 .import
= stm32_hash_import
,
1265 .digestsize
= SHA224_DIGEST_SIZE
,
1266 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1268 .cra_name
= "hmac(sha224)",
1269 .cra_driver_name
= "stm32-hmac-sha224",
1270 .cra_priority
= 200,
1271 .cra_flags
= CRYPTO_ALG_ASYNC
|
1272 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1273 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1274 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1276 .cra_init
= stm32_hash_cra_sha224_init
,
1277 .cra_module
= THIS_MODULE
,
1282 .init
= stm32_hash_init
,
1283 .update
= stm32_hash_update
,
1284 .final
= stm32_hash_final
,
1285 .finup
= stm32_hash_finup
,
1286 .digest
= stm32_hash_digest
,
1287 .export
= stm32_hash_export
,
1288 .import
= stm32_hash_import
,
1290 .digestsize
= SHA256_DIGEST_SIZE
,
1291 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1293 .cra_name
= "sha256",
1294 .cra_driver_name
= "stm32-sha256",
1295 .cra_priority
= 200,
1296 .cra_flags
= CRYPTO_ALG_ASYNC
|
1297 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1298 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1299 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1301 .cra_init
= stm32_hash_cra_init
,
1302 .cra_module
= THIS_MODULE
,
1307 .init
= stm32_hash_init
,
1308 .update
= stm32_hash_update
,
1309 .final
= stm32_hash_final
,
1310 .finup
= stm32_hash_finup
,
1311 .digest
= stm32_hash_digest
,
1312 .export
= stm32_hash_export
,
1313 .import
= stm32_hash_import
,
1314 .setkey
= stm32_hash_setkey
,
1316 .digestsize
= SHA256_DIGEST_SIZE
,
1317 .statesize
= sizeof(struct stm32_hash_request_ctx
),
1319 .cra_name
= "hmac(sha256)",
1320 .cra_driver_name
= "stm32-hmac-sha256",
1321 .cra_priority
= 200,
1322 .cra_flags
= CRYPTO_ALG_ASYNC
|
1323 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1324 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1325 .cra_ctxsize
= sizeof(struct stm32_hash_ctx
),
1327 .cra_init
= stm32_hash_cra_sha256_init
,
1328 .cra_module
= THIS_MODULE
,
1334 static int stm32_hash_register_algs(struct stm32_hash_dev
*hdev
)
1339 for (i
= 0; i
< hdev
->pdata
->algs_info_size
; i
++) {
1340 for (j
= 0; j
< hdev
->pdata
->algs_info
[i
].size
; j
++) {
1341 err
= crypto_register_ahash(
1342 &hdev
->pdata
->algs_info
[i
].algs_list
[j
]);
1350 dev_err(hdev
->dev
, "Algo %d : %d failed\n", i
, j
);
1353 crypto_unregister_ahash(
1354 &hdev
->pdata
->algs_info
[i
].algs_list
[j
]);
1360 static int stm32_hash_unregister_algs(struct stm32_hash_dev
*hdev
)
1364 for (i
= 0; i
< hdev
->pdata
->algs_info_size
; i
++) {
1365 for (j
= 0; j
< hdev
->pdata
->algs_info
[i
].size
; j
++)
1366 crypto_unregister_ahash(
1367 &hdev
->pdata
->algs_info
[i
].algs_list
[j
]);
1373 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f4
[] = {
1375 .algs_list
= algs_md5_sha1
,
1376 .size
= ARRAY_SIZE(algs_md5_sha1
),
1380 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f4
= {
1381 .algs_info
= stm32_hash_algs_info_stm32f4
,
1382 .algs_info_size
= ARRAY_SIZE(stm32_hash_algs_info_stm32f4
),
1385 static struct stm32_hash_algs_info stm32_hash_algs_info_stm32f7
[] = {
1387 .algs_list
= algs_md5_sha1
,
1388 .size
= ARRAY_SIZE(algs_md5_sha1
),
1391 .algs_list
= algs_sha224_sha256
,
1392 .size
= ARRAY_SIZE(algs_sha224_sha256
),
1396 static const struct stm32_hash_pdata stm32_hash_pdata_stm32f7
= {
1397 .algs_info
= stm32_hash_algs_info_stm32f7
,
1398 .algs_info_size
= ARRAY_SIZE(stm32_hash_algs_info_stm32f7
),
1401 static const struct of_device_id stm32_hash_of_match
[] = {
1403 .compatible
= "st,stm32f456-hash",
1404 .data
= &stm32_hash_pdata_stm32f4
,
1407 .compatible
= "st,stm32f756-hash",
1408 .data
= &stm32_hash_pdata_stm32f7
,
1413 MODULE_DEVICE_TABLE(of
, stm32_hash_of_match
);
1415 static int stm32_hash_get_of_match(struct stm32_hash_dev
*hdev
,
1418 hdev
->pdata
= of_device_get_match_data(dev
);
1420 dev_err(dev
, "no compatible OF match\n");
1424 if (of_property_read_u32(dev
->of_node
, "dma-maxburst",
1425 &hdev
->dma_maxburst
)) {
1426 dev_info(dev
, "dma-maxburst not specified, using 0\n");
1427 hdev
->dma_maxburst
= 0;
1433 static int stm32_hash_probe(struct platform_device
*pdev
)
1435 struct stm32_hash_dev
*hdev
;
1436 struct device
*dev
= &pdev
->dev
;
1437 struct resource
*res
;
1440 hdev
= devm_kzalloc(dev
, sizeof(*hdev
), GFP_KERNEL
);
1444 res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
1445 hdev
->io_base
= devm_ioremap_resource(dev
, res
);
1446 if (IS_ERR(hdev
->io_base
))
1447 return PTR_ERR(hdev
->io_base
);
1449 hdev
->phys_base
= res
->start
;
1451 ret
= stm32_hash_get_of_match(hdev
, dev
);
1455 irq
= platform_get_irq(pdev
, 0);
1459 ret
= devm_request_threaded_irq(dev
, irq
, stm32_hash_irq_handler
,
1460 stm32_hash_irq_thread
, IRQF_ONESHOT
,
1461 dev_name(dev
), hdev
);
1463 dev_err(dev
, "Cannot grab IRQ\n");
1467 hdev
->clk
= devm_clk_get(&pdev
->dev
, NULL
);
1468 if (IS_ERR(hdev
->clk
))
1469 return dev_err_probe(dev
, PTR_ERR(hdev
->clk
),
1470 "failed to get clock for hash\n");
1472 ret
= clk_prepare_enable(hdev
->clk
);
1474 dev_err(dev
, "failed to enable hash clock (%d)\n", ret
);
1478 pm_runtime_set_autosuspend_delay(dev
, HASH_AUTOSUSPEND_DELAY
);
1479 pm_runtime_use_autosuspend(dev
);
1481 pm_runtime_get_noresume(dev
);
1482 pm_runtime_set_active(dev
);
1483 pm_runtime_enable(dev
);
1485 hdev
->rst
= devm_reset_control_get(&pdev
->dev
, NULL
);
1486 if (IS_ERR(hdev
->rst
)) {
1487 if (PTR_ERR(hdev
->rst
) == -EPROBE_DEFER
) {
1488 ret
= -EPROBE_DEFER
;
1492 reset_control_assert(hdev
->rst
);
1494 reset_control_deassert(hdev
->rst
);
1499 platform_set_drvdata(pdev
, hdev
);
1501 ret
= stm32_hash_dma_init(hdev
);
1506 dev_dbg(dev
, "DMA mode not available\n");
1512 spin_lock(&stm32_hash
.lock
);
1513 list_add_tail(&hdev
->list
, &stm32_hash
.dev_list
);
1514 spin_unlock(&stm32_hash
.lock
);
1516 /* Initialize crypto engine */
1517 hdev
->engine
= crypto_engine_alloc_init(dev
, 1);
1518 if (!hdev
->engine
) {
1523 ret
= crypto_engine_start(hdev
->engine
);
1525 goto err_engine_start
;
1527 hdev
->dma_mode
= stm32_hash_read(hdev
, HASH_HWCFGR
);
1529 /* Register algos */
1530 ret
= stm32_hash_register_algs(hdev
);
1534 dev_info(dev
, "Init HASH done HW ver %x DMA mode %u\n",
1535 stm32_hash_read(hdev
, HASH_VER
), hdev
->dma_mode
);
1537 pm_runtime_put_sync(dev
);
1543 crypto_engine_exit(hdev
->engine
);
1545 spin_lock(&stm32_hash
.lock
);
1546 list_del(&hdev
->list
);
1547 spin_unlock(&stm32_hash
.lock
);
1550 dma_release_channel(hdev
->dma_lch
);
1552 pm_runtime_disable(dev
);
1553 pm_runtime_put_noidle(dev
);
1555 clk_disable_unprepare(hdev
->clk
);
1560 static int stm32_hash_remove(struct platform_device
*pdev
)
1562 struct stm32_hash_dev
*hdev
;
1565 hdev
= platform_get_drvdata(pdev
);
1569 ret
= pm_runtime_get_sync(hdev
->dev
);
1573 stm32_hash_unregister_algs(hdev
);
1575 crypto_engine_exit(hdev
->engine
);
1577 spin_lock(&stm32_hash
.lock
);
1578 list_del(&hdev
->list
);
1579 spin_unlock(&stm32_hash
.lock
);
1582 dma_release_channel(hdev
->dma_lch
);
1584 pm_runtime_disable(hdev
->dev
);
1585 pm_runtime_put_noidle(hdev
->dev
);
1587 clk_disable_unprepare(hdev
->clk
);
1593 static int stm32_hash_runtime_suspend(struct device
*dev
)
1595 struct stm32_hash_dev
*hdev
= dev_get_drvdata(dev
);
1597 clk_disable_unprepare(hdev
->clk
);
1602 static int stm32_hash_runtime_resume(struct device
*dev
)
1604 struct stm32_hash_dev
*hdev
= dev_get_drvdata(dev
);
1607 ret
= clk_prepare_enable(hdev
->clk
);
1609 dev_err(hdev
->dev
, "Failed to prepare_enable clock\n");
1617 static const struct dev_pm_ops stm32_hash_pm_ops
= {
1618 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend
,
1619 pm_runtime_force_resume
)
1620 SET_RUNTIME_PM_OPS(stm32_hash_runtime_suspend
,
1621 stm32_hash_runtime_resume
, NULL
)
1624 static struct platform_driver stm32_hash_driver
= {
1625 .probe
= stm32_hash_probe
,
1626 .remove
= stm32_hash_remove
,
1628 .name
= "stm32-hash",
1629 .pm
= &stm32_hash_pm_ops
,
1630 .of_match_table
= stm32_hash_of_match
,
1634 module_platform_driver(stm32_hash_driver
);
1636 MODULE_DESCRIPTION("STM32 SHA1/224/256 & MD5 (HMAC) hw accelerator driver");
1637 MODULE_AUTHOR("Lionel Debieve <lionel.debieve@st.com>");
1638 MODULE_LICENSE("GPL v2");