1 // SPDX-License-Identifier: GPL-2.0
3 * sun8i-ss-hash.c - hardware cryptographic offloader for
4 * Allwinner A80/A83T SoC
6 * Copyright (C) 2015-2020 Corentin Labbe <clabbe@baylibre.com>
8 * This file add support for MD5 and SHA1/SHA224/SHA256.
10 * You could find the datasheet in Documentation/arm/sunxi.rst
12 #include <linux/dma-mapping.h>
13 #include <linux/pm_runtime.h>
14 #include <linux/scatterlist.h>
15 #include <crypto/internal/hash.h>
16 #include <crypto/sha1.h>
17 #include <crypto/sha2.h>
18 #include <crypto/md5.h>
21 int sun8i_ss_hash_crainit(struct crypto_tfm
*tfm
)
23 struct sun8i_ss_hash_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
24 struct ahash_alg
*alg
= __crypto_ahash_alg(tfm
->__crt_alg
);
25 struct sun8i_ss_alg_template
*algt
;
28 memset(op
, 0, sizeof(struct sun8i_ss_hash_tfm_ctx
));
30 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.hash
);
33 op
->enginectx
.op
.do_one_request
= sun8i_ss_hash_run
;
34 op
->enginectx
.op
.prepare_request
= NULL
;
35 op
->enginectx
.op
.unprepare_request
= NULL
;
38 op
->fallback_tfm
= crypto_alloc_ahash(crypto_tfm_alg_name(tfm
), 0,
39 CRYPTO_ALG_NEED_FALLBACK
);
40 if (IS_ERR(op
->fallback_tfm
)) {
41 dev_err(algt
->ss
->dev
, "Fallback driver could no be loaded\n");
42 return PTR_ERR(op
->fallback_tfm
);
45 if (algt
->alg
.hash
.halg
.statesize
< crypto_ahash_statesize(op
->fallback_tfm
))
46 algt
->alg
.hash
.halg
.statesize
= crypto_ahash_statesize(op
->fallback_tfm
);
48 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
49 sizeof(struct sun8i_ss_hash_reqctx
) +
50 crypto_ahash_reqsize(op
->fallback_tfm
));
52 dev_info(op
->ss
->dev
, "Fallback for %s is %s\n",
53 crypto_tfm_alg_driver_name(tfm
),
54 crypto_tfm_alg_driver_name(&op
->fallback_tfm
->base
));
55 err
= pm_runtime_get_sync(op
->ss
->dev
);
60 pm_runtime_put_noidle(op
->ss
->dev
);
61 crypto_free_ahash(op
->fallback_tfm
);
65 void sun8i_ss_hash_craexit(struct crypto_tfm
*tfm
)
67 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_tfm_ctx(tfm
);
69 crypto_free_ahash(tfmctx
->fallback_tfm
);
70 pm_runtime_put_sync_suspend(tfmctx
->ss
->dev
);
73 int sun8i_ss_hash_init(struct ahash_request
*areq
)
75 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
76 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
77 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_ahash_ctx(tfm
);
79 memset(rctx
, 0, sizeof(struct sun8i_ss_hash_reqctx
));
81 ahash_request_set_tfm(&rctx
->fallback_req
, tfmctx
->fallback_tfm
);
82 rctx
->fallback_req
.base
.flags
= areq
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
84 return crypto_ahash_init(&rctx
->fallback_req
);
87 int sun8i_ss_hash_export(struct ahash_request
*areq
, void *out
)
89 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
90 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
91 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_ahash_ctx(tfm
);
93 ahash_request_set_tfm(&rctx
->fallback_req
, tfmctx
->fallback_tfm
);
94 rctx
->fallback_req
.base
.flags
= areq
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
96 return crypto_ahash_export(&rctx
->fallback_req
, out
);
99 int sun8i_ss_hash_import(struct ahash_request
*areq
, const void *in
)
101 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
102 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
103 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_ahash_ctx(tfm
);
105 ahash_request_set_tfm(&rctx
->fallback_req
, tfmctx
->fallback_tfm
);
106 rctx
->fallback_req
.base
.flags
= areq
->base
.flags
& CRYPTO_TFM_REQ_MAY_SLEEP
;
108 return crypto_ahash_import(&rctx
->fallback_req
, in
);
111 int sun8i_ss_hash_final(struct ahash_request
*areq
)
113 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
114 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
115 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_ahash_ctx(tfm
);
116 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
117 struct ahash_alg
*alg
= __crypto_ahash_alg(tfm
->base
.__crt_alg
);
118 struct sun8i_ss_alg_template
*algt
;
121 ahash_request_set_tfm(&rctx
->fallback_req
, tfmctx
->fallback_tfm
);
122 rctx
->fallback_req
.base
.flags
= areq
->base
.flags
&
123 CRYPTO_TFM_REQ_MAY_SLEEP
;
124 rctx
->fallback_req
.result
= areq
->result
;
126 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
127 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.hash
);
131 return crypto_ahash_final(&rctx
->fallback_req
);
134 int sun8i_ss_hash_update(struct ahash_request
*areq
)
136 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
137 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
138 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_ahash_ctx(tfm
);
140 ahash_request_set_tfm(&rctx
->fallback_req
, tfmctx
->fallback_tfm
);
141 rctx
->fallback_req
.base
.flags
= areq
->base
.flags
&
142 CRYPTO_TFM_REQ_MAY_SLEEP
;
143 rctx
->fallback_req
.nbytes
= areq
->nbytes
;
144 rctx
->fallback_req
.src
= areq
->src
;
146 return crypto_ahash_update(&rctx
->fallback_req
);
149 int sun8i_ss_hash_finup(struct ahash_request
*areq
)
151 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
152 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
153 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_ahash_ctx(tfm
);
154 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
155 struct ahash_alg
*alg
= __crypto_ahash_alg(tfm
->base
.__crt_alg
);
156 struct sun8i_ss_alg_template
*algt
;
159 ahash_request_set_tfm(&rctx
->fallback_req
, tfmctx
->fallback_tfm
);
160 rctx
->fallback_req
.base
.flags
= areq
->base
.flags
&
161 CRYPTO_TFM_REQ_MAY_SLEEP
;
163 rctx
->fallback_req
.nbytes
= areq
->nbytes
;
164 rctx
->fallback_req
.src
= areq
->src
;
165 rctx
->fallback_req
.result
= areq
->result
;
166 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
167 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.hash
);
171 return crypto_ahash_finup(&rctx
->fallback_req
);
174 static int sun8i_ss_hash_digest_fb(struct ahash_request
*areq
)
176 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
177 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
178 struct sun8i_ss_hash_tfm_ctx
*tfmctx
= crypto_ahash_ctx(tfm
);
179 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
180 struct ahash_alg
*alg
= __crypto_ahash_alg(tfm
->base
.__crt_alg
);
181 struct sun8i_ss_alg_template
*algt
;
184 ahash_request_set_tfm(&rctx
->fallback_req
, tfmctx
->fallback_tfm
);
185 rctx
->fallback_req
.base
.flags
= areq
->base
.flags
&
186 CRYPTO_TFM_REQ_MAY_SLEEP
;
188 rctx
->fallback_req
.nbytes
= areq
->nbytes
;
189 rctx
->fallback_req
.src
= areq
->src
;
190 rctx
->fallback_req
.result
= areq
->result
;
191 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
192 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.hash
);
196 return crypto_ahash_digest(&rctx
->fallback_req
);
199 static int sun8i_ss_run_hash_task(struct sun8i_ss_dev
*ss
,
200 struct sun8i_ss_hash_reqctx
*rctx
,
203 int flow
= rctx
->flow
;
207 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
208 ss
->flows
[flow
].stat_req
++;
211 /* choose between stream0/stream1 */
219 for (i
= 0; i
< MAX_SG
; i
++) {
220 if (!rctx
->t_dst
[i
].addr
)
223 mutex_lock(&ss
->mlock
);
226 writel(rctx
->t_dst
[i
- 1].addr
, ss
->base
+ SS_KEY_ADR_REG
);
227 writel(rctx
->t_dst
[i
- 1].addr
, ss
->base
+ SS_IV_ADR_REG
);
231 "Processing SG %d on flow %d %s ctl=%x %d to %d method=%x src=%x dst=%x\n",
233 rctx
->t_src
[i
].len
, rctx
->t_dst
[i
].len
,
234 rctx
->method
, rctx
->t_src
[i
].addr
, rctx
->t_dst
[i
].addr
);
236 writel(rctx
->t_src
[i
].addr
, ss
->base
+ SS_SRC_ADR_REG
);
237 writel(rctx
->t_dst
[i
].addr
, ss
->base
+ SS_DST_ADR_REG
);
238 writel(rctx
->t_src
[i
].len
, ss
->base
+ SS_LEN_ADR_REG
);
239 writel(BIT(0) | BIT(1), ss
->base
+ SS_INT_CTL_REG
);
241 reinit_completion(&ss
->flows
[flow
].complete
);
242 ss
->flows
[flow
].status
= 0;
245 writel(v
, ss
->base
+ SS_CTL_REG
);
246 mutex_unlock(&ss
->mlock
);
247 wait_for_completion_interruptible_timeout(&ss
->flows
[flow
].complete
,
248 msecs_to_jiffies(2000));
249 if (ss
->flows
[flow
].status
== 0) {
250 dev_err(ss
->dev
, "DMA timeout for %s\n", name
);
258 static bool sun8i_ss_hash_need_fallback(struct ahash_request
*areq
)
260 struct scatterlist
*sg
;
262 if (areq
->nbytes
== 0)
264 /* we need to reserve one SG for the padding one */
265 if (sg_nents(areq
->src
) > MAX_SG
- 1)
269 /* SS can operate hash only on full block size
270 * since SS support only MD5,sha1,sha224 and sha256, blocksize
272 * TODO: handle request if last SG is not len%64
273 * but this will need to copy data on a new SG of size=64
275 if (sg
->length
% 64 || !IS_ALIGNED(sg
->offset
, sizeof(u32
)))
282 int sun8i_ss_hash_digest(struct ahash_request
*areq
)
284 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
285 struct ahash_alg
*alg
= __crypto_ahash_alg(tfm
->base
.__crt_alg
);
286 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
287 struct sun8i_ss_alg_template
*algt
;
288 struct sun8i_ss_dev
*ss
;
289 struct crypto_engine
*engine
;
290 struct scatterlist
*sg
;
293 if (sun8i_ss_hash_need_fallback(areq
))
294 return sun8i_ss_hash_digest_fb(areq
);
296 nr_sgs
= sg_nents(areq
->src
);
297 if (nr_sgs
> MAX_SG
- 1)
298 return sun8i_ss_hash_digest_fb(areq
);
300 for_each_sg(areq
->src
, sg
, nr_sgs
, i
) {
301 if (sg
->length
% 4 || !IS_ALIGNED(sg
->offset
, sizeof(u32
)))
302 return sun8i_ss_hash_digest_fb(areq
);
305 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.hash
);
308 e
= sun8i_ss_get_engine_number(ss
);
310 engine
= ss
->flows
[e
].engine
;
312 return crypto_transfer_hash_request_to_engine(engine
, areq
);
315 /* sun8i_ss_hash_run - run an ahash request
316 * Send the data of the request to the SS along with an extra SG with padding
318 int sun8i_ss_hash_run(struct crypto_engine
*engine
, void *breq
)
320 struct ahash_request
*areq
= container_of(breq
, struct ahash_request
, base
);
321 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
322 struct ahash_alg
*alg
= __crypto_ahash_alg(tfm
->base
.__crt_alg
);
323 struct sun8i_ss_hash_reqctx
*rctx
= ahash_request_ctx(areq
);
324 struct sun8i_ss_alg_template
*algt
;
325 struct sun8i_ss_dev
*ss
;
326 struct scatterlist
*sg
;
327 int nr_sgs
, err
, digestsize
;
329 u64 fill
, min_fill
, byte_count
;
334 dma_addr_t addr_res
, addr_pad
;
337 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.hash
);
340 digestsize
= algt
->alg
.hash
.halg
.digestsize
;
341 if (digestsize
== SHA224_DIGEST_SIZE
)
342 digestsize
= SHA256_DIGEST_SIZE
;
344 /* the padding could be up to two block. */
345 pad
= kzalloc(algt
->alg
.hash
.halg
.base
.cra_blocksize
* 2, GFP_KERNEL
| GFP_DMA
);
350 result
= kzalloc(digestsize
, GFP_KERNEL
| GFP_DMA
);
354 for (i
= 0; i
< MAX_SG
; i
++) {
355 rctx
->t_dst
[i
].addr
= 0;
356 rctx
->t_dst
[i
].len
= 0;
359 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
363 rctx
->method
= ss
->variant
->alg_hash
[algt
->ss_algo_id
];
365 nr_sgs
= dma_map_sg(ss
->dev
, areq
->src
, sg_nents(areq
->src
), DMA_TO_DEVICE
);
366 if (nr_sgs
<= 0 || nr_sgs
> MAX_SG
) {
367 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgs
);
372 addr_res
= dma_map_single(ss
->dev
, result
, digestsize
, DMA_FROM_DEVICE
);
373 if (dma_mapping_error(ss
->dev
, addr_res
)) {
374 dev_err(ss
->dev
, "DMA map dest\n");
380 for_each_sg(areq
->src
, sg
, nr_sgs
, i
) {
381 rctx
->t_src
[i
].addr
= sg_dma_address(sg
);
382 todo
= min(len
, sg_dma_len(sg
));
383 rctx
->t_src
[i
].len
= todo
/ 4;
385 rctx
->t_dst
[i
].addr
= addr_res
;
386 rctx
->t_dst
[i
].len
= digestsize
/ 4;
389 dev_err(ss
->dev
, "remaining len %d\n", len
);
394 byte_count
= areq
->nbytes
;
396 bf
[j
++] = cpu_to_le32(0x80);
398 fill
= 64 - (byte_count
% 64);
399 min_fill
= 3 * sizeof(u32
);
404 j
+= (fill
- min_fill
) / sizeof(u32
);
406 switch (algt
->ss_algo_id
) {
408 lebits
= (__le64
*)&bf
[j
];
409 *lebits
= cpu_to_le64(byte_count
<< 3);
412 case SS_ID_HASH_SHA1
:
413 case SS_ID_HASH_SHA224
:
414 case SS_ID_HASH_SHA256
:
415 bebits
= (__be64
*)&bf
[j
];
416 *bebits
= cpu_to_be64(byte_count
<< 3);
421 addr_pad
= dma_map_single(ss
->dev
, pad
, j
* 4, DMA_TO_DEVICE
);
422 rctx
->t_src
[i
].addr
= addr_pad
;
423 rctx
->t_src
[i
].len
= j
;
424 rctx
->t_dst
[i
].addr
= addr_res
;
425 rctx
->t_dst
[i
].len
= digestsize
/ 4;
426 if (dma_mapping_error(ss
->dev
, addr_pad
)) {
427 dev_err(ss
->dev
, "DMA error on padding SG\n");
432 err
= sun8i_ss_run_hash_task(ss
, rctx
, crypto_tfm_alg_name(areq
->base
.tfm
));
434 dma_unmap_single(ss
->dev
, addr_pad
, j
* 4, DMA_TO_DEVICE
);
435 dma_unmap_sg(ss
->dev
, areq
->src
, nr_sgs
, DMA_TO_DEVICE
);
436 dma_unmap_single(ss
->dev
, addr_res
, digestsize
, DMA_FROM_DEVICE
);
440 memcpy(areq
->result
, result
, algt
->alg
.hash
.halg
.digestsize
);
443 crypto_finalize_hash_request(engine
, breq
, err
);