1 // SPDX-License-Identifier: GPL-2.0
3 * sun8i-ss-cipher.c - hardware cryptographic offloader for
4 * Allwinner A80/A83T SoC
6 * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com>
8 * This file add support for AES cipher with 128,192,256 bits keysize in
11 * You could find a link for the datasheet in Documentation/arch/arm/sunxi.rst
14 #include <linux/bottom_half.h>
15 #include <linux/crypto.h>
16 #include <linux/dma-mapping.h>
18 #include <linux/pm_runtime.h>
19 #include <crypto/scatterwalk.h>
20 #include <crypto/internal/skcipher.h>
23 static bool sun8i_ss_need_fallback(struct skcipher_request
*areq
)
25 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
26 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
27 struct sun8i_ss_alg_template
*algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
.base
);
28 struct scatterlist
*in_sg
= areq
->src
;
29 struct scatterlist
*out_sg
= areq
->dst
;
30 struct scatterlist
*sg
;
31 unsigned int todo
, len
;
33 if (areq
->cryptlen
== 0 || areq
->cryptlen
% 16) {
38 if (sg_nents_for_len(areq
->src
, areq
->cryptlen
) > 8 ||
39 sg_nents_for_len(areq
->dst
, areq
->cryptlen
) > 8) {
40 algt
->stat_fb_sgnum
++;
47 todo
= min(len
, sg
->length
);
48 if ((todo
% 16) != 0) {
49 algt
->stat_fb_sglen
++;
52 if (!IS_ALIGNED(sg
->offset
, 16)) {
53 algt
->stat_fb_align
++;
62 todo
= min(len
, sg
->length
);
63 if ((todo
% 16) != 0) {
64 algt
->stat_fb_sglen
++;
67 if (!IS_ALIGNED(sg
->offset
, 16)) {
68 algt
->stat_fb_align
++;
75 /* SS need same numbers of SG (with same length) for source and destination */
78 while (in_sg
&& out_sg
) {
79 if (in_sg
->length
!= out_sg
->length
)
81 in_sg
= sg_next(in_sg
);
82 out_sg
= sg_next(out_sg
);
89 static int sun8i_ss_cipher_fallback(struct skcipher_request
*areq
)
91 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
92 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
93 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
96 if (IS_ENABLED(CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
)) {
97 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
98 struct sun8i_ss_alg_template
*algt __maybe_unused
;
100 algt
= container_of(alg
, struct sun8i_ss_alg_template
,
103 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
108 skcipher_request_set_tfm(&rctx
->fallback_req
, op
->fallback_tfm
);
109 skcipher_request_set_callback(&rctx
->fallback_req
, areq
->base
.flags
,
110 areq
->base
.complete
, areq
->base
.data
);
111 skcipher_request_set_crypt(&rctx
->fallback_req
, areq
->src
, areq
->dst
,
112 areq
->cryptlen
, areq
->iv
);
113 if (rctx
->op_dir
& SS_DECRYPTION
)
114 err
= crypto_skcipher_decrypt(&rctx
->fallback_req
);
116 err
= crypto_skcipher_encrypt(&rctx
->fallback_req
);
120 static int sun8i_ss_setup_ivs(struct skcipher_request
*areq
)
122 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
123 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
124 struct sun8i_ss_dev
*ss
= op
->ss
;
125 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
126 struct scatterlist
*sg
= areq
->src
;
127 unsigned int todo
, offset
;
128 unsigned int len
= areq
->cryptlen
;
129 unsigned int ivsize
= crypto_skcipher_ivsize(tfm
);
130 struct sun8i_ss_flow
*sf
= &ss
->flows
[rctx
->flow
];
135 rctx
->ivlen
= ivsize
;
136 if (rctx
->op_dir
& SS_DECRYPTION
) {
137 offset
= areq
->cryptlen
- ivsize
;
138 scatterwalk_map_and_copy(sf
->biv
, areq
->src
, offset
,
142 /* we need to copy all IVs from source in case DMA is bi-directionnal */
144 if (sg_dma_len(sg
) == 0) {
149 memcpy(sf
->iv
[0], areq
->iv
, ivsize
);
150 a
= dma_map_single(ss
->dev
, sf
->iv
[i
], ivsize
, DMA_TO_DEVICE
);
151 if (dma_mapping_error(ss
->dev
, a
)) {
152 memzero_explicit(sf
->iv
[i
], ivsize
);
153 dev_err(ss
->dev
, "Cannot DMA MAP IV\n");
158 /* we need to setup all others IVs only in the decrypt way */
159 if (rctx
->op_dir
== SS_ENCRYPTION
)
161 todo
= min(len
, sg_dma_len(sg
));
165 offset
= sg
->length
- ivsize
;
166 scatterwalk_map_and_copy(sf
->iv
[i
], sg
, offset
, ivsize
, 0);
176 dma_unmap_single(ss
->dev
, rctx
->p_iv
[i
], ivsize
, DMA_TO_DEVICE
);
177 memzero_explicit(sf
->iv
[i
], ivsize
);
183 static int sun8i_ss_cipher(struct skcipher_request
*areq
)
185 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
186 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
187 struct sun8i_ss_dev
*ss
= op
->ss
;
188 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
189 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
190 struct sun8i_ss_alg_template
*algt
;
191 struct sun8i_ss_flow
*sf
= &ss
->flows
[rctx
->flow
];
192 struct scatterlist
*sg
;
193 unsigned int todo
, len
, offset
, ivsize
;
197 int nsgs
= sg_nents_for_len(areq
->src
, areq
->cryptlen
);
198 int nsgd
= sg_nents_for_len(areq
->dst
, areq
->cryptlen
);
201 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
.base
);
203 dev_dbg(ss
->dev
, "%s %s %u %x IV(%p %u) key=%u\n", __func__
,
204 crypto_tfm_alg_name(areq
->base
.tfm
),
206 rctx
->op_dir
, areq
->iv
, crypto_skcipher_ivsize(tfm
),
209 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
213 rctx
->op_mode
= ss
->variant
->op_mode
[algt
->ss_blockmode
];
214 rctx
->method
= ss
->variant
->alg_cipher
[algt
->ss_algo_id
];
215 rctx
->keylen
= op
->keylen
;
217 rctx
->p_key
= dma_map_single(ss
->dev
, op
->key
, op
->keylen
, DMA_TO_DEVICE
);
218 if (dma_mapping_error(ss
->dev
, rctx
->p_key
)) {
219 dev_err(ss
->dev
, "Cannot DMA MAP KEY\n");
224 ivsize
= crypto_skcipher_ivsize(tfm
);
225 if (areq
->iv
&& crypto_skcipher_ivsize(tfm
) > 0) {
226 err
= sun8i_ss_setup_ivs(areq
);
230 if (areq
->src
== areq
->dst
) {
231 nr_sgs
= dma_map_sg(ss
->dev
, areq
->src
, nsgs
, DMA_BIDIRECTIONAL
);
232 if (nr_sgs
<= 0 || nr_sgs
> 8) {
233 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgs
);
239 nr_sgs
= dma_map_sg(ss
->dev
, areq
->src
, nsgs
, DMA_TO_DEVICE
);
240 if (nr_sgs
<= 0 || nr_sgs
> 8) {
241 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgs
);
245 nr_sgd
= dma_map_sg(ss
->dev
, areq
->dst
, nsgd
, DMA_FROM_DEVICE
);
246 if (nr_sgd
<= 0 || nr_sgd
> 8) {
247 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgd
);
253 len
= areq
->cryptlen
;
256 while (i
< nr_sgs
&& sg
&& len
) {
257 if (sg_dma_len(sg
) == 0)
259 rctx
->t_src
[i
].addr
= sg_dma_address(sg
);
260 todo
= min(len
, sg_dma_len(sg
));
261 rctx
->t_src
[i
].len
= todo
/ 4;
262 dev_dbg(ss
->dev
, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__
,
263 areq
->cryptlen
, i
, rctx
->t_src
[i
].len
, sg
->offset
, todo
);
270 dev_err(ss
->dev
, "remaining len %d\n", len
);
275 len
= areq
->cryptlen
;
278 while (i
< nr_sgd
&& sg
&& len
) {
279 if (sg_dma_len(sg
) == 0)
281 rctx
->t_dst
[i
].addr
= sg_dma_address(sg
);
282 todo
= min(len
, sg_dma_len(sg
));
283 rctx
->t_dst
[i
].len
= todo
/ 4;
284 dev_dbg(ss
->dev
, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__
,
285 areq
->cryptlen
, i
, rctx
->t_dst
[i
].len
, sg
->offset
, todo
);
292 dev_err(ss
->dev
, "remaining len %d\n", len
);
297 err
= sun8i_ss_run_task(ss
, rctx
, crypto_tfm_alg_name(areq
->base
.tfm
));
300 if (areq
->src
== areq
->dst
) {
301 dma_unmap_sg(ss
->dev
, areq
->src
, nsgs
, DMA_BIDIRECTIONAL
);
303 dma_unmap_sg(ss
->dev
, areq
->src
, nsgs
, DMA_TO_DEVICE
);
304 dma_unmap_sg(ss
->dev
, areq
->dst
, nsgd
, DMA_FROM_DEVICE
);
308 if (areq
->iv
&& ivsize
> 0) {
309 for (i
= 0; i
< rctx
->niv
; i
++) {
310 dma_unmap_single(ss
->dev
, rctx
->p_iv
[i
], ivsize
, DMA_TO_DEVICE
);
311 memzero_explicit(sf
->iv
[i
], ivsize
);
314 offset
= areq
->cryptlen
- ivsize
;
315 if (rctx
->op_dir
& SS_DECRYPTION
) {
316 memcpy(areq
->iv
, sf
->biv
, ivsize
);
317 memzero_explicit(sf
->biv
, ivsize
);
319 scatterwalk_map_and_copy(areq
->iv
, areq
->dst
, offset
,
325 dma_unmap_single(ss
->dev
, rctx
->p_key
, op
->keylen
, DMA_TO_DEVICE
);
332 int sun8i_ss_handle_cipher_request(struct crypto_engine
*engine
, void *areq
)
335 struct skcipher_request
*breq
= container_of(areq
, struct skcipher_request
, base
);
337 err
= sun8i_ss_cipher(breq
);
339 crypto_finalize_skcipher_request(engine
, breq
, err
);
345 int sun8i_ss_skdecrypt(struct skcipher_request
*areq
)
347 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
348 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
349 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
350 struct crypto_engine
*engine
;
353 memset(rctx
, 0, sizeof(struct sun8i_cipher_req_ctx
));
354 rctx
->op_dir
= SS_DECRYPTION
;
356 if (sun8i_ss_need_fallback(areq
))
357 return sun8i_ss_cipher_fallback(areq
);
359 e
= sun8i_ss_get_engine_number(op
->ss
);
360 engine
= op
->ss
->flows
[e
].engine
;
363 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
366 int sun8i_ss_skencrypt(struct skcipher_request
*areq
)
368 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
369 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
370 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
371 struct crypto_engine
*engine
;
374 memset(rctx
, 0, sizeof(struct sun8i_cipher_req_ctx
));
375 rctx
->op_dir
= SS_ENCRYPTION
;
377 if (sun8i_ss_need_fallback(areq
))
378 return sun8i_ss_cipher_fallback(areq
);
380 e
= sun8i_ss_get_engine_number(op
->ss
);
381 engine
= op
->ss
->flows
[e
].engine
;
384 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
387 int sun8i_ss_cipher_init(struct crypto_tfm
*tfm
)
389 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
390 struct sun8i_ss_alg_template
*algt
;
391 const char *name
= crypto_tfm_alg_name(tfm
);
392 struct crypto_skcipher
*sktfm
= __crypto_skcipher_cast(tfm
);
393 struct skcipher_alg
*alg
= crypto_skcipher_alg(sktfm
);
396 memset(op
, 0, sizeof(struct sun8i_cipher_tfm_ctx
));
398 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
.base
);
401 op
->fallback_tfm
= crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
);
402 if (IS_ERR(op
->fallback_tfm
)) {
403 dev_err(op
->ss
->dev
, "ERROR: Cannot allocate fallback for %s %ld\n",
404 name
, PTR_ERR(op
->fallback_tfm
));
405 return PTR_ERR(op
->fallback_tfm
);
408 crypto_skcipher_set_reqsize(sktfm
, sizeof(struct sun8i_cipher_req_ctx
) +
409 crypto_skcipher_reqsize(op
->fallback_tfm
));
412 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op
->fallback_tfm
)),
413 CRYPTO_MAX_ALG_NAME
);
415 err
= pm_runtime_resume_and_get(op
->ss
->dev
);
417 dev_err(op
->ss
->dev
, "pm error %d\n", err
);
423 crypto_free_skcipher(op
->fallback_tfm
);
427 void sun8i_ss_cipher_exit(struct crypto_tfm
*tfm
)
429 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
431 kfree_sensitive(op
->key
);
432 crypto_free_skcipher(op
->fallback_tfm
);
433 pm_runtime_put_sync(op
->ss
->dev
);
436 int sun8i_ss_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
439 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
440 struct sun8i_ss_dev
*ss
= op
->ss
;
450 dev_dbg(ss
->dev
, "ERROR: Invalid keylen %u\n", keylen
);
453 kfree_sensitive(op
->key
);
455 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
);
459 crypto_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
460 crypto_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
462 return crypto_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);
465 int sun8i_ss_des3_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
468 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
469 struct sun8i_ss_dev
*ss
= op
->ss
;
471 if (unlikely(keylen
!= 3 * DES_KEY_SIZE
)) {
472 dev_dbg(ss
->dev
, "Invalid keylen %u\n", keylen
);
476 kfree_sensitive(op
->key
);
478 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
);
482 crypto_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
483 crypto_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
485 return crypto_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);