1 // SPDX-License-Identifier: GPL-2.0
3 * sun8i-ss-cipher.c - hardware cryptographic offloader for
4 * Allwinner A80/A83T SoC
6 * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com>
8 * This file add support for AES cipher with 128,192,256 bits keysize in
11 * You could find a link for the datasheet in Documentation/arm/sunxi.rst
14 #include <linux/crypto.h>
15 #include <linux/dma-mapping.h>
17 #include <linux/pm_runtime.h>
18 #include <crypto/scatterwalk.h>
19 #include <crypto/internal/skcipher.h>
22 static bool sun8i_ss_need_fallback(struct skcipher_request
*areq
)
24 struct scatterlist
*in_sg
= areq
->src
;
25 struct scatterlist
*out_sg
= areq
->dst
;
26 struct scatterlist
*sg
;
28 if (areq
->cryptlen
== 0 || areq
->cryptlen
% 16)
31 if (sg_nents(areq
->src
) > 8 || sg_nents(areq
->dst
) > 8)
36 if ((sg
->length
% 16) != 0)
38 if ((sg_dma_len(sg
) % 16) != 0)
40 if (!IS_ALIGNED(sg
->offset
, 16))
46 if ((sg
->length
% 16) != 0)
48 if ((sg_dma_len(sg
) % 16) != 0)
50 if (!IS_ALIGNED(sg
->offset
, 16))
55 /* SS need same numbers of SG (with same length) for source and destination */
58 while (in_sg
&& out_sg
) {
59 if (in_sg
->length
!= out_sg
->length
)
61 in_sg
= sg_next(in_sg
);
62 out_sg
= sg_next(out_sg
);
69 static int sun8i_ss_cipher_fallback(struct skcipher_request
*areq
)
71 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
72 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
73 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
76 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
77 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
78 struct sun8i_ss_alg_template
*algt
;
80 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
);
83 skcipher_request_set_tfm(&rctx
->fallback_req
, op
->fallback_tfm
);
84 skcipher_request_set_callback(&rctx
->fallback_req
, areq
->base
.flags
,
85 areq
->base
.complete
, areq
->base
.data
);
86 skcipher_request_set_crypt(&rctx
->fallback_req
, areq
->src
, areq
->dst
,
87 areq
->cryptlen
, areq
->iv
);
88 if (rctx
->op_dir
& SS_DECRYPTION
)
89 err
= crypto_skcipher_decrypt(&rctx
->fallback_req
);
91 err
= crypto_skcipher_encrypt(&rctx
->fallback_req
);
95 static int sun8i_ss_cipher(struct skcipher_request
*areq
)
97 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
98 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
99 struct sun8i_ss_dev
*ss
= op
->ss
;
100 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
101 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
102 struct sun8i_ss_alg_template
*algt
;
103 struct scatterlist
*sg
;
104 unsigned int todo
, len
, offset
, ivsize
;
105 void *backup_iv
= NULL
;
111 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
);
113 dev_dbg(ss
->dev
, "%s %s %u %x IV(%p %u) key=%u\n", __func__
,
114 crypto_tfm_alg_name(areq
->base
.tfm
),
116 rctx
->op_dir
, areq
->iv
, crypto_skcipher_ivsize(tfm
),
119 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
123 rctx
->op_mode
= ss
->variant
->op_mode
[algt
->ss_blockmode
];
124 rctx
->method
= ss
->variant
->alg_cipher
[algt
->ss_algo_id
];
125 rctx
->keylen
= op
->keylen
;
127 rctx
->p_key
= dma_map_single(ss
->dev
, op
->key
, op
->keylen
, DMA_TO_DEVICE
);
128 if (dma_mapping_error(ss
->dev
, rctx
->p_key
)) {
129 dev_err(ss
->dev
, "Cannot DMA MAP KEY\n");
134 ivsize
= crypto_skcipher_ivsize(tfm
);
135 if (areq
->iv
&& crypto_skcipher_ivsize(tfm
) > 0) {
136 rctx
->ivlen
= ivsize
;
137 rctx
->biv
= kzalloc(ivsize
, GFP_KERNEL
| GFP_DMA
);
142 if (rctx
->op_dir
& SS_DECRYPTION
) {
143 backup_iv
= kzalloc(ivsize
, GFP_KERNEL
);
148 offset
= areq
->cryptlen
- ivsize
;
149 scatterwalk_map_and_copy(backup_iv
, areq
->src
, offset
,
152 memcpy(rctx
->biv
, areq
->iv
, ivsize
);
153 rctx
->p_iv
= dma_map_single(ss
->dev
, rctx
->biv
, rctx
->ivlen
,
155 if (dma_mapping_error(ss
->dev
, rctx
->p_iv
)) {
156 dev_err(ss
->dev
, "Cannot DMA MAP IV\n");
161 if (areq
->src
== areq
->dst
) {
162 nr_sgs
= dma_map_sg(ss
->dev
, areq
->src
, sg_nents(areq
->src
),
164 if (nr_sgs
<= 0 || nr_sgs
> 8) {
165 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgs
);
171 nr_sgs
= dma_map_sg(ss
->dev
, areq
->src
, sg_nents(areq
->src
),
173 if (nr_sgs
<= 0 || nr_sgs
> 8) {
174 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgs
);
178 nr_sgd
= dma_map_sg(ss
->dev
, areq
->dst
, sg_nents(areq
->dst
),
180 if (nr_sgd
<= 0 || nr_sgd
> 8) {
181 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgd
);
187 len
= areq
->cryptlen
;
190 while (i
< nr_sgs
&& sg
&& len
) {
191 if (sg_dma_len(sg
) == 0)
193 rctx
->t_src
[i
].addr
= sg_dma_address(sg
);
194 todo
= min(len
, sg_dma_len(sg
));
195 rctx
->t_src
[i
].len
= todo
/ 4;
196 dev_dbg(ss
->dev
, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__
,
197 areq
->cryptlen
, i
, rctx
->t_src
[i
].len
, sg
->offset
, todo
);
204 dev_err(ss
->dev
, "remaining len %d\n", len
);
209 len
= areq
->cryptlen
;
212 while (i
< nr_sgd
&& sg
&& len
) {
213 if (sg_dma_len(sg
) == 0)
215 rctx
->t_dst
[i
].addr
= sg_dma_address(sg
);
216 todo
= min(len
, sg_dma_len(sg
));
217 rctx
->t_dst
[i
].len
= todo
/ 4;
218 dev_dbg(ss
->dev
, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__
,
219 areq
->cryptlen
, i
, rctx
->t_dst
[i
].len
, sg
->offset
, todo
);
226 dev_err(ss
->dev
, "remaining len %d\n", len
);
231 err
= sun8i_ss_run_task(ss
, rctx
, crypto_tfm_alg_name(areq
->base
.tfm
));
234 if (areq
->src
== areq
->dst
) {
235 dma_unmap_sg(ss
->dev
, areq
->src
, nr_sgs
, DMA_BIDIRECTIONAL
);
237 dma_unmap_sg(ss
->dev
, areq
->src
, nr_sgs
, DMA_TO_DEVICE
);
238 dma_unmap_sg(ss
->dev
, areq
->dst
, nr_sgd
, DMA_FROM_DEVICE
);
243 dma_unmap_single(ss
->dev
, rctx
->p_iv
, rctx
->ivlen
,
246 if (areq
->iv
&& ivsize
> 0) {
248 offset
= areq
->cryptlen
- ivsize
;
249 if (rctx
->op_dir
& SS_DECRYPTION
) {
250 memcpy(areq
->iv
, backup_iv
, ivsize
);
251 kfree_sensitive(backup_iv
);
253 scatterwalk_map_and_copy(areq
->iv
, areq
->dst
, offset
,
261 dma_unmap_single(ss
->dev
, rctx
->p_key
, op
->keylen
, DMA_TO_DEVICE
);
268 static int sun8i_ss_handle_cipher_request(struct crypto_engine
*engine
, void *areq
)
271 struct skcipher_request
*breq
= container_of(areq
, struct skcipher_request
, base
);
273 err
= sun8i_ss_cipher(breq
);
274 crypto_finalize_skcipher_request(engine
, breq
, err
);
279 int sun8i_ss_skdecrypt(struct skcipher_request
*areq
)
281 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
282 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
283 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
284 struct crypto_engine
*engine
;
287 memset(rctx
, 0, sizeof(struct sun8i_cipher_req_ctx
));
288 rctx
->op_dir
= SS_DECRYPTION
;
290 if (sun8i_ss_need_fallback(areq
))
291 return sun8i_ss_cipher_fallback(areq
);
293 e
= sun8i_ss_get_engine_number(op
->ss
);
294 engine
= op
->ss
->flows
[e
].engine
;
297 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
300 int sun8i_ss_skencrypt(struct skcipher_request
*areq
)
302 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
303 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
304 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
305 struct crypto_engine
*engine
;
308 memset(rctx
, 0, sizeof(struct sun8i_cipher_req_ctx
));
309 rctx
->op_dir
= SS_ENCRYPTION
;
311 if (sun8i_ss_need_fallback(areq
))
312 return sun8i_ss_cipher_fallback(areq
);
314 e
= sun8i_ss_get_engine_number(op
->ss
);
315 engine
= op
->ss
->flows
[e
].engine
;
318 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
321 int sun8i_ss_cipher_init(struct crypto_tfm
*tfm
)
323 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
324 struct sun8i_ss_alg_template
*algt
;
325 const char *name
= crypto_tfm_alg_name(tfm
);
326 struct crypto_skcipher
*sktfm
= __crypto_skcipher_cast(tfm
);
327 struct skcipher_alg
*alg
= crypto_skcipher_alg(sktfm
);
330 memset(op
, 0, sizeof(struct sun8i_cipher_tfm_ctx
));
332 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
);
335 op
->fallback_tfm
= crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
);
336 if (IS_ERR(op
->fallback_tfm
)) {
337 dev_err(op
->ss
->dev
, "ERROR: Cannot allocate fallback for %s %ld\n",
338 name
, PTR_ERR(op
->fallback_tfm
));
339 return PTR_ERR(op
->fallback_tfm
);
342 sktfm
->reqsize
= sizeof(struct sun8i_cipher_req_ctx
) +
343 crypto_skcipher_reqsize(op
->fallback_tfm
);
346 dev_info(op
->ss
->dev
, "Fallback for %s is %s\n",
347 crypto_tfm_alg_driver_name(&sktfm
->base
),
348 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op
->fallback_tfm
)));
350 op
->enginectx
.op
.do_one_request
= sun8i_ss_handle_cipher_request
;
351 op
->enginectx
.op
.prepare_request
= NULL
;
352 op
->enginectx
.op
.unprepare_request
= NULL
;
354 err
= pm_runtime_get_sync(op
->ss
->dev
);
356 dev_err(op
->ss
->dev
, "pm error %d\n", err
);
362 crypto_free_skcipher(op
->fallback_tfm
);
366 void sun8i_ss_cipher_exit(struct crypto_tfm
*tfm
)
368 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
370 kfree_sensitive(op
->key
);
371 crypto_free_skcipher(op
->fallback_tfm
);
372 pm_runtime_put_sync(op
->ss
->dev
);
375 int sun8i_ss_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
378 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
379 struct sun8i_ss_dev
*ss
= op
->ss
;
389 dev_dbg(ss
->dev
, "ERROR: Invalid keylen %u\n", keylen
);
392 kfree_sensitive(op
->key
);
394 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
| GFP_DMA
);
398 crypto_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
399 crypto_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
401 return crypto_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);
404 int sun8i_ss_des3_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
407 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
408 struct sun8i_ss_dev
*ss
= op
->ss
;
410 if (unlikely(keylen
!= 3 * DES_KEY_SIZE
)) {
411 dev_dbg(ss
->dev
, "Invalid keylen %u\n", keylen
);
415 kfree_sensitive(op
->key
);
417 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
| GFP_DMA
);
421 crypto_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
422 crypto_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
424 return crypto_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);