1 // SPDX-License-Identifier: GPL-2.0
3 * sun8i-ss-cipher.c - hardware cryptographic offloader for
4 * Allwinner A80/A83T SoC
6 * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com>
8 * This file add support for AES cipher with 128,192,256 bits keysize in
11 * You could find a link for the datasheet in Documentation/arm/sunxi/README
14 #include <linux/crypto.h>
15 #include <linux/dma-mapping.h>
17 #include <linux/pm_runtime.h>
18 #include <crypto/scatterwalk.h>
19 #include <crypto/internal/skcipher.h>
22 static bool sun8i_ss_need_fallback(struct skcipher_request
*areq
)
24 struct scatterlist
*in_sg
= areq
->src
;
25 struct scatterlist
*out_sg
= areq
->dst
;
26 struct scatterlist
*sg
;
28 if (areq
->cryptlen
== 0 || areq
->cryptlen
% 16)
31 if (sg_nents(areq
->src
) > 8 || sg_nents(areq
->dst
) > 8)
36 if ((sg
->length
% 16) != 0)
38 if ((sg_dma_len(sg
) % 16) != 0)
40 if (!IS_ALIGNED(sg
->offset
, 16))
46 if ((sg
->length
% 16) != 0)
48 if ((sg_dma_len(sg
) % 16) != 0)
50 if (!IS_ALIGNED(sg
->offset
, 16))
55 /* SS need same numbers of SG (with same length) for source and destination */
58 while (in_sg
&& out_sg
) {
59 if (in_sg
->length
!= out_sg
->length
)
61 in_sg
= sg_next(in_sg
);
62 out_sg
= sg_next(out_sg
);
69 static int sun8i_ss_cipher_fallback(struct skcipher_request
*areq
)
71 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
72 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
73 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
76 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq
, op
->fallback_tfm
);
77 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
78 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
79 struct sun8i_ss_alg_template
*algt
;
81 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
);
84 skcipher_request_set_sync_tfm(subreq
, op
->fallback_tfm
);
85 skcipher_request_set_callback(subreq
, areq
->base
.flags
, NULL
, NULL
);
86 skcipher_request_set_crypt(subreq
, areq
->src
, areq
->dst
,
87 areq
->cryptlen
, areq
->iv
);
88 if (rctx
->op_dir
& SS_DECRYPTION
)
89 err
= crypto_skcipher_decrypt(subreq
);
91 err
= crypto_skcipher_encrypt(subreq
);
92 skcipher_request_zero(subreq
);
96 static int sun8i_ss_cipher(struct skcipher_request
*areq
)
98 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
99 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
100 struct sun8i_ss_dev
*ss
= op
->ss
;
101 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
102 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
103 struct sun8i_ss_alg_template
*algt
;
104 struct scatterlist
*sg
;
105 unsigned int todo
, len
, offset
, ivsize
;
106 void *backup_iv
= NULL
;
112 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
);
114 dev_dbg(ss
->dev
, "%s %s %u %x IV(%p %u) key=%u\n", __func__
,
115 crypto_tfm_alg_name(areq
->base
.tfm
),
117 rctx
->op_dir
, areq
->iv
, crypto_skcipher_ivsize(tfm
),
120 #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
124 rctx
->op_mode
= ss
->variant
->op_mode
[algt
->ss_blockmode
];
125 rctx
->method
= ss
->variant
->alg_cipher
[algt
->ss_algo_id
];
126 rctx
->keylen
= op
->keylen
;
128 rctx
->p_key
= dma_map_single(ss
->dev
, op
->key
, op
->keylen
, DMA_TO_DEVICE
);
129 if (dma_mapping_error(ss
->dev
, rctx
->p_key
)) {
130 dev_err(ss
->dev
, "Cannot DMA MAP KEY\n");
135 ivsize
= crypto_skcipher_ivsize(tfm
);
136 if (areq
->iv
&& crypto_skcipher_ivsize(tfm
) > 0) {
137 rctx
->ivlen
= ivsize
;
138 rctx
->biv
= kzalloc(ivsize
, GFP_KERNEL
| GFP_DMA
);
143 if (rctx
->op_dir
& SS_DECRYPTION
) {
144 backup_iv
= kzalloc(ivsize
, GFP_KERNEL
);
149 offset
= areq
->cryptlen
- ivsize
;
150 scatterwalk_map_and_copy(backup_iv
, areq
->src
, offset
,
153 memcpy(rctx
->biv
, areq
->iv
, ivsize
);
154 rctx
->p_iv
= dma_map_single(ss
->dev
, rctx
->biv
, rctx
->ivlen
,
156 if (dma_mapping_error(ss
->dev
, rctx
->p_iv
)) {
157 dev_err(ss
->dev
, "Cannot DMA MAP IV\n");
162 if (areq
->src
== areq
->dst
) {
163 nr_sgs
= dma_map_sg(ss
->dev
, areq
->src
, sg_nents(areq
->src
),
165 if (nr_sgs
<= 0 || nr_sgs
> 8) {
166 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgs
);
172 nr_sgs
= dma_map_sg(ss
->dev
, areq
->src
, sg_nents(areq
->src
),
174 if (nr_sgs
<= 0 || nr_sgs
> 8) {
175 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgs
);
179 nr_sgd
= dma_map_sg(ss
->dev
, areq
->dst
, sg_nents(areq
->dst
),
181 if (nr_sgd
<= 0 || nr_sgd
> 8) {
182 dev_err(ss
->dev
, "Invalid sg number %d\n", nr_sgd
);
188 len
= areq
->cryptlen
;
191 while (i
< nr_sgs
&& sg
&& len
) {
192 if (sg_dma_len(sg
) == 0)
194 rctx
->t_src
[i
].addr
= sg_dma_address(sg
);
195 todo
= min(len
, sg_dma_len(sg
));
196 rctx
->t_src
[i
].len
= todo
/ 4;
197 dev_dbg(ss
->dev
, "%s total=%u SGS(%d %u off=%d) todo=%u\n", __func__
,
198 areq
->cryptlen
, i
, rctx
->t_src
[i
].len
, sg
->offset
, todo
);
205 dev_err(ss
->dev
, "remaining len %d\n", len
);
210 len
= areq
->cryptlen
;
213 while (i
< nr_sgd
&& sg
&& len
) {
214 if (sg_dma_len(sg
) == 0)
216 rctx
->t_dst
[i
].addr
= sg_dma_address(sg
);
217 todo
= min(len
, sg_dma_len(sg
));
218 rctx
->t_dst
[i
].len
= todo
/ 4;
219 dev_dbg(ss
->dev
, "%s total=%u SGD(%d %u off=%d) todo=%u\n", __func__
,
220 areq
->cryptlen
, i
, rctx
->t_dst
[i
].len
, sg
->offset
, todo
);
227 dev_err(ss
->dev
, "remaining len %d\n", len
);
232 err
= sun8i_ss_run_task(ss
, rctx
, crypto_tfm_alg_name(areq
->base
.tfm
));
235 if (areq
->src
== areq
->dst
) {
236 dma_unmap_sg(ss
->dev
, areq
->src
, nr_sgs
, DMA_BIDIRECTIONAL
);
238 dma_unmap_sg(ss
->dev
, areq
->src
, nr_sgs
, DMA_TO_DEVICE
);
239 dma_unmap_sg(ss
->dev
, areq
->dst
, nr_sgd
, DMA_FROM_DEVICE
);
244 dma_unmap_single(ss
->dev
, rctx
->p_iv
, rctx
->ivlen
,
247 if (areq
->iv
&& ivsize
> 0) {
249 offset
= areq
->cryptlen
- ivsize
;
250 if (rctx
->op_dir
& SS_DECRYPTION
) {
251 memcpy(areq
->iv
, backup_iv
, ivsize
);
252 memzero_explicit(backup_iv
, ivsize
);
255 scatterwalk_map_and_copy(areq
->iv
, areq
->dst
, offset
,
263 dma_unmap_single(ss
->dev
, rctx
->p_key
, op
->keylen
, DMA_TO_DEVICE
);
270 static int sun8i_ss_handle_cipher_request(struct crypto_engine
*engine
, void *areq
)
273 struct skcipher_request
*breq
= container_of(areq
, struct skcipher_request
, base
);
275 err
= sun8i_ss_cipher(breq
);
276 crypto_finalize_skcipher_request(engine
, breq
, err
);
281 int sun8i_ss_skdecrypt(struct skcipher_request
*areq
)
283 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
284 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
285 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
286 struct crypto_engine
*engine
;
289 memset(rctx
, 0, sizeof(struct sun8i_cipher_req_ctx
));
290 rctx
->op_dir
= SS_DECRYPTION
;
292 if (sun8i_ss_need_fallback(areq
))
293 return sun8i_ss_cipher_fallback(areq
);
295 e
= sun8i_ss_get_engine_number(op
->ss
);
296 engine
= op
->ss
->flows
[e
].engine
;
299 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
302 int sun8i_ss_skencrypt(struct skcipher_request
*areq
)
304 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
305 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
306 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
307 struct crypto_engine
*engine
;
310 memset(rctx
, 0, sizeof(struct sun8i_cipher_req_ctx
));
311 rctx
->op_dir
= SS_ENCRYPTION
;
313 if (sun8i_ss_need_fallback(areq
))
314 return sun8i_ss_cipher_fallback(areq
);
316 e
= sun8i_ss_get_engine_number(op
->ss
);
317 engine
= op
->ss
->flows
[e
].engine
;
320 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
323 int sun8i_ss_cipher_init(struct crypto_tfm
*tfm
)
325 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
326 struct sun8i_ss_alg_template
*algt
;
327 const char *name
= crypto_tfm_alg_name(tfm
);
328 struct crypto_skcipher
*sktfm
= __crypto_skcipher_cast(tfm
);
329 struct skcipher_alg
*alg
= crypto_skcipher_alg(sktfm
);
332 memset(op
, 0, sizeof(struct sun8i_cipher_tfm_ctx
));
334 algt
= container_of(alg
, struct sun8i_ss_alg_template
, alg
.skcipher
);
337 sktfm
->reqsize
= sizeof(struct sun8i_cipher_req_ctx
);
339 op
->fallback_tfm
= crypto_alloc_sync_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
);
340 if (IS_ERR(op
->fallback_tfm
)) {
341 dev_err(op
->ss
->dev
, "ERROR: Cannot allocate fallback for %s %ld\n",
342 name
, PTR_ERR(op
->fallback_tfm
));
343 return PTR_ERR(op
->fallback_tfm
);
346 dev_info(op
->ss
->dev
, "Fallback for %s is %s\n",
347 crypto_tfm_alg_driver_name(&sktfm
->base
),
348 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(&op
->fallback_tfm
->base
)));
350 op
->enginectx
.op
.do_one_request
= sun8i_ss_handle_cipher_request
;
351 op
->enginectx
.op
.prepare_request
= NULL
;
352 op
->enginectx
.op
.unprepare_request
= NULL
;
354 err
= pm_runtime_get_sync(op
->ss
->dev
);
356 dev_err(op
->ss
->dev
, "pm error %d\n", err
);
362 crypto_free_sync_skcipher(op
->fallback_tfm
);
366 void sun8i_ss_cipher_exit(struct crypto_tfm
*tfm
)
368 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
371 memzero_explicit(op
->key
, op
->keylen
);
374 crypto_free_sync_skcipher(op
->fallback_tfm
);
375 pm_runtime_put_sync(op
->ss
->dev
);
378 int sun8i_ss_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
381 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
382 struct sun8i_ss_dev
*ss
= op
->ss
;
392 dev_dbg(ss
->dev
, "ERROR: Invalid keylen %u\n", keylen
);
396 memzero_explicit(op
->key
, op
->keylen
);
400 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
| GFP_DMA
);
404 crypto_sync_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
405 crypto_sync_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
407 return crypto_sync_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);
410 int sun8i_ss_des3_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
413 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
414 struct sun8i_ss_dev
*ss
= op
->ss
;
416 if (unlikely(keylen
!= 3 * DES_KEY_SIZE
)) {
417 dev_dbg(ss
->dev
, "Invalid keylen %u\n", keylen
);
422 memzero_explicit(op
->key
, op
->keylen
);
426 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
| GFP_DMA
);
430 crypto_sync_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
431 crypto_sync_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
433 return crypto_sync_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);