1 // SPDX-License-Identifier: GPL-2.0
3 * sun8i-ce-cipher.c - hardware cryptographic offloader for
4 * Allwinner H3/A64/H5/H2+/H6/R40 SoC
6 * Copyright (C) 2016-2019 Corentin LABBE <clabbe.montjoie@gmail.com>
8 * This file add support for AES cipher with 128,192,256 bits keysize in
11 * You could find a link for the datasheet in Documentation/arm/sunxi.rst
14 #include <linux/crypto.h>
15 #include <linux/dma-mapping.h>
17 #include <linux/pm_runtime.h>
18 #include <crypto/scatterwalk.h>
19 #include <crypto/internal/des.h>
20 #include <crypto/internal/skcipher.h>
23 static int sun8i_ce_cipher_need_fallback(struct skcipher_request
*areq
)
25 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
26 struct scatterlist
*sg
;
28 if (sg_nents(areq
->src
) > MAX_SG
|| sg_nents(areq
->dst
) > MAX_SG
)
31 if (areq
->cryptlen
< crypto_skcipher_ivsize(tfm
))
34 if (areq
->cryptlen
== 0 || areq
->cryptlen
% 16)
39 if (sg
->length
% 4 || !IS_ALIGNED(sg
->offset
, sizeof(u32
)))
45 if (sg
->length
% 4 || !IS_ALIGNED(sg
->offset
, sizeof(u32
)))
52 static int sun8i_ce_cipher_fallback(struct skcipher_request
*areq
)
54 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
55 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
56 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
58 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
59 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
60 struct sun8i_ce_alg_template
*algt
;
62 algt
= container_of(alg
, struct sun8i_ce_alg_template
, alg
.skcipher
);
66 skcipher_request_set_tfm(&rctx
->fallback_req
, op
->fallback_tfm
);
67 skcipher_request_set_callback(&rctx
->fallback_req
, areq
->base
.flags
,
68 areq
->base
.complete
, areq
->base
.data
);
69 skcipher_request_set_crypt(&rctx
->fallback_req
, areq
->src
, areq
->dst
,
70 areq
->cryptlen
, areq
->iv
);
71 if (rctx
->op_dir
& CE_DECRYPTION
)
72 err
= crypto_skcipher_decrypt(&rctx
->fallback_req
);
74 err
= crypto_skcipher_encrypt(&rctx
->fallback_req
);
78 static int sun8i_ce_cipher_prepare(struct crypto_engine
*engine
, void *async_req
)
80 struct skcipher_request
*areq
= container_of(async_req
, struct skcipher_request
, base
);
81 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
82 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
83 struct sun8i_ce_dev
*ce
= op
->ce
;
84 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
85 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
86 struct sun8i_ce_alg_template
*algt
;
87 struct sun8i_ce_flow
*chan
;
89 struct scatterlist
*sg
;
90 unsigned int todo
, len
, offset
, ivsize
;
97 algt
= container_of(alg
, struct sun8i_ce_alg_template
, alg
.skcipher
);
99 dev_dbg(ce
->dev
, "%s %s %u %x IV(%p %u) key=%u\n", __func__
,
100 crypto_tfm_alg_name(areq
->base
.tfm
),
102 rctx
->op_dir
, areq
->iv
, crypto_skcipher_ivsize(tfm
),
105 #ifdef CONFIG_CRYPTO_DEV_SUN8I_CE_DEBUG
111 chan
= &ce
->chanlist
[flow
];
114 memset(cet
, 0, sizeof(struct ce_task
));
116 cet
->t_id
= cpu_to_le32(flow
);
117 common
= ce
->variant
->alg_cipher
[algt
->ce_algo_id
];
118 common
|= rctx
->op_dir
| CE_COMM_INT
;
119 cet
->t_common_ctl
= cpu_to_le32(common
);
120 /* CTS and recent CE (H6) need length in bytes, in word otherwise */
121 if (ce
->variant
->cipher_t_dlen_in_bytes
)
122 cet
->t_dlen
= cpu_to_le32(areq
->cryptlen
);
124 cet
->t_dlen
= cpu_to_le32(areq
->cryptlen
/ 4);
126 sym
= ce
->variant
->op_mode
[algt
->ce_blockmode
];
130 sym
|= CE_AES_128BITS
;
133 sym
|= CE_AES_192BITS
;
136 sym
|= CE_AES_256BITS
;
140 cet
->t_sym_ctl
= cpu_to_le32(sym
);
143 rctx
->addr_key
= dma_map_single(ce
->dev
, op
->key
, op
->keylen
, DMA_TO_DEVICE
);
144 if (dma_mapping_error(ce
->dev
, rctx
->addr_key
)) {
145 dev_err(ce
->dev
, "Cannot DMA MAP KEY\n");
149 cet
->t_key
= cpu_to_le32(rctx
->addr_key
);
151 ivsize
= crypto_skcipher_ivsize(tfm
);
152 if (areq
->iv
&& crypto_skcipher_ivsize(tfm
) > 0) {
153 rctx
->ivlen
= ivsize
;
154 rctx
->bounce_iv
= kzalloc(ivsize
, GFP_KERNEL
| GFP_DMA
);
155 if (!rctx
->bounce_iv
) {
159 if (rctx
->op_dir
& CE_DECRYPTION
) {
160 rctx
->backup_iv
= kzalloc(ivsize
, GFP_KERNEL
);
161 if (!rctx
->backup_iv
) {
165 offset
= areq
->cryptlen
- ivsize
;
166 scatterwalk_map_and_copy(rctx
->backup_iv
, areq
->src
,
169 memcpy(rctx
->bounce_iv
, areq
->iv
, ivsize
);
170 rctx
->addr_iv
= dma_map_single(ce
->dev
, rctx
->bounce_iv
, rctx
->ivlen
,
172 if (dma_mapping_error(ce
->dev
, rctx
->addr_iv
)) {
173 dev_err(ce
->dev
, "Cannot DMA MAP IV\n");
177 cet
->t_iv
= cpu_to_le32(rctx
->addr_iv
);
180 if (areq
->src
== areq
->dst
) {
181 nr_sgs
= dma_map_sg(ce
->dev
, areq
->src
, sg_nents(areq
->src
),
183 if (nr_sgs
<= 0 || nr_sgs
> MAX_SG
) {
184 dev_err(ce
->dev
, "Invalid sg number %d\n", nr_sgs
);
190 nr_sgs
= dma_map_sg(ce
->dev
, areq
->src
, sg_nents(areq
->src
),
192 if (nr_sgs
<= 0 || nr_sgs
> MAX_SG
) {
193 dev_err(ce
->dev
, "Invalid sg number %d\n", nr_sgs
);
197 nr_sgd
= dma_map_sg(ce
->dev
, areq
->dst
, sg_nents(areq
->dst
),
199 if (nr_sgd
<= 0 || nr_sgd
> MAX_SG
) {
200 dev_err(ce
->dev
, "Invalid sg number %d\n", nr_sgd
);
206 len
= areq
->cryptlen
;
207 for_each_sg(areq
->src
, sg
, nr_sgs
, i
) {
208 cet
->t_src
[i
].addr
= cpu_to_le32(sg_dma_address(sg
));
209 todo
= min(len
, sg_dma_len(sg
));
210 cet
->t_src
[i
].len
= cpu_to_le32(todo
/ 4);
211 dev_dbg(ce
->dev
, "%s total=%u SG(%d %u off=%d) todo=%u\n", __func__
,
212 areq
->cryptlen
, i
, cet
->t_src
[i
].len
, sg
->offset
, todo
);
216 dev_err(ce
->dev
, "remaining len %d\n", len
);
221 len
= areq
->cryptlen
;
222 for_each_sg(areq
->dst
, sg
, nr_sgd
, i
) {
223 cet
->t_dst
[i
].addr
= cpu_to_le32(sg_dma_address(sg
));
224 todo
= min(len
, sg_dma_len(sg
));
225 cet
->t_dst
[i
].len
= cpu_to_le32(todo
/ 4);
226 dev_dbg(ce
->dev
, "%s total=%u SG(%d %u off=%d) todo=%u\n", __func__
,
227 areq
->cryptlen
, i
, cet
->t_dst
[i
].len
, sg
->offset
, todo
);
231 dev_err(ce
->dev
, "remaining len %d\n", len
);
236 chan
->timeout
= areq
->cryptlen
;
237 rctx
->nr_sgs
= nr_sgs
;
238 rctx
->nr_sgd
= nr_sgd
;
242 if (areq
->src
== areq
->dst
) {
243 dma_unmap_sg(ce
->dev
, areq
->src
, nr_sgs
, DMA_BIDIRECTIONAL
);
246 dma_unmap_sg(ce
->dev
, areq
->src
, nr_sgs
, DMA_TO_DEVICE
);
247 dma_unmap_sg(ce
->dev
, areq
->dst
, nr_sgd
, DMA_FROM_DEVICE
);
251 if (areq
->iv
&& ivsize
> 0) {
253 dma_unmap_single(ce
->dev
, rctx
->addr_iv
, rctx
->ivlen
, DMA_TO_DEVICE
);
254 offset
= areq
->cryptlen
- ivsize
;
255 if (rctx
->op_dir
& CE_DECRYPTION
) {
256 memcpy(areq
->iv
, rctx
->backup_iv
, ivsize
);
257 kfree_sensitive(rctx
->backup_iv
);
259 scatterwalk_map_and_copy(areq
->iv
, areq
->dst
, offset
,
262 kfree(rctx
->bounce_iv
);
266 dma_unmap_single(ce
->dev
, rctx
->addr_key
, op
->keylen
, DMA_TO_DEVICE
);
272 static int sun8i_ce_cipher_run(struct crypto_engine
*engine
, void *areq
)
274 struct skcipher_request
*breq
= container_of(areq
, struct skcipher_request
, base
);
275 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(breq
);
276 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
277 struct sun8i_ce_dev
*ce
= op
->ce
;
278 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(breq
);
282 err
= sun8i_ce_run_task(ce
, flow
, crypto_tfm_alg_name(breq
->base
.tfm
));
283 crypto_finalize_skcipher_request(engine
, breq
, err
);
287 static int sun8i_ce_cipher_unprepare(struct crypto_engine
*engine
, void *async_req
)
289 struct skcipher_request
*areq
= container_of(async_req
, struct skcipher_request
, base
);
290 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
291 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
292 struct sun8i_ce_dev
*ce
= op
->ce
;
293 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
294 struct sun8i_ce_flow
*chan
;
296 unsigned int ivsize
, offset
;
297 int nr_sgs
= rctx
->nr_sgs
;
298 int nr_sgd
= rctx
->nr_sgd
;
302 chan
= &ce
->chanlist
[flow
];
304 ivsize
= crypto_skcipher_ivsize(tfm
);
306 if (areq
->src
== areq
->dst
) {
307 dma_unmap_sg(ce
->dev
, areq
->src
, nr_sgs
, DMA_BIDIRECTIONAL
);
310 dma_unmap_sg(ce
->dev
, areq
->src
, nr_sgs
, DMA_TO_DEVICE
);
311 dma_unmap_sg(ce
->dev
, areq
->dst
, nr_sgd
, DMA_FROM_DEVICE
);
314 if (areq
->iv
&& ivsize
> 0) {
316 dma_unmap_single(ce
->dev
, rctx
->addr_iv
, rctx
->ivlen
, DMA_TO_DEVICE
);
317 offset
= areq
->cryptlen
- ivsize
;
318 if (rctx
->op_dir
& CE_DECRYPTION
) {
319 memcpy(areq
->iv
, rctx
->backup_iv
, ivsize
);
320 kfree_sensitive(rctx
->backup_iv
);
322 scatterwalk_map_and_copy(areq
->iv
, areq
->dst
, offset
,
325 kfree(rctx
->bounce_iv
);
328 dma_unmap_single(ce
->dev
, rctx
->addr_key
, op
->keylen
, DMA_TO_DEVICE
);
333 int sun8i_ce_skdecrypt(struct skcipher_request
*areq
)
335 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
336 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
337 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
338 struct crypto_engine
*engine
;
341 rctx
->op_dir
= CE_DECRYPTION
;
342 if (sun8i_ce_cipher_need_fallback(areq
))
343 return sun8i_ce_cipher_fallback(areq
);
345 e
= sun8i_ce_get_engine_number(op
->ce
);
347 engine
= op
->ce
->chanlist
[e
].engine
;
349 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
352 int sun8i_ce_skencrypt(struct skcipher_request
*areq
)
354 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
355 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
356 struct sun8i_cipher_req_ctx
*rctx
= skcipher_request_ctx(areq
);
357 struct crypto_engine
*engine
;
360 rctx
->op_dir
= CE_ENCRYPTION
;
361 if (sun8i_ce_cipher_need_fallback(areq
))
362 return sun8i_ce_cipher_fallback(areq
);
364 e
= sun8i_ce_get_engine_number(op
->ce
);
366 engine
= op
->ce
->chanlist
[e
].engine
;
368 return crypto_transfer_skcipher_request_to_engine(engine
, areq
);
371 int sun8i_ce_cipher_init(struct crypto_tfm
*tfm
)
373 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
374 struct sun8i_ce_alg_template
*algt
;
375 const char *name
= crypto_tfm_alg_name(tfm
);
376 struct crypto_skcipher
*sktfm
= __crypto_skcipher_cast(tfm
);
377 struct skcipher_alg
*alg
= crypto_skcipher_alg(sktfm
);
380 memset(op
, 0, sizeof(struct sun8i_cipher_tfm_ctx
));
382 algt
= container_of(alg
, struct sun8i_ce_alg_template
, alg
.skcipher
);
385 op
->fallback_tfm
= crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_NEED_FALLBACK
);
386 if (IS_ERR(op
->fallback_tfm
)) {
387 dev_err(op
->ce
->dev
, "ERROR: Cannot allocate fallback for %s %ld\n",
388 name
, PTR_ERR(op
->fallback_tfm
));
389 return PTR_ERR(op
->fallback_tfm
);
392 sktfm
->reqsize
= sizeof(struct sun8i_cipher_req_ctx
) +
393 crypto_skcipher_reqsize(op
->fallback_tfm
);
396 dev_info(op
->ce
->dev
, "Fallback for %s is %s\n",
397 crypto_tfm_alg_driver_name(&sktfm
->base
),
398 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(op
->fallback_tfm
)));
400 op
->enginectx
.op
.do_one_request
= sun8i_ce_cipher_run
;
401 op
->enginectx
.op
.prepare_request
= sun8i_ce_cipher_prepare
;
402 op
->enginectx
.op
.unprepare_request
= sun8i_ce_cipher_unprepare
;
404 err
= pm_runtime_get_sync(op
->ce
->dev
);
410 pm_runtime_put_noidle(op
->ce
->dev
);
411 crypto_free_skcipher(op
->fallback_tfm
);
415 void sun8i_ce_cipher_exit(struct crypto_tfm
*tfm
)
417 struct sun8i_cipher_tfm_ctx
*op
= crypto_tfm_ctx(tfm
);
419 kfree_sensitive(op
->key
);
420 crypto_free_skcipher(op
->fallback_tfm
);
421 pm_runtime_put_sync_suspend(op
->ce
->dev
);
424 int sun8i_ce_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
427 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
428 struct sun8i_ce_dev
*ce
= op
->ce
;
438 dev_dbg(ce
->dev
, "ERROR: Invalid keylen %u\n", keylen
);
441 kfree_sensitive(op
->key
);
443 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
| GFP_DMA
);
447 crypto_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
448 crypto_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
450 return crypto_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);
453 int sun8i_ce_des3_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
456 struct sun8i_cipher_tfm_ctx
*op
= crypto_skcipher_ctx(tfm
);
459 err
= verify_skcipher_des3_key(tfm
, key
);
463 kfree_sensitive(op
->key
);
465 op
->key
= kmemdup(key
, keylen
, GFP_KERNEL
| GFP_DMA
);
469 crypto_skcipher_clear_flags(op
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
470 crypto_skcipher_set_flags(op
->fallback_tfm
, tfm
->base
.crt_flags
& CRYPTO_TFM_REQ_MASK
);
472 return crypto_skcipher_setkey(op
->fallback_tfm
, key
, keylen
);