1 // SPDX-License-Identifier: GPL-2.0+
3 * Copyright (c) 2021 Aspeed Technology Inc.
6 #include "aspeed-hace.h"
7 #include <crypto/des.h>
8 #include <crypto/engine.h>
9 #include <crypto/internal/des.h>
10 #include <crypto/internal/skcipher.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/err.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/scatterlist.h>
17 #include <linux/string.h>
19 #ifdef CONFIG_CRYPTO_DEV_ASPEED_HACE_CRYPTO_DEBUG
20 #define CIPHER_DBG(h, fmt, ...) \
21 dev_info((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
23 #define CIPHER_DBG(h, fmt, ...) \
24 dev_dbg((h)->dev, "%s() " fmt, __func__, ##__VA_ARGS__)
27 static int aspeed_crypto_do_fallback(struct skcipher_request
*areq
)
29 struct aspeed_cipher_reqctx
*rctx
= skcipher_request_ctx(areq
);
30 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(areq
);
31 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
34 skcipher_request_set_tfm(&rctx
->fallback_req
, ctx
->fallback_tfm
);
35 skcipher_request_set_callback(&rctx
->fallback_req
, areq
->base
.flags
,
36 areq
->base
.complete
, areq
->base
.data
);
37 skcipher_request_set_crypt(&rctx
->fallback_req
, areq
->src
, areq
->dst
,
38 areq
->cryptlen
, areq
->iv
);
40 if (rctx
->enc_cmd
& HACE_CMD_ENCRYPT
)
41 err
= crypto_skcipher_encrypt(&rctx
->fallback_req
);
43 err
= crypto_skcipher_decrypt(&rctx
->fallback_req
);
48 static bool aspeed_crypto_need_fallback(struct skcipher_request
*areq
)
50 struct aspeed_cipher_reqctx
*rctx
= skcipher_request_ctx(areq
);
52 if (areq
->cryptlen
== 0)
55 if ((rctx
->enc_cmd
& HACE_CMD_DES_SELECT
) &&
56 !IS_ALIGNED(areq
->cryptlen
, DES_BLOCK_SIZE
))
59 if ((!(rctx
->enc_cmd
& HACE_CMD_DES_SELECT
)) &&
60 !IS_ALIGNED(areq
->cryptlen
, AES_BLOCK_SIZE
))
66 static int aspeed_hace_crypto_handle_queue(struct aspeed_hace_dev
*hace_dev
,
67 struct skcipher_request
*req
)
69 if (hace_dev
->version
== AST2500_VERSION
&&
70 aspeed_crypto_need_fallback(req
)) {
71 CIPHER_DBG(hace_dev
, "SW fallback\n");
72 return aspeed_crypto_do_fallback(req
);
75 return crypto_transfer_skcipher_request_to_engine(
76 hace_dev
->crypt_engine_crypto
, req
);
79 static int aspeed_crypto_do_request(struct crypto_engine
*engine
, void *areq
)
81 struct skcipher_request
*req
= skcipher_request_cast(areq
);
82 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
83 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
84 struct aspeed_hace_dev
*hace_dev
= ctx
->hace_dev
;
85 struct aspeed_engine_crypto
*crypto_engine
;
88 crypto_engine
= &hace_dev
->crypto_engine
;
89 crypto_engine
->req
= req
;
90 crypto_engine
->flags
|= CRYPTO_FLAGS_BUSY
;
92 rc
= ctx
->start(hace_dev
);
94 if (rc
!= -EINPROGRESS
)
100 static int aspeed_sk_complete(struct aspeed_hace_dev
*hace_dev
, int err
)
102 struct aspeed_engine_crypto
*crypto_engine
= &hace_dev
->crypto_engine
;
103 struct aspeed_cipher_reqctx
*rctx
;
104 struct skcipher_request
*req
;
106 CIPHER_DBG(hace_dev
, "\n");
108 req
= crypto_engine
->req
;
109 rctx
= skcipher_request_ctx(req
);
111 if (rctx
->enc_cmd
& HACE_CMD_IV_REQUIRE
) {
112 if (rctx
->enc_cmd
& HACE_CMD_DES_SELECT
)
113 memcpy(req
->iv
, crypto_engine
->cipher_ctx
+
114 DES_KEY_SIZE
, DES_KEY_SIZE
);
116 memcpy(req
->iv
, crypto_engine
->cipher_ctx
,
120 crypto_engine
->flags
&= ~CRYPTO_FLAGS_BUSY
;
122 crypto_finalize_skcipher_request(hace_dev
->crypt_engine_crypto
, req
,
128 static int aspeed_sk_transfer_sg(struct aspeed_hace_dev
*hace_dev
)
130 struct aspeed_engine_crypto
*crypto_engine
= &hace_dev
->crypto_engine
;
131 struct device
*dev
= hace_dev
->dev
;
132 struct aspeed_cipher_reqctx
*rctx
;
133 struct skcipher_request
*req
;
135 CIPHER_DBG(hace_dev
, "\n");
137 req
= crypto_engine
->req
;
138 rctx
= skcipher_request_ctx(req
);
140 if (req
->src
== req
->dst
) {
141 dma_unmap_sg(dev
, req
->src
, rctx
->src_nents
, DMA_BIDIRECTIONAL
);
143 dma_unmap_sg(dev
, req
->src
, rctx
->src_nents
, DMA_TO_DEVICE
);
144 dma_unmap_sg(dev
, req
->dst
, rctx
->dst_nents
, DMA_FROM_DEVICE
);
147 return aspeed_sk_complete(hace_dev
, 0);
150 static int aspeed_sk_transfer(struct aspeed_hace_dev
*hace_dev
)
152 struct aspeed_engine_crypto
*crypto_engine
= &hace_dev
->crypto_engine
;
153 struct aspeed_cipher_reqctx
*rctx
;
154 struct skcipher_request
*req
;
155 struct scatterlist
*out_sg
;
159 req
= crypto_engine
->req
;
160 rctx
= skcipher_request_ctx(req
);
163 /* Copy output buffer to dst scatter-gather lists */
164 nbytes
= sg_copy_from_buffer(out_sg
, rctx
->dst_nents
,
165 crypto_engine
->cipher_addr
, req
->cryptlen
);
167 dev_warn(hace_dev
->dev
, "invalid sg copy, %s:0x%x, %s:0x%x\n",
168 "nbytes", nbytes
, "cryptlen", req
->cryptlen
);
172 CIPHER_DBG(hace_dev
, "%s:%d, %s:%d, %s:%d, %s:%p\n",
173 "nbytes", nbytes
, "req->cryptlen", req
->cryptlen
,
174 "nb_out_sg", rctx
->dst_nents
,
175 "cipher addr", crypto_engine
->cipher_addr
);
177 return aspeed_sk_complete(hace_dev
, rc
);
180 static int aspeed_sk_start(struct aspeed_hace_dev
*hace_dev
)
182 struct aspeed_engine_crypto
*crypto_engine
= &hace_dev
->crypto_engine
;
183 struct aspeed_cipher_reqctx
*rctx
;
184 struct skcipher_request
*req
;
185 struct scatterlist
*in_sg
;
188 req
= crypto_engine
->req
;
189 rctx
= skcipher_request_ctx(req
);
192 nbytes
= sg_copy_to_buffer(in_sg
, rctx
->src_nents
,
193 crypto_engine
->cipher_addr
, req
->cryptlen
);
195 CIPHER_DBG(hace_dev
, "%s:%d, %s:%d, %s:%d, %s:%p\n",
196 "nbytes", nbytes
, "req->cryptlen", req
->cryptlen
,
197 "nb_in_sg", rctx
->src_nents
,
198 "cipher addr", crypto_engine
->cipher_addr
);
201 dev_warn(hace_dev
->dev
, "invalid sg copy, %s:0x%x, %s:0x%x\n",
202 "nbytes", nbytes
, "cryptlen", req
->cryptlen
);
206 crypto_engine
->resume
= aspeed_sk_transfer
;
208 /* Trigger engines */
209 ast_hace_write(hace_dev
, crypto_engine
->cipher_dma_addr
,
211 ast_hace_write(hace_dev
, crypto_engine
->cipher_dma_addr
,
213 ast_hace_write(hace_dev
, req
->cryptlen
, ASPEED_HACE_DATA_LEN
);
214 ast_hace_write(hace_dev
, rctx
->enc_cmd
, ASPEED_HACE_CMD
);
219 static int aspeed_sk_start_sg(struct aspeed_hace_dev
*hace_dev
)
221 struct aspeed_engine_crypto
*crypto_engine
= &hace_dev
->crypto_engine
;
222 struct aspeed_sg_list
*src_list
, *dst_list
;
223 dma_addr_t src_dma_addr
, dst_dma_addr
;
224 struct aspeed_cipher_reqctx
*rctx
;
225 struct skcipher_request
*req
;
226 struct scatterlist
*s
;
232 CIPHER_DBG(hace_dev
, "\n");
234 req
= crypto_engine
->req
;
235 rctx
= skcipher_request_ctx(req
);
237 rctx
->enc_cmd
|= HACE_CMD_DES_SG_CTRL
| HACE_CMD_SRC_SG_CTRL
|
238 HACE_CMD_AES_KEY_HW_EXP
| HACE_CMD_MBUS_REQ_SYNC_EN
;
241 if (req
->dst
== req
->src
) {
242 src_sg_len
= dma_map_sg(hace_dev
->dev
, req
->src
,
243 rctx
->src_nents
, DMA_BIDIRECTIONAL
);
244 dst_sg_len
= src_sg_len
;
246 dev_warn(hace_dev
->dev
, "dma_map_sg() src error\n");
251 src_sg_len
= dma_map_sg(hace_dev
->dev
, req
->src
,
252 rctx
->src_nents
, DMA_TO_DEVICE
);
254 dev_warn(hace_dev
->dev
, "dma_map_sg() src error\n");
258 dst_sg_len
= dma_map_sg(hace_dev
->dev
, req
->dst
,
259 rctx
->dst_nents
, DMA_FROM_DEVICE
);
261 dev_warn(hace_dev
->dev
, "dma_map_sg() dst error\n");
267 src_list
= (struct aspeed_sg_list
*)crypto_engine
->cipher_addr
;
268 src_dma_addr
= crypto_engine
->cipher_dma_addr
;
269 total
= req
->cryptlen
;
271 for_each_sg(req
->src
, s
, src_sg_len
, i
) {
272 u32 phy_addr
= sg_dma_address(s
);
273 u32 len
= sg_dma_len(s
);
284 src_list
[i
].phy_addr
= cpu_to_le32(phy_addr
);
285 src_list
[i
].len
= cpu_to_le32(len
);
293 if (req
->dst
== req
->src
) {
295 dst_dma_addr
= src_dma_addr
;
298 dst_list
= (struct aspeed_sg_list
*)crypto_engine
->dst_sg_addr
;
299 dst_dma_addr
= crypto_engine
->dst_sg_dma_addr
;
300 total
= req
->cryptlen
;
302 for_each_sg(req
->dst
, s
, dst_sg_len
, i
) {
303 u32 phy_addr
= sg_dma_address(s
);
304 u32 len
= sg_dma_len(s
);
315 dst_list
[i
].phy_addr
= cpu_to_le32(phy_addr
);
316 dst_list
[i
].len
= cpu_to_le32(len
);
320 dst_list
[dst_sg_len
].phy_addr
= 0;
321 dst_list
[dst_sg_len
].len
= 0;
329 crypto_engine
->resume
= aspeed_sk_transfer_sg
;
331 /* Memory barrier to ensure all data setup before engine starts */
334 /* Trigger engines */
335 ast_hace_write(hace_dev
, src_dma_addr
, ASPEED_HACE_SRC
);
336 ast_hace_write(hace_dev
, dst_dma_addr
, ASPEED_HACE_DEST
);
337 ast_hace_write(hace_dev
, req
->cryptlen
, ASPEED_HACE_DATA_LEN
);
338 ast_hace_write(hace_dev
, rctx
->enc_cmd
, ASPEED_HACE_CMD
);
343 if (req
->dst
== req
->src
) {
344 dma_unmap_sg(hace_dev
->dev
, req
->src
, rctx
->src_nents
,
348 dma_unmap_sg(hace_dev
->dev
, req
->dst
, rctx
->dst_nents
,
350 dma_unmap_sg(hace_dev
->dev
, req
->src
, rctx
->src_nents
,
357 dma_unmap_sg(hace_dev
->dev
, req
->src
, rctx
->src_nents
, DMA_TO_DEVICE
);
362 static int aspeed_hace_skcipher_trigger(struct aspeed_hace_dev
*hace_dev
)
364 struct aspeed_engine_crypto
*crypto_engine
= &hace_dev
->crypto_engine
;
365 struct aspeed_cipher_reqctx
*rctx
;
366 struct crypto_skcipher
*cipher
;
367 struct aspeed_cipher_ctx
*ctx
;
368 struct skcipher_request
*req
;
370 CIPHER_DBG(hace_dev
, "\n");
372 req
= crypto_engine
->req
;
373 rctx
= skcipher_request_ctx(req
);
374 cipher
= crypto_skcipher_reqtfm(req
);
375 ctx
= crypto_skcipher_ctx(cipher
);
377 /* enable interrupt */
378 rctx
->enc_cmd
|= HACE_CMD_ISR_EN
;
380 rctx
->dst_nents
= sg_nents(req
->dst
);
381 rctx
->src_nents
= sg_nents(req
->src
);
383 ast_hace_write(hace_dev
, crypto_engine
->cipher_ctx_dma
,
384 ASPEED_HACE_CONTEXT
);
386 if (rctx
->enc_cmd
& HACE_CMD_IV_REQUIRE
) {
387 if (rctx
->enc_cmd
& HACE_CMD_DES_SELECT
)
388 memcpy(crypto_engine
->cipher_ctx
+ DES_BLOCK_SIZE
,
389 req
->iv
, DES_BLOCK_SIZE
);
391 memcpy(crypto_engine
->cipher_ctx
, req
->iv
,
395 if (hace_dev
->version
== AST2600_VERSION
) {
396 memcpy(crypto_engine
->cipher_ctx
+ 16, ctx
->key
, ctx
->key_len
);
398 return aspeed_sk_start_sg(hace_dev
);
401 memcpy(crypto_engine
->cipher_ctx
+ 16, ctx
->key
, AES_MAX_KEYLENGTH
);
403 return aspeed_sk_start(hace_dev
);
406 static int aspeed_des_crypt(struct skcipher_request
*req
, u32 cmd
)
408 struct aspeed_cipher_reqctx
*rctx
= skcipher_request_ctx(req
);
409 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
410 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
411 struct aspeed_hace_dev
*hace_dev
= ctx
->hace_dev
;
412 u32 crypto_alg
= cmd
& HACE_CMD_OP_MODE_MASK
;
414 CIPHER_DBG(hace_dev
, "\n");
416 if (crypto_alg
== HACE_CMD_CBC
|| crypto_alg
== HACE_CMD_ECB
) {
417 if (!IS_ALIGNED(req
->cryptlen
, DES_BLOCK_SIZE
))
421 rctx
->enc_cmd
= cmd
| HACE_CMD_DES_SELECT
| HACE_CMD_RI_WO_DATA_ENABLE
|
422 HACE_CMD_DES
| HACE_CMD_CONTEXT_LOAD_ENABLE
|
423 HACE_CMD_CONTEXT_SAVE_ENABLE
;
425 return aspeed_hace_crypto_handle_queue(hace_dev
, req
);
428 static int aspeed_des_setkey(struct crypto_skcipher
*cipher
, const u8
*key
,
431 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
432 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(cipher
);
433 struct aspeed_hace_dev
*hace_dev
= ctx
->hace_dev
;
436 CIPHER_DBG(hace_dev
, "keylen: %d bits\n", keylen
);
438 if (keylen
!= DES_KEY_SIZE
&& keylen
!= DES3_EDE_KEY_SIZE
) {
439 dev_warn(hace_dev
->dev
, "invalid keylen: %d bits\n", keylen
);
443 if (keylen
== DES_KEY_SIZE
) {
444 rc
= crypto_des_verify_key(tfm
, key
);
448 } else if (keylen
== DES3_EDE_KEY_SIZE
) {
449 rc
= crypto_des3_ede_verify_key(tfm
, key
);
454 memcpy(ctx
->key
, key
, keylen
);
455 ctx
->key_len
= keylen
;
457 crypto_skcipher_clear_flags(ctx
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
458 crypto_skcipher_set_flags(ctx
->fallback_tfm
, cipher
->base
.crt_flags
&
459 CRYPTO_TFM_REQ_MASK
);
461 return crypto_skcipher_setkey(ctx
->fallback_tfm
, key
, keylen
);
464 static int aspeed_tdes_ctr_decrypt(struct skcipher_request
*req
)
466 return aspeed_des_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_CTR
|
467 HACE_CMD_TRIPLE_DES
);
470 static int aspeed_tdes_ctr_encrypt(struct skcipher_request
*req
)
472 return aspeed_des_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_CTR
|
473 HACE_CMD_TRIPLE_DES
);
476 static int aspeed_tdes_cbc_decrypt(struct skcipher_request
*req
)
478 return aspeed_des_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_CBC
|
479 HACE_CMD_TRIPLE_DES
);
482 static int aspeed_tdes_cbc_encrypt(struct skcipher_request
*req
)
484 return aspeed_des_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_CBC
|
485 HACE_CMD_TRIPLE_DES
);
488 static int aspeed_tdes_ecb_decrypt(struct skcipher_request
*req
)
490 return aspeed_des_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_ECB
|
491 HACE_CMD_TRIPLE_DES
);
494 static int aspeed_tdes_ecb_encrypt(struct skcipher_request
*req
)
496 return aspeed_des_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_ECB
|
497 HACE_CMD_TRIPLE_DES
);
500 static int aspeed_des_ctr_decrypt(struct skcipher_request
*req
)
502 return aspeed_des_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_CTR
|
503 HACE_CMD_SINGLE_DES
);
506 static int aspeed_des_ctr_encrypt(struct skcipher_request
*req
)
508 return aspeed_des_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_CTR
|
509 HACE_CMD_SINGLE_DES
);
512 static int aspeed_des_cbc_decrypt(struct skcipher_request
*req
)
514 return aspeed_des_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_CBC
|
515 HACE_CMD_SINGLE_DES
);
518 static int aspeed_des_cbc_encrypt(struct skcipher_request
*req
)
520 return aspeed_des_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_CBC
|
521 HACE_CMD_SINGLE_DES
);
524 static int aspeed_des_ecb_decrypt(struct skcipher_request
*req
)
526 return aspeed_des_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_ECB
|
527 HACE_CMD_SINGLE_DES
);
530 static int aspeed_des_ecb_encrypt(struct skcipher_request
*req
)
532 return aspeed_des_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_ECB
|
533 HACE_CMD_SINGLE_DES
);
536 static int aspeed_aes_crypt(struct skcipher_request
*req
, u32 cmd
)
538 struct aspeed_cipher_reqctx
*rctx
= skcipher_request_ctx(req
);
539 struct crypto_skcipher
*cipher
= crypto_skcipher_reqtfm(req
);
540 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
541 struct aspeed_hace_dev
*hace_dev
= ctx
->hace_dev
;
542 u32 crypto_alg
= cmd
& HACE_CMD_OP_MODE_MASK
;
544 if (crypto_alg
== HACE_CMD_CBC
|| crypto_alg
== HACE_CMD_ECB
) {
545 if (!IS_ALIGNED(req
->cryptlen
, AES_BLOCK_SIZE
))
549 CIPHER_DBG(hace_dev
, "%s\n",
550 (cmd
& HACE_CMD_ENCRYPT
) ? "encrypt" : "decrypt");
552 cmd
|= HACE_CMD_AES_SELECT
| HACE_CMD_RI_WO_DATA_ENABLE
|
553 HACE_CMD_CONTEXT_LOAD_ENABLE
| HACE_CMD_CONTEXT_SAVE_ENABLE
;
555 switch (ctx
->key_len
) {
556 case AES_KEYSIZE_128
:
557 cmd
|= HACE_CMD_AES128
;
559 case AES_KEYSIZE_192
:
560 cmd
|= HACE_CMD_AES192
;
562 case AES_KEYSIZE_256
:
563 cmd
|= HACE_CMD_AES256
;
571 return aspeed_hace_crypto_handle_queue(hace_dev
, req
);
574 static int aspeed_aes_setkey(struct crypto_skcipher
*cipher
, const u8
*key
,
577 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(cipher
);
578 struct aspeed_hace_dev
*hace_dev
= ctx
->hace_dev
;
579 struct crypto_aes_ctx gen_aes_key
;
581 CIPHER_DBG(hace_dev
, "keylen: %d bits\n", (keylen
* 8));
583 if (keylen
!= AES_KEYSIZE_128
&& keylen
!= AES_KEYSIZE_192
&&
584 keylen
!= AES_KEYSIZE_256
)
587 if (ctx
->hace_dev
->version
== AST2500_VERSION
) {
588 aes_expandkey(&gen_aes_key
, key
, keylen
);
589 memcpy(ctx
->key
, gen_aes_key
.key_enc
, AES_MAX_KEYLENGTH
);
592 memcpy(ctx
->key
, key
, keylen
);
595 ctx
->key_len
= keylen
;
597 crypto_skcipher_clear_flags(ctx
->fallback_tfm
, CRYPTO_TFM_REQ_MASK
);
598 crypto_skcipher_set_flags(ctx
->fallback_tfm
, cipher
->base
.crt_flags
&
599 CRYPTO_TFM_REQ_MASK
);
601 return crypto_skcipher_setkey(ctx
->fallback_tfm
, key
, keylen
);
604 static int aspeed_aes_ctr_decrypt(struct skcipher_request
*req
)
606 return aspeed_aes_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_CTR
);
609 static int aspeed_aes_ctr_encrypt(struct skcipher_request
*req
)
611 return aspeed_aes_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_CTR
);
614 static int aspeed_aes_cbc_decrypt(struct skcipher_request
*req
)
616 return aspeed_aes_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_CBC
);
619 static int aspeed_aes_cbc_encrypt(struct skcipher_request
*req
)
621 return aspeed_aes_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_CBC
);
624 static int aspeed_aes_ecb_decrypt(struct skcipher_request
*req
)
626 return aspeed_aes_crypt(req
, HACE_CMD_DECRYPT
| HACE_CMD_ECB
);
629 static int aspeed_aes_ecb_encrypt(struct skcipher_request
*req
)
631 return aspeed_aes_crypt(req
, HACE_CMD_ENCRYPT
| HACE_CMD_ECB
);
634 static int aspeed_crypto_cra_init(struct crypto_skcipher
*tfm
)
636 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
637 struct skcipher_alg
*alg
= crypto_skcipher_alg(tfm
);
638 const char *name
= crypto_tfm_alg_name(&tfm
->base
);
639 struct aspeed_hace_alg
*crypto_alg
;
642 crypto_alg
= container_of(alg
, struct aspeed_hace_alg
, alg
.skcipher
.base
);
643 ctx
->hace_dev
= crypto_alg
->hace_dev
;
644 ctx
->start
= aspeed_hace_skcipher_trigger
;
646 CIPHER_DBG(ctx
->hace_dev
, "%s\n", name
);
648 ctx
->fallback_tfm
= crypto_alloc_skcipher(name
, 0, CRYPTO_ALG_ASYNC
|
649 CRYPTO_ALG_NEED_FALLBACK
);
650 if (IS_ERR(ctx
->fallback_tfm
)) {
651 dev_err(ctx
->hace_dev
->dev
, "ERROR: Cannot allocate fallback for %s %ld\n",
652 name
, PTR_ERR(ctx
->fallback_tfm
));
653 return PTR_ERR(ctx
->fallback_tfm
);
656 crypto_skcipher_set_reqsize(tfm
, sizeof(struct aspeed_cipher_reqctx
) +
657 crypto_skcipher_reqsize(ctx
->fallback_tfm
));
662 static void aspeed_crypto_cra_exit(struct crypto_skcipher
*tfm
)
664 struct aspeed_cipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
665 struct aspeed_hace_dev
*hace_dev
= ctx
->hace_dev
;
667 CIPHER_DBG(hace_dev
, "%s\n", crypto_tfm_alg_name(&tfm
->base
));
668 crypto_free_skcipher(ctx
->fallback_tfm
);
671 static struct aspeed_hace_alg aspeed_crypto_algs
[] = {
673 .alg
.skcipher
.base
= {
674 .min_keysize
= AES_MIN_KEY_SIZE
,
675 .max_keysize
= AES_MAX_KEY_SIZE
,
676 .setkey
= aspeed_aes_setkey
,
677 .encrypt
= aspeed_aes_ecb_encrypt
,
678 .decrypt
= aspeed_aes_ecb_decrypt
,
679 .init
= aspeed_crypto_cra_init
,
680 .exit
= aspeed_crypto_cra_exit
,
682 .cra_name
= "ecb(aes)",
683 .cra_driver_name
= "aspeed-ecb-aes",
685 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
687 CRYPTO_ALG_NEED_FALLBACK
,
688 .cra_blocksize
= AES_BLOCK_SIZE
,
689 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
690 .cra_alignmask
= 0x0f,
691 .cra_module
= THIS_MODULE
,
695 .do_one_request
= aspeed_crypto_do_request
,
699 .alg
.skcipher
.base
= {
700 .ivsize
= AES_BLOCK_SIZE
,
701 .min_keysize
= AES_MIN_KEY_SIZE
,
702 .max_keysize
= AES_MAX_KEY_SIZE
,
703 .setkey
= aspeed_aes_setkey
,
704 .encrypt
= aspeed_aes_cbc_encrypt
,
705 .decrypt
= aspeed_aes_cbc_decrypt
,
706 .init
= aspeed_crypto_cra_init
,
707 .exit
= aspeed_crypto_cra_exit
,
709 .cra_name
= "cbc(aes)",
710 .cra_driver_name
= "aspeed-cbc-aes",
712 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
714 CRYPTO_ALG_NEED_FALLBACK
,
715 .cra_blocksize
= AES_BLOCK_SIZE
,
716 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
717 .cra_alignmask
= 0x0f,
718 .cra_module
= THIS_MODULE
,
722 .do_one_request
= aspeed_crypto_do_request
,
726 .alg
.skcipher
.base
= {
727 .min_keysize
= DES_KEY_SIZE
,
728 .max_keysize
= DES_KEY_SIZE
,
729 .setkey
= aspeed_des_setkey
,
730 .encrypt
= aspeed_des_ecb_encrypt
,
731 .decrypt
= aspeed_des_ecb_decrypt
,
732 .init
= aspeed_crypto_cra_init
,
733 .exit
= aspeed_crypto_cra_exit
,
735 .cra_name
= "ecb(des)",
736 .cra_driver_name
= "aspeed-ecb-des",
738 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
740 CRYPTO_ALG_NEED_FALLBACK
,
741 .cra_blocksize
= DES_BLOCK_SIZE
,
742 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
743 .cra_alignmask
= 0x0f,
744 .cra_module
= THIS_MODULE
,
748 .do_one_request
= aspeed_crypto_do_request
,
752 .alg
.skcipher
.base
= {
753 .ivsize
= DES_BLOCK_SIZE
,
754 .min_keysize
= DES_KEY_SIZE
,
755 .max_keysize
= DES_KEY_SIZE
,
756 .setkey
= aspeed_des_setkey
,
757 .encrypt
= aspeed_des_cbc_encrypt
,
758 .decrypt
= aspeed_des_cbc_decrypt
,
759 .init
= aspeed_crypto_cra_init
,
760 .exit
= aspeed_crypto_cra_exit
,
762 .cra_name
= "cbc(des)",
763 .cra_driver_name
= "aspeed-cbc-des",
765 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
767 CRYPTO_ALG_NEED_FALLBACK
,
768 .cra_blocksize
= DES_BLOCK_SIZE
,
769 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
770 .cra_alignmask
= 0x0f,
771 .cra_module
= THIS_MODULE
,
775 .do_one_request
= aspeed_crypto_do_request
,
779 .alg
.skcipher
.base
= {
780 .min_keysize
= DES3_EDE_KEY_SIZE
,
781 .max_keysize
= DES3_EDE_KEY_SIZE
,
782 .setkey
= aspeed_des_setkey
,
783 .encrypt
= aspeed_tdes_ecb_encrypt
,
784 .decrypt
= aspeed_tdes_ecb_decrypt
,
785 .init
= aspeed_crypto_cra_init
,
786 .exit
= aspeed_crypto_cra_exit
,
788 .cra_name
= "ecb(des3_ede)",
789 .cra_driver_name
= "aspeed-ecb-tdes",
791 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
793 CRYPTO_ALG_NEED_FALLBACK
,
794 .cra_blocksize
= DES_BLOCK_SIZE
,
795 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
796 .cra_alignmask
= 0x0f,
797 .cra_module
= THIS_MODULE
,
801 .do_one_request
= aspeed_crypto_do_request
,
805 .alg
.skcipher
.base
= {
806 .ivsize
= DES_BLOCK_SIZE
,
807 .min_keysize
= DES3_EDE_KEY_SIZE
,
808 .max_keysize
= DES3_EDE_KEY_SIZE
,
809 .setkey
= aspeed_des_setkey
,
810 .encrypt
= aspeed_tdes_cbc_encrypt
,
811 .decrypt
= aspeed_tdes_cbc_decrypt
,
812 .init
= aspeed_crypto_cra_init
,
813 .exit
= aspeed_crypto_cra_exit
,
815 .cra_name
= "cbc(des3_ede)",
816 .cra_driver_name
= "aspeed-cbc-tdes",
818 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
820 CRYPTO_ALG_NEED_FALLBACK
,
821 .cra_blocksize
= DES_BLOCK_SIZE
,
822 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
823 .cra_alignmask
= 0x0f,
824 .cra_module
= THIS_MODULE
,
828 .do_one_request
= aspeed_crypto_do_request
,
833 static struct aspeed_hace_alg aspeed_crypto_algs_g6
[] = {
835 .alg
.skcipher
.base
= {
836 .ivsize
= AES_BLOCK_SIZE
,
837 .min_keysize
= AES_MIN_KEY_SIZE
,
838 .max_keysize
= AES_MAX_KEY_SIZE
,
839 .setkey
= aspeed_aes_setkey
,
840 .encrypt
= aspeed_aes_ctr_encrypt
,
841 .decrypt
= aspeed_aes_ctr_decrypt
,
842 .init
= aspeed_crypto_cra_init
,
843 .exit
= aspeed_crypto_cra_exit
,
845 .cra_name
= "ctr(aes)",
846 .cra_driver_name
= "aspeed-ctr-aes",
848 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
851 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
852 .cra_alignmask
= 0x0f,
853 .cra_module
= THIS_MODULE
,
857 .do_one_request
= aspeed_crypto_do_request
,
861 .alg
.skcipher
.base
= {
862 .ivsize
= DES_BLOCK_SIZE
,
863 .min_keysize
= DES_KEY_SIZE
,
864 .max_keysize
= DES_KEY_SIZE
,
865 .setkey
= aspeed_des_setkey
,
866 .encrypt
= aspeed_des_ctr_encrypt
,
867 .decrypt
= aspeed_des_ctr_decrypt
,
868 .init
= aspeed_crypto_cra_init
,
869 .exit
= aspeed_crypto_cra_exit
,
871 .cra_name
= "ctr(des)",
872 .cra_driver_name
= "aspeed-ctr-des",
874 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
877 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
878 .cra_alignmask
= 0x0f,
879 .cra_module
= THIS_MODULE
,
883 .do_one_request
= aspeed_crypto_do_request
,
887 .alg
.skcipher
.base
= {
888 .ivsize
= DES_BLOCK_SIZE
,
889 .min_keysize
= DES3_EDE_KEY_SIZE
,
890 .max_keysize
= DES3_EDE_KEY_SIZE
,
891 .setkey
= aspeed_des_setkey
,
892 .encrypt
= aspeed_tdes_ctr_encrypt
,
893 .decrypt
= aspeed_tdes_ctr_decrypt
,
894 .init
= aspeed_crypto_cra_init
,
895 .exit
= aspeed_crypto_cra_exit
,
897 .cra_name
= "ctr(des3_ede)",
898 .cra_driver_name
= "aspeed-ctr-tdes",
900 .cra_flags
= CRYPTO_ALG_KERN_DRIVER_ONLY
|
903 .cra_ctxsize
= sizeof(struct aspeed_cipher_ctx
),
904 .cra_alignmask
= 0x0f,
905 .cra_module
= THIS_MODULE
,
909 .do_one_request
= aspeed_crypto_do_request
,
915 void aspeed_unregister_hace_crypto_algs(struct aspeed_hace_dev
*hace_dev
)
919 for (i
= 0; i
< ARRAY_SIZE(aspeed_crypto_algs
); i
++)
920 crypto_engine_unregister_skcipher(&aspeed_crypto_algs
[i
].alg
.skcipher
);
922 if (hace_dev
->version
!= AST2600_VERSION
)
925 for (i
= 0; i
< ARRAY_SIZE(aspeed_crypto_algs_g6
); i
++)
926 crypto_engine_unregister_skcipher(&aspeed_crypto_algs_g6
[i
].alg
.skcipher
);
929 void aspeed_register_hace_crypto_algs(struct aspeed_hace_dev
*hace_dev
)
933 CIPHER_DBG(hace_dev
, "\n");
935 for (i
= 0; i
< ARRAY_SIZE(aspeed_crypto_algs
); i
++) {
936 aspeed_crypto_algs
[i
].hace_dev
= hace_dev
;
937 rc
= crypto_engine_register_skcipher(&aspeed_crypto_algs
[i
].alg
.skcipher
);
939 CIPHER_DBG(hace_dev
, "Failed to register %s\n",
940 aspeed_crypto_algs
[i
].alg
.skcipher
.base
.base
.cra_name
);
944 if (hace_dev
->version
!= AST2600_VERSION
)
947 for (i
= 0; i
< ARRAY_SIZE(aspeed_crypto_algs_g6
); i
++) {
948 aspeed_crypto_algs_g6
[i
].hace_dev
= hace_dev
;
949 rc
= crypto_engine_register_skcipher(&aspeed_crypto_algs_g6
[i
].alg
.skcipher
);
951 CIPHER_DBG(hace_dev
, "Failed to register %s\n",
952 aspeed_crypto_algs_g6
[i
].alg
.skcipher
.base
.base
.cra_name
);