2 * Copyright (C) 2017 Marvell
4 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 * This file is licensed under the terms of the GNU General Public
7 * License version 2. This program is licensed "as is" without any
8 * warranty of any kind, whether express or implied.
11 #include <linux/device.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmapool.h>
15 #include <crypto/aead.h>
16 #include <crypto/aes.h>
17 #include <crypto/authenc.h>
18 #include <crypto/sha.h>
19 #include <crypto/skcipher.h>
20 #include <crypto/internal/aead.h>
21 #include <crypto/internal/skcipher.h>
25 enum safexcel_cipher_direction
{
30 struct safexcel_cipher_ctx
{
31 struct safexcel_context base
;
32 struct safexcel_crypto_priv
*priv
;
40 /* All the below is AEAD specific */
43 u32 ipad
[SHA256_DIGEST_SIZE
/ sizeof(u32
)];
44 u32 opad
[SHA256_DIGEST_SIZE
/ sizeof(u32
)];
47 struct safexcel_cipher_req
{
48 enum safexcel_cipher_direction direction
;
52 static void safexcel_skcipher_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
53 struct safexcel_command_desc
*cdesc
,
56 struct safexcel_token
*token
;
59 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) {
60 offset
= AES_BLOCK_SIZE
/ sizeof(u32
);
61 memcpy(cdesc
->control_data
.token
, iv
, AES_BLOCK_SIZE
);
63 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
66 token
= (struct safexcel_token
*)(cdesc
->control_data
.token
+ offset
);
68 token
[0].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
69 token
[0].packet_length
= length
;
70 token
[0].stat
= EIP197_TOKEN_STAT_LAST_PACKET
|
71 EIP197_TOKEN_STAT_LAST_HASH
;
72 token
[0].instructions
= EIP197_TOKEN_INS_LAST
|
73 EIP197_TOKEN_INS_TYPE_CRYTO
|
74 EIP197_TOKEN_INS_TYPE_OUTPUT
;
77 static void safexcel_aead_token(struct safexcel_cipher_ctx
*ctx
, u8
*iv
,
78 struct safexcel_command_desc
*cdesc
,
79 enum safexcel_cipher_direction direction
,
80 u32 cryptlen
, u32 assoclen
, u32 digestsize
)
82 struct safexcel_token
*token
;
85 if (ctx
->mode
== CONTEXT_CONTROL_CRYPTO_MODE_CBC
) {
86 offset
= AES_BLOCK_SIZE
/ sizeof(u32
);
87 memcpy(cdesc
->control_data
.token
, iv
, AES_BLOCK_SIZE
);
89 cdesc
->control_data
.options
|= EIP197_OPTION_4_TOKEN_IV_CMD
;
92 token
= (struct safexcel_token
*)(cdesc
->control_data
.token
+ offset
);
94 if (direction
== SAFEXCEL_DECRYPT
)
95 cryptlen
-= digestsize
;
97 token
[0].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
98 token
[0].packet_length
= assoclen
;
99 token
[0].instructions
= EIP197_TOKEN_INS_TYPE_HASH
|
100 EIP197_TOKEN_INS_TYPE_OUTPUT
;
102 token
[1].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
103 token
[1].packet_length
= cryptlen
;
104 token
[1].stat
= EIP197_TOKEN_STAT_LAST_HASH
;
105 token
[1].instructions
= EIP197_TOKEN_INS_LAST
|
106 EIP197_TOKEN_INS_TYPE_CRYTO
|
107 EIP197_TOKEN_INS_TYPE_HASH
|
108 EIP197_TOKEN_INS_TYPE_OUTPUT
;
110 if (direction
== SAFEXCEL_ENCRYPT
) {
111 token
[2].opcode
= EIP197_TOKEN_OPCODE_INSERT
;
112 token
[2].packet_length
= digestsize
;
113 token
[2].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
114 EIP197_TOKEN_STAT_LAST_PACKET
;
115 token
[2].instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
116 EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
118 token
[2].opcode
= EIP197_TOKEN_OPCODE_RETRIEVE
;
119 token
[2].packet_length
= digestsize
;
120 token
[2].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
121 EIP197_TOKEN_STAT_LAST_PACKET
;
122 token
[2].instructions
= EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
124 token
[3].opcode
= EIP197_TOKEN_OPCODE_VERIFY
;
125 token
[3].packet_length
= digestsize
|
126 EIP197_TOKEN_HASH_RESULT_VERIFY
;
127 token
[3].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
128 EIP197_TOKEN_STAT_LAST_PACKET
;
129 token
[3].instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
;
133 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher
*ctfm
,
134 const u8
*key
, unsigned int len
)
136 struct crypto_tfm
*tfm
= crypto_skcipher_tfm(ctfm
);
137 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
138 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
139 struct crypto_aes_ctx aes
;
142 ret
= crypto_aes_expand_key(&aes
, key
, len
);
144 crypto_skcipher_set_flags(ctfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
148 if (priv
->version
== EIP197
&& ctx
->base
.ctxr_dma
) {
149 for (i
= 0; i
< len
/ sizeof(u32
); i
++) {
150 if (ctx
->key
[i
] != cpu_to_le32(aes
.key_enc
[i
])) {
151 ctx
->base
.needs_inv
= true;
157 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
158 ctx
->key
[i
] = cpu_to_le32(aes
.key_enc
[i
]);
162 memzero_explicit(&aes
, sizeof(aes
));
166 static int safexcel_aead_aes_setkey(struct crypto_aead
*ctfm
, const u8
*key
,
169 struct crypto_tfm
*tfm
= crypto_aead_tfm(ctfm
);
170 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
171 struct safexcel_ahash_export_state istate
, ostate
;
172 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
173 struct crypto_authenc_keys keys
;
175 if (crypto_authenc_extractkeys(&keys
, key
, len
) != 0)
178 if (keys
.enckeylen
> sizeof(ctx
->key
))
182 if (priv
->version
== EIP197
&& ctx
->base
.ctxr_dma
&&
183 memcmp(ctx
->key
, keys
.enckey
, keys
.enckeylen
))
184 ctx
->base
.needs_inv
= true;
188 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1
:
189 if (safexcel_hmac_setkey("safexcel-sha1", keys
.authkey
,
190 keys
.authkeylen
, &istate
, &ostate
))
193 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224
:
194 if (safexcel_hmac_setkey("safexcel-sha224", keys
.authkey
,
195 keys
.authkeylen
, &istate
, &ostate
))
198 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256
:
199 if (safexcel_hmac_setkey("safexcel-sha256", keys
.authkey
,
200 keys
.authkeylen
, &istate
, &ostate
))
204 dev_err(priv
->dev
, "aead: unsupported hash algorithm\n");
208 crypto_aead_set_flags(ctfm
, crypto_aead_get_flags(ctfm
) &
209 CRYPTO_TFM_RES_MASK
);
211 if (priv
->version
== EIP197
&& ctx
->base
.ctxr_dma
&&
212 (memcmp(ctx
->ipad
, istate
.state
, ctx
->state_sz
) ||
213 memcmp(ctx
->opad
, ostate
.state
, ctx
->state_sz
)))
214 ctx
->base
.needs_inv
= true;
216 /* Now copy the keys into the context */
217 memcpy(ctx
->key
, keys
.enckey
, keys
.enckeylen
);
218 ctx
->key_len
= keys
.enckeylen
;
220 memcpy(ctx
->ipad
, &istate
.state
, ctx
->state_sz
);
221 memcpy(ctx
->opad
, &ostate
.state
, ctx
->state_sz
);
223 memzero_explicit(&keys
, sizeof(keys
));
227 crypto_aead_set_flags(ctfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
228 memzero_explicit(&keys
, sizeof(keys
));
232 static int safexcel_context_control(struct safexcel_cipher_ctx
*ctx
,
233 struct crypto_async_request
*async
,
234 struct safexcel_cipher_req
*sreq
,
235 struct safexcel_command_desc
*cdesc
)
237 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
241 if (sreq
->direction
== SAFEXCEL_ENCRYPT
)
242 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT
;
244 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN
;
246 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_CRYPTO_OUT
;
248 /* The decryption control type is a combination of the
249 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
252 if (sreq
->direction
== SAFEXCEL_DECRYPT
)
253 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_NULL_IN
;
256 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_KEY_EN
;
257 cdesc
->control_data
.control1
|= ctx
->mode
;
260 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_DIGEST_HMAC
|
263 switch (ctx
->key_len
) {
264 case AES_KEYSIZE_128
:
265 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_CRYPTO_ALG_AES128
;
267 case AES_KEYSIZE_192
:
268 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_CRYPTO_ALG_AES192
;
270 case AES_KEYSIZE_256
:
271 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_CRYPTO_ALG_AES256
;
274 dev_err(priv
->dev
, "aes keysize not supported: %u\n",
279 ctrl_size
= ctx
->key_len
/ sizeof(u32
);
281 /* Take in account the ipad+opad digests */
282 ctrl_size
+= ctx
->state_sz
/ sizeof(u32
) * 2;
283 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_SIZE(ctrl_size
);
288 static int safexcel_handle_req_result(struct safexcel_crypto_priv
*priv
, int ring
,
289 struct crypto_async_request
*async
,
290 struct scatterlist
*src
,
291 struct scatterlist
*dst
,
292 unsigned int cryptlen
,
293 struct safexcel_cipher_req
*sreq
,
294 bool *should_complete
, int *ret
)
296 struct safexcel_result_desc
*rdesc
;
301 spin_lock_bh(&priv
->ring
[ring
].egress_lock
);
303 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
306 "cipher: result: could not retrieve the result descriptor\n");
307 *ret
= PTR_ERR(rdesc
);
312 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
315 } while (!rdesc
->last_seg
);
317 safexcel_complete(priv
, ring
);
318 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
321 dma_unmap_sg(priv
->dev
, src
,
322 sg_nents_for_len(src
, cryptlen
),
325 dma_unmap_sg(priv
->dev
, src
,
326 sg_nents_for_len(src
, cryptlen
),
328 dma_unmap_sg(priv
->dev
, dst
,
329 sg_nents_for_len(dst
, cryptlen
),
333 *should_complete
= true;
338 static int safexcel_aes_send(struct crypto_async_request
*base
, int ring
,
339 struct safexcel_request
*request
,
340 struct safexcel_cipher_req
*sreq
,
341 struct scatterlist
*src
, struct scatterlist
*dst
,
342 unsigned int cryptlen
, unsigned int assoclen
,
343 unsigned int digestsize
, u8
*iv
, int *commands
,
346 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
347 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
348 struct safexcel_command_desc
*cdesc
;
349 struct safexcel_result_desc
*rdesc
;
350 struct scatterlist
*sg
;
351 unsigned int totlen
= cryptlen
+ assoclen
;
352 int nr_src
, nr_dst
, n_cdesc
= 0, n_rdesc
= 0, queued
= totlen
;
356 nr_src
= dma_map_sg(priv
->dev
, src
,
357 sg_nents_for_len(src
, totlen
),
363 nr_src
= dma_map_sg(priv
->dev
, src
,
364 sg_nents_for_len(src
, totlen
),
369 nr_dst
= dma_map_sg(priv
->dev
, dst
,
370 sg_nents_for_len(dst
, totlen
),
373 dma_unmap_sg(priv
->dev
, src
,
374 sg_nents_for_len(src
, totlen
),
380 memcpy(ctx
->base
.ctxr
->data
, ctx
->key
, ctx
->key_len
);
383 memcpy(ctx
->base
.ctxr
->data
+ ctx
->key_len
/ sizeof(u32
),
384 ctx
->ipad
, ctx
->state_sz
);
385 memcpy(ctx
->base
.ctxr
->data
+ (ctx
->key_len
+ ctx
->state_sz
) / sizeof(u32
),
386 ctx
->opad
, ctx
->state_sz
);
389 spin_lock_bh(&priv
->ring
[ring
].egress_lock
);
391 /* command descriptors */
392 for_each_sg(src
, sg
, nr_src
, i
) {
393 int len
= sg_dma_len(sg
);
395 /* Do not overflow the request */
396 if (queued
- len
< 0)
399 cdesc
= safexcel_add_cdesc(priv
, ring
, !n_cdesc
, !(queued
- len
),
400 sg_dma_address(sg
), len
, totlen
,
403 /* No space left in the command descriptor ring */
404 ret
= PTR_ERR(cdesc
);
410 safexcel_context_control(ctx
, base
, sreq
, cdesc
);
412 safexcel_aead_token(ctx
, iv
, cdesc
,
413 sreq
->direction
, cryptlen
,
414 assoclen
, digestsize
);
416 safexcel_skcipher_token(ctx
, iv
, cdesc
,
425 /* result descriptors */
426 for_each_sg(dst
, sg
, nr_dst
, i
) {
427 bool first
= !i
, last
= (i
== nr_dst
- 1);
428 u32 len
= sg_dma_len(sg
);
430 rdesc
= safexcel_add_rdesc(priv
, ring
, first
, last
,
431 sg_dma_address(sg
), len
);
433 /* No space left in the result descriptor ring */
434 ret
= PTR_ERR(rdesc
);
440 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
449 for (i
= 0; i
< n_rdesc
; i
++)
450 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].rdr
);
452 for (i
= 0; i
< n_cdesc
; i
++)
453 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].cdr
);
455 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
458 dma_unmap_sg(priv
->dev
, src
,
459 sg_nents_for_len(src
, totlen
),
462 dma_unmap_sg(priv
->dev
, src
,
463 sg_nents_for_len(src
, totlen
),
465 dma_unmap_sg(priv
->dev
, dst
,
466 sg_nents_for_len(dst
, totlen
),
473 static int safexcel_handle_inv_result(struct safexcel_crypto_priv
*priv
,
475 struct crypto_async_request
*base
,
476 bool *should_complete
, int *ret
)
478 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
479 struct safexcel_result_desc
*rdesc
;
480 int ndesc
= 0, enq_ret
;
484 spin_lock_bh(&priv
->ring
[ring
].egress_lock
);
486 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
489 "cipher: invalidate: could not retrieve the result descriptor\n");
490 *ret
= PTR_ERR(rdesc
);
494 if (rdesc
->result_data
.error_code
) {
495 dev_err(priv
->dev
, "cipher: invalidate: result descriptor error (%d)\n",
496 rdesc
->result_data
.error_code
);
501 } while (!rdesc
->last_seg
);
503 safexcel_complete(priv
, ring
);
504 spin_unlock_bh(&priv
->ring
[ring
].egress_lock
);
506 if (ctx
->base
.exit_inv
) {
507 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
510 *should_complete
= true;
515 ring
= safexcel_select_ring(priv
);
516 ctx
->base
.ring
= ring
;
518 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
519 enq_ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
520 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
522 if (enq_ret
!= -EINPROGRESS
)
525 queue_work(priv
->ring
[ring
].workqueue
,
526 &priv
->ring
[ring
].work_data
.work
);
528 *should_complete
= false;
533 static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv
*priv
,
535 struct crypto_async_request
*async
,
536 bool *should_complete
, int *ret
)
538 struct skcipher_request
*req
= skcipher_request_cast(async
);
539 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
542 if (sreq
->needs_inv
) {
543 sreq
->needs_inv
= false;
544 err
= safexcel_handle_inv_result(priv
, ring
, async
,
545 should_complete
, ret
);
547 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
548 req
->dst
, req
->cryptlen
, sreq
,
549 should_complete
, ret
);
555 static int safexcel_aead_handle_result(struct safexcel_crypto_priv
*priv
,
557 struct crypto_async_request
*async
,
558 bool *should_complete
, int *ret
)
560 struct aead_request
*req
= aead_request_cast(async
);
561 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
562 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
565 if (sreq
->needs_inv
) {
566 sreq
->needs_inv
= false;
567 err
= safexcel_handle_inv_result(priv
, ring
, async
,
568 should_complete
, ret
);
570 err
= safexcel_handle_req_result(priv
, ring
, async
, req
->src
,
572 req
->cryptlen
+ crypto_aead_authsize(tfm
),
573 sreq
, should_complete
, ret
);
579 static int safexcel_cipher_send_inv(struct crypto_async_request
*base
,
580 int ring
, struct safexcel_request
*request
,
581 int *commands
, int *results
)
583 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
584 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
587 ret
= safexcel_invalidate_cache(base
, priv
, ctx
->base
.ctxr_dma
, ring
,
598 static int safexcel_skcipher_send(struct crypto_async_request
*async
, int ring
,
599 struct safexcel_request
*request
,
600 int *commands
, int *results
)
602 struct skcipher_request
*req
= skcipher_request_cast(async
);
603 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
604 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
605 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
608 BUG_ON(priv
->version
== EIP97
&& sreq
->needs_inv
);
611 ret
= safexcel_cipher_send_inv(async
, ring
, request
, commands
,
614 ret
= safexcel_aes_send(async
, ring
, request
, sreq
, req
->src
,
615 req
->dst
, req
->cryptlen
, 0, 0, req
->iv
,
620 static int safexcel_aead_send(struct crypto_async_request
*async
, int ring
,
621 struct safexcel_request
*request
, int *commands
,
624 struct aead_request
*req
= aead_request_cast(async
);
625 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
626 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
627 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
628 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
631 BUG_ON(priv
->version
== EIP97
&& sreq
->needs_inv
);
634 ret
= safexcel_cipher_send_inv(async
, ring
, request
, commands
,
637 ret
= safexcel_aes_send(async
, ring
, request
, sreq
, req
->src
,
638 req
->dst
, req
->cryptlen
, req
->assoclen
,
639 crypto_aead_authsize(tfm
), req
->iv
,
644 static int safexcel_cipher_exit_inv(struct crypto_tfm
*tfm
,
645 struct crypto_async_request
*base
,
646 struct safexcel_cipher_req
*sreq
,
647 struct safexcel_inv_result
*result
)
649 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
650 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
651 int ring
= ctx
->base
.ring
;
653 init_completion(&result
->completion
);
655 ctx
= crypto_tfm_ctx(base
->tfm
);
656 ctx
->base
.exit_inv
= true;
657 sreq
->needs_inv
= true;
659 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
660 crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
661 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
663 queue_work(priv
->ring
[ring
].workqueue
,
664 &priv
->ring
[ring
].work_data
.work
);
666 wait_for_completion(&result
->completion
);
670 "cipher: sync: invalidate: completion error %d\n",
672 return result
->error
;
678 static int safexcel_skcipher_exit_inv(struct crypto_tfm
*tfm
)
680 EIP197_REQUEST_ON_STACK(req
, skcipher
, EIP197_SKCIPHER_REQ_SIZE
);
681 struct safexcel_cipher_req
*sreq
= skcipher_request_ctx(req
);
682 struct safexcel_inv_result result
= {};
684 memset(req
, 0, sizeof(struct skcipher_request
));
686 skcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
687 safexcel_inv_complete
, &result
);
688 skcipher_request_set_tfm(req
, __crypto_skcipher_cast(tfm
));
690 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
693 static int safexcel_aead_exit_inv(struct crypto_tfm
*tfm
)
695 EIP197_REQUEST_ON_STACK(req
, aead
, EIP197_AEAD_REQ_SIZE
);
696 struct safexcel_cipher_req
*sreq
= aead_request_ctx(req
);
697 struct safexcel_inv_result result
= {};
699 memset(req
, 0, sizeof(struct aead_request
));
701 aead_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
702 safexcel_inv_complete
, &result
);
703 aead_request_set_tfm(req
, __crypto_aead_cast(tfm
));
705 return safexcel_cipher_exit_inv(tfm
, &req
->base
, sreq
, &result
);
708 static int safexcel_aes(struct crypto_async_request
*base
,
709 struct safexcel_cipher_req
*sreq
,
710 enum safexcel_cipher_direction dir
, u32 mode
)
712 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(base
->tfm
);
713 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
716 sreq
->needs_inv
= false;
717 sreq
->direction
= dir
;
720 if (ctx
->base
.ctxr
) {
721 if (priv
->version
== EIP197
&& ctx
->base
.needs_inv
) {
722 sreq
->needs_inv
= true;
723 ctx
->base
.needs_inv
= false;
726 ctx
->base
.ring
= safexcel_select_ring(priv
);
727 ctx
->base
.ctxr
= dma_pool_zalloc(priv
->context_pool
,
728 EIP197_GFP_FLAGS(*base
),
729 &ctx
->base
.ctxr_dma
);
734 ring
= ctx
->base
.ring
;
736 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
737 ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, base
);
738 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
740 queue_work(priv
->ring
[ring
].workqueue
,
741 &priv
->ring
[ring
].work_data
.work
);
746 static int safexcel_ecb_aes_encrypt(struct skcipher_request
*req
)
748 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
749 SAFEXCEL_ENCRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_ECB
);
752 static int safexcel_ecb_aes_decrypt(struct skcipher_request
*req
)
754 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
755 SAFEXCEL_DECRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_ECB
);
758 static int safexcel_skcipher_cra_init(struct crypto_tfm
*tfm
)
760 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
761 struct safexcel_alg_template
*tmpl
=
762 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
765 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm
),
766 sizeof(struct safexcel_cipher_req
));
768 ctx
->priv
= tmpl
->priv
;
770 ctx
->base
.send
= safexcel_skcipher_send
;
771 ctx
->base
.handle_result
= safexcel_skcipher_handle_result
;
775 static int safexcel_cipher_cra_exit(struct crypto_tfm
*tfm
)
777 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
779 memzero_explicit(ctx
->key
, sizeof(ctx
->key
));
781 /* context not allocated, skip invalidation */
785 memzero_explicit(ctx
->base
.ctxr
->data
, sizeof(ctx
->base
.ctxr
->data
));
789 static void safexcel_skcipher_cra_exit(struct crypto_tfm
*tfm
)
791 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
792 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
795 if (safexcel_cipher_cra_exit(tfm
))
798 if (priv
->version
== EIP197
) {
799 ret
= safexcel_skcipher_exit_inv(tfm
);
801 dev_warn(priv
->dev
, "skcipher: invalidation error %d\n",
804 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
809 static void safexcel_aead_cra_exit(struct crypto_tfm
*tfm
)
811 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
812 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
815 if (safexcel_cipher_cra_exit(tfm
))
818 if (priv
->version
== EIP197
) {
819 ret
= safexcel_aead_exit_inv(tfm
);
821 dev_warn(priv
->dev
, "aead: invalidation error %d\n",
824 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
829 struct safexcel_alg_template safexcel_alg_ecb_aes
= {
830 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
832 .setkey
= safexcel_skcipher_aes_setkey
,
833 .encrypt
= safexcel_ecb_aes_encrypt
,
834 .decrypt
= safexcel_ecb_aes_decrypt
,
835 .min_keysize
= AES_MIN_KEY_SIZE
,
836 .max_keysize
= AES_MAX_KEY_SIZE
,
838 .cra_name
= "ecb(aes)",
839 .cra_driver_name
= "safexcel-ecb-aes",
841 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_ASYNC
|
842 CRYPTO_ALG_KERN_DRIVER_ONLY
,
843 .cra_blocksize
= AES_BLOCK_SIZE
,
844 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
846 .cra_init
= safexcel_skcipher_cra_init
,
847 .cra_exit
= safexcel_skcipher_cra_exit
,
848 .cra_module
= THIS_MODULE
,
853 static int safexcel_cbc_aes_encrypt(struct skcipher_request
*req
)
855 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
856 SAFEXCEL_ENCRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
859 static int safexcel_cbc_aes_decrypt(struct skcipher_request
*req
)
861 return safexcel_aes(&req
->base
, skcipher_request_ctx(req
),
862 SAFEXCEL_DECRYPT
, CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
865 struct safexcel_alg_template safexcel_alg_cbc_aes
= {
866 .type
= SAFEXCEL_ALG_TYPE_SKCIPHER
,
868 .setkey
= safexcel_skcipher_aes_setkey
,
869 .encrypt
= safexcel_cbc_aes_encrypt
,
870 .decrypt
= safexcel_cbc_aes_decrypt
,
871 .min_keysize
= AES_MIN_KEY_SIZE
,
872 .max_keysize
= AES_MAX_KEY_SIZE
,
873 .ivsize
= AES_BLOCK_SIZE
,
875 .cra_name
= "cbc(aes)",
876 .cra_driver_name
= "safexcel-cbc-aes",
878 .cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_ASYNC
|
879 CRYPTO_ALG_KERN_DRIVER_ONLY
,
880 .cra_blocksize
= AES_BLOCK_SIZE
,
881 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
883 .cra_init
= safexcel_skcipher_cra_init
,
884 .cra_exit
= safexcel_skcipher_cra_exit
,
885 .cra_module
= THIS_MODULE
,
890 static int safexcel_aead_encrypt(struct aead_request
*req
)
892 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
894 return safexcel_aes(&req
->base
, creq
, SAFEXCEL_ENCRYPT
,
895 CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
898 static int safexcel_aead_decrypt(struct aead_request
*req
)
900 struct safexcel_cipher_req
*creq
= aead_request_ctx(req
);
902 return safexcel_aes(&req
->base
, creq
, SAFEXCEL_DECRYPT
,
903 CONTEXT_CONTROL_CRYPTO_MODE_CBC
);
906 static int safexcel_aead_cra_init(struct crypto_tfm
*tfm
)
908 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
909 struct safexcel_alg_template
*tmpl
=
910 container_of(tfm
->__crt_alg
, struct safexcel_alg_template
,
913 crypto_aead_set_reqsize(__crypto_aead_cast(tfm
),
914 sizeof(struct safexcel_cipher_req
));
916 ctx
->priv
= tmpl
->priv
;
919 ctx
->base
.send
= safexcel_aead_send
;
920 ctx
->base
.handle_result
= safexcel_aead_handle_result
;
924 static int safexcel_aead_sha1_cra_init(struct crypto_tfm
*tfm
)
926 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
928 safexcel_aead_cra_init(tfm
);
929 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
930 ctx
->state_sz
= SHA1_DIGEST_SIZE
;
934 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes
= {
935 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
937 .setkey
= safexcel_aead_aes_setkey
,
938 .encrypt
= safexcel_aead_encrypt
,
939 .decrypt
= safexcel_aead_decrypt
,
940 .ivsize
= AES_BLOCK_SIZE
,
941 .maxauthsize
= SHA1_DIGEST_SIZE
,
943 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
944 .cra_driver_name
= "safexcel-authenc-hmac-sha1-cbc-aes",
946 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
|
947 CRYPTO_ALG_KERN_DRIVER_ONLY
,
948 .cra_blocksize
= AES_BLOCK_SIZE
,
949 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
951 .cra_init
= safexcel_aead_sha1_cra_init
,
952 .cra_exit
= safexcel_aead_cra_exit
,
953 .cra_module
= THIS_MODULE
,
958 static int safexcel_aead_sha256_cra_init(struct crypto_tfm
*tfm
)
960 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
962 safexcel_aead_cra_init(tfm
);
963 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
964 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
968 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes
= {
969 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
971 .setkey
= safexcel_aead_aes_setkey
,
972 .encrypt
= safexcel_aead_encrypt
,
973 .decrypt
= safexcel_aead_decrypt
,
974 .ivsize
= AES_BLOCK_SIZE
,
975 .maxauthsize
= SHA256_DIGEST_SIZE
,
977 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
978 .cra_driver_name
= "safexcel-authenc-hmac-sha256-cbc-aes",
980 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
|
981 CRYPTO_ALG_KERN_DRIVER_ONLY
,
982 .cra_blocksize
= AES_BLOCK_SIZE
,
983 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
985 .cra_init
= safexcel_aead_sha256_cra_init
,
986 .cra_exit
= safexcel_aead_cra_exit
,
987 .cra_module
= THIS_MODULE
,
992 static int safexcel_aead_sha224_cra_init(struct crypto_tfm
*tfm
)
994 struct safexcel_cipher_ctx
*ctx
= crypto_tfm_ctx(tfm
);
996 safexcel_aead_cra_init(tfm
);
997 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
998 ctx
->state_sz
= SHA256_DIGEST_SIZE
;
1002 struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes
= {
1003 .type
= SAFEXCEL_ALG_TYPE_AEAD
,
1005 .setkey
= safexcel_aead_aes_setkey
,
1006 .encrypt
= safexcel_aead_encrypt
,
1007 .decrypt
= safexcel_aead_decrypt
,
1008 .ivsize
= AES_BLOCK_SIZE
,
1009 .maxauthsize
= SHA224_DIGEST_SIZE
,
1011 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
1012 .cra_driver_name
= "safexcel-authenc-hmac-sha224-cbc-aes",
1013 .cra_priority
= 300,
1014 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
|
1015 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1016 .cra_blocksize
= AES_BLOCK_SIZE
,
1017 .cra_ctxsize
= sizeof(struct safexcel_cipher_ctx
),
1019 .cra_init
= safexcel_aead_sha224_cra_init
,
1020 .cra_exit
= safexcel_aead_cra_exit
,
1021 .cra_module
= THIS_MODULE
,