1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2017 Marvell
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha.h>
12 #include <crypto/sha3.h>
13 #include <crypto/skcipher.h>
14 #include <crypto/sm3.h>
15 #include <linux/device.h>
16 #include <linux/dma-mapping.h>
17 #include <linux/dmapool.h>
21 struct safexcel_ahash_ctx
{
22 struct safexcel_context base
;
23 struct safexcel_crypto_priv
*priv
;
32 __le32 ipad
[SHA3_512_BLOCK_SIZE
/ sizeof(__le32
)];
33 __le32 opad
[SHA3_512_BLOCK_SIZE
/ sizeof(__le32
)];
35 struct crypto_cipher
*kaes
;
36 struct crypto_ahash
*fback
;
37 struct crypto_shash
*shpre
;
38 struct shash_desc
*shdesc
;
41 struct safexcel_ahash_req
{
52 dma_addr_t result_dma
;
56 u8 state_sz
; /* expected state size, only set once */
57 u8 block_sz
; /* block size, only set once */
58 u8 digest_sz
; /* output digest size, only set once */
59 __le32 state
[SHA3_512_BLOCK_SIZE
/
60 sizeof(__le32
)] __aligned(sizeof(__le32
));
65 u8 cache
[HASH_CACHE_SIZE
] __aligned(sizeof(u32
));
67 unsigned int cache_sz
;
69 u8 cache_next
[HASH_CACHE_SIZE
] __aligned(sizeof(u32
));
72 static inline u64
safexcel_queued_len(struct safexcel_ahash_req
*req
)
74 return req
->len
- req
->processed
;
77 static void safexcel_hash_token(struct safexcel_command_desc
*cdesc
,
78 u32 input_length
, u32 result_length
,
81 struct safexcel_token
*token
=
82 (struct safexcel_token
*)cdesc
->control_data
.token
;
84 token
[0].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
85 token
[0].packet_length
= input_length
;
86 token
[0].instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
89 if (unlikely(cbcmac
&& input_length
)) {
91 token
[1].opcode
= EIP197_TOKEN_OPCODE_INSERT
;
92 token
[1].packet_length
= 16 - input_length
;
93 token
[1].stat
= EIP197_TOKEN_STAT_LAST_HASH
;
94 token
[1].instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
96 token
[0].stat
= EIP197_TOKEN_STAT_LAST_HASH
;
97 eip197_noop_token(&token
[1]);
100 token
[2].opcode
= EIP197_TOKEN_OPCODE_INSERT
;
101 token
[2].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
102 EIP197_TOKEN_STAT_LAST_PACKET
;
103 token
[2].packet_length
= result_length
;
104 token
[2].instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
105 EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
107 eip197_noop_token(&token
[3]);
110 static void safexcel_context_control(struct safexcel_ahash_ctx
*ctx
,
111 struct safexcel_ahash_req
*req
,
112 struct safexcel_command_desc
*cdesc
)
114 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
117 cdesc
->control_data
.control0
= ctx
->alg
;
118 cdesc
->control_data
.control1
= 0;
121 * Copy the input digest if needed, and setup the context
122 * fields. Do this now as we need it to setup the first command
125 if (unlikely(req
->digest
== CONTEXT_CONTROL_DIGEST_XCM
)) {
127 memcpy(ctx
->base
.ctxr
->data
, ctx
->ipad
, ctx
->key_sz
);
129 memcpy(ctx
->base
.ctxr
->data
, req
->state
, req
->state_sz
);
131 if (!req
->finish
&& req
->xcbcmac
)
132 cdesc
->control_data
.control0
|=
133 CONTEXT_CONTROL_DIGEST_XCM
|
134 CONTEXT_CONTROL_TYPE_HASH_OUT
|
135 CONTEXT_CONTROL_NO_FINISH_HASH
|
136 CONTEXT_CONTROL_SIZE(req
->state_sz
/
139 cdesc
->control_data
.control0
|=
140 CONTEXT_CONTROL_DIGEST_XCM
|
141 CONTEXT_CONTROL_TYPE_HASH_OUT
|
142 CONTEXT_CONTROL_SIZE(req
->state_sz
/
145 } else if (!req
->processed
) {
146 /* First - and possibly only - block of basic hash only */
148 cdesc
->control_data
.control0
|= req
->digest
|
149 CONTEXT_CONTROL_TYPE_HASH_OUT
|
150 CONTEXT_CONTROL_RESTART_HASH
|
151 /* ensure its not 0! */
152 CONTEXT_CONTROL_SIZE(1);
154 cdesc
->control_data
.control0
|= req
->digest
|
155 CONTEXT_CONTROL_TYPE_HASH_OUT
|
156 CONTEXT_CONTROL_RESTART_HASH
|
157 CONTEXT_CONTROL_NO_FINISH_HASH
|
158 /* ensure its not 0! */
159 CONTEXT_CONTROL_SIZE(1);
163 /* Hash continuation or HMAC, setup (inner) digest from state */
164 memcpy(ctx
->base
.ctxr
->data
, req
->state
, req
->state_sz
);
167 /* Compute digest count for hash/HMAC finish operations */
168 if ((req
->digest
== CONTEXT_CONTROL_DIGEST_PRECOMPUTED
) ||
169 req
->hmac_zlen
|| (req
->processed
!= req
->block_sz
)) {
170 count
= req
->processed
/ EIP197_COUNTER_BLOCK_SIZE
;
172 /* This is a hardware limitation, as the
173 * counter must fit into an u32. This represents
174 * a fairly big amount of input data, so we
175 * shouldn't see this.
177 if (unlikely(count
& 0xffffffff00000000ULL
)) {
179 "Input data is too big\n");
184 if ((req
->digest
== CONTEXT_CONTROL_DIGEST_PRECOMPUTED
) ||
185 /* Special case: zero length HMAC */
187 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
188 (req
->processed
!= req
->block_sz
)) {
189 /* Basic hash continue operation, need digest + cnt */
190 cdesc
->control_data
.control0
|=
191 CONTEXT_CONTROL_SIZE((req
->state_sz
>> 2) + 1) |
192 CONTEXT_CONTROL_TYPE_HASH_OUT
|
193 CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
194 /* For zero-len HMAC, don't finalize, already padded! */
196 cdesc
->control_data
.control0
|=
197 CONTEXT_CONTROL_NO_FINISH_HASH
;
198 cdesc
->control_data
.control1
|=
199 CONTEXT_CONTROL_DIGEST_CNT
;
200 ctx
->base
.ctxr
->data
[req
->state_sz
>> 2] =
202 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
204 /* Clear zero-length HMAC flag for next operation! */
205 req
->hmac_zlen
= false;
207 /* Need outer digest for HMAC finalization */
208 memcpy(ctx
->base
.ctxr
->data
+ (req
->state_sz
>> 2),
209 ctx
->opad
, req
->state_sz
);
211 /* Single pass HMAC - no digest count */
212 cdesc
->control_data
.control0
|=
213 CONTEXT_CONTROL_SIZE(req
->state_sz
>> 1) |
214 CONTEXT_CONTROL_TYPE_HASH_OUT
|
215 CONTEXT_CONTROL_DIGEST_HMAC
;
217 } else { /* Hash continuation, do not finish yet */
218 cdesc
->control_data
.control0
|=
219 CONTEXT_CONTROL_SIZE(req
->state_sz
>> 2) |
220 CONTEXT_CONTROL_DIGEST_PRECOMPUTED
|
221 CONTEXT_CONTROL_TYPE_HASH_OUT
|
222 CONTEXT_CONTROL_NO_FINISH_HASH
;
226 static int safexcel_ahash_enqueue(struct ahash_request
*areq
);
228 static int safexcel_handle_req_result(struct safexcel_crypto_priv
*priv
,
230 struct crypto_async_request
*async
,
231 bool *should_complete
, int *ret
)
233 struct safexcel_result_desc
*rdesc
;
234 struct ahash_request
*areq
= ahash_request_cast(async
);
235 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
236 struct safexcel_ahash_req
*sreq
= ahash_request_ctx(areq
);
237 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(ahash
);
242 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
245 "hash: result: could not retrieve the result descriptor\n");
246 *ret
= PTR_ERR(rdesc
);
248 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
251 safexcel_complete(priv
, ring
);
254 dma_unmap_sg(priv
->dev
, areq
->src
, sreq
->nents
, DMA_TO_DEVICE
);
258 if (sreq
->result_dma
) {
259 dma_unmap_single(priv
->dev
, sreq
->result_dma
, sreq
->digest_sz
,
261 sreq
->result_dma
= 0;
264 if (sreq
->cache_dma
) {
265 dma_unmap_single(priv
->dev
, sreq
->cache_dma
, sreq
->cache_sz
,
273 (sreq
->digest
!= CONTEXT_CONTROL_DIGEST_HMAC
)) {
274 /* Faking HMAC using hash - need to do outer hash */
275 memcpy(sreq
->cache
, sreq
->state
,
276 crypto_ahash_digestsize(ahash
));
278 memcpy(sreq
->state
, ctx
->opad
, sreq
->digest_sz
);
280 sreq
->len
= sreq
->block_sz
+
281 crypto_ahash_digestsize(ahash
);
282 sreq
->processed
= sreq
->block_sz
;
285 if (priv
->flags
& EIP197_TRC_CACHE
)
286 ctx
->base
.needs_inv
= true;
288 safexcel_ahash_enqueue(areq
);
290 *should_complete
= false; /* Not done yet */
294 if (unlikely(sreq
->digest
== CONTEXT_CONTROL_DIGEST_XCM
&&
295 ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_CRC32
)) {
296 /* Undo final XOR with 0xffffffff ...*/
297 *(__le32
*)areq
->result
= ~sreq
->state
[0];
299 memcpy(areq
->result
, sreq
->state
,
300 crypto_ahash_digestsize(ahash
));
304 cache_len
= safexcel_queued_len(sreq
);
306 memcpy(sreq
->cache
, sreq
->cache_next
, cache_len
);
308 *should_complete
= true;
313 static int safexcel_ahash_send_req(struct crypto_async_request
*async
, int ring
,
314 int *commands
, int *results
)
316 struct ahash_request
*areq
= ahash_request_cast(async
);
317 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
318 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
319 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
320 struct safexcel_command_desc
*cdesc
, *first_cdesc
= NULL
;
321 struct safexcel_result_desc
*rdesc
;
322 struct scatterlist
*sg
;
323 struct safexcel_token
*dmmy
;
324 int i
, extra
= 0, n_cdesc
= 0, ret
= 0, cache_len
, skip
= 0;
327 queued
= safexcel_queued_len(req
);
328 if (queued
<= HASH_CACHE_SIZE
)
331 cache_len
= queued
- areq
->nbytes
;
333 if (!req
->finish
&& !req
->last_req
) {
334 /* If this is not the last request and the queued data does not
335 * fit into full cache blocks, cache it for the next send call.
337 extra
= queued
& (HASH_CACHE_SIZE
- 1);
339 /* If this is not the last request and the queued data
340 * is a multiple of a block, cache the last one for now.
343 extra
= HASH_CACHE_SIZE
;
345 sg_pcopy_to_buffer(areq
->src
, sg_nents(areq
->src
),
346 req
->cache_next
, extra
,
347 areq
->nbytes
- extra
);
360 if (unlikely(req
->xcbcmac
&& req
->processed
> AES_BLOCK_SIZE
)) {
361 if (unlikely(cache_len
< AES_BLOCK_SIZE
)) {
363 * Cache contains less than 1 full block, complete.
365 extra
= AES_BLOCK_SIZE
- cache_len
;
366 if (queued
> cache_len
) {
367 /* More data follows: borrow bytes */
368 u64 tmp
= queued
- cache_len
;
370 skip
= min_t(u64
, tmp
, extra
);
371 sg_pcopy_to_buffer(areq
->src
,
373 req
->cache
+ cache_len
,
377 memset(req
->cache
+ cache_len
+ skip
, 0, extra
);
378 if (!ctx
->cbcmac
&& extra
) {
379 // 10- padding for XCBCMAC & CMAC
380 req
->cache
[cache_len
+ skip
] = 0x80;
381 // HW will use K2 iso K3 - compensate!
382 for (i
= 0; i
< AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
383 ((__be32
*)req
->cache
)[i
] ^=
384 cpu_to_be32(le32_to_cpu(
385 ctx
->ipad
[i
] ^ ctx
->ipad
[i
+ 4]));
387 cache_len
= AES_BLOCK_SIZE
;
388 queued
= queued
+ extra
;
391 /* XCBC continue: XOR previous result into 1st word */
392 crypto_xor(req
->cache
, (const u8
*)req
->state
, AES_BLOCK_SIZE
);
396 /* Add a command descriptor for the cached data, if any */
398 req
->cache_dma
= dma_map_single(priv
->dev
, req
->cache
,
399 cache_len
, DMA_TO_DEVICE
);
400 if (dma_mapping_error(priv
->dev
, req
->cache_dma
))
403 req
->cache_sz
= cache_len
;
404 first_cdesc
= safexcel_add_cdesc(priv
, ring
, 1,
406 req
->cache_dma
, cache_len
,
407 len
, ctx
->base
.ctxr_dma
,
409 if (IS_ERR(first_cdesc
)) {
410 ret
= PTR_ERR(first_cdesc
);
420 /* Now handle the current ahash request buffer(s) */
421 req
->nents
= dma_map_sg(priv
->dev
, areq
->src
,
422 sg_nents_for_len(areq
->src
,
430 for_each_sg(areq
->src
, sg
, req
->nents
, i
) {
431 int sglen
= sg_dma_len(sg
);
433 if (unlikely(sglen
<= skip
)) {
438 /* Do not overflow the request */
439 if ((queued
+ skip
) <= sglen
)
444 cdesc
= safexcel_add_cdesc(priv
, ring
, !n_cdesc
,
446 sg_dma_address(sg
) + skip
, sglen
,
447 len
, ctx
->base
.ctxr_dma
, &dmmy
);
449 ret
= PTR_ERR(cdesc
);
464 /* Setup the context options */
465 safexcel_context_control(ctx
, req
, first_cdesc
);
468 safexcel_hash_token(first_cdesc
, len
, req
->digest_sz
, ctx
->cbcmac
);
470 req
->result_dma
= dma_map_single(priv
->dev
, req
->state
, req
->digest_sz
,
472 if (dma_mapping_error(priv
->dev
, req
->result_dma
)) {
477 /* Add a result descriptor */
478 rdesc
= safexcel_add_rdesc(priv
, ring
, 1, 1, req
->result_dma
,
481 ret
= PTR_ERR(rdesc
);
485 safexcel_rdr_req_set(priv
, ring
, rdesc
, &areq
->base
);
487 req
->processed
+= len
- extra
;
494 dma_unmap_single(priv
->dev
, req
->result_dma
, req
->digest_sz
,
498 dma_unmap_sg(priv
->dev
, areq
->src
, req
->nents
, DMA_TO_DEVICE
);
502 for (i
= 0; i
< n_cdesc
; i
++)
503 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].cdr
);
505 if (req
->cache_dma
) {
506 dma_unmap_single(priv
->dev
, req
->cache_dma
, req
->cache_sz
,
515 static int safexcel_handle_inv_result(struct safexcel_crypto_priv
*priv
,
517 struct crypto_async_request
*async
,
518 bool *should_complete
, int *ret
)
520 struct safexcel_result_desc
*rdesc
;
521 struct ahash_request
*areq
= ahash_request_cast(async
);
522 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
523 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(ahash
);
528 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
531 "hash: invalidate: could not retrieve the result descriptor\n");
532 *ret
= PTR_ERR(rdesc
);
534 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
537 safexcel_complete(priv
, ring
);
539 if (ctx
->base
.exit_inv
) {
540 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
543 *should_complete
= true;
547 ring
= safexcel_select_ring(priv
);
548 ctx
->base
.ring
= ring
;
550 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
551 enq_ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, async
);
552 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
554 if (enq_ret
!= -EINPROGRESS
)
557 queue_work(priv
->ring
[ring
].workqueue
,
558 &priv
->ring
[ring
].work_data
.work
);
560 *should_complete
= false;
565 static int safexcel_handle_result(struct safexcel_crypto_priv
*priv
, int ring
,
566 struct crypto_async_request
*async
,
567 bool *should_complete
, int *ret
)
569 struct ahash_request
*areq
= ahash_request_cast(async
);
570 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
573 BUG_ON(!(priv
->flags
& EIP197_TRC_CACHE
) && req
->needs_inv
);
575 if (req
->needs_inv
) {
576 req
->needs_inv
= false;
577 err
= safexcel_handle_inv_result(priv
, ring
, async
,
578 should_complete
, ret
);
580 err
= safexcel_handle_req_result(priv
, ring
, async
,
581 should_complete
, ret
);
587 static int safexcel_ahash_send_inv(struct crypto_async_request
*async
,
588 int ring
, int *commands
, int *results
)
590 struct ahash_request
*areq
= ahash_request_cast(async
);
591 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
594 ret
= safexcel_invalidate_cache(async
, ctx
->priv
,
595 ctx
->base
.ctxr_dma
, ring
);
605 static int safexcel_ahash_send(struct crypto_async_request
*async
,
606 int ring
, int *commands
, int *results
)
608 struct ahash_request
*areq
= ahash_request_cast(async
);
609 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
613 ret
= safexcel_ahash_send_inv(async
, ring
, commands
, results
);
615 ret
= safexcel_ahash_send_req(async
, ring
, commands
, results
);
620 static int safexcel_ahash_exit_inv(struct crypto_tfm
*tfm
)
622 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
623 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
624 EIP197_REQUEST_ON_STACK(req
, ahash
, EIP197_AHASH_REQ_SIZE
);
625 struct safexcel_ahash_req
*rctx
= ahash_request_ctx(req
);
626 struct safexcel_inv_result result
= {};
627 int ring
= ctx
->base
.ring
;
629 memset(req
, 0, EIP197_AHASH_REQ_SIZE
);
631 /* create invalidation request */
632 init_completion(&result
.completion
);
633 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
634 safexcel_inv_complete
, &result
);
636 ahash_request_set_tfm(req
, __crypto_ahash_cast(tfm
));
637 ctx
= crypto_tfm_ctx(req
->base
.tfm
);
638 ctx
->base
.exit_inv
= true;
639 rctx
->needs_inv
= true;
641 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
642 crypto_enqueue_request(&priv
->ring
[ring
].queue
, &req
->base
);
643 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
645 queue_work(priv
->ring
[ring
].workqueue
,
646 &priv
->ring
[ring
].work_data
.work
);
648 wait_for_completion(&result
.completion
);
651 dev_warn(priv
->dev
, "hash: completion error (%d)\n",
659 /* safexcel_ahash_cache: cache data until at least one request can be sent to
660 * the engine, aka. when there is at least 1 block size in the pipe.
662 static int safexcel_ahash_cache(struct ahash_request
*areq
)
664 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
667 /* cache_len: everything accepted by the driver but not sent yet,
668 * tot sz handled by update() - last req sz - tot sz handled by send()
670 cache_len
= safexcel_queued_len(req
);
673 * In case there isn't enough bytes to proceed (less than a
674 * block size), cache the data until we have enough.
676 if (cache_len
+ areq
->nbytes
<= HASH_CACHE_SIZE
) {
677 sg_pcopy_to_buffer(areq
->src
, sg_nents(areq
->src
),
678 req
->cache
+ cache_len
,
683 /* We couldn't cache all the data */
687 static int safexcel_ahash_enqueue(struct ahash_request
*areq
)
689 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
690 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
691 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
694 req
->needs_inv
= false;
696 if (ctx
->base
.ctxr
) {
697 if (priv
->flags
& EIP197_TRC_CACHE
&& !ctx
->base
.needs_inv
&&
698 /* invalidate for *any* non-XCBC continuation */
699 ((req
->not_first
&& !req
->xcbcmac
) ||
700 /* invalidate if (i)digest changed */
701 memcmp(ctx
->base
.ctxr
->data
, req
->state
, req
->state_sz
) ||
702 /* invalidate for HMAC finish with odigest changed */
703 (req
->finish
&& req
->hmac
&&
704 memcmp(ctx
->base
.ctxr
->data
+ (req
->state_sz
>>2),
705 ctx
->opad
, req
->state_sz
))))
707 * We're still setting needs_inv here, even though it is
708 * cleared right away, because the needs_inv flag can be
709 * set in other functions and we want to keep the same
712 ctx
->base
.needs_inv
= true;
714 if (ctx
->base
.needs_inv
) {
715 ctx
->base
.needs_inv
= false;
716 req
->needs_inv
= true;
719 ctx
->base
.ring
= safexcel_select_ring(priv
);
720 ctx
->base
.ctxr
= dma_pool_zalloc(priv
->context_pool
,
721 EIP197_GFP_FLAGS(areq
->base
),
722 &ctx
->base
.ctxr_dma
);
726 req
->not_first
= true;
728 ring
= ctx
->base
.ring
;
730 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
731 ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, &areq
->base
);
732 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
734 queue_work(priv
->ring
[ring
].workqueue
,
735 &priv
->ring
[ring
].work_data
.work
);
740 static int safexcel_ahash_update(struct ahash_request
*areq
)
742 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
745 /* If the request is 0 length, do nothing */
749 /* Add request to the cache if it fits */
750 ret
= safexcel_ahash_cache(areq
);
752 /* Update total request length */
753 req
->len
+= areq
->nbytes
;
755 /* If not all data could fit into the cache, go process the excess.
756 * Also go process immediately for an HMAC IV precompute, which
757 * will never be finished at all, but needs to be processed anyway.
759 if ((ret
&& !req
->finish
) || req
->last_req
)
760 return safexcel_ahash_enqueue(areq
);
765 static int safexcel_ahash_final(struct ahash_request
*areq
)
767 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
768 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
772 if (unlikely(!req
->len
&& !areq
->nbytes
)) {
774 * If we have an overall 0 length *hash* request:
775 * The HW cannot do 0 length hash, so we provide the correct
776 * result directly here.
778 if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_MD5
)
779 memcpy(areq
->result
, md5_zero_message_hash
,
781 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA1
)
782 memcpy(areq
->result
, sha1_zero_message_hash
,
784 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA224
)
785 memcpy(areq
->result
, sha224_zero_message_hash
,
787 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA256
)
788 memcpy(areq
->result
, sha256_zero_message_hash
,
790 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA384
)
791 memcpy(areq
->result
, sha384_zero_message_hash
,
793 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA512
)
794 memcpy(areq
->result
, sha512_zero_message_hash
,
796 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SM3
) {
798 EIP197_SM3_ZEROM_HASH
, SM3_DIGEST_SIZE
);
802 } else if (unlikely(req
->digest
== CONTEXT_CONTROL_DIGEST_XCM
&&
803 ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_MD5
&&
804 req
->len
== sizeof(u32
) && !areq
->nbytes
)) {
805 /* Zero length CRC32 */
806 memcpy(areq
->result
, ctx
->ipad
, sizeof(u32
));
808 } else if (unlikely(ctx
->cbcmac
&& req
->len
== AES_BLOCK_SIZE
&&
810 /* Zero length CBC MAC */
811 memset(areq
->result
, 0, AES_BLOCK_SIZE
);
813 } else if (unlikely(req
->xcbcmac
&& req
->len
== AES_BLOCK_SIZE
&&
815 /* Zero length (X)CBC/CMAC */
818 for (i
= 0; i
< AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
819 ((__be32
*)areq
->result
)[i
] =
820 cpu_to_be32(le32_to_cpu(ctx
->ipad
[i
+ 4]));//K3
821 areq
->result
[0] ^= 0x80; // 10- padding
822 crypto_cipher_encrypt_one(ctx
->kaes
, areq
->result
, areq
->result
);
824 } else if (unlikely(req
->hmac
&&
825 (req
->len
== req
->block_sz
) &&
828 * If we have an overall 0 length *HMAC* request:
829 * For HMAC, we need to finalize the inner digest
830 * and then perform the outer hash.
833 /* generate pad block in the cache */
834 /* start with a hash block of all zeroes */
835 memset(req
->cache
, 0, req
->block_sz
);
836 /* set the first byte to 0x80 to 'append a 1 bit' */
837 req
->cache
[0] = 0x80;
838 /* add the length in bits in the last 2 bytes */
839 if (req
->len_is_le
) {
840 /* Little endian length word (e.g. MD5) */
841 req
->cache
[req
->block_sz
-8] = (req
->block_sz
<< 3) &
843 req
->cache
[req
->block_sz
-7] = (req
->block_sz
>> 5);
845 /* Big endian length word (e.g. any SHA) */
846 req
->cache
[req
->block_sz
-2] = (req
->block_sz
>> 5);
847 req
->cache
[req
->block_sz
-1] = (req
->block_sz
<< 3) &
851 req
->len
+= req
->block_sz
; /* plus 1 hash block */
853 /* Set special zero-length HMAC flag */
854 req
->hmac_zlen
= true;
857 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
858 } else if (req
->hmac
) {
860 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
863 return safexcel_ahash_enqueue(areq
);
866 static int safexcel_ahash_finup(struct ahash_request
*areq
)
868 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
872 safexcel_ahash_update(areq
);
873 return safexcel_ahash_final(areq
);
876 static int safexcel_ahash_export(struct ahash_request
*areq
, void *out
)
878 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
879 struct safexcel_ahash_export_state
*export
= out
;
881 export
->len
= req
->len
;
882 export
->processed
= req
->processed
;
884 export
->digest
= req
->digest
;
886 memcpy(export
->state
, req
->state
, req
->state_sz
);
887 memcpy(export
->cache
, req
->cache
, HASH_CACHE_SIZE
);
892 static int safexcel_ahash_import(struct ahash_request
*areq
, const void *in
)
894 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
895 const struct safexcel_ahash_export_state
*export
= in
;
898 ret
= crypto_ahash_init(areq
);
902 req
->len
= export
->len
;
903 req
->processed
= export
->processed
;
905 req
->digest
= export
->digest
;
907 memcpy(req
->cache
, export
->cache
, HASH_CACHE_SIZE
);
908 memcpy(req
->state
, export
->state
, req
->state_sz
);
913 static int safexcel_ahash_cra_init(struct crypto_tfm
*tfm
)
915 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
916 struct safexcel_alg_template
*tmpl
=
917 container_of(__crypto_ahash_alg(tfm
->__crt_alg
),
918 struct safexcel_alg_template
, alg
.ahash
);
920 ctx
->priv
= tmpl
->priv
;
921 ctx
->base
.send
= safexcel_ahash_send
;
922 ctx
->base
.handle_result
= safexcel_handle_result
;
923 ctx
->fb_do_setkey
= false;
925 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
926 sizeof(struct safexcel_ahash_req
));
930 static int safexcel_sha1_init(struct ahash_request
*areq
)
932 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
933 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
935 memset(req
, 0, sizeof(*req
));
937 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
938 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
939 req
->state_sz
= SHA1_DIGEST_SIZE
;
940 req
->digest_sz
= SHA1_DIGEST_SIZE
;
941 req
->block_sz
= SHA1_BLOCK_SIZE
;
946 static int safexcel_sha1_digest(struct ahash_request
*areq
)
948 int ret
= safexcel_sha1_init(areq
);
953 return safexcel_ahash_finup(areq
);
956 static void safexcel_ahash_cra_exit(struct crypto_tfm
*tfm
)
958 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
959 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
962 /* context not allocated, skip invalidation */
966 if (priv
->flags
& EIP197_TRC_CACHE
) {
967 ret
= safexcel_ahash_exit_inv(tfm
);
969 dev_warn(priv
->dev
, "hash: invalidation error %d\n", ret
);
971 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
976 struct safexcel_alg_template safexcel_alg_sha1
= {
977 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
978 .algo_mask
= SAFEXCEL_ALG_SHA1
,
980 .init
= safexcel_sha1_init
,
981 .update
= safexcel_ahash_update
,
982 .final
= safexcel_ahash_final
,
983 .finup
= safexcel_ahash_finup
,
984 .digest
= safexcel_sha1_digest
,
985 .export
= safexcel_ahash_export
,
986 .import
= safexcel_ahash_import
,
988 .digestsize
= SHA1_DIGEST_SIZE
,
989 .statesize
= sizeof(struct safexcel_ahash_export_state
),
992 .cra_driver_name
= "safexcel-sha1",
993 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
994 .cra_flags
= CRYPTO_ALG_ASYNC
|
995 CRYPTO_ALG_KERN_DRIVER_ONLY
,
996 .cra_blocksize
= SHA1_BLOCK_SIZE
,
997 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
998 .cra_init
= safexcel_ahash_cra_init
,
999 .cra_exit
= safexcel_ahash_cra_exit
,
1000 .cra_module
= THIS_MODULE
,
1006 static int safexcel_hmac_sha1_init(struct ahash_request
*areq
)
1008 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1009 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1011 memset(req
, 0, sizeof(*req
));
1013 /* Start from ipad precompute */
1014 memcpy(req
->state
, ctx
->ipad
, SHA1_DIGEST_SIZE
);
1015 /* Already processed the key^ipad part now! */
1016 req
->len
= SHA1_BLOCK_SIZE
;
1017 req
->processed
= SHA1_BLOCK_SIZE
;
1019 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
1020 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1021 req
->state_sz
= SHA1_DIGEST_SIZE
;
1022 req
->digest_sz
= SHA1_DIGEST_SIZE
;
1023 req
->block_sz
= SHA1_BLOCK_SIZE
;
1029 static int safexcel_hmac_sha1_digest(struct ahash_request
*areq
)
1031 int ret
= safexcel_hmac_sha1_init(areq
);
1036 return safexcel_ahash_finup(areq
);
1039 struct safexcel_ahash_result
{
1040 struct completion completion
;
1044 static void safexcel_ahash_complete(struct crypto_async_request
*req
, int error
)
1046 struct safexcel_ahash_result
*result
= req
->data
;
1048 if (error
== -EINPROGRESS
)
1051 result
->error
= error
;
1052 complete(&result
->completion
);
1055 static int safexcel_hmac_init_pad(struct ahash_request
*areq
,
1056 unsigned int blocksize
, const u8
*key
,
1057 unsigned int keylen
, u8
*ipad
, u8
*opad
)
1059 struct safexcel_ahash_result result
;
1060 struct scatterlist sg
;
1064 if (keylen
<= blocksize
) {
1065 memcpy(ipad
, key
, keylen
);
1067 keydup
= kmemdup(key
, keylen
, GFP_KERNEL
);
1071 ahash_request_set_callback(areq
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1072 safexcel_ahash_complete
, &result
);
1073 sg_init_one(&sg
, keydup
, keylen
);
1074 ahash_request_set_crypt(areq
, &sg
, ipad
, keylen
);
1075 init_completion(&result
.completion
);
1077 ret
= crypto_ahash_digest(areq
);
1078 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
1079 wait_for_completion_interruptible(&result
.completion
);
1084 memzero_explicit(keydup
, keylen
);
1090 keylen
= crypto_ahash_digestsize(crypto_ahash_reqtfm(areq
));
1093 memset(ipad
+ keylen
, 0, blocksize
- keylen
);
1094 memcpy(opad
, ipad
, blocksize
);
1096 for (i
= 0; i
< blocksize
; i
++) {
1097 ipad
[i
] ^= HMAC_IPAD_VALUE
;
1098 opad
[i
] ^= HMAC_OPAD_VALUE
;
1104 static int safexcel_hmac_init_iv(struct ahash_request
*areq
,
1105 unsigned int blocksize
, u8
*pad
, void *state
)
1107 struct safexcel_ahash_result result
;
1108 struct safexcel_ahash_req
*req
;
1109 struct scatterlist sg
;
1112 ahash_request_set_callback(areq
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1113 safexcel_ahash_complete
, &result
);
1114 sg_init_one(&sg
, pad
, blocksize
);
1115 ahash_request_set_crypt(areq
, &sg
, pad
, blocksize
);
1116 init_completion(&result
.completion
);
1118 ret
= crypto_ahash_init(areq
);
1122 req
= ahash_request_ctx(areq
);
1124 req
->last_req
= true;
1126 ret
= crypto_ahash_update(areq
);
1127 if (ret
&& ret
!= -EINPROGRESS
&& ret
!= -EBUSY
)
1130 wait_for_completion_interruptible(&result
.completion
);
1132 return result
.error
;
1134 return crypto_ahash_export(areq
, state
);
1137 int safexcel_hmac_setkey(const char *alg
, const u8
*key
, unsigned int keylen
,
1138 void *istate
, void *ostate
)
1140 struct ahash_request
*areq
;
1141 struct crypto_ahash
*tfm
;
1142 unsigned int blocksize
;
1146 tfm
= crypto_alloc_ahash(alg
, 0, 0);
1148 return PTR_ERR(tfm
);
1150 areq
= ahash_request_alloc(tfm
, GFP_KERNEL
);
1156 crypto_ahash_clear_flags(tfm
, ~0);
1157 blocksize
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
1159 ipad
= kcalloc(2, blocksize
, GFP_KERNEL
);
1165 opad
= ipad
+ blocksize
;
1167 ret
= safexcel_hmac_init_pad(areq
, blocksize
, key
, keylen
, ipad
, opad
);
1171 ret
= safexcel_hmac_init_iv(areq
, blocksize
, ipad
, istate
);
1175 ret
= safexcel_hmac_init_iv(areq
, blocksize
, opad
, ostate
);
1180 ahash_request_free(areq
);
1182 crypto_free_ahash(tfm
);
1187 static int safexcel_hmac_alg_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1188 unsigned int keylen
, const char *alg
,
1189 unsigned int state_sz
)
1191 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
1192 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
1193 struct safexcel_ahash_export_state istate
, ostate
;
1196 ret
= safexcel_hmac_setkey(alg
, key
, keylen
, &istate
, &ostate
);
1200 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr
&&
1201 (memcmp(ctx
->ipad
, istate
.state
, state_sz
) ||
1202 memcmp(ctx
->opad
, ostate
.state
, state_sz
)))
1203 ctx
->base
.needs_inv
= true;
1205 memcpy(ctx
->ipad
, &istate
.state
, state_sz
);
1206 memcpy(ctx
->opad
, &ostate
.state
, state_sz
);
1211 static int safexcel_hmac_sha1_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1212 unsigned int keylen
)
1214 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha1",
1218 struct safexcel_alg_template safexcel_alg_hmac_sha1
= {
1219 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1220 .algo_mask
= SAFEXCEL_ALG_SHA1
,
1222 .init
= safexcel_hmac_sha1_init
,
1223 .update
= safexcel_ahash_update
,
1224 .final
= safexcel_ahash_final
,
1225 .finup
= safexcel_ahash_finup
,
1226 .digest
= safexcel_hmac_sha1_digest
,
1227 .setkey
= safexcel_hmac_sha1_setkey
,
1228 .export
= safexcel_ahash_export
,
1229 .import
= safexcel_ahash_import
,
1231 .digestsize
= SHA1_DIGEST_SIZE
,
1232 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1234 .cra_name
= "hmac(sha1)",
1235 .cra_driver_name
= "safexcel-hmac-sha1",
1236 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1237 .cra_flags
= CRYPTO_ALG_ASYNC
|
1238 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1239 .cra_blocksize
= SHA1_BLOCK_SIZE
,
1240 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1241 .cra_init
= safexcel_ahash_cra_init
,
1242 .cra_exit
= safexcel_ahash_cra_exit
,
1243 .cra_module
= THIS_MODULE
,
1249 static int safexcel_sha256_init(struct ahash_request
*areq
)
1251 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1252 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1254 memset(req
, 0, sizeof(*req
));
1256 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
1257 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1258 req
->state_sz
= SHA256_DIGEST_SIZE
;
1259 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1260 req
->block_sz
= SHA256_BLOCK_SIZE
;
1265 static int safexcel_sha256_digest(struct ahash_request
*areq
)
1267 int ret
= safexcel_sha256_init(areq
);
1272 return safexcel_ahash_finup(areq
);
1275 struct safexcel_alg_template safexcel_alg_sha256
= {
1276 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1277 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1279 .init
= safexcel_sha256_init
,
1280 .update
= safexcel_ahash_update
,
1281 .final
= safexcel_ahash_final
,
1282 .finup
= safexcel_ahash_finup
,
1283 .digest
= safexcel_sha256_digest
,
1284 .export
= safexcel_ahash_export
,
1285 .import
= safexcel_ahash_import
,
1287 .digestsize
= SHA256_DIGEST_SIZE
,
1288 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1290 .cra_name
= "sha256",
1291 .cra_driver_name
= "safexcel-sha256",
1292 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1293 .cra_flags
= CRYPTO_ALG_ASYNC
|
1294 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1295 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1296 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1297 .cra_init
= safexcel_ahash_cra_init
,
1298 .cra_exit
= safexcel_ahash_cra_exit
,
1299 .cra_module
= THIS_MODULE
,
1305 static int safexcel_sha224_init(struct ahash_request
*areq
)
1307 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1308 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1310 memset(req
, 0, sizeof(*req
));
1312 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
1313 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1314 req
->state_sz
= SHA256_DIGEST_SIZE
;
1315 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1316 req
->block_sz
= SHA256_BLOCK_SIZE
;
1321 static int safexcel_sha224_digest(struct ahash_request
*areq
)
1323 int ret
= safexcel_sha224_init(areq
);
1328 return safexcel_ahash_finup(areq
);
1331 struct safexcel_alg_template safexcel_alg_sha224
= {
1332 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1333 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1335 .init
= safexcel_sha224_init
,
1336 .update
= safexcel_ahash_update
,
1337 .final
= safexcel_ahash_final
,
1338 .finup
= safexcel_ahash_finup
,
1339 .digest
= safexcel_sha224_digest
,
1340 .export
= safexcel_ahash_export
,
1341 .import
= safexcel_ahash_import
,
1343 .digestsize
= SHA224_DIGEST_SIZE
,
1344 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1346 .cra_name
= "sha224",
1347 .cra_driver_name
= "safexcel-sha224",
1348 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1349 .cra_flags
= CRYPTO_ALG_ASYNC
|
1350 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1351 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1352 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1353 .cra_init
= safexcel_ahash_cra_init
,
1354 .cra_exit
= safexcel_ahash_cra_exit
,
1355 .cra_module
= THIS_MODULE
,
1361 static int safexcel_hmac_sha224_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1362 unsigned int keylen
)
1364 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha224",
1365 SHA256_DIGEST_SIZE
);
1368 static int safexcel_hmac_sha224_init(struct ahash_request
*areq
)
1370 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1371 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1373 memset(req
, 0, sizeof(*req
));
1375 /* Start from ipad precompute */
1376 memcpy(req
->state
, ctx
->ipad
, SHA256_DIGEST_SIZE
);
1377 /* Already processed the key^ipad part now! */
1378 req
->len
= SHA256_BLOCK_SIZE
;
1379 req
->processed
= SHA256_BLOCK_SIZE
;
1381 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
1382 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1383 req
->state_sz
= SHA256_DIGEST_SIZE
;
1384 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1385 req
->block_sz
= SHA256_BLOCK_SIZE
;
1391 static int safexcel_hmac_sha224_digest(struct ahash_request
*areq
)
1393 int ret
= safexcel_hmac_sha224_init(areq
);
1398 return safexcel_ahash_finup(areq
);
1401 struct safexcel_alg_template safexcel_alg_hmac_sha224
= {
1402 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1403 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1405 .init
= safexcel_hmac_sha224_init
,
1406 .update
= safexcel_ahash_update
,
1407 .final
= safexcel_ahash_final
,
1408 .finup
= safexcel_ahash_finup
,
1409 .digest
= safexcel_hmac_sha224_digest
,
1410 .setkey
= safexcel_hmac_sha224_setkey
,
1411 .export
= safexcel_ahash_export
,
1412 .import
= safexcel_ahash_import
,
1414 .digestsize
= SHA224_DIGEST_SIZE
,
1415 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1417 .cra_name
= "hmac(sha224)",
1418 .cra_driver_name
= "safexcel-hmac-sha224",
1419 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1420 .cra_flags
= CRYPTO_ALG_ASYNC
|
1421 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1422 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1423 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1424 .cra_init
= safexcel_ahash_cra_init
,
1425 .cra_exit
= safexcel_ahash_cra_exit
,
1426 .cra_module
= THIS_MODULE
,
1432 static int safexcel_hmac_sha256_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1433 unsigned int keylen
)
1435 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha256",
1436 SHA256_DIGEST_SIZE
);
1439 static int safexcel_hmac_sha256_init(struct ahash_request
*areq
)
1441 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1442 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1444 memset(req
, 0, sizeof(*req
));
1446 /* Start from ipad precompute */
1447 memcpy(req
->state
, ctx
->ipad
, SHA256_DIGEST_SIZE
);
1448 /* Already processed the key^ipad part now! */
1449 req
->len
= SHA256_BLOCK_SIZE
;
1450 req
->processed
= SHA256_BLOCK_SIZE
;
1452 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
1453 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1454 req
->state_sz
= SHA256_DIGEST_SIZE
;
1455 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1456 req
->block_sz
= SHA256_BLOCK_SIZE
;
1462 static int safexcel_hmac_sha256_digest(struct ahash_request
*areq
)
1464 int ret
= safexcel_hmac_sha256_init(areq
);
1469 return safexcel_ahash_finup(areq
);
1472 struct safexcel_alg_template safexcel_alg_hmac_sha256
= {
1473 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1474 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1476 .init
= safexcel_hmac_sha256_init
,
1477 .update
= safexcel_ahash_update
,
1478 .final
= safexcel_ahash_final
,
1479 .finup
= safexcel_ahash_finup
,
1480 .digest
= safexcel_hmac_sha256_digest
,
1481 .setkey
= safexcel_hmac_sha256_setkey
,
1482 .export
= safexcel_ahash_export
,
1483 .import
= safexcel_ahash_import
,
1485 .digestsize
= SHA256_DIGEST_SIZE
,
1486 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1488 .cra_name
= "hmac(sha256)",
1489 .cra_driver_name
= "safexcel-hmac-sha256",
1490 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1491 .cra_flags
= CRYPTO_ALG_ASYNC
|
1492 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1493 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1494 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1495 .cra_init
= safexcel_ahash_cra_init
,
1496 .cra_exit
= safexcel_ahash_cra_exit
,
1497 .cra_module
= THIS_MODULE
,
1503 static int safexcel_sha512_init(struct ahash_request
*areq
)
1505 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1506 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1508 memset(req
, 0, sizeof(*req
));
1510 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA512
;
1511 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1512 req
->state_sz
= SHA512_DIGEST_SIZE
;
1513 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1514 req
->block_sz
= SHA512_BLOCK_SIZE
;
1519 static int safexcel_sha512_digest(struct ahash_request
*areq
)
1521 int ret
= safexcel_sha512_init(areq
);
1526 return safexcel_ahash_finup(areq
);
1529 struct safexcel_alg_template safexcel_alg_sha512
= {
1530 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1531 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1533 .init
= safexcel_sha512_init
,
1534 .update
= safexcel_ahash_update
,
1535 .final
= safexcel_ahash_final
,
1536 .finup
= safexcel_ahash_finup
,
1537 .digest
= safexcel_sha512_digest
,
1538 .export
= safexcel_ahash_export
,
1539 .import
= safexcel_ahash_import
,
1541 .digestsize
= SHA512_DIGEST_SIZE
,
1542 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1544 .cra_name
= "sha512",
1545 .cra_driver_name
= "safexcel-sha512",
1546 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1547 .cra_flags
= CRYPTO_ALG_ASYNC
|
1548 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1549 .cra_blocksize
= SHA512_BLOCK_SIZE
,
1550 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1551 .cra_init
= safexcel_ahash_cra_init
,
1552 .cra_exit
= safexcel_ahash_cra_exit
,
1553 .cra_module
= THIS_MODULE
,
1559 static int safexcel_sha384_init(struct ahash_request
*areq
)
1561 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1562 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1564 memset(req
, 0, sizeof(*req
));
1566 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA384
;
1567 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1568 req
->state_sz
= SHA512_DIGEST_SIZE
;
1569 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1570 req
->block_sz
= SHA512_BLOCK_SIZE
;
1575 static int safexcel_sha384_digest(struct ahash_request
*areq
)
1577 int ret
= safexcel_sha384_init(areq
);
1582 return safexcel_ahash_finup(areq
);
1585 struct safexcel_alg_template safexcel_alg_sha384
= {
1586 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1587 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1589 .init
= safexcel_sha384_init
,
1590 .update
= safexcel_ahash_update
,
1591 .final
= safexcel_ahash_final
,
1592 .finup
= safexcel_ahash_finup
,
1593 .digest
= safexcel_sha384_digest
,
1594 .export
= safexcel_ahash_export
,
1595 .import
= safexcel_ahash_import
,
1597 .digestsize
= SHA384_DIGEST_SIZE
,
1598 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1600 .cra_name
= "sha384",
1601 .cra_driver_name
= "safexcel-sha384",
1602 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1603 .cra_flags
= CRYPTO_ALG_ASYNC
|
1604 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1605 .cra_blocksize
= SHA384_BLOCK_SIZE
,
1606 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1607 .cra_init
= safexcel_ahash_cra_init
,
1608 .cra_exit
= safexcel_ahash_cra_exit
,
1609 .cra_module
= THIS_MODULE
,
1615 static int safexcel_hmac_sha512_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1616 unsigned int keylen
)
1618 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha512",
1619 SHA512_DIGEST_SIZE
);
1622 static int safexcel_hmac_sha512_init(struct ahash_request
*areq
)
1624 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1625 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1627 memset(req
, 0, sizeof(*req
));
1629 /* Start from ipad precompute */
1630 memcpy(req
->state
, ctx
->ipad
, SHA512_DIGEST_SIZE
);
1631 /* Already processed the key^ipad part now! */
1632 req
->len
= SHA512_BLOCK_SIZE
;
1633 req
->processed
= SHA512_BLOCK_SIZE
;
1635 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA512
;
1636 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1637 req
->state_sz
= SHA512_DIGEST_SIZE
;
1638 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1639 req
->block_sz
= SHA512_BLOCK_SIZE
;
1645 static int safexcel_hmac_sha512_digest(struct ahash_request
*areq
)
1647 int ret
= safexcel_hmac_sha512_init(areq
);
1652 return safexcel_ahash_finup(areq
);
1655 struct safexcel_alg_template safexcel_alg_hmac_sha512
= {
1656 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1657 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1659 .init
= safexcel_hmac_sha512_init
,
1660 .update
= safexcel_ahash_update
,
1661 .final
= safexcel_ahash_final
,
1662 .finup
= safexcel_ahash_finup
,
1663 .digest
= safexcel_hmac_sha512_digest
,
1664 .setkey
= safexcel_hmac_sha512_setkey
,
1665 .export
= safexcel_ahash_export
,
1666 .import
= safexcel_ahash_import
,
1668 .digestsize
= SHA512_DIGEST_SIZE
,
1669 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1671 .cra_name
= "hmac(sha512)",
1672 .cra_driver_name
= "safexcel-hmac-sha512",
1673 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1674 .cra_flags
= CRYPTO_ALG_ASYNC
|
1675 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1676 .cra_blocksize
= SHA512_BLOCK_SIZE
,
1677 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1678 .cra_init
= safexcel_ahash_cra_init
,
1679 .cra_exit
= safexcel_ahash_cra_exit
,
1680 .cra_module
= THIS_MODULE
,
1686 static int safexcel_hmac_sha384_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1687 unsigned int keylen
)
1689 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha384",
1690 SHA512_DIGEST_SIZE
);
1693 static int safexcel_hmac_sha384_init(struct ahash_request
*areq
)
1695 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1696 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1698 memset(req
, 0, sizeof(*req
));
1700 /* Start from ipad precompute */
1701 memcpy(req
->state
, ctx
->ipad
, SHA512_DIGEST_SIZE
);
1702 /* Already processed the key^ipad part now! */
1703 req
->len
= SHA512_BLOCK_SIZE
;
1704 req
->processed
= SHA512_BLOCK_SIZE
;
1706 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA384
;
1707 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1708 req
->state_sz
= SHA512_DIGEST_SIZE
;
1709 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1710 req
->block_sz
= SHA512_BLOCK_SIZE
;
1716 static int safexcel_hmac_sha384_digest(struct ahash_request
*areq
)
1718 int ret
= safexcel_hmac_sha384_init(areq
);
1723 return safexcel_ahash_finup(areq
);
1726 struct safexcel_alg_template safexcel_alg_hmac_sha384
= {
1727 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1728 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1730 .init
= safexcel_hmac_sha384_init
,
1731 .update
= safexcel_ahash_update
,
1732 .final
= safexcel_ahash_final
,
1733 .finup
= safexcel_ahash_finup
,
1734 .digest
= safexcel_hmac_sha384_digest
,
1735 .setkey
= safexcel_hmac_sha384_setkey
,
1736 .export
= safexcel_ahash_export
,
1737 .import
= safexcel_ahash_import
,
1739 .digestsize
= SHA384_DIGEST_SIZE
,
1740 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1742 .cra_name
= "hmac(sha384)",
1743 .cra_driver_name
= "safexcel-hmac-sha384",
1744 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1745 .cra_flags
= CRYPTO_ALG_ASYNC
|
1746 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1747 .cra_blocksize
= SHA384_BLOCK_SIZE
,
1748 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1749 .cra_init
= safexcel_ahash_cra_init
,
1750 .cra_exit
= safexcel_ahash_cra_exit
,
1751 .cra_module
= THIS_MODULE
,
1757 static int safexcel_md5_init(struct ahash_request
*areq
)
1759 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1760 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1762 memset(req
, 0, sizeof(*req
));
1764 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_MD5
;
1765 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1766 req
->state_sz
= MD5_DIGEST_SIZE
;
1767 req
->digest_sz
= MD5_DIGEST_SIZE
;
1768 req
->block_sz
= MD5_HMAC_BLOCK_SIZE
;
1773 static int safexcel_md5_digest(struct ahash_request
*areq
)
1775 int ret
= safexcel_md5_init(areq
);
1780 return safexcel_ahash_finup(areq
);
1783 struct safexcel_alg_template safexcel_alg_md5
= {
1784 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1785 .algo_mask
= SAFEXCEL_ALG_MD5
,
1787 .init
= safexcel_md5_init
,
1788 .update
= safexcel_ahash_update
,
1789 .final
= safexcel_ahash_final
,
1790 .finup
= safexcel_ahash_finup
,
1791 .digest
= safexcel_md5_digest
,
1792 .export
= safexcel_ahash_export
,
1793 .import
= safexcel_ahash_import
,
1795 .digestsize
= MD5_DIGEST_SIZE
,
1796 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1799 .cra_driver_name
= "safexcel-md5",
1800 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1801 .cra_flags
= CRYPTO_ALG_ASYNC
|
1802 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1803 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1804 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1805 .cra_init
= safexcel_ahash_cra_init
,
1806 .cra_exit
= safexcel_ahash_cra_exit
,
1807 .cra_module
= THIS_MODULE
,
1813 static int safexcel_hmac_md5_init(struct ahash_request
*areq
)
1815 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1816 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1818 memset(req
, 0, sizeof(*req
));
1820 /* Start from ipad precompute */
1821 memcpy(req
->state
, ctx
->ipad
, MD5_DIGEST_SIZE
);
1822 /* Already processed the key^ipad part now! */
1823 req
->len
= MD5_HMAC_BLOCK_SIZE
;
1824 req
->processed
= MD5_HMAC_BLOCK_SIZE
;
1826 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_MD5
;
1827 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1828 req
->state_sz
= MD5_DIGEST_SIZE
;
1829 req
->digest_sz
= MD5_DIGEST_SIZE
;
1830 req
->block_sz
= MD5_HMAC_BLOCK_SIZE
;
1831 req
->len_is_le
= true; /* MD5 is little endian! ... */
1837 static int safexcel_hmac_md5_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1838 unsigned int keylen
)
1840 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-md5",
1844 static int safexcel_hmac_md5_digest(struct ahash_request
*areq
)
1846 int ret
= safexcel_hmac_md5_init(areq
);
1851 return safexcel_ahash_finup(areq
);
1854 struct safexcel_alg_template safexcel_alg_hmac_md5
= {
1855 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1856 .algo_mask
= SAFEXCEL_ALG_MD5
,
1858 .init
= safexcel_hmac_md5_init
,
1859 .update
= safexcel_ahash_update
,
1860 .final
= safexcel_ahash_final
,
1861 .finup
= safexcel_ahash_finup
,
1862 .digest
= safexcel_hmac_md5_digest
,
1863 .setkey
= safexcel_hmac_md5_setkey
,
1864 .export
= safexcel_ahash_export
,
1865 .import
= safexcel_ahash_import
,
1867 .digestsize
= MD5_DIGEST_SIZE
,
1868 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1870 .cra_name
= "hmac(md5)",
1871 .cra_driver_name
= "safexcel-hmac-md5",
1872 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1873 .cra_flags
= CRYPTO_ALG_ASYNC
|
1874 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1875 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1876 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1877 .cra_init
= safexcel_ahash_cra_init
,
1878 .cra_exit
= safexcel_ahash_cra_exit
,
1879 .cra_module
= THIS_MODULE
,
1885 static int safexcel_crc32_cra_init(struct crypto_tfm
*tfm
)
1887 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1888 int ret
= safexcel_ahash_cra_init(tfm
);
1890 /* Default 'key' is all zeroes */
1891 memset(ctx
->ipad
, 0, sizeof(u32
));
1895 static int safexcel_crc32_init(struct ahash_request
*areq
)
1897 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1898 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1900 memset(req
, 0, sizeof(*req
));
1902 /* Start from loaded key */
1903 req
->state
[0] = (__force __le32
)le32_to_cpu(~ctx
->ipad
[0]);
1904 /* Set processed to non-zero to enable invalidation detection */
1905 req
->len
= sizeof(u32
);
1906 req
->processed
= sizeof(u32
);
1908 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_CRC32
;
1909 req
->digest
= CONTEXT_CONTROL_DIGEST_XCM
;
1910 req
->state_sz
= sizeof(u32
);
1911 req
->digest_sz
= sizeof(u32
);
1912 req
->block_sz
= sizeof(u32
);
1917 static int safexcel_crc32_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1918 unsigned int keylen
)
1920 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
1922 if (keylen
!= sizeof(u32
))
1925 memcpy(ctx
->ipad
, key
, sizeof(u32
));
1929 static int safexcel_crc32_digest(struct ahash_request
*areq
)
1931 return safexcel_crc32_init(areq
) ?: safexcel_ahash_finup(areq
);
1934 struct safexcel_alg_template safexcel_alg_crc32
= {
1935 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1938 .init
= safexcel_crc32_init
,
1939 .update
= safexcel_ahash_update
,
1940 .final
= safexcel_ahash_final
,
1941 .finup
= safexcel_ahash_finup
,
1942 .digest
= safexcel_crc32_digest
,
1943 .setkey
= safexcel_crc32_setkey
,
1944 .export
= safexcel_ahash_export
,
1945 .import
= safexcel_ahash_import
,
1947 .digestsize
= sizeof(u32
),
1948 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1950 .cra_name
= "crc32",
1951 .cra_driver_name
= "safexcel-crc32",
1952 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1953 .cra_flags
= CRYPTO_ALG_OPTIONAL_KEY
|
1955 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1957 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1958 .cra_init
= safexcel_crc32_cra_init
,
1959 .cra_exit
= safexcel_ahash_cra_exit
,
1960 .cra_module
= THIS_MODULE
,
1966 static int safexcel_cbcmac_init(struct ahash_request
*areq
)
1968 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1969 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1971 memset(req
, 0, sizeof(*req
));
1973 /* Start from loaded keys */
1974 memcpy(req
->state
, ctx
->ipad
, ctx
->key_sz
);
1975 /* Set processed to non-zero to enable invalidation detection */
1976 req
->len
= AES_BLOCK_SIZE
;
1977 req
->processed
= AES_BLOCK_SIZE
;
1979 req
->digest
= CONTEXT_CONTROL_DIGEST_XCM
;
1980 req
->state_sz
= ctx
->key_sz
;
1981 req
->digest_sz
= AES_BLOCK_SIZE
;
1982 req
->block_sz
= AES_BLOCK_SIZE
;
1983 req
->xcbcmac
= true;
1988 static int safexcel_cbcmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1991 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
1992 struct crypto_aes_ctx aes
;
1995 ret
= aes_expandkey(&aes
, key
, len
);
1999 memset(ctx
->ipad
, 0, 2 * AES_BLOCK_SIZE
);
2000 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
2001 ctx
->ipad
[i
+ 8] = (__force __le32
)cpu_to_be32(aes
.key_enc
[i
]);
2003 if (len
== AES_KEYSIZE_192
) {
2004 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC192
;
2005 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2006 } else if (len
== AES_KEYSIZE_256
) {
2007 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC256
;
2008 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2010 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2011 ctx
->key_sz
= AES_MIN_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2015 memzero_explicit(&aes
, sizeof(aes
));
2019 static int safexcel_cbcmac_digest(struct ahash_request
*areq
)
2021 return safexcel_cbcmac_init(areq
) ?: safexcel_ahash_finup(areq
);
2024 struct safexcel_alg_template safexcel_alg_cbcmac
= {
2025 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2028 .init
= safexcel_cbcmac_init
,
2029 .update
= safexcel_ahash_update
,
2030 .final
= safexcel_ahash_final
,
2031 .finup
= safexcel_ahash_finup
,
2032 .digest
= safexcel_cbcmac_digest
,
2033 .setkey
= safexcel_cbcmac_setkey
,
2034 .export
= safexcel_ahash_export
,
2035 .import
= safexcel_ahash_import
,
2037 .digestsize
= AES_BLOCK_SIZE
,
2038 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2040 .cra_name
= "cbcmac(aes)",
2041 .cra_driver_name
= "safexcel-cbcmac-aes",
2042 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2043 .cra_flags
= CRYPTO_ALG_ASYNC
|
2044 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2046 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2047 .cra_init
= safexcel_ahash_cra_init
,
2048 .cra_exit
= safexcel_ahash_cra_exit
,
2049 .cra_module
= THIS_MODULE
,
2055 static int safexcel_xcbcmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2058 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
2059 struct crypto_aes_ctx aes
;
2060 u32 key_tmp
[3 * AES_BLOCK_SIZE
/ sizeof(u32
)];
2063 ret
= aes_expandkey(&aes
, key
, len
);
2067 /* precompute the XCBC key material */
2068 crypto_cipher_clear_flags(ctx
->kaes
, CRYPTO_TFM_REQ_MASK
);
2069 crypto_cipher_set_flags(ctx
->kaes
, crypto_ahash_get_flags(tfm
) &
2070 CRYPTO_TFM_REQ_MASK
);
2071 ret
= crypto_cipher_setkey(ctx
->kaes
, key
, len
);
2075 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)key_tmp
+ 2 * AES_BLOCK_SIZE
,
2076 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2077 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)key_tmp
,
2078 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2079 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)key_tmp
+ AES_BLOCK_SIZE
,
2080 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2081 for (i
= 0; i
< 3 * AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
2083 cpu_to_le32((__force u32
)cpu_to_be32(key_tmp
[i
]));
2085 crypto_cipher_clear_flags(ctx
->kaes
, CRYPTO_TFM_REQ_MASK
);
2086 crypto_cipher_set_flags(ctx
->kaes
, crypto_ahash_get_flags(tfm
) &
2087 CRYPTO_TFM_REQ_MASK
);
2088 ret
= crypto_cipher_setkey(ctx
->kaes
,
2089 (u8
*)key_tmp
+ 2 * AES_BLOCK_SIZE
,
2094 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2095 ctx
->key_sz
= AES_MIN_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2096 ctx
->cbcmac
= false;
2098 memzero_explicit(&aes
, sizeof(aes
));
2102 static int safexcel_xcbcmac_cra_init(struct crypto_tfm
*tfm
)
2104 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2106 safexcel_ahash_cra_init(tfm
);
2107 ctx
->kaes
= crypto_alloc_cipher("aes", 0, 0);
2108 return PTR_ERR_OR_ZERO(ctx
->kaes
);
2111 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm
*tfm
)
2113 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2115 crypto_free_cipher(ctx
->kaes
);
2116 safexcel_ahash_cra_exit(tfm
);
2119 struct safexcel_alg_template safexcel_alg_xcbcmac
= {
2120 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2123 .init
= safexcel_cbcmac_init
,
2124 .update
= safexcel_ahash_update
,
2125 .final
= safexcel_ahash_final
,
2126 .finup
= safexcel_ahash_finup
,
2127 .digest
= safexcel_cbcmac_digest
,
2128 .setkey
= safexcel_xcbcmac_setkey
,
2129 .export
= safexcel_ahash_export
,
2130 .import
= safexcel_ahash_import
,
2132 .digestsize
= AES_BLOCK_SIZE
,
2133 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2135 .cra_name
= "xcbc(aes)",
2136 .cra_driver_name
= "safexcel-xcbc-aes",
2137 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2138 .cra_flags
= CRYPTO_ALG_ASYNC
|
2139 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2140 .cra_blocksize
= AES_BLOCK_SIZE
,
2141 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2142 .cra_init
= safexcel_xcbcmac_cra_init
,
2143 .cra_exit
= safexcel_xcbcmac_cra_exit
,
2144 .cra_module
= THIS_MODULE
,
2150 static int safexcel_cmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2153 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
2154 struct crypto_aes_ctx aes
;
2157 u8 msb_mask
, gfmask
;
2160 ret
= aes_expandkey(&aes
, key
, len
);
2164 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
2166 cpu_to_le32((__force u32
)cpu_to_be32(aes
.key_enc
[i
]));
2168 /* precompute the CMAC key material */
2169 crypto_cipher_clear_flags(ctx
->kaes
, CRYPTO_TFM_REQ_MASK
);
2170 crypto_cipher_set_flags(ctx
->kaes
, crypto_ahash_get_flags(tfm
) &
2171 CRYPTO_TFM_REQ_MASK
);
2172 ret
= crypto_cipher_setkey(ctx
->kaes
, key
, len
);
2176 /* code below borrowed from crypto/cmac.c */
2177 /* encrypt the zero block */
2178 memset(consts
, 0, AES_BLOCK_SIZE
);
2179 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)consts
, (u8
*)consts
);
2182 _const
[0] = be64_to_cpu(consts
[1]);
2183 _const
[1] = be64_to_cpu(consts
[0]);
2185 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2186 for (i
= 0; i
< 4; i
+= 2) {
2187 msb_mask
= ((s64
)_const
[1] >> 63) & gfmask
;
2188 _const
[1] = (_const
[1] << 1) | (_const
[0] >> 63);
2189 _const
[0] = (_const
[0] << 1) ^ msb_mask
;
2191 consts
[i
+ 0] = cpu_to_be64(_const
[1]);
2192 consts
[i
+ 1] = cpu_to_be64(_const
[0]);
2194 /* end of code borrowed from crypto/cmac.c */
2196 for (i
= 0; i
< 2 * AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
2197 ctx
->ipad
[i
] = (__force __le32
)cpu_to_be32(((u32
*)consts
)[i
]);
2199 if (len
== AES_KEYSIZE_192
) {
2200 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC192
;
2201 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2202 } else if (len
== AES_KEYSIZE_256
) {
2203 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC256
;
2204 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2206 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2207 ctx
->key_sz
= AES_MIN_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2209 ctx
->cbcmac
= false;
2211 memzero_explicit(&aes
, sizeof(aes
));
2215 struct safexcel_alg_template safexcel_alg_cmac
= {
2216 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2219 .init
= safexcel_cbcmac_init
,
2220 .update
= safexcel_ahash_update
,
2221 .final
= safexcel_ahash_final
,
2222 .finup
= safexcel_ahash_finup
,
2223 .digest
= safexcel_cbcmac_digest
,
2224 .setkey
= safexcel_cmac_setkey
,
2225 .export
= safexcel_ahash_export
,
2226 .import
= safexcel_ahash_import
,
2228 .digestsize
= AES_BLOCK_SIZE
,
2229 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2231 .cra_name
= "cmac(aes)",
2232 .cra_driver_name
= "safexcel-cmac-aes",
2233 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2234 .cra_flags
= CRYPTO_ALG_ASYNC
|
2235 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2236 .cra_blocksize
= AES_BLOCK_SIZE
,
2237 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2238 .cra_init
= safexcel_xcbcmac_cra_init
,
2239 .cra_exit
= safexcel_xcbcmac_cra_exit
,
2240 .cra_module
= THIS_MODULE
,
2246 static int safexcel_sm3_init(struct ahash_request
*areq
)
2248 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
2249 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2251 memset(req
, 0, sizeof(*req
));
2253 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SM3
;
2254 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
2255 req
->state_sz
= SM3_DIGEST_SIZE
;
2256 req
->digest_sz
= SM3_DIGEST_SIZE
;
2257 req
->block_sz
= SM3_BLOCK_SIZE
;
2262 static int safexcel_sm3_digest(struct ahash_request
*areq
)
2264 int ret
= safexcel_sm3_init(areq
);
2269 return safexcel_ahash_finup(areq
);
2272 struct safexcel_alg_template safexcel_alg_sm3
= {
2273 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2274 .algo_mask
= SAFEXCEL_ALG_SM3
,
2276 .init
= safexcel_sm3_init
,
2277 .update
= safexcel_ahash_update
,
2278 .final
= safexcel_ahash_final
,
2279 .finup
= safexcel_ahash_finup
,
2280 .digest
= safexcel_sm3_digest
,
2281 .export
= safexcel_ahash_export
,
2282 .import
= safexcel_ahash_import
,
2284 .digestsize
= SM3_DIGEST_SIZE
,
2285 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2288 .cra_driver_name
= "safexcel-sm3",
2289 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2290 .cra_flags
= CRYPTO_ALG_ASYNC
|
2291 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2292 .cra_blocksize
= SM3_BLOCK_SIZE
,
2293 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2294 .cra_init
= safexcel_ahash_cra_init
,
2295 .cra_exit
= safexcel_ahash_cra_exit
,
2296 .cra_module
= THIS_MODULE
,
2302 static int safexcel_hmac_sm3_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2303 unsigned int keylen
)
2305 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sm3",
2309 static int safexcel_hmac_sm3_init(struct ahash_request
*areq
)
2311 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
2312 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2314 memset(req
, 0, sizeof(*req
));
2316 /* Start from ipad precompute */
2317 memcpy(req
->state
, ctx
->ipad
, SM3_DIGEST_SIZE
);
2318 /* Already processed the key^ipad part now! */
2319 req
->len
= SM3_BLOCK_SIZE
;
2320 req
->processed
= SM3_BLOCK_SIZE
;
2322 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SM3
;
2323 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
2324 req
->state_sz
= SM3_DIGEST_SIZE
;
2325 req
->digest_sz
= SM3_DIGEST_SIZE
;
2326 req
->block_sz
= SM3_BLOCK_SIZE
;
2332 static int safexcel_hmac_sm3_digest(struct ahash_request
*areq
)
2334 int ret
= safexcel_hmac_sm3_init(areq
);
2339 return safexcel_ahash_finup(areq
);
2342 struct safexcel_alg_template safexcel_alg_hmac_sm3
= {
2343 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2344 .algo_mask
= SAFEXCEL_ALG_SM3
,
2346 .init
= safexcel_hmac_sm3_init
,
2347 .update
= safexcel_ahash_update
,
2348 .final
= safexcel_ahash_final
,
2349 .finup
= safexcel_ahash_finup
,
2350 .digest
= safexcel_hmac_sm3_digest
,
2351 .setkey
= safexcel_hmac_sm3_setkey
,
2352 .export
= safexcel_ahash_export
,
2353 .import
= safexcel_ahash_import
,
2355 .digestsize
= SM3_DIGEST_SIZE
,
2356 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2358 .cra_name
= "hmac(sm3)",
2359 .cra_driver_name
= "safexcel-hmac-sm3",
2360 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2361 .cra_flags
= CRYPTO_ALG_ASYNC
|
2362 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2363 .cra_blocksize
= SM3_BLOCK_SIZE
,
2364 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2365 .cra_init
= safexcel_ahash_cra_init
,
2366 .cra_exit
= safexcel_ahash_cra_exit
,
2367 .cra_module
= THIS_MODULE
,
2373 static int safexcel_sha3_224_init(struct ahash_request
*areq
)
2375 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2376 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2377 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2379 memset(req
, 0, sizeof(*req
));
2381 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224
;
2382 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2383 req
->state_sz
= SHA3_224_DIGEST_SIZE
;
2384 req
->digest_sz
= SHA3_224_DIGEST_SIZE
;
2385 req
->block_sz
= SHA3_224_BLOCK_SIZE
;
2386 ctx
->do_fallback
= false;
2387 ctx
->fb_init_done
= false;
2391 static int safexcel_sha3_fbcheck(struct ahash_request
*req
)
2393 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2394 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2395 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2398 if (ctx
->do_fallback
) {
2399 ahash_request_set_tfm(subreq
, ctx
->fback
);
2400 ahash_request_set_callback(subreq
, req
->base
.flags
,
2401 req
->base
.complete
, req
->base
.data
);
2402 ahash_request_set_crypt(subreq
, req
->src
, req
->result
,
2404 if (!ctx
->fb_init_done
) {
2405 if (ctx
->fb_do_setkey
) {
2406 /* Set fallback cipher HMAC key */
2407 u8 key
[SHA3_224_BLOCK_SIZE
];
2409 memcpy(key
, ctx
->ipad
,
2410 crypto_ahash_blocksize(ctx
->fback
) / 2);
2412 crypto_ahash_blocksize(ctx
->fback
) / 2,
2414 crypto_ahash_blocksize(ctx
->fback
) / 2);
2415 ret
= crypto_ahash_setkey(ctx
->fback
, key
,
2416 crypto_ahash_blocksize(ctx
->fback
));
2417 memzero_explicit(key
,
2418 crypto_ahash_blocksize(ctx
->fback
));
2419 ctx
->fb_do_setkey
= false;
2421 ret
= ret
?: crypto_ahash_init(subreq
);
2422 ctx
->fb_init_done
= true;
2428 static int safexcel_sha3_update(struct ahash_request
*req
)
2430 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2431 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2432 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2434 ctx
->do_fallback
= true;
2435 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_update(subreq
);
2438 static int safexcel_sha3_final(struct ahash_request
*req
)
2440 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2441 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2442 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2444 ctx
->do_fallback
= true;
2445 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_final(subreq
);
2448 static int safexcel_sha3_finup(struct ahash_request
*req
)
2450 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2451 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2452 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2454 ctx
->do_fallback
|= !req
->nbytes
;
2455 if (ctx
->do_fallback
)
2456 /* Update or ex/import happened or len 0, cannot use the HW */
2457 return safexcel_sha3_fbcheck(req
) ?:
2458 crypto_ahash_finup(subreq
);
2460 return safexcel_ahash_finup(req
);
2463 static int safexcel_sha3_digest_fallback(struct ahash_request
*req
)
2465 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2466 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2467 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2469 ctx
->do_fallback
= true;
2470 ctx
->fb_init_done
= false;
2471 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_finup(subreq
);
2474 static int safexcel_sha3_224_digest(struct ahash_request
*req
)
2477 return safexcel_sha3_224_init(req
) ?: safexcel_ahash_finup(req
);
2479 /* HW cannot do zero length hash, use fallback instead */
2480 return safexcel_sha3_digest_fallback(req
);
2483 static int safexcel_sha3_export(struct ahash_request
*req
, void *out
)
2485 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2486 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2487 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2489 ctx
->do_fallback
= true;
2490 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_export(subreq
, out
);
2493 static int safexcel_sha3_import(struct ahash_request
*req
, const void *in
)
2495 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2496 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2497 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2499 ctx
->do_fallback
= true;
2500 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_import(subreq
, in
);
2501 // return safexcel_ahash_import(req, in);
2504 static int safexcel_sha3_cra_init(struct crypto_tfm
*tfm
)
2506 struct crypto_ahash
*ahash
= __crypto_ahash_cast(tfm
);
2507 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2509 safexcel_ahash_cra_init(tfm
);
2511 /* Allocate fallback implementation */
2512 ctx
->fback
= crypto_alloc_ahash(crypto_tfm_alg_name(tfm
), 0,
2514 CRYPTO_ALG_NEED_FALLBACK
);
2515 if (IS_ERR(ctx
->fback
))
2516 return PTR_ERR(ctx
->fback
);
2518 /* Update statesize from fallback algorithm! */
2519 crypto_hash_alg_common(ahash
)->statesize
=
2520 crypto_ahash_statesize(ctx
->fback
);
2521 crypto_ahash_set_reqsize(ahash
, max(sizeof(struct safexcel_ahash_req
),
2522 sizeof(struct ahash_request
) +
2523 crypto_ahash_reqsize(ctx
->fback
)));
2527 static void safexcel_sha3_cra_exit(struct crypto_tfm
*tfm
)
2529 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2531 crypto_free_ahash(ctx
->fback
);
2532 safexcel_ahash_cra_exit(tfm
);
2535 struct safexcel_alg_template safexcel_alg_sha3_224
= {
2536 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2537 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2539 .init
= safexcel_sha3_224_init
,
2540 .update
= safexcel_sha3_update
,
2541 .final
= safexcel_sha3_final
,
2542 .finup
= safexcel_sha3_finup
,
2543 .digest
= safexcel_sha3_224_digest
,
2544 .export
= safexcel_sha3_export
,
2545 .import
= safexcel_sha3_import
,
2547 .digestsize
= SHA3_224_DIGEST_SIZE
,
2548 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2550 .cra_name
= "sha3-224",
2551 .cra_driver_name
= "safexcel-sha3-224",
2552 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2553 .cra_flags
= CRYPTO_ALG_ASYNC
|
2554 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2555 CRYPTO_ALG_NEED_FALLBACK
,
2556 .cra_blocksize
= SHA3_224_BLOCK_SIZE
,
2557 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2558 .cra_init
= safexcel_sha3_cra_init
,
2559 .cra_exit
= safexcel_sha3_cra_exit
,
2560 .cra_module
= THIS_MODULE
,
2566 static int safexcel_sha3_256_init(struct ahash_request
*areq
)
2568 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2569 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2570 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2572 memset(req
, 0, sizeof(*req
));
2574 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256
;
2575 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2576 req
->state_sz
= SHA3_256_DIGEST_SIZE
;
2577 req
->digest_sz
= SHA3_256_DIGEST_SIZE
;
2578 req
->block_sz
= SHA3_256_BLOCK_SIZE
;
2579 ctx
->do_fallback
= false;
2580 ctx
->fb_init_done
= false;
2584 static int safexcel_sha3_256_digest(struct ahash_request
*req
)
2587 return safexcel_sha3_256_init(req
) ?: safexcel_ahash_finup(req
);
2589 /* HW cannot do zero length hash, use fallback instead */
2590 return safexcel_sha3_digest_fallback(req
);
2593 struct safexcel_alg_template safexcel_alg_sha3_256
= {
2594 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2595 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2597 .init
= safexcel_sha3_256_init
,
2598 .update
= safexcel_sha3_update
,
2599 .final
= safexcel_sha3_final
,
2600 .finup
= safexcel_sha3_finup
,
2601 .digest
= safexcel_sha3_256_digest
,
2602 .export
= safexcel_sha3_export
,
2603 .import
= safexcel_sha3_import
,
2605 .digestsize
= SHA3_256_DIGEST_SIZE
,
2606 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2608 .cra_name
= "sha3-256",
2609 .cra_driver_name
= "safexcel-sha3-256",
2610 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2611 .cra_flags
= CRYPTO_ALG_ASYNC
|
2612 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2613 CRYPTO_ALG_NEED_FALLBACK
,
2614 .cra_blocksize
= SHA3_256_BLOCK_SIZE
,
2615 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2616 .cra_init
= safexcel_sha3_cra_init
,
2617 .cra_exit
= safexcel_sha3_cra_exit
,
2618 .cra_module
= THIS_MODULE
,
2624 static int safexcel_sha3_384_init(struct ahash_request
*areq
)
2626 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2627 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2628 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2630 memset(req
, 0, sizeof(*req
));
2632 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384
;
2633 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2634 req
->state_sz
= SHA3_384_DIGEST_SIZE
;
2635 req
->digest_sz
= SHA3_384_DIGEST_SIZE
;
2636 req
->block_sz
= SHA3_384_BLOCK_SIZE
;
2637 ctx
->do_fallback
= false;
2638 ctx
->fb_init_done
= false;
2642 static int safexcel_sha3_384_digest(struct ahash_request
*req
)
2645 return safexcel_sha3_384_init(req
) ?: safexcel_ahash_finup(req
);
2647 /* HW cannot do zero length hash, use fallback instead */
2648 return safexcel_sha3_digest_fallback(req
);
2651 struct safexcel_alg_template safexcel_alg_sha3_384
= {
2652 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2653 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2655 .init
= safexcel_sha3_384_init
,
2656 .update
= safexcel_sha3_update
,
2657 .final
= safexcel_sha3_final
,
2658 .finup
= safexcel_sha3_finup
,
2659 .digest
= safexcel_sha3_384_digest
,
2660 .export
= safexcel_sha3_export
,
2661 .import
= safexcel_sha3_import
,
2663 .digestsize
= SHA3_384_DIGEST_SIZE
,
2664 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2666 .cra_name
= "sha3-384",
2667 .cra_driver_name
= "safexcel-sha3-384",
2668 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2669 .cra_flags
= CRYPTO_ALG_ASYNC
|
2670 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2671 CRYPTO_ALG_NEED_FALLBACK
,
2672 .cra_blocksize
= SHA3_384_BLOCK_SIZE
,
2673 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2674 .cra_init
= safexcel_sha3_cra_init
,
2675 .cra_exit
= safexcel_sha3_cra_exit
,
2676 .cra_module
= THIS_MODULE
,
2682 static int safexcel_sha3_512_init(struct ahash_request
*areq
)
2684 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2685 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2686 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2688 memset(req
, 0, sizeof(*req
));
2690 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512
;
2691 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2692 req
->state_sz
= SHA3_512_DIGEST_SIZE
;
2693 req
->digest_sz
= SHA3_512_DIGEST_SIZE
;
2694 req
->block_sz
= SHA3_512_BLOCK_SIZE
;
2695 ctx
->do_fallback
= false;
2696 ctx
->fb_init_done
= false;
2700 static int safexcel_sha3_512_digest(struct ahash_request
*req
)
2703 return safexcel_sha3_512_init(req
) ?: safexcel_ahash_finup(req
);
2705 /* HW cannot do zero length hash, use fallback instead */
2706 return safexcel_sha3_digest_fallback(req
);
2709 struct safexcel_alg_template safexcel_alg_sha3_512
= {
2710 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2711 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2713 .init
= safexcel_sha3_512_init
,
2714 .update
= safexcel_sha3_update
,
2715 .final
= safexcel_sha3_final
,
2716 .finup
= safexcel_sha3_finup
,
2717 .digest
= safexcel_sha3_512_digest
,
2718 .export
= safexcel_sha3_export
,
2719 .import
= safexcel_sha3_import
,
2721 .digestsize
= SHA3_512_DIGEST_SIZE
,
2722 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2724 .cra_name
= "sha3-512",
2725 .cra_driver_name
= "safexcel-sha3-512",
2726 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2727 .cra_flags
= CRYPTO_ALG_ASYNC
|
2728 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2729 CRYPTO_ALG_NEED_FALLBACK
,
2730 .cra_blocksize
= SHA3_512_BLOCK_SIZE
,
2731 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2732 .cra_init
= safexcel_sha3_cra_init
,
2733 .cra_exit
= safexcel_sha3_cra_exit
,
2734 .cra_module
= THIS_MODULE
,
2740 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm
*tfm
, const char *alg
)
2742 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2745 ret
= safexcel_sha3_cra_init(tfm
);
2749 /* Allocate precalc basic digest implementation */
2750 ctx
->shpre
= crypto_alloc_shash(alg
, 0, CRYPTO_ALG_NEED_FALLBACK
);
2751 if (IS_ERR(ctx
->shpre
))
2752 return PTR_ERR(ctx
->shpre
);
2754 ctx
->shdesc
= kmalloc(sizeof(*ctx
->shdesc
) +
2755 crypto_shash_descsize(ctx
->shpre
), GFP_KERNEL
);
2757 crypto_free_shash(ctx
->shpre
);
2760 ctx
->shdesc
->tfm
= ctx
->shpre
;
2764 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm
*tfm
)
2766 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2768 crypto_free_ahash(ctx
->fback
);
2769 crypto_free_shash(ctx
->shpre
);
2771 safexcel_ahash_cra_exit(tfm
);
2774 static int safexcel_hmac_sha3_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2775 unsigned int keylen
)
2777 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2780 if (keylen
> crypto_ahash_blocksize(tfm
)) {
2782 * If the key is larger than the blocksize, then hash it
2783 * first using our fallback cipher
2785 ret
= crypto_shash_digest(ctx
->shdesc
, key
, keylen
,
2787 keylen
= crypto_shash_digestsize(ctx
->shpre
);
2790 * If the digest is larger than half the blocksize, we need to
2791 * move the rest to opad due to the way our HMAC infra works.
2793 if (keylen
> crypto_ahash_blocksize(tfm
) / 2)
2794 /* Buffers overlap, need to use memmove iso memcpy! */
2797 crypto_ahash_blocksize(tfm
) / 2,
2798 keylen
- crypto_ahash_blocksize(tfm
) / 2);
2801 * Copy the key to our ipad & opad buffers
2802 * Note that ipad and opad each contain one half of the key,
2803 * to match the existing HMAC driver infrastructure.
2805 if (keylen
<= crypto_ahash_blocksize(tfm
) / 2) {
2806 memcpy(ctx
->ipad
, key
, keylen
);
2808 memcpy(ctx
->ipad
, key
,
2809 crypto_ahash_blocksize(tfm
) / 2);
2811 key
+ crypto_ahash_blocksize(tfm
) / 2,
2812 keylen
- crypto_ahash_blocksize(tfm
) / 2);
2816 /* Pad key with zeroes */
2817 if (keylen
<= crypto_ahash_blocksize(tfm
) / 2) {
2818 memset((u8
*)ctx
->ipad
+ keylen
, 0,
2819 crypto_ahash_blocksize(tfm
) / 2 - keylen
);
2820 memset(ctx
->opad
, 0, crypto_ahash_blocksize(tfm
) / 2);
2822 memset((u8
*)ctx
->opad
+ keylen
-
2823 crypto_ahash_blocksize(tfm
) / 2, 0,
2824 crypto_ahash_blocksize(tfm
) - keylen
);
2827 /* If doing fallback, still need to set the new key! */
2828 ctx
->fb_do_setkey
= true;
2832 static int safexcel_hmac_sha3_224_init(struct ahash_request
*areq
)
2834 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2835 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2836 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2838 memset(req
, 0, sizeof(*req
));
2840 /* Copy (half of) the key */
2841 memcpy(req
->state
, ctx
->ipad
, SHA3_224_BLOCK_SIZE
/ 2);
2842 /* Start of HMAC should have len == processed == blocksize */
2843 req
->len
= SHA3_224_BLOCK_SIZE
;
2844 req
->processed
= SHA3_224_BLOCK_SIZE
;
2845 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224
;
2846 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
2847 req
->state_sz
= SHA3_224_BLOCK_SIZE
/ 2;
2848 req
->digest_sz
= SHA3_224_DIGEST_SIZE
;
2849 req
->block_sz
= SHA3_224_BLOCK_SIZE
;
2851 ctx
->do_fallback
= false;
2852 ctx
->fb_init_done
= false;
2856 static int safexcel_hmac_sha3_224_digest(struct ahash_request
*req
)
2859 return safexcel_hmac_sha3_224_init(req
) ?:
2860 safexcel_ahash_finup(req
);
2862 /* HW cannot do zero length HMAC, use fallback instead */
2863 return safexcel_sha3_digest_fallback(req
);
2866 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm
*tfm
)
2868 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-224");
2871 struct safexcel_alg_template safexcel_alg_hmac_sha3_224
= {
2872 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2873 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2875 .init
= safexcel_hmac_sha3_224_init
,
2876 .update
= safexcel_sha3_update
,
2877 .final
= safexcel_sha3_final
,
2878 .finup
= safexcel_sha3_finup
,
2879 .digest
= safexcel_hmac_sha3_224_digest
,
2880 .setkey
= safexcel_hmac_sha3_setkey
,
2881 .export
= safexcel_sha3_export
,
2882 .import
= safexcel_sha3_import
,
2884 .digestsize
= SHA3_224_DIGEST_SIZE
,
2885 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2887 .cra_name
= "hmac(sha3-224)",
2888 .cra_driver_name
= "safexcel-hmac-sha3-224",
2889 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2890 .cra_flags
= CRYPTO_ALG_ASYNC
|
2891 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2892 CRYPTO_ALG_NEED_FALLBACK
,
2893 .cra_blocksize
= SHA3_224_BLOCK_SIZE
,
2894 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2895 .cra_init
= safexcel_hmac_sha3_224_cra_init
,
2896 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
2897 .cra_module
= THIS_MODULE
,
2903 static int safexcel_hmac_sha3_256_init(struct ahash_request
*areq
)
2905 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2906 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2907 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2909 memset(req
, 0, sizeof(*req
));
2911 /* Copy (half of) the key */
2912 memcpy(req
->state
, ctx
->ipad
, SHA3_256_BLOCK_SIZE
/ 2);
2913 /* Start of HMAC should have len == processed == blocksize */
2914 req
->len
= SHA3_256_BLOCK_SIZE
;
2915 req
->processed
= SHA3_256_BLOCK_SIZE
;
2916 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256
;
2917 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
2918 req
->state_sz
= SHA3_256_BLOCK_SIZE
/ 2;
2919 req
->digest_sz
= SHA3_256_DIGEST_SIZE
;
2920 req
->block_sz
= SHA3_256_BLOCK_SIZE
;
2922 ctx
->do_fallback
= false;
2923 ctx
->fb_init_done
= false;
2927 static int safexcel_hmac_sha3_256_digest(struct ahash_request
*req
)
2930 return safexcel_hmac_sha3_256_init(req
) ?:
2931 safexcel_ahash_finup(req
);
2933 /* HW cannot do zero length HMAC, use fallback instead */
2934 return safexcel_sha3_digest_fallback(req
);
2937 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm
*tfm
)
2939 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-256");
2942 struct safexcel_alg_template safexcel_alg_hmac_sha3_256
= {
2943 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2944 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2946 .init
= safexcel_hmac_sha3_256_init
,
2947 .update
= safexcel_sha3_update
,
2948 .final
= safexcel_sha3_final
,
2949 .finup
= safexcel_sha3_finup
,
2950 .digest
= safexcel_hmac_sha3_256_digest
,
2951 .setkey
= safexcel_hmac_sha3_setkey
,
2952 .export
= safexcel_sha3_export
,
2953 .import
= safexcel_sha3_import
,
2955 .digestsize
= SHA3_256_DIGEST_SIZE
,
2956 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2958 .cra_name
= "hmac(sha3-256)",
2959 .cra_driver_name
= "safexcel-hmac-sha3-256",
2960 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2961 .cra_flags
= CRYPTO_ALG_ASYNC
|
2962 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2963 CRYPTO_ALG_NEED_FALLBACK
,
2964 .cra_blocksize
= SHA3_256_BLOCK_SIZE
,
2965 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2966 .cra_init
= safexcel_hmac_sha3_256_cra_init
,
2967 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
2968 .cra_module
= THIS_MODULE
,
2974 static int safexcel_hmac_sha3_384_init(struct ahash_request
*areq
)
2976 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2977 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2978 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2980 memset(req
, 0, sizeof(*req
));
2982 /* Copy (half of) the key */
2983 memcpy(req
->state
, ctx
->ipad
, SHA3_384_BLOCK_SIZE
/ 2);
2984 /* Start of HMAC should have len == processed == blocksize */
2985 req
->len
= SHA3_384_BLOCK_SIZE
;
2986 req
->processed
= SHA3_384_BLOCK_SIZE
;
2987 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384
;
2988 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
2989 req
->state_sz
= SHA3_384_BLOCK_SIZE
/ 2;
2990 req
->digest_sz
= SHA3_384_DIGEST_SIZE
;
2991 req
->block_sz
= SHA3_384_BLOCK_SIZE
;
2993 ctx
->do_fallback
= false;
2994 ctx
->fb_init_done
= false;
2998 static int safexcel_hmac_sha3_384_digest(struct ahash_request
*req
)
3001 return safexcel_hmac_sha3_384_init(req
) ?:
3002 safexcel_ahash_finup(req
);
3004 /* HW cannot do zero length HMAC, use fallback instead */
3005 return safexcel_sha3_digest_fallback(req
);
3008 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm
*tfm
)
3010 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-384");
3013 struct safexcel_alg_template safexcel_alg_hmac_sha3_384
= {
3014 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
3015 .algo_mask
= SAFEXCEL_ALG_SHA3
,
3017 .init
= safexcel_hmac_sha3_384_init
,
3018 .update
= safexcel_sha3_update
,
3019 .final
= safexcel_sha3_final
,
3020 .finup
= safexcel_sha3_finup
,
3021 .digest
= safexcel_hmac_sha3_384_digest
,
3022 .setkey
= safexcel_hmac_sha3_setkey
,
3023 .export
= safexcel_sha3_export
,
3024 .import
= safexcel_sha3_import
,
3026 .digestsize
= SHA3_384_DIGEST_SIZE
,
3027 .statesize
= sizeof(struct safexcel_ahash_export_state
),
3029 .cra_name
= "hmac(sha3-384)",
3030 .cra_driver_name
= "safexcel-hmac-sha3-384",
3031 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3032 .cra_flags
= CRYPTO_ALG_ASYNC
|
3033 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3034 CRYPTO_ALG_NEED_FALLBACK
,
3035 .cra_blocksize
= SHA3_384_BLOCK_SIZE
,
3036 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
3037 .cra_init
= safexcel_hmac_sha3_384_cra_init
,
3038 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
3039 .cra_module
= THIS_MODULE
,
3045 static int safexcel_hmac_sha3_512_init(struct ahash_request
*areq
)
3047 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
3048 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
3049 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
3051 memset(req
, 0, sizeof(*req
));
3053 /* Copy (half of) the key */
3054 memcpy(req
->state
, ctx
->ipad
, SHA3_512_BLOCK_SIZE
/ 2);
3055 /* Start of HMAC should have len == processed == blocksize */
3056 req
->len
= SHA3_512_BLOCK_SIZE
;
3057 req
->processed
= SHA3_512_BLOCK_SIZE
;
3058 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512
;
3059 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
3060 req
->state_sz
= SHA3_512_BLOCK_SIZE
/ 2;
3061 req
->digest_sz
= SHA3_512_DIGEST_SIZE
;
3062 req
->block_sz
= SHA3_512_BLOCK_SIZE
;
3064 ctx
->do_fallback
= false;
3065 ctx
->fb_init_done
= false;
3069 static int safexcel_hmac_sha3_512_digest(struct ahash_request
*req
)
3072 return safexcel_hmac_sha3_512_init(req
) ?:
3073 safexcel_ahash_finup(req
);
3075 /* HW cannot do zero length HMAC, use fallback instead */
3076 return safexcel_sha3_digest_fallback(req
);
3079 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm
*tfm
)
3081 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-512");
3083 struct safexcel_alg_template safexcel_alg_hmac_sha3_512
= {
3084 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
3085 .algo_mask
= SAFEXCEL_ALG_SHA3
,
3087 .init
= safexcel_hmac_sha3_512_init
,
3088 .update
= safexcel_sha3_update
,
3089 .final
= safexcel_sha3_final
,
3090 .finup
= safexcel_sha3_finup
,
3091 .digest
= safexcel_hmac_sha3_512_digest
,
3092 .setkey
= safexcel_hmac_sha3_setkey
,
3093 .export
= safexcel_sha3_export
,
3094 .import
= safexcel_sha3_import
,
3096 .digestsize
= SHA3_512_DIGEST_SIZE
,
3097 .statesize
= sizeof(struct safexcel_ahash_export_state
),
3099 .cra_name
= "hmac(sha3-512)",
3100 .cra_driver_name
= "safexcel-hmac-sha3-512",
3101 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3102 .cra_flags
= CRYPTO_ALG_ASYNC
|
3103 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3104 CRYPTO_ALG_NEED_FALLBACK
,
3105 .cra_blocksize
= SHA3_512_BLOCK_SIZE
,
3106 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
3107 .cra_init
= safexcel_hmac_sha3_512_cra_init
,
3108 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
3109 .cra_module
= THIS_MODULE
,