1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2017 Marvell
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
8 #include <crypto/aes.h>
9 #include <crypto/hmac.h>
10 #include <crypto/md5.h>
11 #include <crypto/sha1.h>
12 #include <crypto/sha2.h>
13 #include <crypto/sha3.h>
14 #include <crypto/skcipher.h>
15 #include <crypto/sm3.h>
16 #include <linux/device.h>
17 #include <linux/dma-mapping.h>
18 #include <linux/dmapool.h>
22 struct safexcel_ahash_ctx
{
23 struct safexcel_context base
;
32 struct crypto_cipher
*kaes
;
33 struct crypto_ahash
*fback
;
34 struct crypto_shash
*shpre
;
35 struct shash_desc
*shdesc
;
38 struct safexcel_ahash_req
{
49 dma_addr_t result_dma
;
53 u8 state_sz
; /* expected state size, only set once */
54 u8 block_sz
; /* block size, only set once */
55 u8 digest_sz
; /* output digest size, only set once */
56 __le32 state
[SHA3_512_BLOCK_SIZE
/
57 sizeof(__le32
)] __aligned(sizeof(__le32
));
62 u8 cache
[HASH_CACHE_SIZE
] __aligned(sizeof(u32
));
64 unsigned int cache_sz
;
66 u8 cache_next
[HASH_CACHE_SIZE
] __aligned(sizeof(u32
));
69 static inline u64
safexcel_queued_len(struct safexcel_ahash_req
*req
)
71 return req
->len
- req
->processed
;
74 static void safexcel_hash_token(struct safexcel_command_desc
*cdesc
,
75 u32 input_length
, u32 result_length
,
78 struct safexcel_token
*token
=
79 (struct safexcel_token
*)cdesc
->control_data
.token
;
81 token
[0].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
82 token
[0].packet_length
= input_length
;
83 token
[0].instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
86 if (unlikely(cbcmac
&& input_length
)) {
88 token
[1].opcode
= EIP197_TOKEN_OPCODE_INSERT
;
89 token
[1].packet_length
= 16 - input_length
;
90 token
[1].stat
= EIP197_TOKEN_STAT_LAST_HASH
;
91 token
[1].instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
93 token
[0].stat
= EIP197_TOKEN_STAT_LAST_HASH
;
94 eip197_noop_token(&token
[1]);
97 token
[2].opcode
= EIP197_TOKEN_OPCODE_INSERT
;
98 token
[2].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
99 EIP197_TOKEN_STAT_LAST_PACKET
;
100 token
[2].packet_length
= result_length
;
101 token
[2].instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
102 EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
104 eip197_noop_token(&token
[3]);
107 static void safexcel_context_control(struct safexcel_ahash_ctx
*ctx
,
108 struct safexcel_ahash_req
*req
,
109 struct safexcel_command_desc
*cdesc
)
111 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
114 cdesc
->control_data
.control0
= ctx
->alg
;
115 cdesc
->control_data
.control1
= 0;
118 * Copy the input digest if needed, and setup the context
119 * fields. Do this now as we need it to setup the first command
122 if (unlikely(req
->digest
== CONTEXT_CONTROL_DIGEST_XCM
)) {
124 memcpy(ctx
->base
.ctxr
->data
, &ctx
->base
.ipad
, ctx
->key_sz
);
126 memcpy(ctx
->base
.ctxr
->data
, req
->state
, req
->state_sz
);
128 if (!req
->finish
&& req
->xcbcmac
)
129 cdesc
->control_data
.control0
|=
130 CONTEXT_CONTROL_DIGEST_XCM
|
131 CONTEXT_CONTROL_TYPE_HASH_OUT
|
132 CONTEXT_CONTROL_NO_FINISH_HASH
|
133 CONTEXT_CONTROL_SIZE(req
->state_sz
/
136 cdesc
->control_data
.control0
|=
137 CONTEXT_CONTROL_DIGEST_XCM
|
138 CONTEXT_CONTROL_TYPE_HASH_OUT
|
139 CONTEXT_CONTROL_SIZE(req
->state_sz
/
142 } else if (!req
->processed
) {
143 /* First - and possibly only - block of basic hash only */
145 cdesc
->control_data
.control0
|= req
->digest
|
146 CONTEXT_CONTROL_TYPE_HASH_OUT
|
147 CONTEXT_CONTROL_RESTART_HASH
|
148 /* ensure its not 0! */
149 CONTEXT_CONTROL_SIZE(1);
151 cdesc
->control_data
.control0
|= req
->digest
|
152 CONTEXT_CONTROL_TYPE_HASH_OUT
|
153 CONTEXT_CONTROL_RESTART_HASH
|
154 CONTEXT_CONTROL_NO_FINISH_HASH
|
155 /* ensure its not 0! */
156 CONTEXT_CONTROL_SIZE(1);
160 /* Hash continuation or HMAC, setup (inner) digest from state */
161 memcpy(ctx
->base
.ctxr
->data
, req
->state
, req
->state_sz
);
164 /* Compute digest count for hash/HMAC finish operations */
165 if ((req
->digest
== CONTEXT_CONTROL_DIGEST_PRECOMPUTED
) ||
166 req
->hmac_zlen
|| (req
->processed
!= req
->block_sz
)) {
167 count
= req
->processed
/ EIP197_COUNTER_BLOCK_SIZE
;
169 /* This is a hardware limitation, as the
170 * counter must fit into an u32. This represents
171 * a fairly big amount of input data, so we
172 * shouldn't see this.
174 if (unlikely(count
& 0xffffffff00000000ULL
)) {
176 "Input data is too big\n");
181 if ((req
->digest
== CONTEXT_CONTROL_DIGEST_PRECOMPUTED
) ||
182 /* Special case: zero length HMAC */
184 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
185 (req
->processed
!= req
->block_sz
)) {
186 /* Basic hash continue operation, need digest + cnt */
187 cdesc
->control_data
.control0
|=
188 CONTEXT_CONTROL_SIZE((req
->state_sz
>> 2) + 1) |
189 CONTEXT_CONTROL_TYPE_HASH_OUT
|
190 CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
191 /* For zero-len HMAC, don't finalize, already padded! */
193 cdesc
->control_data
.control0
|=
194 CONTEXT_CONTROL_NO_FINISH_HASH
;
195 cdesc
->control_data
.control1
|=
196 CONTEXT_CONTROL_DIGEST_CNT
;
197 ctx
->base
.ctxr
->data
[req
->state_sz
>> 2] =
199 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
201 /* Clear zero-length HMAC flag for next operation! */
202 req
->hmac_zlen
= false;
204 /* Need outer digest for HMAC finalization */
205 memcpy(ctx
->base
.ctxr
->data
+ (req
->state_sz
>> 2),
206 &ctx
->base
.opad
, req
->state_sz
);
208 /* Single pass HMAC - no digest count */
209 cdesc
->control_data
.control0
|=
210 CONTEXT_CONTROL_SIZE(req
->state_sz
>> 1) |
211 CONTEXT_CONTROL_TYPE_HASH_OUT
|
212 CONTEXT_CONTROL_DIGEST_HMAC
;
214 } else { /* Hash continuation, do not finish yet */
215 cdesc
->control_data
.control0
|=
216 CONTEXT_CONTROL_SIZE(req
->state_sz
>> 2) |
217 CONTEXT_CONTROL_DIGEST_PRECOMPUTED
|
218 CONTEXT_CONTROL_TYPE_HASH_OUT
|
219 CONTEXT_CONTROL_NO_FINISH_HASH
;
223 static int safexcel_ahash_enqueue(struct ahash_request
*areq
);
225 static int safexcel_handle_req_result(struct safexcel_crypto_priv
*priv
,
227 struct crypto_async_request
*async
,
228 bool *should_complete
, int *ret
)
230 struct safexcel_result_desc
*rdesc
;
231 struct ahash_request
*areq
= ahash_request_cast(async
);
232 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
233 struct safexcel_ahash_req
*sreq
= ahash_request_ctx(areq
);
234 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(ahash
);
239 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
242 "hash: result: could not retrieve the result descriptor\n");
243 *ret
= PTR_ERR(rdesc
);
245 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
248 safexcel_complete(priv
, ring
);
251 dma_unmap_sg(priv
->dev
, areq
->src
, sreq
->nents
, DMA_TO_DEVICE
);
255 if (sreq
->result_dma
) {
256 dma_unmap_single(priv
->dev
, sreq
->result_dma
, sreq
->digest_sz
,
258 sreq
->result_dma
= 0;
261 if (sreq
->cache_dma
) {
262 dma_unmap_single(priv
->dev
, sreq
->cache_dma
, sreq
->cache_sz
,
270 (sreq
->digest
!= CONTEXT_CONTROL_DIGEST_HMAC
)) {
271 /* Faking HMAC using hash - need to do outer hash */
272 memcpy(sreq
->cache
, sreq
->state
,
273 crypto_ahash_digestsize(ahash
));
275 memcpy(sreq
->state
, &ctx
->base
.opad
, sreq
->digest_sz
);
277 sreq
->len
= sreq
->block_sz
+
278 crypto_ahash_digestsize(ahash
);
279 sreq
->processed
= sreq
->block_sz
;
282 if (priv
->flags
& EIP197_TRC_CACHE
)
283 ctx
->base
.needs_inv
= true;
285 safexcel_ahash_enqueue(areq
);
287 *should_complete
= false; /* Not done yet */
291 if (unlikely(sreq
->digest
== CONTEXT_CONTROL_DIGEST_XCM
&&
292 ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_CRC32
)) {
293 /* Undo final XOR with 0xffffffff ...*/
294 *(__le32
*)areq
->result
= ~sreq
->state
[0];
296 memcpy(areq
->result
, sreq
->state
,
297 crypto_ahash_digestsize(ahash
));
301 cache_len
= safexcel_queued_len(sreq
);
303 memcpy(sreq
->cache
, sreq
->cache_next
, cache_len
);
305 *should_complete
= true;
310 static int safexcel_ahash_send_req(struct crypto_async_request
*async
, int ring
,
311 int *commands
, int *results
)
313 struct ahash_request
*areq
= ahash_request_cast(async
);
314 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
315 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
316 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
317 struct safexcel_command_desc
*cdesc
, *first_cdesc
= NULL
;
318 struct safexcel_result_desc
*rdesc
;
319 struct scatterlist
*sg
;
320 struct safexcel_token
*dmmy
;
321 int i
, extra
= 0, n_cdesc
= 0, ret
= 0, cache_len
, skip
= 0;
324 queued
= safexcel_queued_len(req
);
325 if (queued
<= HASH_CACHE_SIZE
)
328 cache_len
= queued
- areq
->nbytes
;
330 if (!req
->finish
&& !req
->last_req
) {
331 /* If this is not the last request and the queued data does not
332 * fit into full cache blocks, cache it for the next send call.
334 extra
= queued
& (HASH_CACHE_SIZE
- 1);
336 /* If this is not the last request and the queued data
337 * is a multiple of a block, cache the last one for now.
340 extra
= HASH_CACHE_SIZE
;
342 sg_pcopy_to_buffer(areq
->src
, sg_nents(areq
->src
),
343 req
->cache_next
, extra
,
344 areq
->nbytes
- extra
);
357 if (unlikely(req
->xcbcmac
&& req
->processed
> AES_BLOCK_SIZE
)) {
358 if (unlikely(cache_len
< AES_BLOCK_SIZE
)) {
360 * Cache contains less than 1 full block, complete.
362 extra
= AES_BLOCK_SIZE
- cache_len
;
363 if (queued
> cache_len
) {
364 /* More data follows: borrow bytes */
365 u64 tmp
= queued
- cache_len
;
367 skip
= min_t(u64
, tmp
, extra
);
368 sg_pcopy_to_buffer(areq
->src
,
370 req
->cache
+ cache_len
,
374 memset(req
->cache
+ cache_len
+ skip
, 0, extra
);
375 if (!ctx
->cbcmac
&& extra
) {
376 // 10- padding for XCBCMAC & CMAC
377 req
->cache
[cache_len
+ skip
] = 0x80;
378 // HW will use K2 iso K3 - compensate!
379 for (i
= 0; i
< AES_BLOCK_SIZE
/ 4; i
++) {
380 u32
*cache
= (void *)req
->cache
;
381 u32
*ipad
= ctx
->base
.ipad
.word
;
384 x
= ipad
[i
] ^ ipad
[i
+ 4];
388 cache_len
= AES_BLOCK_SIZE
;
389 queued
= queued
+ extra
;
392 /* XCBC continue: XOR previous result into 1st word */
393 crypto_xor(req
->cache
, (const u8
*)req
->state
, AES_BLOCK_SIZE
);
397 /* Add a command descriptor for the cached data, if any */
399 req
->cache_dma
= dma_map_single(priv
->dev
, req
->cache
,
400 cache_len
, DMA_TO_DEVICE
);
401 if (dma_mapping_error(priv
->dev
, req
->cache_dma
))
404 req
->cache_sz
= cache_len
;
405 first_cdesc
= safexcel_add_cdesc(priv
, ring
, 1,
407 req
->cache_dma
, cache_len
,
408 len
, ctx
->base
.ctxr_dma
,
410 if (IS_ERR(first_cdesc
)) {
411 ret
= PTR_ERR(first_cdesc
);
421 /* Now handle the current ahash request buffer(s) */
422 req
->nents
= dma_map_sg(priv
->dev
, areq
->src
,
423 sg_nents_for_len(areq
->src
,
431 for_each_sg(areq
->src
, sg
, req
->nents
, i
) {
432 int sglen
= sg_dma_len(sg
);
434 if (unlikely(sglen
<= skip
)) {
439 /* Do not overflow the request */
440 if ((queued
+ skip
) <= sglen
)
445 cdesc
= safexcel_add_cdesc(priv
, ring
, !n_cdesc
,
447 sg_dma_address(sg
) + skip
, sglen
,
448 len
, ctx
->base
.ctxr_dma
, &dmmy
);
450 ret
= PTR_ERR(cdesc
);
465 /* Setup the context options */
466 safexcel_context_control(ctx
, req
, first_cdesc
);
469 safexcel_hash_token(first_cdesc
, len
, req
->digest_sz
, ctx
->cbcmac
);
471 req
->result_dma
= dma_map_single(priv
->dev
, req
->state
, req
->digest_sz
,
473 if (dma_mapping_error(priv
->dev
, req
->result_dma
)) {
478 /* Add a result descriptor */
479 rdesc
= safexcel_add_rdesc(priv
, ring
, 1, 1, req
->result_dma
,
482 ret
= PTR_ERR(rdesc
);
486 safexcel_rdr_req_set(priv
, ring
, rdesc
, &areq
->base
);
488 req
->processed
+= len
- extra
;
495 dma_unmap_single(priv
->dev
, req
->result_dma
, req
->digest_sz
,
499 dma_unmap_sg(priv
->dev
, areq
->src
, req
->nents
, DMA_TO_DEVICE
);
503 for (i
= 0; i
< n_cdesc
; i
++)
504 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].cdr
);
506 if (req
->cache_dma
) {
507 dma_unmap_single(priv
->dev
, req
->cache_dma
, req
->cache_sz
,
516 static int safexcel_handle_inv_result(struct safexcel_crypto_priv
*priv
,
518 struct crypto_async_request
*async
,
519 bool *should_complete
, int *ret
)
521 struct safexcel_result_desc
*rdesc
;
522 struct ahash_request
*areq
= ahash_request_cast(async
);
523 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
524 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(ahash
);
529 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
532 "hash: invalidate: could not retrieve the result descriptor\n");
533 *ret
= PTR_ERR(rdesc
);
535 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
538 safexcel_complete(priv
, ring
);
540 if (ctx
->base
.exit_inv
) {
541 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
544 *should_complete
= true;
548 ring
= safexcel_select_ring(priv
);
549 ctx
->base
.ring
= ring
;
551 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
552 enq_ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, async
);
553 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
555 if (enq_ret
!= -EINPROGRESS
)
558 queue_work(priv
->ring
[ring
].workqueue
,
559 &priv
->ring
[ring
].work_data
.work
);
561 *should_complete
= false;
566 static int safexcel_handle_result(struct safexcel_crypto_priv
*priv
, int ring
,
567 struct crypto_async_request
*async
,
568 bool *should_complete
, int *ret
)
570 struct ahash_request
*areq
= ahash_request_cast(async
);
571 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
574 BUG_ON(!(priv
->flags
& EIP197_TRC_CACHE
) && req
->needs_inv
);
576 if (req
->needs_inv
) {
577 req
->needs_inv
= false;
578 err
= safexcel_handle_inv_result(priv
, ring
, async
,
579 should_complete
, ret
);
581 err
= safexcel_handle_req_result(priv
, ring
, async
,
582 should_complete
, ret
);
588 static int safexcel_ahash_send_inv(struct crypto_async_request
*async
,
589 int ring
, int *commands
, int *results
)
591 struct ahash_request
*areq
= ahash_request_cast(async
);
592 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
595 ret
= safexcel_invalidate_cache(async
, ctx
->base
.priv
,
596 ctx
->base
.ctxr_dma
, ring
);
606 static int safexcel_ahash_send(struct crypto_async_request
*async
,
607 int ring
, int *commands
, int *results
)
609 struct ahash_request
*areq
= ahash_request_cast(async
);
610 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
614 ret
= safexcel_ahash_send_inv(async
, ring
, commands
, results
);
616 ret
= safexcel_ahash_send_req(async
, ring
, commands
, results
);
621 static int safexcel_ahash_exit_inv(struct crypto_tfm
*tfm
)
623 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
624 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
625 EIP197_REQUEST_ON_STACK(req
, ahash
, EIP197_AHASH_REQ_SIZE
);
626 struct safexcel_ahash_req
*rctx
= ahash_request_ctx(req
);
627 struct safexcel_inv_result result
= {};
628 int ring
= ctx
->base
.ring
;
630 memset(req
, 0, EIP197_AHASH_REQ_SIZE
);
632 /* create invalidation request */
633 init_completion(&result
.completion
);
634 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
635 safexcel_inv_complete
, &result
);
637 ahash_request_set_tfm(req
, __crypto_ahash_cast(tfm
));
638 ctx
= crypto_tfm_ctx(req
->base
.tfm
);
639 ctx
->base
.exit_inv
= true;
640 rctx
->needs_inv
= true;
642 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
643 crypto_enqueue_request(&priv
->ring
[ring
].queue
, &req
->base
);
644 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
646 queue_work(priv
->ring
[ring
].workqueue
,
647 &priv
->ring
[ring
].work_data
.work
);
649 wait_for_completion(&result
.completion
);
652 dev_warn(priv
->dev
, "hash: completion error (%d)\n",
660 /* safexcel_ahash_cache: cache data until at least one request can be sent to
661 * the engine, aka. when there is at least 1 block size in the pipe.
663 static int safexcel_ahash_cache(struct ahash_request
*areq
)
665 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
668 /* cache_len: everything accepted by the driver but not sent yet,
669 * tot sz handled by update() - last req sz - tot sz handled by send()
671 cache_len
= safexcel_queued_len(req
);
674 * In case there isn't enough bytes to proceed (less than a
675 * block size), cache the data until we have enough.
677 if (cache_len
+ areq
->nbytes
<= HASH_CACHE_SIZE
) {
678 sg_pcopy_to_buffer(areq
->src
, sg_nents(areq
->src
),
679 req
->cache
+ cache_len
,
684 /* We couldn't cache all the data */
688 static int safexcel_ahash_enqueue(struct ahash_request
*areq
)
690 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
691 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
692 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
695 req
->needs_inv
= false;
697 if (ctx
->base
.ctxr
) {
698 if (priv
->flags
& EIP197_TRC_CACHE
&& !ctx
->base
.needs_inv
&&
699 /* invalidate for *any* non-XCBC continuation */
700 ((req
->not_first
&& !req
->xcbcmac
) ||
701 /* invalidate if (i)digest changed */
702 memcmp(ctx
->base
.ctxr
->data
, req
->state
, req
->state_sz
) ||
703 /* invalidate for HMAC finish with odigest changed */
704 (req
->finish
&& req
->hmac
&&
705 memcmp(ctx
->base
.ctxr
->data
+ (req
->state_sz
>>2),
706 &ctx
->base
.opad
, req
->state_sz
))))
708 * We're still setting needs_inv here, even though it is
709 * cleared right away, because the needs_inv flag can be
710 * set in other functions and we want to keep the same
713 ctx
->base
.needs_inv
= true;
715 if (ctx
->base
.needs_inv
) {
716 ctx
->base
.needs_inv
= false;
717 req
->needs_inv
= true;
720 ctx
->base
.ring
= safexcel_select_ring(priv
);
721 ctx
->base
.ctxr
= dma_pool_zalloc(priv
->context_pool
,
722 EIP197_GFP_FLAGS(areq
->base
),
723 &ctx
->base
.ctxr_dma
);
727 req
->not_first
= true;
729 ring
= ctx
->base
.ring
;
731 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
732 ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, &areq
->base
);
733 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
735 queue_work(priv
->ring
[ring
].workqueue
,
736 &priv
->ring
[ring
].work_data
.work
);
741 static int safexcel_ahash_update(struct ahash_request
*areq
)
743 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
746 /* If the request is 0 length, do nothing */
750 /* Add request to the cache if it fits */
751 ret
= safexcel_ahash_cache(areq
);
753 /* Update total request length */
754 req
->len
+= areq
->nbytes
;
756 /* If not all data could fit into the cache, go process the excess.
757 * Also go process immediately for an HMAC IV precompute, which
758 * will never be finished at all, but needs to be processed anyway.
760 if ((ret
&& !req
->finish
) || req
->last_req
)
761 return safexcel_ahash_enqueue(areq
);
766 static int safexcel_ahash_final(struct ahash_request
*areq
)
768 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
769 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
773 if (unlikely(!req
->len
&& !areq
->nbytes
)) {
775 * If we have an overall 0 length *hash* request:
776 * The HW cannot do 0 length hash, so we provide the correct
777 * result directly here.
779 if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_MD5
)
780 memcpy(areq
->result
, md5_zero_message_hash
,
782 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA1
)
783 memcpy(areq
->result
, sha1_zero_message_hash
,
785 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA224
)
786 memcpy(areq
->result
, sha224_zero_message_hash
,
788 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA256
)
789 memcpy(areq
->result
, sha256_zero_message_hash
,
791 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA384
)
792 memcpy(areq
->result
, sha384_zero_message_hash
,
794 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA512
)
795 memcpy(areq
->result
, sha512_zero_message_hash
,
797 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SM3
) {
799 EIP197_SM3_ZEROM_HASH
, SM3_DIGEST_SIZE
);
803 } else if (unlikely(req
->digest
== CONTEXT_CONTROL_DIGEST_XCM
&&
804 ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_MD5
&&
805 req
->len
== sizeof(u32
) && !areq
->nbytes
)) {
806 /* Zero length CRC32 */
807 memcpy(areq
->result
, &ctx
->base
.ipad
, sizeof(u32
));
809 } else if (unlikely(ctx
->cbcmac
&& req
->len
== AES_BLOCK_SIZE
&&
811 /* Zero length CBC MAC */
812 memset(areq
->result
, 0, AES_BLOCK_SIZE
);
814 } else if (unlikely(req
->xcbcmac
&& req
->len
== AES_BLOCK_SIZE
&&
816 /* Zero length (X)CBC/CMAC */
819 for (i
= 0; i
< AES_BLOCK_SIZE
/ sizeof(u32
); i
++) {
820 u32
*result
= (void *)areq
->result
;
823 result
[i
] = swab(ctx
->base
.ipad
.word
[i
+ 4]);
825 areq
->result
[0] ^= 0x80; // 10- padding
826 crypto_cipher_encrypt_one(ctx
->kaes
, areq
->result
, areq
->result
);
828 } else if (unlikely(req
->hmac
&&
829 (req
->len
== req
->block_sz
) &&
832 * If we have an overall 0 length *HMAC* request:
833 * For HMAC, we need to finalize the inner digest
834 * and then perform the outer hash.
837 /* generate pad block in the cache */
838 /* start with a hash block of all zeroes */
839 memset(req
->cache
, 0, req
->block_sz
);
840 /* set the first byte to 0x80 to 'append a 1 bit' */
841 req
->cache
[0] = 0x80;
842 /* add the length in bits in the last 2 bytes */
843 if (req
->len_is_le
) {
844 /* Little endian length word (e.g. MD5) */
845 req
->cache
[req
->block_sz
-8] = (req
->block_sz
<< 3) &
847 req
->cache
[req
->block_sz
-7] = (req
->block_sz
>> 5);
849 /* Big endian length word (e.g. any SHA) */
850 req
->cache
[req
->block_sz
-2] = (req
->block_sz
>> 5);
851 req
->cache
[req
->block_sz
-1] = (req
->block_sz
<< 3) &
855 req
->len
+= req
->block_sz
; /* plus 1 hash block */
857 /* Set special zero-length HMAC flag */
858 req
->hmac_zlen
= true;
861 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
862 } else if (req
->hmac
) {
864 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
867 return safexcel_ahash_enqueue(areq
);
870 static int safexcel_ahash_finup(struct ahash_request
*areq
)
872 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
876 safexcel_ahash_update(areq
);
877 return safexcel_ahash_final(areq
);
880 static int safexcel_ahash_export(struct ahash_request
*areq
, void *out
)
882 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
883 struct safexcel_ahash_export_state
*export
= out
;
885 export
->len
= req
->len
;
886 export
->processed
= req
->processed
;
888 export
->digest
= req
->digest
;
890 memcpy(export
->state
, req
->state
, req
->state_sz
);
891 memcpy(export
->cache
, req
->cache
, HASH_CACHE_SIZE
);
896 static int safexcel_ahash_import(struct ahash_request
*areq
, const void *in
)
898 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
899 const struct safexcel_ahash_export_state
*export
= in
;
902 ret
= crypto_ahash_init(areq
);
906 req
->len
= export
->len
;
907 req
->processed
= export
->processed
;
909 req
->digest
= export
->digest
;
911 memcpy(req
->cache
, export
->cache
, HASH_CACHE_SIZE
);
912 memcpy(req
->state
, export
->state
, req
->state_sz
);
917 static int safexcel_ahash_cra_init(struct crypto_tfm
*tfm
)
919 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
920 struct safexcel_alg_template
*tmpl
=
921 container_of(__crypto_ahash_alg(tfm
->__crt_alg
),
922 struct safexcel_alg_template
, alg
.ahash
);
924 ctx
->base
.priv
= tmpl
->priv
;
925 ctx
->base
.send
= safexcel_ahash_send
;
926 ctx
->base
.handle_result
= safexcel_handle_result
;
927 ctx
->fb_do_setkey
= false;
929 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
930 sizeof(struct safexcel_ahash_req
));
934 static int safexcel_sha1_init(struct ahash_request
*areq
)
936 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
937 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
939 memset(req
, 0, sizeof(*req
));
941 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
942 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
943 req
->state_sz
= SHA1_DIGEST_SIZE
;
944 req
->digest_sz
= SHA1_DIGEST_SIZE
;
945 req
->block_sz
= SHA1_BLOCK_SIZE
;
950 static int safexcel_sha1_digest(struct ahash_request
*areq
)
952 int ret
= safexcel_sha1_init(areq
);
957 return safexcel_ahash_finup(areq
);
960 static void safexcel_ahash_cra_exit(struct crypto_tfm
*tfm
)
962 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
963 struct safexcel_crypto_priv
*priv
= ctx
->base
.priv
;
966 /* context not allocated, skip invalidation */
970 if (priv
->flags
& EIP197_TRC_CACHE
) {
971 ret
= safexcel_ahash_exit_inv(tfm
);
973 dev_warn(priv
->dev
, "hash: invalidation error %d\n", ret
);
975 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
980 struct safexcel_alg_template safexcel_alg_sha1
= {
981 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
982 .algo_mask
= SAFEXCEL_ALG_SHA1
,
984 .init
= safexcel_sha1_init
,
985 .update
= safexcel_ahash_update
,
986 .final
= safexcel_ahash_final
,
987 .finup
= safexcel_ahash_finup
,
988 .digest
= safexcel_sha1_digest
,
989 .export
= safexcel_ahash_export
,
990 .import
= safexcel_ahash_import
,
992 .digestsize
= SHA1_DIGEST_SIZE
,
993 .statesize
= sizeof(struct safexcel_ahash_export_state
),
996 .cra_driver_name
= "safexcel-sha1",
997 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
998 .cra_flags
= CRYPTO_ALG_ASYNC
|
999 CRYPTO_ALG_ALLOCATES_MEMORY
|
1000 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1001 .cra_blocksize
= SHA1_BLOCK_SIZE
,
1002 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1003 .cra_init
= safexcel_ahash_cra_init
,
1004 .cra_exit
= safexcel_ahash_cra_exit
,
1005 .cra_module
= THIS_MODULE
,
1011 static int safexcel_hmac_sha1_init(struct ahash_request
*areq
)
1013 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1014 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1016 memset(req
, 0, sizeof(*req
));
1018 /* Start from ipad precompute */
1019 memcpy(req
->state
, &ctx
->base
.ipad
, SHA1_DIGEST_SIZE
);
1020 /* Already processed the key^ipad part now! */
1021 req
->len
= SHA1_BLOCK_SIZE
;
1022 req
->processed
= SHA1_BLOCK_SIZE
;
1024 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
1025 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1026 req
->state_sz
= SHA1_DIGEST_SIZE
;
1027 req
->digest_sz
= SHA1_DIGEST_SIZE
;
1028 req
->block_sz
= SHA1_BLOCK_SIZE
;
1034 static int safexcel_hmac_sha1_digest(struct ahash_request
*areq
)
1036 int ret
= safexcel_hmac_sha1_init(areq
);
1041 return safexcel_ahash_finup(areq
);
1044 struct safexcel_ahash_result
{
1045 struct completion completion
;
1049 static void safexcel_ahash_complete(struct crypto_async_request
*req
, int error
)
1051 struct safexcel_ahash_result
*result
= req
->data
;
1053 if (error
== -EINPROGRESS
)
1056 result
->error
= error
;
1057 complete(&result
->completion
);
1060 static int safexcel_hmac_init_pad(struct ahash_request
*areq
,
1061 unsigned int blocksize
, const u8
*key
,
1062 unsigned int keylen
, u8
*ipad
, u8
*opad
)
1064 struct safexcel_ahash_result result
;
1065 struct scatterlist sg
;
1069 if (keylen
<= blocksize
) {
1070 memcpy(ipad
, key
, keylen
);
1072 keydup
= kmemdup(key
, keylen
, GFP_KERNEL
);
1076 ahash_request_set_callback(areq
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1077 safexcel_ahash_complete
, &result
);
1078 sg_init_one(&sg
, keydup
, keylen
);
1079 ahash_request_set_crypt(areq
, &sg
, ipad
, keylen
);
1080 init_completion(&result
.completion
);
1082 ret
= crypto_ahash_digest(areq
);
1083 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
1084 wait_for_completion_interruptible(&result
.completion
);
1089 kfree_sensitive(keydup
);
1094 keylen
= crypto_ahash_digestsize(crypto_ahash_reqtfm(areq
));
1097 memset(ipad
+ keylen
, 0, blocksize
- keylen
);
1098 memcpy(opad
, ipad
, blocksize
);
1100 for (i
= 0; i
< blocksize
; i
++) {
1101 ipad
[i
] ^= HMAC_IPAD_VALUE
;
1102 opad
[i
] ^= HMAC_OPAD_VALUE
;
1108 static int safexcel_hmac_init_iv(struct ahash_request
*areq
,
1109 unsigned int blocksize
, u8
*pad
, void *state
)
1111 struct safexcel_ahash_result result
;
1112 struct safexcel_ahash_req
*req
;
1113 struct scatterlist sg
;
1116 ahash_request_set_callback(areq
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
1117 safexcel_ahash_complete
, &result
);
1118 sg_init_one(&sg
, pad
, blocksize
);
1119 ahash_request_set_crypt(areq
, &sg
, pad
, blocksize
);
1120 init_completion(&result
.completion
);
1122 ret
= crypto_ahash_init(areq
);
1126 req
= ahash_request_ctx(areq
);
1128 req
->last_req
= true;
1130 ret
= crypto_ahash_update(areq
);
1131 if (ret
&& ret
!= -EINPROGRESS
&& ret
!= -EBUSY
)
1134 wait_for_completion_interruptible(&result
.completion
);
1136 return result
.error
;
1138 return crypto_ahash_export(areq
, state
);
1141 static int __safexcel_hmac_setkey(const char *alg
, const u8
*key
,
1142 unsigned int keylen
,
1143 void *istate
, void *ostate
)
1145 struct ahash_request
*areq
;
1146 struct crypto_ahash
*tfm
;
1147 unsigned int blocksize
;
1151 tfm
= crypto_alloc_ahash(alg
, 0, 0);
1153 return PTR_ERR(tfm
);
1155 areq
= ahash_request_alloc(tfm
, GFP_KERNEL
);
1161 crypto_ahash_clear_flags(tfm
, ~0);
1162 blocksize
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
1164 ipad
= kcalloc(2, blocksize
, GFP_KERNEL
);
1170 opad
= ipad
+ blocksize
;
1172 ret
= safexcel_hmac_init_pad(areq
, blocksize
, key
, keylen
, ipad
, opad
);
1176 ret
= safexcel_hmac_init_iv(areq
, blocksize
, ipad
, istate
);
1180 ret
= safexcel_hmac_init_iv(areq
, blocksize
, opad
, ostate
);
1185 ahash_request_free(areq
);
1187 crypto_free_ahash(tfm
);
1192 int safexcel_hmac_setkey(struct safexcel_context
*base
, const u8
*key
,
1193 unsigned int keylen
, const char *alg
,
1194 unsigned int state_sz
)
1196 struct safexcel_crypto_priv
*priv
= base
->priv
;
1197 struct safexcel_ahash_export_state istate
, ostate
;
1200 ret
= __safexcel_hmac_setkey(alg
, key
, keylen
, &istate
, &ostate
);
1204 if (priv
->flags
& EIP197_TRC_CACHE
&& base
->ctxr
&&
1205 (memcmp(&base
->ipad
, istate
.state
, state_sz
) ||
1206 memcmp(&base
->opad
, ostate
.state
, state_sz
)))
1207 base
->needs_inv
= true;
1209 memcpy(&base
->ipad
, &istate
.state
, state_sz
);
1210 memcpy(&base
->opad
, &ostate
.state
, state_sz
);
1215 static int safexcel_hmac_alg_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1216 unsigned int keylen
, const char *alg
,
1217 unsigned int state_sz
)
1219 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
1221 return safexcel_hmac_setkey(&ctx
->base
, key
, keylen
, alg
, state_sz
);
1224 static int safexcel_hmac_sha1_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1225 unsigned int keylen
)
1227 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha1",
1231 struct safexcel_alg_template safexcel_alg_hmac_sha1
= {
1232 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1233 .algo_mask
= SAFEXCEL_ALG_SHA1
,
1235 .init
= safexcel_hmac_sha1_init
,
1236 .update
= safexcel_ahash_update
,
1237 .final
= safexcel_ahash_final
,
1238 .finup
= safexcel_ahash_finup
,
1239 .digest
= safexcel_hmac_sha1_digest
,
1240 .setkey
= safexcel_hmac_sha1_setkey
,
1241 .export
= safexcel_ahash_export
,
1242 .import
= safexcel_ahash_import
,
1244 .digestsize
= SHA1_DIGEST_SIZE
,
1245 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1247 .cra_name
= "hmac(sha1)",
1248 .cra_driver_name
= "safexcel-hmac-sha1",
1249 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1250 .cra_flags
= CRYPTO_ALG_ASYNC
|
1251 CRYPTO_ALG_ALLOCATES_MEMORY
|
1252 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1253 .cra_blocksize
= SHA1_BLOCK_SIZE
,
1254 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1255 .cra_init
= safexcel_ahash_cra_init
,
1256 .cra_exit
= safexcel_ahash_cra_exit
,
1257 .cra_module
= THIS_MODULE
,
1263 static int safexcel_sha256_init(struct ahash_request
*areq
)
1265 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1266 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1268 memset(req
, 0, sizeof(*req
));
1270 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
1271 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1272 req
->state_sz
= SHA256_DIGEST_SIZE
;
1273 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1274 req
->block_sz
= SHA256_BLOCK_SIZE
;
1279 static int safexcel_sha256_digest(struct ahash_request
*areq
)
1281 int ret
= safexcel_sha256_init(areq
);
1286 return safexcel_ahash_finup(areq
);
1289 struct safexcel_alg_template safexcel_alg_sha256
= {
1290 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1291 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1293 .init
= safexcel_sha256_init
,
1294 .update
= safexcel_ahash_update
,
1295 .final
= safexcel_ahash_final
,
1296 .finup
= safexcel_ahash_finup
,
1297 .digest
= safexcel_sha256_digest
,
1298 .export
= safexcel_ahash_export
,
1299 .import
= safexcel_ahash_import
,
1301 .digestsize
= SHA256_DIGEST_SIZE
,
1302 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1304 .cra_name
= "sha256",
1305 .cra_driver_name
= "safexcel-sha256",
1306 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1307 .cra_flags
= CRYPTO_ALG_ASYNC
|
1308 CRYPTO_ALG_ALLOCATES_MEMORY
|
1309 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1310 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1311 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1312 .cra_init
= safexcel_ahash_cra_init
,
1313 .cra_exit
= safexcel_ahash_cra_exit
,
1314 .cra_module
= THIS_MODULE
,
1320 static int safexcel_sha224_init(struct ahash_request
*areq
)
1322 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1323 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1325 memset(req
, 0, sizeof(*req
));
1327 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
1328 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1329 req
->state_sz
= SHA256_DIGEST_SIZE
;
1330 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1331 req
->block_sz
= SHA256_BLOCK_SIZE
;
1336 static int safexcel_sha224_digest(struct ahash_request
*areq
)
1338 int ret
= safexcel_sha224_init(areq
);
1343 return safexcel_ahash_finup(areq
);
1346 struct safexcel_alg_template safexcel_alg_sha224
= {
1347 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1348 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1350 .init
= safexcel_sha224_init
,
1351 .update
= safexcel_ahash_update
,
1352 .final
= safexcel_ahash_final
,
1353 .finup
= safexcel_ahash_finup
,
1354 .digest
= safexcel_sha224_digest
,
1355 .export
= safexcel_ahash_export
,
1356 .import
= safexcel_ahash_import
,
1358 .digestsize
= SHA224_DIGEST_SIZE
,
1359 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1361 .cra_name
= "sha224",
1362 .cra_driver_name
= "safexcel-sha224",
1363 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1364 .cra_flags
= CRYPTO_ALG_ASYNC
|
1365 CRYPTO_ALG_ALLOCATES_MEMORY
|
1366 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1367 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1368 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1369 .cra_init
= safexcel_ahash_cra_init
,
1370 .cra_exit
= safexcel_ahash_cra_exit
,
1371 .cra_module
= THIS_MODULE
,
1377 static int safexcel_hmac_sha224_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1378 unsigned int keylen
)
1380 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha224",
1381 SHA256_DIGEST_SIZE
);
1384 static int safexcel_hmac_sha224_init(struct ahash_request
*areq
)
1386 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1387 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1389 memset(req
, 0, sizeof(*req
));
1391 /* Start from ipad precompute */
1392 memcpy(req
->state
, &ctx
->base
.ipad
, SHA256_DIGEST_SIZE
);
1393 /* Already processed the key^ipad part now! */
1394 req
->len
= SHA256_BLOCK_SIZE
;
1395 req
->processed
= SHA256_BLOCK_SIZE
;
1397 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
1398 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1399 req
->state_sz
= SHA256_DIGEST_SIZE
;
1400 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1401 req
->block_sz
= SHA256_BLOCK_SIZE
;
1407 static int safexcel_hmac_sha224_digest(struct ahash_request
*areq
)
1409 int ret
= safexcel_hmac_sha224_init(areq
);
1414 return safexcel_ahash_finup(areq
);
1417 struct safexcel_alg_template safexcel_alg_hmac_sha224
= {
1418 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1419 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1421 .init
= safexcel_hmac_sha224_init
,
1422 .update
= safexcel_ahash_update
,
1423 .final
= safexcel_ahash_final
,
1424 .finup
= safexcel_ahash_finup
,
1425 .digest
= safexcel_hmac_sha224_digest
,
1426 .setkey
= safexcel_hmac_sha224_setkey
,
1427 .export
= safexcel_ahash_export
,
1428 .import
= safexcel_ahash_import
,
1430 .digestsize
= SHA224_DIGEST_SIZE
,
1431 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1433 .cra_name
= "hmac(sha224)",
1434 .cra_driver_name
= "safexcel-hmac-sha224",
1435 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1436 .cra_flags
= CRYPTO_ALG_ASYNC
|
1437 CRYPTO_ALG_ALLOCATES_MEMORY
|
1438 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1439 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1440 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1441 .cra_init
= safexcel_ahash_cra_init
,
1442 .cra_exit
= safexcel_ahash_cra_exit
,
1443 .cra_module
= THIS_MODULE
,
1449 static int safexcel_hmac_sha256_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1450 unsigned int keylen
)
1452 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha256",
1453 SHA256_DIGEST_SIZE
);
1456 static int safexcel_hmac_sha256_init(struct ahash_request
*areq
)
1458 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1459 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1461 memset(req
, 0, sizeof(*req
));
1463 /* Start from ipad precompute */
1464 memcpy(req
->state
, &ctx
->base
.ipad
, SHA256_DIGEST_SIZE
);
1465 /* Already processed the key^ipad part now! */
1466 req
->len
= SHA256_BLOCK_SIZE
;
1467 req
->processed
= SHA256_BLOCK_SIZE
;
1469 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
1470 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1471 req
->state_sz
= SHA256_DIGEST_SIZE
;
1472 req
->digest_sz
= SHA256_DIGEST_SIZE
;
1473 req
->block_sz
= SHA256_BLOCK_SIZE
;
1479 static int safexcel_hmac_sha256_digest(struct ahash_request
*areq
)
1481 int ret
= safexcel_hmac_sha256_init(areq
);
1486 return safexcel_ahash_finup(areq
);
1489 struct safexcel_alg_template safexcel_alg_hmac_sha256
= {
1490 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1491 .algo_mask
= SAFEXCEL_ALG_SHA2_256
,
1493 .init
= safexcel_hmac_sha256_init
,
1494 .update
= safexcel_ahash_update
,
1495 .final
= safexcel_ahash_final
,
1496 .finup
= safexcel_ahash_finup
,
1497 .digest
= safexcel_hmac_sha256_digest
,
1498 .setkey
= safexcel_hmac_sha256_setkey
,
1499 .export
= safexcel_ahash_export
,
1500 .import
= safexcel_ahash_import
,
1502 .digestsize
= SHA256_DIGEST_SIZE
,
1503 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1505 .cra_name
= "hmac(sha256)",
1506 .cra_driver_name
= "safexcel-hmac-sha256",
1507 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1508 .cra_flags
= CRYPTO_ALG_ASYNC
|
1509 CRYPTO_ALG_ALLOCATES_MEMORY
|
1510 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1511 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1512 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1513 .cra_init
= safexcel_ahash_cra_init
,
1514 .cra_exit
= safexcel_ahash_cra_exit
,
1515 .cra_module
= THIS_MODULE
,
1521 static int safexcel_sha512_init(struct ahash_request
*areq
)
1523 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1524 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1526 memset(req
, 0, sizeof(*req
));
1528 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA512
;
1529 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1530 req
->state_sz
= SHA512_DIGEST_SIZE
;
1531 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1532 req
->block_sz
= SHA512_BLOCK_SIZE
;
1537 static int safexcel_sha512_digest(struct ahash_request
*areq
)
1539 int ret
= safexcel_sha512_init(areq
);
1544 return safexcel_ahash_finup(areq
);
1547 struct safexcel_alg_template safexcel_alg_sha512
= {
1548 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1549 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1551 .init
= safexcel_sha512_init
,
1552 .update
= safexcel_ahash_update
,
1553 .final
= safexcel_ahash_final
,
1554 .finup
= safexcel_ahash_finup
,
1555 .digest
= safexcel_sha512_digest
,
1556 .export
= safexcel_ahash_export
,
1557 .import
= safexcel_ahash_import
,
1559 .digestsize
= SHA512_DIGEST_SIZE
,
1560 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1562 .cra_name
= "sha512",
1563 .cra_driver_name
= "safexcel-sha512",
1564 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1565 .cra_flags
= CRYPTO_ALG_ASYNC
|
1566 CRYPTO_ALG_ALLOCATES_MEMORY
|
1567 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1568 .cra_blocksize
= SHA512_BLOCK_SIZE
,
1569 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1570 .cra_init
= safexcel_ahash_cra_init
,
1571 .cra_exit
= safexcel_ahash_cra_exit
,
1572 .cra_module
= THIS_MODULE
,
1578 static int safexcel_sha384_init(struct ahash_request
*areq
)
1580 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1581 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1583 memset(req
, 0, sizeof(*req
));
1585 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA384
;
1586 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1587 req
->state_sz
= SHA512_DIGEST_SIZE
;
1588 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1589 req
->block_sz
= SHA512_BLOCK_SIZE
;
1594 static int safexcel_sha384_digest(struct ahash_request
*areq
)
1596 int ret
= safexcel_sha384_init(areq
);
1601 return safexcel_ahash_finup(areq
);
1604 struct safexcel_alg_template safexcel_alg_sha384
= {
1605 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1606 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1608 .init
= safexcel_sha384_init
,
1609 .update
= safexcel_ahash_update
,
1610 .final
= safexcel_ahash_final
,
1611 .finup
= safexcel_ahash_finup
,
1612 .digest
= safexcel_sha384_digest
,
1613 .export
= safexcel_ahash_export
,
1614 .import
= safexcel_ahash_import
,
1616 .digestsize
= SHA384_DIGEST_SIZE
,
1617 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1619 .cra_name
= "sha384",
1620 .cra_driver_name
= "safexcel-sha384",
1621 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1622 .cra_flags
= CRYPTO_ALG_ASYNC
|
1623 CRYPTO_ALG_ALLOCATES_MEMORY
|
1624 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1625 .cra_blocksize
= SHA384_BLOCK_SIZE
,
1626 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1627 .cra_init
= safexcel_ahash_cra_init
,
1628 .cra_exit
= safexcel_ahash_cra_exit
,
1629 .cra_module
= THIS_MODULE
,
1635 static int safexcel_hmac_sha512_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1636 unsigned int keylen
)
1638 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha512",
1639 SHA512_DIGEST_SIZE
);
1642 static int safexcel_hmac_sha512_init(struct ahash_request
*areq
)
1644 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1645 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1647 memset(req
, 0, sizeof(*req
));
1649 /* Start from ipad precompute */
1650 memcpy(req
->state
, &ctx
->base
.ipad
, SHA512_DIGEST_SIZE
);
1651 /* Already processed the key^ipad part now! */
1652 req
->len
= SHA512_BLOCK_SIZE
;
1653 req
->processed
= SHA512_BLOCK_SIZE
;
1655 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA512
;
1656 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1657 req
->state_sz
= SHA512_DIGEST_SIZE
;
1658 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1659 req
->block_sz
= SHA512_BLOCK_SIZE
;
1665 static int safexcel_hmac_sha512_digest(struct ahash_request
*areq
)
1667 int ret
= safexcel_hmac_sha512_init(areq
);
1672 return safexcel_ahash_finup(areq
);
1675 struct safexcel_alg_template safexcel_alg_hmac_sha512
= {
1676 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1677 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1679 .init
= safexcel_hmac_sha512_init
,
1680 .update
= safexcel_ahash_update
,
1681 .final
= safexcel_ahash_final
,
1682 .finup
= safexcel_ahash_finup
,
1683 .digest
= safexcel_hmac_sha512_digest
,
1684 .setkey
= safexcel_hmac_sha512_setkey
,
1685 .export
= safexcel_ahash_export
,
1686 .import
= safexcel_ahash_import
,
1688 .digestsize
= SHA512_DIGEST_SIZE
,
1689 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1691 .cra_name
= "hmac(sha512)",
1692 .cra_driver_name
= "safexcel-hmac-sha512",
1693 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1694 .cra_flags
= CRYPTO_ALG_ASYNC
|
1695 CRYPTO_ALG_ALLOCATES_MEMORY
|
1696 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1697 .cra_blocksize
= SHA512_BLOCK_SIZE
,
1698 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1699 .cra_init
= safexcel_ahash_cra_init
,
1700 .cra_exit
= safexcel_ahash_cra_exit
,
1701 .cra_module
= THIS_MODULE
,
1707 static int safexcel_hmac_sha384_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1708 unsigned int keylen
)
1710 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha384",
1711 SHA512_DIGEST_SIZE
);
1714 static int safexcel_hmac_sha384_init(struct ahash_request
*areq
)
1716 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1717 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1719 memset(req
, 0, sizeof(*req
));
1721 /* Start from ipad precompute */
1722 memcpy(req
->state
, &ctx
->base
.ipad
, SHA512_DIGEST_SIZE
);
1723 /* Already processed the key^ipad part now! */
1724 req
->len
= SHA512_BLOCK_SIZE
;
1725 req
->processed
= SHA512_BLOCK_SIZE
;
1727 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA384
;
1728 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1729 req
->state_sz
= SHA512_DIGEST_SIZE
;
1730 req
->digest_sz
= SHA512_DIGEST_SIZE
;
1731 req
->block_sz
= SHA512_BLOCK_SIZE
;
1737 static int safexcel_hmac_sha384_digest(struct ahash_request
*areq
)
1739 int ret
= safexcel_hmac_sha384_init(areq
);
1744 return safexcel_ahash_finup(areq
);
1747 struct safexcel_alg_template safexcel_alg_hmac_sha384
= {
1748 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1749 .algo_mask
= SAFEXCEL_ALG_SHA2_512
,
1751 .init
= safexcel_hmac_sha384_init
,
1752 .update
= safexcel_ahash_update
,
1753 .final
= safexcel_ahash_final
,
1754 .finup
= safexcel_ahash_finup
,
1755 .digest
= safexcel_hmac_sha384_digest
,
1756 .setkey
= safexcel_hmac_sha384_setkey
,
1757 .export
= safexcel_ahash_export
,
1758 .import
= safexcel_ahash_import
,
1760 .digestsize
= SHA384_DIGEST_SIZE
,
1761 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1763 .cra_name
= "hmac(sha384)",
1764 .cra_driver_name
= "safexcel-hmac-sha384",
1765 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1766 .cra_flags
= CRYPTO_ALG_ASYNC
|
1767 CRYPTO_ALG_ALLOCATES_MEMORY
|
1768 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1769 .cra_blocksize
= SHA384_BLOCK_SIZE
,
1770 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1771 .cra_init
= safexcel_ahash_cra_init
,
1772 .cra_exit
= safexcel_ahash_cra_exit
,
1773 .cra_module
= THIS_MODULE
,
1779 static int safexcel_md5_init(struct ahash_request
*areq
)
1781 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1782 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1784 memset(req
, 0, sizeof(*req
));
1786 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_MD5
;
1787 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1788 req
->state_sz
= MD5_DIGEST_SIZE
;
1789 req
->digest_sz
= MD5_DIGEST_SIZE
;
1790 req
->block_sz
= MD5_HMAC_BLOCK_SIZE
;
1795 static int safexcel_md5_digest(struct ahash_request
*areq
)
1797 int ret
= safexcel_md5_init(areq
);
1802 return safexcel_ahash_finup(areq
);
1805 struct safexcel_alg_template safexcel_alg_md5
= {
1806 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1807 .algo_mask
= SAFEXCEL_ALG_MD5
,
1809 .init
= safexcel_md5_init
,
1810 .update
= safexcel_ahash_update
,
1811 .final
= safexcel_ahash_final
,
1812 .finup
= safexcel_ahash_finup
,
1813 .digest
= safexcel_md5_digest
,
1814 .export
= safexcel_ahash_export
,
1815 .import
= safexcel_ahash_import
,
1817 .digestsize
= MD5_DIGEST_SIZE
,
1818 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1821 .cra_driver_name
= "safexcel-md5",
1822 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1823 .cra_flags
= CRYPTO_ALG_ASYNC
|
1824 CRYPTO_ALG_ALLOCATES_MEMORY
|
1825 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1826 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1827 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1828 .cra_init
= safexcel_ahash_cra_init
,
1829 .cra_exit
= safexcel_ahash_cra_exit
,
1830 .cra_module
= THIS_MODULE
,
1836 static int safexcel_hmac_md5_init(struct ahash_request
*areq
)
1838 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1839 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1841 memset(req
, 0, sizeof(*req
));
1843 /* Start from ipad precompute */
1844 memcpy(req
->state
, &ctx
->base
.ipad
, MD5_DIGEST_SIZE
);
1845 /* Already processed the key^ipad part now! */
1846 req
->len
= MD5_HMAC_BLOCK_SIZE
;
1847 req
->processed
= MD5_HMAC_BLOCK_SIZE
;
1849 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_MD5
;
1850 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1851 req
->state_sz
= MD5_DIGEST_SIZE
;
1852 req
->digest_sz
= MD5_DIGEST_SIZE
;
1853 req
->block_sz
= MD5_HMAC_BLOCK_SIZE
;
1854 req
->len_is_le
= true; /* MD5 is little endian! ... */
1860 static int safexcel_hmac_md5_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1861 unsigned int keylen
)
1863 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-md5",
1867 static int safexcel_hmac_md5_digest(struct ahash_request
*areq
)
1869 int ret
= safexcel_hmac_md5_init(areq
);
1874 return safexcel_ahash_finup(areq
);
1877 struct safexcel_alg_template safexcel_alg_hmac_md5
= {
1878 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1879 .algo_mask
= SAFEXCEL_ALG_MD5
,
1881 .init
= safexcel_hmac_md5_init
,
1882 .update
= safexcel_ahash_update
,
1883 .final
= safexcel_ahash_final
,
1884 .finup
= safexcel_ahash_finup
,
1885 .digest
= safexcel_hmac_md5_digest
,
1886 .setkey
= safexcel_hmac_md5_setkey
,
1887 .export
= safexcel_ahash_export
,
1888 .import
= safexcel_ahash_import
,
1890 .digestsize
= MD5_DIGEST_SIZE
,
1891 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1893 .cra_name
= "hmac(md5)",
1894 .cra_driver_name
= "safexcel-hmac-md5",
1895 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1896 .cra_flags
= CRYPTO_ALG_ASYNC
|
1897 CRYPTO_ALG_ALLOCATES_MEMORY
|
1898 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1899 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1900 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1901 .cra_init
= safexcel_ahash_cra_init
,
1902 .cra_exit
= safexcel_ahash_cra_exit
,
1903 .cra_module
= THIS_MODULE
,
1909 static int safexcel_crc32_cra_init(struct crypto_tfm
*tfm
)
1911 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1912 int ret
= safexcel_ahash_cra_init(tfm
);
1914 /* Default 'key' is all zeroes */
1915 memset(&ctx
->base
.ipad
, 0, sizeof(u32
));
1919 static int safexcel_crc32_init(struct ahash_request
*areq
)
1921 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1922 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1924 memset(req
, 0, sizeof(*req
));
1926 /* Start from loaded key */
1927 req
->state
[0] = cpu_to_le32(~ctx
->base
.ipad
.word
[0]);
1928 /* Set processed to non-zero to enable invalidation detection */
1929 req
->len
= sizeof(u32
);
1930 req
->processed
= sizeof(u32
);
1932 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_CRC32
;
1933 req
->digest
= CONTEXT_CONTROL_DIGEST_XCM
;
1934 req
->state_sz
= sizeof(u32
);
1935 req
->digest_sz
= sizeof(u32
);
1936 req
->block_sz
= sizeof(u32
);
1941 static int safexcel_crc32_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1942 unsigned int keylen
)
1944 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
1946 if (keylen
!= sizeof(u32
))
1949 memcpy(&ctx
->base
.ipad
, key
, sizeof(u32
));
1953 static int safexcel_crc32_digest(struct ahash_request
*areq
)
1955 return safexcel_crc32_init(areq
) ?: safexcel_ahash_finup(areq
);
1958 struct safexcel_alg_template safexcel_alg_crc32
= {
1959 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1962 .init
= safexcel_crc32_init
,
1963 .update
= safexcel_ahash_update
,
1964 .final
= safexcel_ahash_final
,
1965 .finup
= safexcel_ahash_finup
,
1966 .digest
= safexcel_crc32_digest
,
1967 .setkey
= safexcel_crc32_setkey
,
1968 .export
= safexcel_ahash_export
,
1969 .import
= safexcel_ahash_import
,
1971 .digestsize
= sizeof(u32
),
1972 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1974 .cra_name
= "crc32",
1975 .cra_driver_name
= "safexcel-crc32",
1976 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
1977 .cra_flags
= CRYPTO_ALG_OPTIONAL_KEY
|
1979 CRYPTO_ALG_ALLOCATES_MEMORY
|
1980 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1982 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1983 .cra_init
= safexcel_crc32_cra_init
,
1984 .cra_exit
= safexcel_ahash_cra_exit
,
1985 .cra_module
= THIS_MODULE
,
1991 static int safexcel_cbcmac_init(struct ahash_request
*areq
)
1993 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1994 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1996 memset(req
, 0, sizeof(*req
));
1998 /* Start from loaded keys */
1999 memcpy(req
->state
, &ctx
->base
.ipad
, ctx
->key_sz
);
2000 /* Set processed to non-zero to enable invalidation detection */
2001 req
->len
= AES_BLOCK_SIZE
;
2002 req
->processed
= AES_BLOCK_SIZE
;
2004 req
->digest
= CONTEXT_CONTROL_DIGEST_XCM
;
2005 req
->state_sz
= ctx
->key_sz
;
2006 req
->digest_sz
= AES_BLOCK_SIZE
;
2007 req
->block_sz
= AES_BLOCK_SIZE
;
2008 req
->xcbcmac
= true;
2013 static int safexcel_cbcmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2016 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
2017 struct crypto_aes_ctx aes
;
2020 ret
= aes_expandkey(&aes
, key
, len
);
2024 memset(&ctx
->base
.ipad
, 0, 2 * AES_BLOCK_SIZE
);
2025 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
2026 ctx
->base
.ipad
.be
[i
+ 8] = cpu_to_be32(aes
.key_enc
[i
]);
2028 if (len
== AES_KEYSIZE_192
) {
2029 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC192
;
2030 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2031 } else if (len
== AES_KEYSIZE_256
) {
2032 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC256
;
2033 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2035 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2036 ctx
->key_sz
= AES_MIN_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2040 memzero_explicit(&aes
, sizeof(aes
));
2044 static int safexcel_cbcmac_digest(struct ahash_request
*areq
)
2046 return safexcel_cbcmac_init(areq
) ?: safexcel_ahash_finup(areq
);
2049 struct safexcel_alg_template safexcel_alg_cbcmac
= {
2050 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2053 .init
= safexcel_cbcmac_init
,
2054 .update
= safexcel_ahash_update
,
2055 .final
= safexcel_ahash_final
,
2056 .finup
= safexcel_ahash_finup
,
2057 .digest
= safexcel_cbcmac_digest
,
2058 .setkey
= safexcel_cbcmac_setkey
,
2059 .export
= safexcel_ahash_export
,
2060 .import
= safexcel_ahash_import
,
2062 .digestsize
= AES_BLOCK_SIZE
,
2063 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2065 .cra_name
= "cbcmac(aes)",
2066 .cra_driver_name
= "safexcel-cbcmac-aes",
2067 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2068 .cra_flags
= CRYPTO_ALG_ASYNC
|
2069 CRYPTO_ALG_ALLOCATES_MEMORY
|
2070 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2072 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2073 .cra_init
= safexcel_ahash_cra_init
,
2074 .cra_exit
= safexcel_ahash_cra_exit
,
2075 .cra_module
= THIS_MODULE
,
2081 static int safexcel_xcbcmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2084 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
2085 struct crypto_aes_ctx aes
;
2086 u32 key_tmp
[3 * AES_BLOCK_SIZE
/ sizeof(u32
)];
2089 ret
= aes_expandkey(&aes
, key
, len
);
2093 /* precompute the XCBC key material */
2094 crypto_cipher_clear_flags(ctx
->kaes
, CRYPTO_TFM_REQ_MASK
);
2095 crypto_cipher_set_flags(ctx
->kaes
, crypto_ahash_get_flags(tfm
) &
2096 CRYPTO_TFM_REQ_MASK
);
2097 ret
= crypto_cipher_setkey(ctx
->kaes
, key
, len
);
2101 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)key_tmp
+ 2 * AES_BLOCK_SIZE
,
2102 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2103 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)key_tmp
,
2104 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2105 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)key_tmp
+ AES_BLOCK_SIZE
,
2106 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2107 for (i
= 0; i
< 3 * AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
2108 ctx
->base
.ipad
.word
[i
] = swab(key_tmp
[i
]);
2110 crypto_cipher_clear_flags(ctx
->kaes
, CRYPTO_TFM_REQ_MASK
);
2111 crypto_cipher_set_flags(ctx
->kaes
, crypto_ahash_get_flags(tfm
) &
2112 CRYPTO_TFM_REQ_MASK
);
2113 ret
= crypto_cipher_setkey(ctx
->kaes
,
2114 (u8
*)key_tmp
+ 2 * AES_BLOCK_SIZE
,
2119 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2120 ctx
->key_sz
= AES_MIN_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2121 ctx
->cbcmac
= false;
2123 memzero_explicit(&aes
, sizeof(aes
));
2127 static int safexcel_xcbcmac_cra_init(struct crypto_tfm
*tfm
)
2129 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2131 safexcel_ahash_cra_init(tfm
);
2132 ctx
->kaes
= crypto_alloc_cipher("aes", 0, 0);
2133 return PTR_ERR_OR_ZERO(ctx
->kaes
);
2136 static void safexcel_xcbcmac_cra_exit(struct crypto_tfm
*tfm
)
2138 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2140 crypto_free_cipher(ctx
->kaes
);
2141 safexcel_ahash_cra_exit(tfm
);
2144 struct safexcel_alg_template safexcel_alg_xcbcmac
= {
2145 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2148 .init
= safexcel_cbcmac_init
,
2149 .update
= safexcel_ahash_update
,
2150 .final
= safexcel_ahash_final
,
2151 .finup
= safexcel_ahash_finup
,
2152 .digest
= safexcel_cbcmac_digest
,
2153 .setkey
= safexcel_xcbcmac_setkey
,
2154 .export
= safexcel_ahash_export
,
2155 .import
= safexcel_ahash_import
,
2157 .digestsize
= AES_BLOCK_SIZE
,
2158 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2160 .cra_name
= "xcbc(aes)",
2161 .cra_driver_name
= "safexcel-xcbc-aes",
2162 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2163 .cra_flags
= CRYPTO_ALG_ASYNC
|
2164 CRYPTO_ALG_ALLOCATES_MEMORY
|
2165 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2166 .cra_blocksize
= AES_BLOCK_SIZE
,
2167 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2168 .cra_init
= safexcel_xcbcmac_cra_init
,
2169 .cra_exit
= safexcel_xcbcmac_cra_exit
,
2170 .cra_module
= THIS_MODULE
,
2176 static int safexcel_cmac_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2179 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
2180 struct crypto_aes_ctx aes
;
2183 u8 msb_mask
, gfmask
;
2186 ret
= aes_expandkey(&aes
, key
, len
);
2190 for (i
= 0; i
< len
/ sizeof(u32
); i
++)
2191 ctx
->base
.ipad
.word
[i
+ 8] = swab(aes
.key_enc
[i
]);
2193 /* precompute the CMAC key material */
2194 crypto_cipher_clear_flags(ctx
->kaes
, CRYPTO_TFM_REQ_MASK
);
2195 crypto_cipher_set_flags(ctx
->kaes
, crypto_ahash_get_flags(tfm
) &
2196 CRYPTO_TFM_REQ_MASK
);
2197 ret
= crypto_cipher_setkey(ctx
->kaes
, key
, len
);
2201 /* code below borrowed from crypto/cmac.c */
2202 /* encrypt the zero block */
2203 memset(consts
, 0, AES_BLOCK_SIZE
);
2204 crypto_cipher_encrypt_one(ctx
->kaes
, (u8
*)consts
, (u8
*)consts
);
2207 _const
[0] = be64_to_cpu(consts
[1]);
2208 _const
[1] = be64_to_cpu(consts
[0]);
2210 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2211 for (i
= 0; i
< 4; i
+= 2) {
2212 msb_mask
= ((s64
)_const
[1] >> 63) & gfmask
;
2213 _const
[1] = (_const
[1] << 1) | (_const
[0] >> 63);
2214 _const
[0] = (_const
[0] << 1) ^ msb_mask
;
2216 consts
[i
+ 0] = cpu_to_be64(_const
[1]);
2217 consts
[i
+ 1] = cpu_to_be64(_const
[0]);
2219 /* end of code borrowed from crypto/cmac.c */
2221 for (i
= 0; i
< 2 * AES_BLOCK_SIZE
/ sizeof(u32
); i
++)
2222 ctx
->base
.ipad
.be
[i
] = cpu_to_be32(((u32
*)consts
)[i
]);
2224 if (len
== AES_KEYSIZE_192
) {
2225 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC192
;
2226 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2227 } else if (len
== AES_KEYSIZE_256
) {
2228 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC256
;
2229 ctx
->key_sz
= AES_MAX_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2231 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_XCBC128
;
2232 ctx
->key_sz
= AES_MIN_KEY_SIZE
+ 2 * AES_BLOCK_SIZE
;
2234 ctx
->cbcmac
= false;
2236 memzero_explicit(&aes
, sizeof(aes
));
2240 struct safexcel_alg_template safexcel_alg_cmac
= {
2241 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2244 .init
= safexcel_cbcmac_init
,
2245 .update
= safexcel_ahash_update
,
2246 .final
= safexcel_ahash_final
,
2247 .finup
= safexcel_ahash_finup
,
2248 .digest
= safexcel_cbcmac_digest
,
2249 .setkey
= safexcel_cmac_setkey
,
2250 .export
= safexcel_ahash_export
,
2251 .import
= safexcel_ahash_import
,
2253 .digestsize
= AES_BLOCK_SIZE
,
2254 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2256 .cra_name
= "cmac(aes)",
2257 .cra_driver_name
= "safexcel-cmac-aes",
2258 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2259 .cra_flags
= CRYPTO_ALG_ASYNC
|
2260 CRYPTO_ALG_ALLOCATES_MEMORY
|
2261 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2262 .cra_blocksize
= AES_BLOCK_SIZE
,
2263 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2264 .cra_init
= safexcel_xcbcmac_cra_init
,
2265 .cra_exit
= safexcel_xcbcmac_cra_exit
,
2266 .cra_module
= THIS_MODULE
,
2272 static int safexcel_sm3_init(struct ahash_request
*areq
)
2274 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
2275 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2277 memset(req
, 0, sizeof(*req
));
2279 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SM3
;
2280 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
2281 req
->state_sz
= SM3_DIGEST_SIZE
;
2282 req
->digest_sz
= SM3_DIGEST_SIZE
;
2283 req
->block_sz
= SM3_BLOCK_SIZE
;
2288 static int safexcel_sm3_digest(struct ahash_request
*areq
)
2290 int ret
= safexcel_sm3_init(areq
);
2295 return safexcel_ahash_finup(areq
);
2298 struct safexcel_alg_template safexcel_alg_sm3
= {
2299 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2300 .algo_mask
= SAFEXCEL_ALG_SM3
,
2302 .init
= safexcel_sm3_init
,
2303 .update
= safexcel_ahash_update
,
2304 .final
= safexcel_ahash_final
,
2305 .finup
= safexcel_ahash_finup
,
2306 .digest
= safexcel_sm3_digest
,
2307 .export
= safexcel_ahash_export
,
2308 .import
= safexcel_ahash_import
,
2310 .digestsize
= SM3_DIGEST_SIZE
,
2311 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2314 .cra_driver_name
= "safexcel-sm3",
2315 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2316 .cra_flags
= CRYPTO_ALG_ASYNC
|
2317 CRYPTO_ALG_ALLOCATES_MEMORY
|
2318 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2319 .cra_blocksize
= SM3_BLOCK_SIZE
,
2320 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2321 .cra_init
= safexcel_ahash_cra_init
,
2322 .cra_exit
= safexcel_ahash_cra_exit
,
2323 .cra_module
= THIS_MODULE
,
2329 static int safexcel_hmac_sm3_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2330 unsigned int keylen
)
2332 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sm3",
2336 static int safexcel_hmac_sm3_init(struct ahash_request
*areq
)
2338 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
2339 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2341 memset(req
, 0, sizeof(*req
));
2343 /* Start from ipad precompute */
2344 memcpy(req
->state
, &ctx
->base
.ipad
, SM3_DIGEST_SIZE
);
2345 /* Already processed the key^ipad part now! */
2346 req
->len
= SM3_BLOCK_SIZE
;
2347 req
->processed
= SM3_BLOCK_SIZE
;
2349 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SM3
;
2350 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
2351 req
->state_sz
= SM3_DIGEST_SIZE
;
2352 req
->digest_sz
= SM3_DIGEST_SIZE
;
2353 req
->block_sz
= SM3_BLOCK_SIZE
;
2359 static int safexcel_hmac_sm3_digest(struct ahash_request
*areq
)
2361 int ret
= safexcel_hmac_sm3_init(areq
);
2366 return safexcel_ahash_finup(areq
);
2369 struct safexcel_alg_template safexcel_alg_hmac_sm3
= {
2370 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2371 .algo_mask
= SAFEXCEL_ALG_SM3
,
2373 .init
= safexcel_hmac_sm3_init
,
2374 .update
= safexcel_ahash_update
,
2375 .final
= safexcel_ahash_final
,
2376 .finup
= safexcel_ahash_finup
,
2377 .digest
= safexcel_hmac_sm3_digest
,
2378 .setkey
= safexcel_hmac_sm3_setkey
,
2379 .export
= safexcel_ahash_export
,
2380 .import
= safexcel_ahash_import
,
2382 .digestsize
= SM3_DIGEST_SIZE
,
2383 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2385 .cra_name
= "hmac(sm3)",
2386 .cra_driver_name
= "safexcel-hmac-sm3",
2387 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2388 .cra_flags
= CRYPTO_ALG_ASYNC
|
2389 CRYPTO_ALG_ALLOCATES_MEMORY
|
2390 CRYPTO_ALG_KERN_DRIVER_ONLY
,
2391 .cra_blocksize
= SM3_BLOCK_SIZE
,
2392 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2393 .cra_init
= safexcel_ahash_cra_init
,
2394 .cra_exit
= safexcel_ahash_cra_exit
,
2395 .cra_module
= THIS_MODULE
,
2401 static int safexcel_sha3_224_init(struct ahash_request
*areq
)
2403 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2404 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2405 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2407 memset(req
, 0, sizeof(*req
));
2409 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224
;
2410 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2411 req
->state_sz
= SHA3_224_DIGEST_SIZE
;
2412 req
->digest_sz
= SHA3_224_DIGEST_SIZE
;
2413 req
->block_sz
= SHA3_224_BLOCK_SIZE
;
2414 ctx
->do_fallback
= false;
2415 ctx
->fb_init_done
= false;
2419 static int safexcel_sha3_fbcheck(struct ahash_request
*req
)
2421 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2422 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2423 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2426 if (ctx
->do_fallback
) {
2427 ahash_request_set_tfm(subreq
, ctx
->fback
);
2428 ahash_request_set_callback(subreq
, req
->base
.flags
,
2429 req
->base
.complete
, req
->base
.data
);
2430 ahash_request_set_crypt(subreq
, req
->src
, req
->result
,
2432 if (!ctx
->fb_init_done
) {
2433 if (ctx
->fb_do_setkey
) {
2434 /* Set fallback cipher HMAC key */
2435 u8 key
[SHA3_224_BLOCK_SIZE
];
2437 memcpy(key
, &ctx
->base
.ipad
,
2438 crypto_ahash_blocksize(ctx
->fback
) / 2);
2440 crypto_ahash_blocksize(ctx
->fback
) / 2,
2442 crypto_ahash_blocksize(ctx
->fback
) / 2);
2443 ret
= crypto_ahash_setkey(ctx
->fback
, key
,
2444 crypto_ahash_blocksize(ctx
->fback
));
2445 memzero_explicit(key
,
2446 crypto_ahash_blocksize(ctx
->fback
));
2447 ctx
->fb_do_setkey
= false;
2449 ret
= ret
?: crypto_ahash_init(subreq
);
2450 ctx
->fb_init_done
= true;
2456 static int safexcel_sha3_update(struct ahash_request
*req
)
2458 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2459 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2460 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2462 ctx
->do_fallback
= true;
2463 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_update(subreq
);
2466 static int safexcel_sha3_final(struct ahash_request
*req
)
2468 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2469 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2470 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2472 ctx
->do_fallback
= true;
2473 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_final(subreq
);
2476 static int safexcel_sha3_finup(struct ahash_request
*req
)
2478 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2479 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2480 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2482 ctx
->do_fallback
|= !req
->nbytes
;
2483 if (ctx
->do_fallback
)
2484 /* Update or ex/import happened or len 0, cannot use the HW */
2485 return safexcel_sha3_fbcheck(req
) ?:
2486 crypto_ahash_finup(subreq
);
2488 return safexcel_ahash_finup(req
);
2491 static int safexcel_sha3_digest_fallback(struct ahash_request
*req
)
2493 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2494 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2495 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2497 ctx
->do_fallback
= true;
2498 ctx
->fb_init_done
= false;
2499 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_finup(subreq
);
2502 static int safexcel_sha3_224_digest(struct ahash_request
*req
)
2505 return safexcel_sha3_224_init(req
) ?: safexcel_ahash_finup(req
);
2507 /* HW cannot do zero length hash, use fallback instead */
2508 return safexcel_sha3_digest_fallback(req
);
2511 static int safexcel_sha3_export(struct ahash_request
*req
, void *out
)
2513 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2514 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2515 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2517 ctx
->do_fallback
= true;
2518 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_export(subreq
, out
);
2521 static int safexcel_sha3_import(struct ahash_request
*req
, const void *in
)
2523 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
2524 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2525 struct ahash_request
*subreq
= ahash_request_ctx(req
);
2527 ctx
->do_fallback
= true;
2528 return safexcel_sha3_fbcheck(req
) ?: crypto_ahash_import(subreq
, in
);
2529 // return safexcel_ahash_import(req, in);
2532 static int safexcel_sha3_cra_init(struct crypto_tfm
*tfm
)
2534 struct crypto_ahash
*ahash
= __crypto_ahash_cast(tfm
);
2535 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2537 safexcel_ahash_cra_init(tfm
);
2539 /* Allocate fallback implementation */
2540 ctx
->fback
= crypto_alloc_ahash(crypto_tfm_alg_name(tfm
), 0,
2542 CRYPTO_ALG_NEED_FALLBACK
);
2543 if (IS_ERR(ctx
->fback
))
2544 return PTR_ERR(ctx
->fback
);
2546 /* Update statesize from fallback algorithm! */
2547 crypto_hash_alg_common(ahash
)->statesize
=
2548 crypto_ahash_statesize(ctx
->fback
);
2549 crypto_ahash_set_reqsize(ahash
, max(sizeof(struct safexcel_ahash_req
),
2550 sizeof(struct ahash_request
) +
2551 crypto_ahash_reqsize(ctx
->fback
)));
2555 static void safexcel_sha3_cra_exit(struct crypto_tfm
*tfm
)
2557 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2559 crypto_free_ahash(ctx
->fback
);
2560 safexcel_ahash_cra_exit(tfm
);
2563 struct safexcel_alg_template safexcel_alg_sha3_224
= {
2564 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2565 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2567 .init
= safexcel_sha3_224_init
,
2568 .update
= safexcel_sha3_update
,
2569 .final
= safexcel_sha3_final
,
2570 .finup
= safexcel_sha3_finup
,
2571 .digest
= safexcel_sha3_224_digest
,
2572 .export
= safexcel_sha3_export
,
2573 .import
= safexcel_sha3_import
,
2575 .digestsize
= SHA3_224_DIGEST_SIZE
,
2576 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2578 .cra_name
= "sha3-224",
2579 .cra_driver_name
= "safexcel-sha3-224",
2580 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2581 .cra_flags
= CRYPTO_ALG_ASYNC
|
2582 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2583 CRYPTO_ALG_NEED_FALLBACK
,
2584 .cra_blocksize
= SHA3_224_BLOCK_SIZE
,
2585 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2586 .cra_init
= safexcel_sha3_cra_init
,
2587 .cra_exit
= safexcel_sha3_cra_exit
,
2588 .cra_module
= THIS_MODULE
,
2594 static int safexcel_sha3_256_init(struct ahash_request
*areq
)
2596 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2597 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2598 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2600 memset(req
, 0, sizeof(*req
));
2602 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256
;
2603 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2604 req
->state_sz
= SHA3_256_DIGEST_SIZE
;
2605 req
->digest_sz
= SHA3_256_DIGEST_SIZE
;
2606 req
->block_sz
= SHA3_256_BLOCK_SIZE
;
2607 ctx
->do_fallback
= false;
2608 ctx
->fb_init_done
= false;
2612 static int safexcel_sha3_256_digest(struct ahash_request
*req
)
2615 return safexcel_sha3_256_init(req
) ?: safexcel_ahash_finup(req
);
2617 /* HW cannot do zero length hash, use fallback instead */
2618 return safexcel_sha3_digest_fallback(req
);
2621 struct safexcel_alg_template safexcel_alg_sha3_256
= {
2622 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2623 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2625 .init
= safexcel_sha3_256_init
,
2626 .update
= safexcel_sha3_update
,
2627 .final
= safexcel_sha3_final
,
2628 .finup
= safexcel_sha3_finup
,
2629 .digest
= safexcel_sha3_256_digest
,
2630 .export
= safexcel_sha3_export
,
2631 .import
= safexcel_sha3_import
,
2633 .digestsize
= SHA3_256_DIGEST_SIZE
,
2634 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2636 .cra_name
= "sha3-256",
2637 .cra_driver_name
= "safexcel-sha3-256",
2638 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2639 .cra_flags
= CRYPTO_ALG_ASYNC
|
2640 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2641 CRYPTO_ALG_NEED_FALLBACK
,
2642 .cra_blocksize
= SHA3_256_BLOCK_SIZE
,
2643 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2644 .cra_init
= safexcel_sha3_cra_init
,
2645 .cra_exit
= safexcel_sha3_cra_exit
,
2646 .cra_module
= THIS_MODULE
,
2652 static int safexcel_sha3_384_init(struct ahash_request
*areq
)
2654 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2655 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2656 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2658 memset(req
, 0, sizeof(*req
));
2660 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384
;
2661 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2662 req
->state_sz
= SHA3_384_DIGEST_SIZE
;
2663 req
->digest_sz
= SHA3_384_DIGEST_SIZE
;
2664 req
->block_sz
= SHA3_384_BLOCK_SIZE
;
2665 ctx
->do_fallback
= false;
2666 ctx
->fb_init_done
= false;
2670 static int safexcel_sha3_384_digest(struct ahash_request
*req
)
2673 return safexcel_sha3_384_init(req
) ?: safexcel_ahash_finup(req
);
2675 /* HW cannot do zero length hash, use fallback instead */
2676 return safexcel_sha3_digest_fallback(req
);
2679 struct safexcel_alg_template safexcel_alg_sha3_384
= {
2680 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2681 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2683 .init
= safexcel_sha3_384_init
,
2684 .update
= safexcel_sha3_update
,
2685 .final
= safexcel_sha3_final
,
2686 .finup
= safexcel_sha3_finup
,
2687 .digest
= safexcel_sha3_384_digest
,
2688 .export
= safexcel_sha3_export
,
2689 .import
= safexcel_sha3_import
,
2691 .digestsize
= SHA3_384_DIGEST_SIZE
,
2692 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2694 .cra_name
= "sha3-384",
2695 .cra_driver_name
= "safexcel-sha3-384",
2696 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2697 .cra_flags
= CRYPTO_ALG_ASYNC
|
2698 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2699 CRYPTO_ALG_NEED_FALLBACK
,
2700 .cra_blocksize
= SHA3_384_BLOCK_SIZE
,
2701 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2702 .cra_init
= safexcel_sha3_cra_init
,
2703 .cra_exit
= safexcel_sha3_cra_exit
,
2704 .cra_module
= THIS_MODULE
,
2710 static int safexcel_sha3_512_init(struct ahash_request
*areq
)
2712 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2713 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2714 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2716 memset(req
, 0, sizeof(*req
));
2718 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512
;
2719 req
->digest
= CONTEXT_CONTROL_DIGEST_INITIAL
;
2720 req
->state_sz
= SHA3_512_DIGEST_SIZE
;
2721 req
->digest_sz
= SHA3_512_DIGEST_SIZE
;
2722 req
->block_sz
= SHA3_512_BLOCK_SIZE
;
2723 ctx
->do_fallback
= false;
2724 ctx
->fb_init_done
= false;
2728 static int safexcel_sha3_512_digest(struct ahash_request
*req
)
2731 return safexcel_sha3_512_init(req
) ?: safexcel_ahash_finup(req
);
2733 /* HW cannot do zero length hash, use fallback instead */
2734 return safexcel_sha3_digest_fallback(req
);
2737 struct safexcel_alg_template safexcel_alg_sha3_512
= {
2738 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2739 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2741 .init
= safexcel_sha3_512_init
,
2742 .update
= safexcel_sha3_update
,
2743 .final
= safexcel_sha3_final
,
2744 .finup
= safexcel_sha3_finup
,
2745 .digest
= safexcel_sha3_512_digest
,
2746 .export
= safexcel_sha3_export
,
2747 .import
= safexcel_sha3_import
,
2749 .digestsize
= SHA3_512_DIGEST_SIZE
,
2750 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2752 .cra_name
= "sha3-512",
2753 .cra_driver_name
= "safexcel-sha3-512",
2754 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2755 .cra_flags
= CRYPTO_ALG_ASYNC
|
2756 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2757 CRYPTO_ALG_NEED_FALLBACK
,
2758 .cra_blocksize
= SHA3_512_BLOCK_SIZE
,
2759 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2760 .cra_init
= safexcel_sha3_cra_init
,
2761 .cra_exit
= safexcel_sha3_cra_exit
,
2762 .cra_module
= THIS_MODULE
,
2768 static int safexcel_hmac_sha3_cra_init(struct crypto_tfm
*tfm
, const char *alg
)
2770 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2773 ret
= safexcel_sha3_cra_init(tfm
);
2777 /* Allocate precalc basic digest implementation */
2778 ctx
->shpre
= crypto_alloc_shash(alg
, 0, CRYPTO_ALG_NEED_FALLBACK
);
2779 if (IS_ERR(ctx
->shpre
))
2780 return PTR_ERR(ctx
->shpre
);
2782 ctx
->shdesc
= kmalloc(sizeof(*ctx
->shdesc
) +
2783 crypto_shash_descsize(ctx
->shpre
), GFP_KERNEL
);
2785 crypto_free_shash(ctx
->shpre
);
2788 ctx
->shdesc
->tfm
= ctx
->shpre
;
2792 static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm
*tfm
)
2794 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
2796 crypto_free_ahash(ctx
->fback
);
2797 crypto_free_shash(ctx
->shpre
);
2799 safexcel_ahash_cra_exit(tfm
);
2802 static int safexcel_hmac_sha3_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
2803 unsigned int keylen
)
2805 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2808 if (keylen
> crypto_ahash_blocksize(tfm
)) {
2810 * If the key is larger than the blocksize, then hash it
2811 * first using our fallback cipher
2813 ret
= crypto_shash_digest(ctx
->shdesc
, key
, keylen
,
2814 ctx
->base
.ipad
.byte
);
2815 keylen
= crypto_shash_digestsize(ctx
->shpre
);
2818 * If the digest is larger than half the blocksize, we need to
2819 * move the rest to opad due to the way our HMAC infra works.
2821 if (keylen
> crypto_ahash_blocksize(tfm
) / 2)
2822 /* Buffers overlap, need to use memmove iso memcpy! */
2823 memmove(&ctx
->base
.opad
,
2824 ctx
->base
.ipad
.byte
+
2825 crypto_ahash_blocksize(tfm
) / 2,
2826 keylen
- crypto_ahash_blocksize(tfm
) / 2);
2829 * Copy the key to our ipad & opad buffers
2830 * Note that ipad and opad each contain one half of the key,
2831 * to match the existing HMAC driver infrastructure.
2833 if (keylen
<= crypto_ahash_blocksize(tfm
) / 2) {
2834 memcpy(&ctx
->base
.ipad
, key
, keylen
);
2836 memcpy(&ctx
->base
.ipad
, key
,
2837 crypto_ahash_blocksize(tfm
) / 2);
2838 memcpy(&ctx
->base
.opad
,
2839 key
+ crypto_ahash_blocksize(tfm
) / 2,
2840 keylen
- crypto_ahash_blocksize(tfm
) / 2);
2844 /* Pad key with zeroes */
2845 if (keylen
<= crypto_ahash_blocksize(tfm
) / 2) {
2846 memset(ctx
->base
.ipad
.byte
+ keylen
, 0,
2847 crypto_ahash_blocksize(tfm
) / 2 - keylen
);
2848 memset(&ctx
->base
.opad
, 0, crypto_ahash_blocksize(tfm
) / 2);
2850 memset(ctx
->base
.opad
.byte
+ keylen
-
2851 crypto_ahash_blocksize(tfm
) / 2, 0,
2852 crypto_ahash_blocksize(tfm
) - keylen
);
2855 /* If doing fallback, still need to set the new key! */
2856 ctx
->fb_do_setkey
= true;
2860 static int safexcel_hmac_sha3_224_init(struct ahash_request
*areq
)
2862 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2863 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2864 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2866 memset(req
, 0, sizeof(*req
));
2868 /* Copy (half of) the key */
2869 memcpy(req
->state
, &ctx
->base
.ipad
, SHA3_224_BLOCK_SIZE
/ 2);
2870 /* Start of HMAC should have len == processed == blocksize */
2871 req
->len
= SHA3_224_BLOCK_SIZE
;
2872 req
->processed
= SHA3_224_BLOCK_SIZE
;
2873 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224
;
2874 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
2875 req
->state_sz
= SHA3_224_BLOCK_SIZE
/ 2;
2876 req
->digest_sz
= SHA3_224_DIGEST_SIZE
;
2877 req
->block_sz
= SHA3_224_BLOCK_SIZE
;
2879 ctx
->do_fallback
= false;
2880 ctx
->fb_init_done
= false;
2884 static int safexcel_hmac_sha3_224_digest(struct ahash_request
*req
)
2887 return safexcel_hmac_sha3_224_init(req
) ?:
2888 safexcel_ahash_finup(req
);
2890 /* HW cannot do zero length HMAC, use fallback instead */
2891 return safexcel_sha3_digest_fallback(req
);
2894 static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm
*tfm
)
2896 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-224");
2899 struct safexcel_alg_template safexcel_alg_hmac_sha3_224
= {
2900 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2901 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2903 .init
= safexcel_hmac_sha3_224_init
,
2904 .update
= safexcel_sha3_update
,
2905 .final
= safexcel_sha3_final
,
2906 .finup
= safexcel_sha3_finup
,
2907 .digest
= safexcel_hmac_sha3_224_digest
,
2908 .setkey
= safexcel_hmac_sha3_setkey
,
2909 .export
= safexcel_sha3_export
,
2910 .import
= safexcel_sha3_import
,
2912 .digestsize
= SHA3_224_DIGEST_SIZE
,
2913 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2915 .cra_name
= "hmac(sha3-224)",
2916 .cra_driver_name
= "safexcel-hmac-sha3-224",
2917 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2918 .cra_flags
= CRYPTO_ALG_ASYNC
|
2919 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2920 CRYPTO_ALG_NEED_FALLBACK
,
2921 .cra_blocksize
= SHA3_224_BLOCK_SIZE
,
2922 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2923 .cra_init
= safexcel_hmac_sha3_224_cra_init
,
2924 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
2925 .cra_module
= THIS_MODULE
,
2931 static int safexcel_hmac_sha3_256_init(struct ahash_request
*areq
)
2933 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
2934 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
2935 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
2937 memset(req
, 0, sizeof(*req
));
2939 /* Copy (half of) the key */
2940 memcpy(req
->state
, &ctx
->base
.ipad
, SHA3_256_BLOCK_SIZE
/ 2);
2941 /* Start of HMAC should have len == processed == blocksize */
2942 req
->len
= SHA3_256_BLOCK_SIZE
;
2943 req
->processed
= SHA3_256_BLOCK_SIZE
;
2944 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256
;
2945 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
2946 req
->state_sz
= SHA3_256_BLOCK_SIZE
/ 2;
2947 req
->digest_sz
= SHA3_256_DIGEST_SIZE
;
2948 req
->block_sz
= SHA3_256_BLOCK_SIZE
;
2950 ctx
->do_fallback
= false;
2951 ctx
->fb_init_done
= false;
2955 static int safexcel_hmac_sha3_256_digest(struct ahash_request
*req
)
2958 return safexcel_hmac_sha3_256_init(req
) ?:
2959 safexcel_ahash_finup(req
);
2961 /* HW cannot do zero length HMAC, use fallback instead */
2962 return safexcel_sha3_digest_fallback(req
);
2965 static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm
*tfm
)
2967 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-256");
2970 struct safexcel_alg_template safexcel_alg_hmac_sha3_256
= {
2971 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
2972 .algo_mask
= SAFEXCEL_ALG_SHA3
,
2974 .init
= safexcel_hmac_sha3_256_init
,
2975 .update
= safexcel_sha3_update
,
2976 .final
= safexcel_sha3_final
,
2977 .finup
= safexcel_sha3_finup
,
2978 .digest
= safexcel_hmac_sha3_256_digest
,
2979 .setkey
= safexcel_hmac_sha3_setkey
,
2980 .export
= safexcel_sha3_export
,
2981 .import
= safexcel_sha3_import
,
2983 .digestsize
= SHA3_256_DIGEST_SIZE
,
2984 .statesize
= sizeof(struct safexcel_ahash_export_state
),
2986 .cra_name
= "hmac(sha3-256)",
2987 .cra_driver_name
= "safexcel-hmac-sha3-256",
2988 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
2989 .cra_flags
= CRYPTO_ALG_ASYNC
|
2990 CRYPTO_ALG_KERN_DRIVER_ONLY
|
2991 CRYPTO_ALG_NEED_FALLBACK
,
2992 .cra_blocksize
= SHA3_256_BLOCK_SIZE
,
2993 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
2994 .cra_init
= safexcel_hmac_sha3_256_cra_init
,
2995 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
2996 .cra_module
= THIS_MODULE
,
3002 static int safexcel_hmac_sha3_384_init(struct ahash_request
*areq
)
3004 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
3005 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
3006 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
3008 memset(req
, 0, sizeof(*req
));
3010 /* Copy (half of) the key */
3011 memcpy(req
->state
, &ctx
->base
.ipad
, SHA3_384_BLOCK_SIZE
/ 2);
3012 /* Start of HMAC should have len == processed == blocksize */
3013 req
->len
= SHA3_384_BLOCK_SIZE
;
3014 req
->processed
= SHA3_384_BLOCK_SIZE
;
3015 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384
;
3016 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
3017 req
->state_sz
= SHA3_384_BLOCK_SIZE
/ 2;
3018 req
->digest_sz
= SHA3_384_DIGEST_SIZE
;
3019 req
->block_sz
= SHA3_384_BLOCK_SIZE
;
3021 ctx
->do_fallback
= false;
3022 ctx
->fb_init_done
= false;
3026 static int safexcel_hmac_sha3_384_digest(struct ahash_request
*req
)
3029 return safexcel_hmac_sha3_384_init(req
) ?:
3030 safexcel_ahash_finup(req
);
3032 /* HW cannot do zero length HMAC, use fallback instead */
3033 return safexcel_sha3_digest_fallback(req
);
3036 static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm
*tfm
)
3038 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-384");
3041 struct safexcel_alg_template safexcel_alg_hmac_sha3_384
= {
3042 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
3043 .algo_mask
= SAFEXCEL_ALG_SHA3
,
3045 .init
= safexcel_hmac_sha3_384_init
,
3046 .update
= safexcel_sha3_update
,
3047 .final
= safexcel_sha3_final
,
3048 .finup
= safexcel_sha3_finup
,
3049 .digest
= safexcel_hmac_sha3_384_digest
,
3050 .setkey
= safexcel_hmac_sha3_setkey
,
3051 .export
= safexcel_sha3_export
,
3052 .import
= safexcel_sha3_import
,
3054 .digestsize
= SHA3_384_DIGEST_SIZE
,
3055 .statesize
= sizeof(struct safexcel_ahash_export_state
),
3057 .cra_name
= "hmac(sha3-384)",
3058 .cra_driver_name
= "safexcel-hmac-sha3-384",
3059 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3060 .cra_flags
= CRYPTO_ALG_ASYNC
|
3061 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3062 CRYPTO_ALG_NEED_FALLBACK
,
3063 .cra_blocksize
= SHA3_384_BLOCK_SIZE
,
3064 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
3065 .cra_init
= safexcel_hmac_sha3_384_cra_init
,
3066 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
3067 .cra_module
= THIS_MODULE
,
3073 static int safexcel_hmac_sha3_512_init(struct ahash_request
*areq
)
3075 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(areq
);
3076 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(tfm
);
3077 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
3079 memset(req
, 0, sizeof(*req
));
3081 /* Copy (half of) the key */
3082 memcpy(req
->state
, &ctx
->base
.ipad
, SHA3_512_BLOCK_SIZE
/ 2);
3083 /* Start of HMAC should have len == processed == blocksize */
3084 req
->len
= SHA3_512_BLOCK_SIZE
;
3085 req
->processed
= SHA3_512_BLOCK_SIZE
;
3086 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512
;
3087 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
3088 req
->state_sz
= SHA3_512_BLOCK_SIZE
/ 2;
3089 req
->digest_sz
= SHA3_512_DIGEST_SIZE
;
3090 req
->block_sz
= SHA3_512_BLOCK_SIZE
;
3092 ctx
->do_fallback
= false;
3093 ctx
->fb_init_done
= false;
3097 static int safexcel_hmac_sha3_512_digest(struct ahash_request
*req
)
3100 return safexcel_hmac_sha3_512_init(req
) ?:
3101 safexcel_ahash_finup(req
);
3103 /* HW cannot do zero length HMAC, use fallback instead */
3104 return safexcel_sha3_digest_fallback(req
);
3107 static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm
*tfm
)
3109 return safexcel_hmac_sha3_cra_init(tfm
, "sha3-512");
3111 struct safexcel_alg_template safexcel_alg_hmac_sha3_512
= {
3112 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
3113 .algo_mask
= SAFEXCEL_ALG_SHA3
,
3115 .init
= safexcel_hmac_sha3_512_init
,
3116 .update
= safexcel_sha3_update
,
3117 .final
= safexcel_sha3_final
,
3118 .finup
= safexcel_sha3_finup
,
3119 .digest
= safexcel_hmac_sha3_512_digest
,
3120 .setkey
= safexcel_hmac_sha3_setkey
,
3121 .export
= safexcel_sha3_export
,
3122 .import
= safexcel_sha3_import
,
3124 .digestsize
= SHA3_512_DIGEST_SIZE
,
3125 .statesize
= sizeof(struct safexcel_ahash_export_state
),
3127 .cra_name
= "hmac(sha3-512)",
3128 .cra_driver_name
= "safexcel-hmac-sha3-512",
3129 .cra_priority
= SAFEXCEL_CRA_PRIORITY
,
3130 .cra_flags
= CRYPTO_ALG_ASYNC
|
3131 CRYPTO_ALG_KERN_DRIVER_ONLY
|
3132 CRYPTO_ALG_NEED_FALLBACK
,
3133 .cra_blocksize
= SHA3_512_BLOCK_SIZE
,
3134 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
3135 .cra_init
= safexcel_hmac_sha3_512_cra_init
,
3136 .cra_exit
= safexcel_hmac_sha3_cra_exit
,
3137 .cra_module
= THIS_MODULE
,