1 // SPDX-License-Identifier: GPL-2.0
3 * Copyright (C) 2017 Marvell
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
8 #include <crypto/hmac.h>
9 #include <crypto/md5.h>
10 #include <crypto/sha.h>
11 #include <linux/device.h>
12 #include <linux/dma-mapping.h>
13 #include <linux/dmapool.h>
17 struct safexcel_ahash_ctx
{
18 struct safexcel_context base
;
19 struct safexcel_crypto_priv
*priv
;
23 u32 ipad
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
24 u32 opad
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
27 struct safexcel_ahash_req
{
34 dma_addr_t result_dma
;
38 u8 state_sz
; /* expected sate size, only set once */
39 u32 state
[SHA512_DIGEST_SIZE
/ sizeof(u32
)] __aligned(sizeof(u32
));
44 u8 cache
[SHA512_BLOCK_SIZE
] __aligned(sizeof(u32
));
46 unsigned int cache_sz
;
48 u8 cache_next
[SHA512_BLOCK_SIZE
] __aligned(sizeof(u32
));
51 static inline u64
safexcel_queued_len(struct safexcel_ahash_req
*req
)
53 if (req
->len
[1] > req
->processed
[1])
54 return 0xffffffff - (req
->len
[0] - req
->processed
[0]);
56 return req
->len
[0] - req
->processed
[0];
59 static void safexcel_hash_token(struct safexcel_command_desc
*cdesc
,
60 u32 input_length
, u32 result_length
)
62 struct safexcel_token
*token
=
63 (struct safexcel_token
*)cdesc
->control_data
.token
;
65 token
[0].opcode
= EIP197_TOKEN_OPCODE_DIRECTION
;
66 token
[0].packet_length
= input_length
;
67 token
[0].stat
= EIP197_TOKEN_STAT_LAST_HASH
;
68 token
[0].instructions
= EIP197_TOKEN_INS_TYPE_HASH
;
70 token
[1].opcode
= EIP197_TOKEN_OPCODE_INSERT
;
71 token
[1].packet_length
= result_length
;
72 token
[1].stat
= EIP197_TOKEN_STAT_LAST_HASH
|
73 EIP197_TOKEN_STAT_LAST_PACKET
;
74 token
[1].instructions
= EIP197_TOKEN_INS_TYPE_OUTPUT
|
75 EIP197_TOKEN_INS_INSERT_HASH_DIGEST
;
78 static void safexcel_context_control(struct safexcel_ahash_ctx
*ctx
,
79 struct safexcel_ahash_req
*req
,
80 struct safexcel_command_desc
*cdesc
,
81 unsigned int digestsize
)
83 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
86 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_TYPE_HASH_OUT
;
87 cdesc
->control_data
.control0
|= ctx
->alg
;
88 cdesc
->control_data
.control0
|= req
->digest
;
90 if (req
->digest
== CONTEXT_CONTROL_DIGEST_PRECOMPUTED
) {
91 if (req
->processed
[0] || req
->processed
[1]) {
92 if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_MD5
)
93 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_SIZE(5);
94 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA1
)
95 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_SIZE(6);
96 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA224
||
97 ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA256
)
98 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_SIZE(9);
99 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA384
||
100 ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA512
)
101 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_SIZE(17);
103 cdesc
->control_data
.control1
|= CONTEXT_CONTROL_DIGEST_CNT
;
105 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_RESTART_HASH
;
109 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_NO_FINISH_HASH
;
112 * Copy the input digest if needed, and setup the context
113 * fields. Do this now as we need it to setup the first command
116 if (req
->processed
[0] || req
->processed
[1]) {
117 for (i
= 0; i
< digestsize
/ sizeof(u32
); i
++)
118 ctx
->base
.ctxr
->data
[i
] = cpu_to_le32(req
->state
[i
]);
121 u64 count
= req
->processed
[0] / EIP197_COUNTER_BLOCK_SIZE
;
122 count
+= ((0xffffffff / EIP197_COUNTER_BLOCK_SIZE
) *
125 /* This is a haredware limitation, as the
126 * counter must fit into an u32. This represents
127 * a farily big amount of input data, so we
128 * shouldn't see this.
130 if (unlikely(count
& 0xffff0000)) {
132 "Input data is too big\n");
136 ctx
->base
.ctxr
->data
[i
] = cpu_to_le32(count
);
139 } else if (req
->digest
== CONTEXT_CONTROL_DIGEST_HMAC
) {
140 cdesc
->control_data
.control0
|= CONTEXT_CONTROL_SIZE(2 * req
->state_sz
/ sizeof(u32
));
142 memcpy(ctx
->base
.ctxr
->data
, ctx
->ipad
, req
->state_sz
);
143 memcpy(ctx
->base
.ctxr
->data
+ req
->state_sz
/ sizeof(u32
),
144 ctx
->opad
, req
->state_sz
);
148 static int safexcel_handle_req_result(struct safexcel_crypto_priv
*priv
, int ring
,
149 struct crypto_async_request
*async
,
150 bool *should_complete
, int *ret
)
152 struct safexcel_result_desc
*rdesc
;
153 struct ahash_request
*areq
= ahash_request_cast(async
);
154 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
155 struct safexcel_ahash_req
*sreq
= ahash_request_ctx(areq
);
160 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
163 "hash: result: could not retrieve the result descriptor\n");
164 *ret
= PTR_ERR(rdesc
);
166 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
169 safexcel_complete(priv
, ring
);
172 dma_unmap_sg(priv
->dev
, areq
->src
, sreq
->nents
, DMA_TO_DEVICE
);
176 if (sreq
->result_dma
) {
177 dma_unmap_single(priv
->dev
, sreq
->result_dma
, sreq
->state_sz
,
179 sreq
->result_dma
= 0;
182 if (sreq
->cache_dma
) {
183 dma_unmap_single(priv
->dev
, sreq
->cache_dma
, sreq
->cache_sz
,
189 memcpy(areq
->result
, sreq
->state
,
190 crypto_ahash_digestsize(ahash
));
192 cache_len
= safexcel_queued_len(sreq
);
194 memcpy(sreq
->cache
, sreq
->cache_next
, cache_len
);
196 *should_complete
= true;
201 static int safexcel_ahash_send_req(struct crypto_async_request
*async
, int ring
,
202 int *commands
, int *results
)
204 struct ahash_request
*areq
= ahash_request_cast(async
);
205 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
206 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
207 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
208 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
209 struct safexcel_command_desc
*cdesc
, *first_cdesc
= NULL
;
210 struct safexcel_result_desc
*rdesc
;
211 struct scatterlist
*sg
;
212 int i
, extra
, n_cdesc
= 0, ret
= 0;
213 u64 queued
, len
, cache_len
;
215 queued
= len
= safexcel_queued_len(req
);
216 if (queued
<= crypto_ahash_blocksize(ahash
))
219 cache_len
= queued
- areq
->nbytes
;
221 if (!req
->last_req
) {
222 /* If this is not the last request and the queued data does not
223 * fit into full blocks, cache it for the next send() call.
225 extra
= queued
& (crypto_ahash_blocksize(ahash
) - 1);
227 /* If this is not the last request and the queued data
228 * is a multiple of a block, cache the last one for now.
230 extra
= crypto_ahash_blocksize(ahash
);
233 sg_pcopy_to_buffer(areq
->src
, sg_nents(areq
->src
),
234 req
->cache_next
, extra
,
235 areq
->nbytes
- extra
);
248 /* Add a command descriptor for the cached data, if any */
250 req
->cache_dma
= dma_map_single(priv
->dev
, req
->cache
,
251 cache_len
, DMA_TO_DEVICE
);
252 if (dma_mapping_error(priv
->dev
, req
->cache_dma
))
255 req
->cache_sz
= cache_len
;
256 first_cdesc
= safexcel_add_cdesc(priv
, ring
, 1,
258 req
->cache_dma
, cache_len
, len
,
260 if (IS_ERR(first_cdesc
)) {
261 ret
= PTR_ERR(first_cdesc
);
271 /* Now handle the current ahash request buffer(s) */
272 req
->nents
= dma_map_sg(priv
->dev
, areq
->src
,
273 sg_nents_for_len(areq
->src
, areq
->nbytes
),
280 for_each_sg(areq
->src
, sg
, req
->nents
, i
) {
281 int sglen
= sg_dma_len(sg
);
283 /* Do not overflow the request */
287 cdesc
= safexcel_add_cdesc(priv
, ring
, !n_cdesc
,
288 !(queued
- sglen
), sg_dma_address(sg
),
289 sglen
, len
, ctx
->base
.ctxr_dma
);
291 ret
= PTR_ERR(cdesc
);
305 /* Setup the context options */
306 safexcel_context_control(ctx
, req
, first_cdesc
, req
->state_sz
);
309 safexcel_hash_token(first_cdesc
, len
, req
->state_sz
);
311 req
->result_dma
= dma_map_single(priv
->dev
, req
->state
, req
->state_sz
,
313 if (dma_mapping_error(priv
->dev
, req
->result_dma
)) {
318 /* Add a result descriptor */
319 rdesc
= safexcel_add_rdesc(priv
, ring
, 1, 1, req
->result_dma
,
322 ret
= PTR_ERR(rdesc
);
326 safexcel_rdr_req_set(priv
, ring
, rdesc
, &areq
->base
);
328 req
->processed
[0] += len
;
329 if (req
->processed
[0] < len
)
337 dma_unmap_single(priv
->dev
, req
->result_dma
, req
->state_sz
,
340 dma_unmap_sg(priv
->dev
, areq
->src
, req
->nents
, DMA_TO_DEVICE
);
342 for (i
= 0; i
< n_cdesc
; i
++)
343 safexcel_ring_rollback_wptr(priv
, &priv
->ring
[ring
].cdr
);
345 if (req
->cache_dma
) {
346 dma_unmap_single(priv
->dev
, req
->cache_dma
, req
->cache_sz
,
354 static inline bool safexcel_ahash_needs_inv_get(struct ahash_request
*areq
)
356 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
357 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
358 unsigned int state_w_sz
= req
->state_sz
/ sizeof(u32
);
362 processed
= req
->processed
[0] / EIP197_COUNTER_BLOCK_SIZE
;
363 processed
+= (0xffffffff / EIP197_COUNTER_BLOCK_SIZE
) * req
->processed
[1];
365 for (i
= 0; i
< state_w_sz
; i
++)
366 if (ctx
->base
.ctxr
->data
[i
] != cpu_to_le32(req
->state
[i
]))
369 if (ctx
->base
.ctxr
->data
[state_w_sz
] != cpu_to_le32(processed
))
375 static int safexcel_handle_inv_result(struct safexcel_crypto_priv
*priv
,
377 struct crypto_async_request
*async
,
378 bool *should_complete
, int *ret
)
380 struct safexcel_result_desc
*rdesc
;
381 struct ahash_request
*areq
= ahash_request_cast(async
);
382 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
383 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(ahash
);
388 rdesc
= safexcel_ring_next_rptr(priv
, &priv
->ring
[ring
].rdr
);
391 "hash: invalidate: could not retrieve the result descriptor\n");
392 *ret
= PTR_ERR(rdesc
);
394 *ret
= safexcel_rdesc_check_errors(priv
, rdesc
);
397 safexcel_complete(priv
, ring
);
399 if (ctx
->base
.exit_inv
) {
400 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
403 *should_complete
= true;
407 ring
= safexcel_select_ring(priv
);
408 ctx
->base
.ring
= ring
;
410 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
411 enq_ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, async
);
412 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
414 if (enq_ret
!= -EINPROGRESS
)
417 queue_work(priv
->ring
[ring
].workqueue
,
418 &priv
->ring
[ring
].work_data
.work
);
420 *should_complete
= false;
425 static int safexcel_handle_result(struct safexcel_crypto_priv
*priv
, int ring
,
426 struct crypto_async_request
*async
,
427 bool *should_complete
, int *ret
)
429 struct ahash_request
*areq
= ahash_request_cast(async
);
430 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
433 BUG_ON(!(priv
->flags
& EIP197_TRC_CACHE
) && req
->needs_inv
);
435 if (req
->needs_inv
) {
436 req
->needs_inv
= false;
437 err
= safexcel_handle_inv_result(priv
, ring
, async
,
438 should_complete
, ret
);
440 err
= safexcel_handle_req_result(priv
, ring
, async
,
441 should_complete
, ret
);
447 static int safexcel_ahash_send_inv(struct crypto_async_request
*async
,
448 int ring
, int *commands
, int *results
)
450 struct ahash_request
*areq
= ahash_request_cast(async
);
451 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
454 ret
= safexcel_invalidate_cache(async
, ctx
->priv
,
455 ctx
->base
.ctxr_dma
, ring
);
465 static int safexcel_ahash_send(struct crypto_async_request
*async
,
466 int ring
, int *commands
, int *results
)
468 struct ahash_request
*areq
= ahash_request_cast(async
);
469 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
473 ret
= safexcel_ahash_send_inv(async
, ring
, commands
, results
);
475 ret
= safexcel_ahash_send_req(async
, ring
, commands
, results
);
480 static int safexcel_ahash_exit_inv(struct crypto_tfm
*tfm
)
482 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
483 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
484 EIP197_REQUEST_ON_STACK(req
, ahash
, EIP197_AHASH_REQ_SIZE
);
485 struct safexcel_ahash_req
*rctx
= ahash_request_ctx(req
);
486 struct safexcel_inv_result result
= {};
487 int ring
= ctx
->base
.ring
;
489 memset(req
, 0, sizeof(struct ahash_request
));
491 /* create invalidation request */
492 init_completion(&result
.completion
);
493 ahash_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
494 safexcel_inv_complete
, &result
);
496 ahash_request_set_tfm(req
, __crypto_ahash_cast(tfm
));
497 ctx
= crypto_tfm_ctx(req
->base
.tfm
);
498 ctx
->base
.exit_inv
= true;
499 rctx
->needs_inv
= true;
501 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
502 crypto_enqueue_request(&priv
->ring
[ring
].queue
, &req
->base
);
503 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
505 queue_work(priv
->ring
[ring
].workqueue
,
506 &priv
->ring
[ring
].work_data
.work
);
508 wait_for_completion(&result
.completion
);
511 dev_warn(priv
->dev
, "hash: completion error (%d)\n",
519 /* safexcel_ahash_cache: cache data until at least one request can be sent to
520 * the engine, aka. when there is at least 1 block size in the pipe.
522 static int safexcel_ahash_cache(struct ahash_request
*areq
)
524 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
525 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
526 u64 queued
, cache_len
;
528 /* queued: everything accepted by the driver which will be handled by
529 * the next send() calls.
530 * tot sz handled by update() - tot sz handled by send()
532 queued
= safexcel_queued_len(req
);
533 /* cache_len: everything accepted by the driver but not sent yet,
534 * tot sz handled by update() - last req sz - tot sz handled by send()
536 cache_len
= queued
- areq
->nbytes
;
539 * In case there isn't enough bytes to proceed (less than a
540 * block size), cache the data until we have enough.
542 if (cache_len
+ areq
->nbytes
<= crypto_ahash_blocksize(ahash
)) {
543 sg_pcopy_to_buffer(areq
->src
, sg_nents(areq
->src
),
544 req
->cache
+ cache_len
,
549 /* We couldn't cache all the data */
553 static int safexcel_ahash_enqueue(struct ahash_request
*areq
)
555 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
556 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
557 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
560 req
->needs_inv
= false;
562 if (ctx
->base
.ctxr
) {
563 if (priv
->flags
& EIP197_TRC_CACHE
&& !ctx
->base
.needs_inv
&&
564 (req
->processed
[0] || req
->processed
[1]) &&
565 req
->digest
== CONTEXT_CONTROL_DIGEST_PRECOMPUTED
)
566 /* We're still setting needs_inv here, even though it is
567 * cleared right away, because the needs_inv flag can be
568 * set in other functions and we want to keep the same
571 ctx
->base
.needs_inv
= safexcel_ahash_needs_inv_get(areq
);
573 if (ctx
->base
.needs_inv
) {
574 ctx
->base
.needs_inv
= false;
575 req
->needs_inv
= true;
578 ctx
->base
.ring
= safexcel_select_ring(priv
);
579 ctx
->base
.ctxr
= dma_pool_zalloc(priv
->context_pool
,
580 EIP197_GFP_FLAGS(areq
->base
),
581 &ctx
->base
.ctxr_dma
);
586 ring
= ctx
->base
.ring
;
588 spin_lock_bh(&priv
->ring
[ring
].queue_lock
);
589 ret
= crypto_enqueue_request(&priv
->ring
[ring
].queue
, &areq
->base
);
590 spin_unlock_bh(&priv
->ring
[ring
].queue_lock
);
592 queue_work(priv
->ring
[ring
].workqueue
,
593 &priv
->ring
[ring
].work_data
.work
);
598 static int safexcel_ahash_update(struct ahash_request
*areq
)
600 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
601 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
603 /* If the request is 0 length, do nothing */
607 req
->len
[0] += areq
->nbytes
;
608 if (req
->len
[0] < areq
->nbytes
)
611 safexcel_ahash_cache(areq
);
614 * We're not doing partial updates when performing an hmac request.
615 * Everything will be handled by the final() call.
617 if (req
->digest
== CONTEXT_CONTROL_DIGEST_HMAC
)
621 return safexcel_ahash_enqueue(areq
);
623 if (!req
->last_req
&&
624 safexcel_queued_len(req
) > crypto_ahash_blocksize(ahash
))
625 return safexcel_ahash_enqueue(areq
);
630 static int safexcel_ahash_final(struct ahash_request
*areq
)
632 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
633 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
635 req
->last_req
= true;
638 /* If we have an overall 0 length request */
639 if (!req
->len
[0] && !req
->len
[1] && !areq
->nbytes
) {
640 if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_MD5
)
641 memcpy(areq
->result
, md5_zero_message_hash
,
643 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA1
)
644 memcpy(areq
->result
, sha1_zero_message_hash
,
646 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA224
)
647 memcpy(areq
->result
, sha224_zero_message_hash
,
649 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA256
)
650 memcpy(areq
->result
, sha256_zero_message_hash
,
652 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA384
)
653 memcpy(areq
->result
, sha384_zero_message_hash
,
655 else if (ctx
->alg
== CONTEXT_CONTROL_CRYPTO_ALG_SHA512
)
656 memcpy(areq
->result
, sha512_zero_message_hash
,
662 return safexcel_ahash_enqueue(areq
);
665 static int safexcel_ahash_finup(struct ahash_request
*areq
)
667 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
669 req
->last_req
= true;
672 safexcel_ahash_update(areq
);
673 return safexcel_ahash_final(areq
);
676 static int safexcel_ahash_export(struct ahash_request
*areq
, void *out
)
678 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
679 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
680 struct safexcel_ahash_export_state
*export
= out
;
682 export
->len
[0] = req
->len
[0];
683 export
->len
[1] = req
->len
[1];
684 export
->processed
[0] = req
->processed
[0];
685 export
->processed
[1] = req
->processed
[1];
687 export
->digest
= req
->digest
;
689 memcpy(export
->state
, req
->state
, req
->state_sz
);
690 memcpy(export
->cache
, req
->cache
, crypto_ahash_blocksize(ahash
));
695 static int safexcel_ahash_import(struct ahash_request
*areq
, const void *in
)
697 struct crypto_ahash
*ahash
= crypto_ahash_reqtfm(areq
);
698 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
699 const struct safexcel_ahash_export_state
*export
= in
;
702 ret
= crypto_ahash_init(areq
);
706 req
->len
[0] = export
->len
[0];
707 req
->len
[1] = export
->len
[1];
708 req
->processed
[0] = export
->processed
[0];
709 req
->processed
[1] = export
->processed
[1];
711 req
->digest
= export
->digest
;
713 memcpy(req
->cache
, export
->cache
, crypto_ahash_blocksize(ahash
));
714 memcpy(req
->state
, export
->state
, req
->state_sz
);
719 static int safexcel_ahash_cra_init(struct crypto_tfm
*tfm
)
721 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
722 struct safexcel_alg_template
*tmpl
=
723 container_of(__crypto_ahash_alg(tfm
->__crt_alg
),
724 struct safexcel_alg_template
, alg
.ahash
);
726 ctx
->priv
= tmpl
->priv
;
727 ctx
->base
.send
= safexcel_ahash_send
;
728 ctx
->base
.handle_result
= safexcel_handle_result
;
730 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm
),
731 sizeof(struct safexcel_ahash_req
));
735 static int safexcel_sha1_init(struct ahash_request
*areq
)
737 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
738 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
740 memset(req
, 0, sizeof(*req
));
742 req
->state
[0] = SHA1_H0
;
743 req
->state
[1] = SHA1_H1
;
744 req
->state
[2] = SHA1_H2
;
745 req
->state
[3] = SHA1_H3
;
746 req
->state
[4] = SHA1_H4
;
748 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA1
;
749 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
750 req
->state_sz
= SHA1_DIGEST_SIZE
;
755 static int safexcel_sha1_digest(struct ahash_request
*areq
)
757 int ret
= safexcel_sha1_init(areq
);
762 return safexcel_ahash_finup(areq
);
765 static void safexcel_ahash_cra_exit(struct crypto_tfm
*tfm
)
767 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(tfm
);
768 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
771 /* context not allocated, skip invalidation */
775 if (priv
->flags
& EIP197_TRC_CACHE
) {
776 ret
= safexcel_ahash_exit_inv(tfm
);
778 dev_warn(priv
->dev
, "hash: invalidation error %d\n", ret
);
780 dma_pool_free(priv
->context_pool
, ctx
->base
.ctxr
,
785 struct safexcel_alg_template safexcel_alg_sha1
= {
786 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
787 .engines
= EIP97IES
| EIP197B
| EIP197D
,
789 .init
= safexcel_sha1_init
,
790 .update
= safexcel_ahash_update
,
791 .final
= safexcel_ahash_final
,
792 .finup
= safexcel_ahash_finup
,
793 .digest
= safexcel_sha1_digest
,
794 .export
= safexcel_ahash_export
,
795 .import
= safexcel_ahash_import
,
797 .digestsize
= SHA1_DIGEST_SIZE
,
798 .statesize
= sizeof(struct safexcel_ahash_export_state
),
801 .cra_driver_name
= "safexcel-sha1",
803 .cra_flags
= CRYPTO_ALG_ASYNC
|
804 CRYPTO_ALG_KERN_DRIVER_ONLY
,
805 .cra_blocksize
= SHA1_BLOCK_SIZE
,
806 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
807 .cra_init
= safexcel_ahash_cra_init
,
808 .cra_exit
= safexcel_ahash_cra_exit
,
809 .cra_module
= THIS_MODULE
,
815 static int safexcel_hmac_sha1_init(struct ahash_request
*areq
)
817 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
819 safexcel_sha1_init(areq
);
820 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
824 static int safexcel_hmac_sha1_digest(struct ahash_request
*areq
)
826 int ret
= safexcel_hmac_sha1_init(areq
);
831 return safexcel_ahash_finup(areq
);
834 struct safexcel_ahash_result
{
835 struct completion completion
;
839 static void safexcel_ahash_complete(struct crypto_async_request
*req
, int error
)
841 struct safexcel_ahash_result
*result
= req
->data
;
843 if (error
== -EINPROGRESS
)
846 result
->error
= error
;
847 complete(&result
->completion
);
850 static int safexcel_hmac_init_pad(struct ahash_request
*areq
,
851 unsigned int blocksize
, const u8
*key
,
852 unsigned int keylen
, u8
*ipad
, u8
*opad
)
854 struct safexcel_ahash_result result
;
855 struct scatterlist sg
;
859 if (keylen
<= blocksize
) {
860 memcpy(ipad
, key
, keylen
);
862 keydup
= kmemdup(key
, keylen
, GFP_KERNEL
);
866 ahash_request_set_callback(areq
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
867 safexcel_ahash_complete
, &result
);
868 sg_init_one(&sg
, keydup
, keylen
);
869 ahash_request_set_crypt(areq
, &sg
, ipad
, keylen
);
870 init_completion(&result
.completion
);
872 ret
= crypto_ahash_digest(areq
);
873 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
874 wait_for_completion_interruptible(&result
.completion
);
879 memzero_explicit(keydup
, keylen
);
885 keylen
= crypto_ahash_digestsize(crypto_ahash_reqtfm(areq
));
888 memset(ipad
+ keylen
, 0, blocksize
- keylen
);
889 memcpy(opad
, ipad
, blocksize
);
891 for (i
= 0; i
< blocksize
; i
++) {
892 ipad
[i
] ^= HMAC_IPAD_VALUE
;
893 opad
[i
] ^= HMAC_OPAD_VALUE
;
899 static int safexcel_hmac_init_iv(struct ahash_request
*areq
,
900 unsigned int blocksize
, u8
*pad
, void *state
)
902 struct safexcel_ahash_result result
;
903 struct safexcel_ahash_req
*req
;
904 struct scatterlist sg
;
907 ahash_request_set_callback(areq
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
908 safexcel_ahash_complete
, &result
);
909 sg_init_one(&sg
, pad
, blocksize
);
910 ahash_request_set_crypt(areq
, &sg
, pad
, blocksize
);
911 init_completion(&result
.completion
);
913 ret
= crypto_ahash_init(areq
);
917 req
= ahash_request_ctx(areq
);
919 req
->last_req
= true;
921 ret
= crypto_ahash_update(areq
);
922 if (ret
&& ret
!= -EINPROGRESS
&& ret
!= -EBUSY
)
925 wait_for_completion_interruptible(&result
.completion
);
929 return crypto_ahash_export(areq
, state
);
932 int safexcel_hmac_setkey(const char *alg
, const u8
*key
, unsigned int keylen
,
933 void *istate
, void *ostate
)
935 struct ahash_request
*areq
;
936 struct crypto_ahash
*tfm
;
937 unsigned int blocksize
;
941 tfm
= crypto_alloc_ahash(alg
, 0, 0);
945 areq
= ahash_request_alloc(tfm
, GFP_KERNEL
);
951 crypto_ahash_clear_flags(tfm
, ~0);
952 blocksize
= crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm
));
954 ipad
= kcalloc(2, blocksize
, GFP_KERNEL
);
960 opad
= ipad
+ blocksize
;
962 ret
= safexcel_hmac_init_pad(areq
, blocksize
, key
, keylen
, ipad
, opad
);
966 ret
= safexcel_hmac_init_iv(areq
, blocksize
, ipad
, istate
);
970 ret
= safexcel_hmac_init_iv(areq
, blocksize
, opad
, ostate
);
975 ahash_request_free(areq
);
977 crypto_free_ahash(tfm
);
982 static int safexcel_hmac_alg_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
983 unsigned int keylen
, const char *alg
,
984 unsigned int state_sz
)
986 struct safexcel_ahash_ctx
*ctx
= crypto_tfm_ctx(crypto_ahash_tfm(tfm
));
987 struct safexcel_crypto_priv
*priv
= ctx
->priv
;
988 struct safexcel_ahash_export_state istate
, ostate
;
991 ret
= safexcel_hmac_setkey(alg
, key
, keylen
, &istate
, &ostate
);
995 if (priv
->flags
& EIP197_TRC_CACHE
&& ctx
->base
.ctxr
) {
996 for (i
= 0; i
< state_sz
/ sizeof(u32
); i
++) {
997 if (ctx
->ipad
[i
] != le32_to_cpu(istate
.state
[i
]) ||
998 ctx
->opad
[i
] != le32_to_cpu(ostate
.state
[i
])) {
999 ctx
->base
.needs_inv
= true;
1005 memcpy(ctx
->ipad
, &istate
.state
, state_sz
);
1006 memcpy(ctx
->opad
, &ostate
.state
, state_sz
);
1011 static int safexcel_hmac_sha1_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1012 unsigned int keylen
)
1014 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha1",
1018 struct safexcel_alg_template safexcel_alg_hmac_sha1
= {
1019 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1020 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1022 .init
= safexcel_hmac_sha1_init
,
1023 .update
= safexcel_ahash_update
,
1024 .final
= safexcel_ahash_final
,
1025 .finup
= safexcel_ahash_finup
,
1026 .digest
= safexcel_hmac_sha1_digest
,
1027 .setkey
= safexcel_hmac_sha1_setkey
,
1028 .export
= safexcel_ahash_export
,
1029 .import
= safexcel_ahash_import
,
1031 .digestsize
= SHA1_DIGEST_SIZE
,
1032 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1034 .cra_name
= "hmac(sha1)",
1035 .cra_driver_name
= "safexcel-hmac-sha1",
1036 .cra_priority
= 300,
1037 .cra_flags
= CRYPTO_ALG_ASYNC
|
1038 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1039 .cra_blocksize
= SHA1_BLOCK_SIZE
,
1040 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1041 .cra_init
= safexcel_ahash_cra_init
,
1042 .cra_exit
= safexcel_ahash_cra_exit
,
1043 .cra_module
= THIS_MODULE
,
1049 static int safexcel_sha256_init(struct ahash_request
*areq
)
1051 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1052 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1054 memset(req
, 0, sizeof(*req
));
1056 req
->state
[0] = SHA256_H0
;
1057 req
->state
[1] = SHA256_H1
;
1058 req
->state
[2] = SHA256_H2
;
1059 req
->state
[3] = SHA256_H3
;
1060 req
->state
[4] = SHA256_H4
;
1061 req
->state
[5] = SHA256_H5
;
1062 req
->state
[6] = SHA256_H6
;
1063 req
->state
[7] = SHA256_H7
;
1065 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA256
;
1066 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1067 req
->state_sz
= SHA256_DIGEST_SIZE
;
1072 static int safexcel_sha256_digest(struct ahash_request
*areq
)
1074 int ret
= safexcel_sha256_init(areq
);
1079 return safexcel_ahash_finup(areq
);
1082 struct safexcel_alg_template safexcel_alg_sha256
= {
1083 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1084 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1086 .init
= safexcel_sha256_init
,
1087 .update
= safexcel_ahash_update
,
1088 .final
= safexcel_ahash_final
,
1089 .finup
= safexcel_ahash_finup
,
1090 .digest
= safexcel_sha256_digest
,
1091 .export
= safexcel_ahash_export
,
1092 .import
= safexcel_ahash_import
,
1094 .digestsize
= SHA256_DIGEST_SIZE
,
1095 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1097 .cra_name
= "sha256",
1098 .cra_driver_name
= "safexcel-sha256",
1099 .cra_priority
= 300,
1100 .cra_flags
= CRYPTO_ALG_ASYNC
|
1101 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1102 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1103 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1104 .cra_init
= safexcel_ahash_cra_init
,
1105 .cra_exit
= safexcel_ahash_cra_exit
,
1106 .cra_module
= THIS_MODULE
,
1112 static int safexcel_sha224_init(struct ahash_request
*areq
)
1114 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1115 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1117 memset(req
, 0, sizeof(*req
));
1119 req
->state
[0] = SHA224_H0
;
1120 req
->state
[1] = SHA224_H1
;
1121 req
->state
[2] = SHA224_H2
;
1122 req
->state
[3] = SHA224_H3
;
1123 req
->state
[4] = SHA224_H4
;
1124 req
->state
[5] = SHA224_H5
;
1125 req
->state
[6] = SHA224_H6
;
1126 req
->state
[7] = SHA224_H7
;
1128 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA224
;
1129 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1130 req
->state_sz
= SHA256_DIGEST_SIZE
;
1135 static int safexcel_sha224_digest(struct ahash_request
*areq
)
1137 int ret
= safexcel_sha224_init(areq
);
1142 return safexcel_ahash_finup(areq
);
1145 struct safexcel_alg_template safexcel_alg_sha224
= {
1146 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1147 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1149 .init
= safexcel_sha224_init
,
1150 .update
= safexcel_ahash_update
,
1151 .final
= safexcel_ahash_final
,
1152 .finup
= safexcel_ahash_finup
,
1153 .digest
= safexcel_sha224_digest
,
1154 .export
= safexcel_ahash_export
,
1155 .import
= safexcel_ahash_import
,
1157 .digestsize
= SHA224_DIGEST_SIZE
,
1158 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1160 .cra_name
= "sha224",
1161 .cra_driver_name
= "safexcel-sha224",
1162 .cra_priority
= 300,
1163 .cra_flags
= CRYPTO_ALG_ASYNC
|
1164 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1165 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1166 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1167 .cra_init
= safexcel_ahash_cra_init
,
1168 .cra_exit
= safexcel_ahash_cra_exit
,
1169 .cra_module
= THIS_MODULE
,
1175 static int safexcel_hmac_sha224_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1176 unsigned int keylen
)
1178 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha224",
1179 SHA256_DIGEST_SIZE
);
1182 static int safexcel_hmac_sha224_init(struct ahash_request
*areq
)
1184 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1186 safexcel_sha224_init(areq
);
1187 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
1191 static int safexcel_hmac_sha224_digest(struct ahash_request
*areq
)
1193 int ret
= safexcel_hmac_sha224_init(areq
);
1198 return safexcel_ahash_finup(areq
);
1201 struct safexcel_alg_template safexcel_alg_hmac_sha224
= {
1202 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1203 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1205 .init
= safexcel_hmac_sha224_init
,
1206 .update
= safexcel_ahash_update
,
1207 .final
= safexcel_ahash_final
,
1208 .finup
= safexcel_ahash_finup
,
1209 .digest
= safexcel_hmac_sha224_digest
,
1210 .setkey
= safexcel_hmac_sha224_setkey
,
1211 .export
= safexcel_ahash_export
,
1212 .import
= safexcel_ahash_import
,
1214 .digestsize
= SHA224_DIGEST_SIZE
,
1215 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1217 .cra_name
= "hmac(sha224)",
1218 .cra_driver_name
= "safexcel-hmac-sha224",
1219 .cra_priority
= 300,
1220 .cra_flags
= CRYPTO_ALG_ASYNC
|
1221 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1222 .cra_blocksize
= SHA224_BLOCK_SIZE
,
1223 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1224 .cra_init
= safexcel_ahash_cra_init
,
1225 .cra_exit
= safexcel_ahash_cra_exit
,
1226 .cra_module
= THIS_MODULE
,
1232 static int safexcel_hmac_sha256_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1233 unsigned int keylen
)
1235 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha256",
1236 SHA256_DIGEST_SIZE
);
1239 static int safexcel_hmac_sha256_init(struct ahash_request
*areq
)
1241 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1243 safexcel_sha256_init(areq
);
1244 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
1248 static int safexcel_hmac_sha256_digest(struct ahash_request
*areq
)
1250 int ret
= safexcel_hmac_sha256_init(areq
);
1255 return safexcel_ahash_finup(areq
);
1258 struct safexcel_alg_template safexcel_alg_hmac_sha256
= {
1259 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1260 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1262 .init
= safexcel_hmac_sha256_init
,
1263 .update
= safexcel_ahash_update
,
1264 .final
= safexcel_ahash_final
,
1265 .finup
= safexcel_ahash_finup
,
1266 .digest
= safexcel_hmac_sha256_digest
,
1267 .setkey
= safexcel_hmac_sha256_setkey
,
1268 .export
= safexcel_ahash_export
,
1269 .import
= safexcel_ahash_import
,
1271 .digestsize
= SHA256_DIGEST_SIZE
,
1272 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1274 .cra_name
= "hmac(sha256)",
1275 .cra_driver_name
= "safexcel-hmac-sha256",
1276 .cra_priority
= 300,
1277 .cra_flags
= CRYPTO_ALG_ASYNC
|
1278 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1279 .cra_blocksize
= SHA256_BLOCK_SIZE
,
1280 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1281 .cra_init
= safexcel_ahash_cra_init
,
1282 .cra_exit
= safexcel_ahash_cra_exit
,
1283 .cra_module
= THIS_MODULE
,
1289 static int safexcel_sha512_init(struct ahash_request
*areq
)
1291 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1292 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1294 memset(req
, 0, sizeof(*req
));
1296 req
->state
[0] = lower_32_bits(SHA512_H0
);
1297 req
->state
[1] = upper_32_bits(SHA512_H0
);
1298 req
->state
[2] = lower_32_bits(SHA512_H1
);
1299 req
->state
[3] = upper_32_bits(SHA512_H1
);
1300 req
->state
[4] = lower_32_bits(SHA512_H2
);
1301 req
->state
[5] = upper_32_bits(SHA512_H2
);
1302 req
->state
[6] = lower_32_bits(SHA512_H3
);
1303 req
->state
[7] = upper_32_bits(SHA512_H3
);
1304 req
->state
[8] = lower_32_bits(SHA512_H4
);
1305 req
->state
[9] = upper_32_bits(SHA512_H4
);
1306 req
->state
[10] = lower_32_bits(SHA512_H5
);
1307 req
->state
[11] = upper_32_bits(SHA512_H5
);
1308 req
->state
[12] = lower_32_bits(SHA512_H6
);
1309 req
->state
[13] = upper_32_bits(SHA512_H6
);
1310 req
->state
[14] = lower_32_bits(SHA512_H7
);
1311 req
->state
[15] = upper_32_bits(SHA512_H7
);
1313 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA512
;
1314 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1315 req
->state_sz
= SHA512_DIGEST_SIZE
;
1320 static int safexcel_sha512_digest(struct ahash_request
*areq
)
1322 int ret
= safexcel_sha512_init(areq
);
1327 return safexcel_ahash_finup(areq
);
1330 struct safexcel_alg_template safexcel_alg_sha512
= {
1331 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1332 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1334 .init
= safexcel_sha512_init
,
1335 .update
= safexcel_ahash_update
,
1336 .final
= safexcel_ahash_final
,
1337 .finup
= safexcel_ahash_finup
,
1338 .digest
= safexcel_sha512_digest
,
1339 .export
= safexcel_ahash_export
,
1340 .import
= safexcel_ahash_import
,
1342 .digestsize
= SHA512_DIGEST_SIZE
,
1343 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1345 .cra_name
= "sha512",
1346 .cra_driver_name
= "safexcel-sha512",
1347 .cra_priority
= 300,
1348 .cra_flags
= CRYPTO_ALG_ASYNC
|
1349 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1350 .cra_blocksize
= SHA512_BLOCK_SIZE
,
1351 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1352 .cra_init
= safexcel_ahash_cra_init
,
1353 .cra_exit
= safexcel_ahash_cra_exit
,
1354 .cra_module
= THIS_MODULE
,
1360 static int safexcel_sha384_init(struct ahash_request
*areq
)
1362 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1363 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1365 memset(req
, 0, sizeof(*req
));
1367 req
->state
[0] = lower_32_bits(SHA384_H0
);
1368 req
->state
[1] = upper_32_bits(SHA384_H0
);
1369 req
->state
[2] = lower_32_bits(SHA384_H1
);
1370 req
->state
[3] = upper_32_bits(SHA384_H1
);
1371 req
->state
[4] = lower_32_bits(SHA384_H2
);
1372 req
->state
[5] = upper_32_bits(SHA384_H2
);
1373 req
->state
[6] = lower_32_bits(SHA384_H3
);
1374 req
->state
[7] = upper_32_bits(SHA384_H3
);
1375 req
->state
[8] = lower_32_bits(SHA384_H4
);
1376 req
->state
[9] = upper_32_bits(SHA384_H4
);
1377 req
->state
[10] = lower_32_bits(SHA384_H5
);
1378 req
->state
[11] = upper_32_bits(SHA384_H5
);
1379 req
->state
[12] = lower_32_bits(SHA384_H6
);
1380 req
->state
[13] = upper_32_bits(SHA384_H6
);
1381 req
->state
[14] = lower_32_bits(SHA384_H7
);
1382 req
->state
[15] = upper_32_bits(SHA384_H7
);
1384 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_SHA384
;
1385 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1386 req
->state_sz
= SHA512_DIGEST_SIZE
;
1391 static int safexcel_sha384_digest(struct ahash_request
*areq
)
1393 int ret
= safexcel_sha384_init(areq
);
1398 return safexcel_ahash_finup(areq
);
1401 struct safexcel_alg_template safexcel_alg_sha384
= {
1402 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1403 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1405 .init
= safexcel_sha384_init
,
1406 .update
= safexcel_ahash_update
,
1407 .final
= safexcel_ahash_final
,
1408 .finup
= safexcel_ahash_finup
,
1409 .digest
= safexcel_sha384_digest
,
1410 .export
= safexcel_ahash_export
,
1411 .import
= safexcel_ahash_import
,
1413 .digestsize
= SHA384_DIGEST_SIZE
,
1414 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1416 .cra_name
= "sha384",
1417 .cra_driver_name
= "safexcel-sha384",
1418 .cra_priority
= 300,
1419 .cra_flags
= CRYPTO_ALG_ASYNC
|
1420 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1421 .cra_blocksize
= SHA384_BLOCK_SIZE
,
1422 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1423 .cra_init
= safexcel_ahash_cra_init
,
1424 .cra_exit
= safexcel_ahash_cra_exit
,
1425 .cra_module
= THIS_MODULE
,
1431 static int safexcel_hmac_sha512_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1432 unsigned int keylen
)
1434 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha512",
1435 SHA512_DIGEST_SIZE
);
1438 static int safexcel_hmac_sha512_init(struct ahash_request
*areq
)
1440 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1442 safexcel_sha512_init(areq
);
1443 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
1447 static int safexcel_hmac_sha512_digest(struct ahash_request
*areq
)
1449 int ret
= safexcel_hmac_sha512_init(areq
);
1454 return safexcel_ahash_finup(areq
);
1457 struct safexcel_alg_template safexcel_alg_hmac_sha512
= {
1458 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1459 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1461 .init
= safexcel_hmac_sha512_init
,
1462 .update
= safexcel_ahash_update
,
1463 .final
= safexcel_ahash_final
,
1464 .finup
= safexcel_ahash_finup
,
1465 .digest
= safexcel_hmac_sha512_digest
,
1466 .setkey
= safexcel_hmac_sha512_setkey
,
1467 .export
= safexcel_ahash_export
,
1468 .import
= safexcel_ahash_import
,
1470 .digestsize
= SHA512_DIGEST_SIZE
,
1471 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1473 .cra_name
= "hmac(sha512)",
1474 .cra_driver_name
= "safexcel-hmac-sha512",
1475 .cra_priority
= 300,
1476 .cra_flags
= CRYPTO_ALG_ASYNC
|
1477 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1478 .cra_blocksize
= SHA512_BLOCK_SIZE
,
1479 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1480 .cra_init
= safexcel_ahash_cra_init
,
1481 .cra_exit
= safexcel_ahash_cra_exit
,
1482 .cra_module
= THIS_MODULE
,
1488 static int safexcel_hmac_sha384_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1489 unsigned int keylen
)
1491 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-sha384",
1492 SHA512_DIGEST_SIZE
);
1495 static int safexcel_hmac_sha384_init(struct ahash_request
*areq
)
1497 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1499 safexcel_sha384_init(areq
);
1500 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
1504 static int safexcel_hmac_sha384_digest(struct ahash_request
*areq
)
1506 int ret
= safexcel_hmac_sha384_init(areq
);
1511 return safexcel_ahash_finup(areq
);
1514 struct safexcel_alg_template safexcel_alg_hmac_sha384
= {
1515 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1516 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1518 .init
= safexcel_hmac_sha384_init
,
1519 .update
= safexcel_ahash_update
,
1520 .final
= safexcel_ahash_final
,
1521 .finup
= safexcel_ahash_finup
,
1522 .digest
= safexcel_hmac_sha384_digest
,
1523 .setkey
= safexcel_hmac_sha384_setkey
,
1524 .export
= safexcel_ahash_export
,
1525 .import
= safexcel_ahash_import
,
1527 .digestsize
= SHA384_DIGEST_SIZE
,
1528 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1530 .cra_name
= "hmac(sha384)",
1531 .cra_driver_name
= "safexcel-hmac-sha384",
1532 .cra_priority
= 300,
1533 .cra_flags
= CRYPTO_ALG_ASYNC
|
1534 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1535 .cra_blocksize
= SHA384_BLOCK_SIZE
,
1536 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1537 .cra_init
= safexcel_ahash_cra_init
,
1538 .cra_exit
= safexcel_ahash_cra_exit
,
1539 .cra_module
= THIS_MODULE
,
1545 static int safexcel_md5_init(struct ahash_request
*areq
)
1547 struct safexcel_ahash_ctx
*ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(areq
));
1548 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1550 memset(req
, 0, sizeof(*req
));
1552 req
->state
[0] = MD5_H0
;
1553 req
->state
[1] = MD5_H1
;
1554 req
->state
[2] = MD5_H2
;
1555 req
->state
[3] = MD5_H3
;
1557 ctx
->alg
= CONTEXT_CONTROL_CRYPTO_ALG_MD5
;
1558 req
->digest
= CONTEXT_CONTROL_DIGEST_PRECOMPUTED
;
1559 req
->state_sz
= MD5_DIGEST_SIZE
;
1564 static int safexcel_md5_digest(struct ahash_request
*areq
)
1566 int ret
= safexcel_md5_init(areq
);
1571 return safexcel_ahash_finup(areq
);
1574 struct safexcel_alg_template safexcel_alg_md5
= {
1575 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1576 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1578 .init
= safexcel_md5_init
,
1579 .update
= safexcel_ahash_update
,
1580 .final
= safexcel_ahash_final
,
1581 .finup
= safexcel_ahash_finup
,
1582 .digest
= safexcel_md5_digest
,
1583 .export
= safexcel_ahash_export
,
1584 .import
= safexcel_ahash_import
,
1586 .digestsize
= MD5_DIGEST_SIZE
,
1587 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1590 .cra_driver_name
= "safexcel-md5",
1591 .cra_priority
= 300,
1592 .cra_flags
= CRYPTO_ALG_ASYNC
|
1593 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1594 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1595 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1596 .cra_init
= safexcel_ahash_cra_init
,
1597 .cra_exit
= safexcel_ahash_cra_exit
,
1598 .cra_module
= THIS_MODULE
,
1604 static int safexcel_hmac_md5_init(struct ahash_request
*areq
)
1606 struct safexcel_ahash_req
*req
= ahash_request_ctx(areq
);
1608 safexcel_md5_init(areq
);
1609 req
->digest
= CONTEXT_CONTROL_DIGEST_HMAC
;
1613 static int safexcel_hmac_md5_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
1614 unsigned int keylen
)
1616 return safexcel_hmac_alg_setkey(tfm
, key
, keylen
, "safexcel-md5",
1620 static int safexcel_hmac_md5_digest(struct ahash_request
*areq
)
1622 int ret
= safexcel_hmac_md5_init(areq
);
1627 return safexcel_ahash_finup(areq
);
1630 struct safexcel_alg_template safexcel_alg_hmac_md5
= {
1631 .type
= SAFEXCEL_ALG_TYPE_AHASH
,
1632 .engines
= EIP97IES
| EIP197B
| EIP197D
,
1634 .init
= safexcel_hmac_md5_init
,
1635 .update
= safexcel_ahash_update
,
1636 .final
= safexcel_ahash_final
,
1637 .finup
= safexcel_ahash_finup
,
1638 .digest
= safexcel_hmac_md5_digest
,
1639 .setkey
= safexcel_hmac_md5_setkey
,
1640 .export
= safexcel_ahash_export
,
1641 .import
= safexcel_ahash_import
,
1643 .digestsize
= MD5_DIGEST_SIZE
,
1644 .statesize
= sizeof(struct safexcel_ahash_export_state
),
1646 .cra_name
= "hmac(md5)",
1647 .cra_driver_name
= "safexcel-hmac-md5",
1648 .cra_priority
= 300,
1649 .cra_flags
= CRYPTO_ALG_ASYNC
|
1650 CRYPTO_ALG_KERN_DRIVER_ONLY
,
1651 .cra_blocksize
= MD5_HMAC_BLOCK_SIZE
,
1652 .cra_ctxsize
= sizeof(struct safexcel_ahash_ctx
),
1653 .cra_init
= safexcel_ahash_cra_init
,
1654 .cra_exit
= safexcel_ahash_cra_exit
,
1655 .cra_module
= THIS_MODULE
,