1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Handle async block request by crypto hardware engine.
5 * Copyright (C) 2016 Linaro, Inc.
7 * Author: Baolin Wang <baolin.wang@linaro.org>
10 #include <crypto/internal/aead.h>
11 #include <crypto/internal/akcipher.h>
12 #include <crypto/internal/engine.h>
13 #include <crypto/internal/hash.h>
14 #include <crypto/internal/kpp.h>
15 #include <crypto/internal/skcipher.h>
16 #include <linux/err.h>
17 #include <linux/delay.h>
18 #include <linux/device.h>
19 #include <linux/kernel.h>
20 #include <linux/module.h>
21 #include <uapi/linux/sched/types.h>
24 #define CRYPTO_ENGINE_MAX_QLEN 10
26 /* Temporary algorithm flag used to indicate an updated driver. */
27 #define CRYPTO_ALG_ENGINE 0x200
29 struct crypto_engine_alg
{
30 struct crypto_alg base
;
31 struct crypto_engine_op op
;
35 * crypto_finalize_request - finalize one request if the request is done
36 * @engine: the hardware engine
37 * @req: the request need to be finalized
40 static void crypto_finalize_request(struct crypto_engine
*engine
,
41 struct crypto_async_request
*req
, int err
)
46 * If hardware cannot enqueue more requests
47 * and retry mechanism is not supported
48 * make sure we are completing the current request
50 if (!engine
->retry_support
) {
51 spin_lock_irqsave(&engine
->queue_lock
, flags
);
52 if (engine
->cur_req
== req
) {
53 engine
->cur_req
= NULL
;
55 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
58 lockdep_assert_in_softirq();
59 crypto_request_complete(req
, err
);
61 kthread_queue_work(engine
->kworker
, &engine
->pump_requests
);
65 * crypto_pump_requests - dequeue one request from engine queue to process
66 * @engine: the hardware engine
67 * @in_kthread: true if we are in the context of the request pump thread
69 * This function checks if there is any request in the engine queue that
70 * needs processing and if so call out to the driver to initialize hardware
71 * and handle each request.
73 static void crypto_pump_requests(struct crypto_engine
*engine
,
76 struct crypto_async_request
*async_req
, *backlog
;
77 struct crypto_engine_alg
*alg
;
78 struct crypto_engine_op
*op
;
80 bool was_busy
= false;
83 spin_lock_irqsave(&engine
->queue_lock
, flags
);
85 /* Make sure we are not already running a request */
86 if (!engine
->retry_support
&& engine
->cur_req
)
89 /* If another context is idling then defer */
91 kthread_queue_work(engine
->kworker
, &engine
->pump_requests
);
95 /* Check if the engine queue is idle */
96 if (!crypto_queue_len(&engine
->queue
) || !engine
->running
) {
100 /* Only do teardown in the thread */
102 kthread_queue_work(engine
->kworker
,
103 &engine
->pump_requests
);
107 engine
->busy
= false;
108 engine
->idling
= true;
109 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
111 if (engine
->unprepare_crypt_hardware
&&
112 engine
->unprepare_crypt_hardware(engine
))
113 dev_err(engine
->dev
, "failed to unprepare crypt hardware\n");
115 spin_lock_irqsave(&engine
->queue_lock
, flags
);
116 engine
->idling
= false;
121 /* Get the fist request from the engine queue to handle */
122 backlog
= crypto_get_backlog(&engine
->queue
);
123 async_req
= crypto_dequeue_request(&engine
->queue
);
128 * If hardware doesn't support the retry mechanism,
129 * keep track of the request we are processing now.
130 * We'll need it on completion (crypto_finalize_request).
132 if (!engine
->retry_support
)
133 engine
->cur_req
= async_req
;
140 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
142 /* Until here we get the request need to be encrypted successfully */
143 if (!was_busy
&& engine
->prepare_crypt_hardware
) {
144 ret
= engine
->prepare_crypt_hardware(engine
);
146 dev_err(engine
->dev
, "failed to prepare crypt hardware\n");
151 if (async_req
->tfm
->__crt_alg
->cra_flags
& CRYPTO_ALG_ENGINE
) {
152 alg
= container_of(async_req
->tfm
->__crt_alg
,
153 struct crypto_engine_alg
, base
);
156 dev_err(engine
->dev
, "failed to do request\n");
161 ret
= op
->do_one_request(engine
, async_req
);
163 /* Request unsuccessfully executed by hardware */
166 * If hardware queue is full (-ENOSPC), requeue request
167 * regardless of backlog flag.
168 * Otherwise, unprepare and complete the request.
170 if (!engine
->retry_support
||
173 "Failed to do one request from queue: %d\n",
177 spin_lock_irqsave(&engine
->queue_lock
, flags
);
179 * If hardware was unable to execute request, enqueue it
180 * back in front of crypto-engine queue, to keep the order
183 crypto_enqueue_request_head(&engine
->queue
, async_req
);
185 kthread_queue_work(engine
->kworker
, &engine
->pump_requests
);
192 crypto_request_complete(async_req
, ret
);
196 crypto_request_complete(backlog
, -EINPROGRESS
);
198 /* If retry mechanism is supported, send new requests to engine */
199 if (engine
->retry_support
) {
200 spin_lock_irqsave(&engine
->queue_lock
, flags
);
206 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
209 * Batch requests is possible only if
210 * hardware can enqueue multiple requests
212 if (engine
->do_batch_requests
) {
213 ret
= engine
->do_batch_requests(engine
);
215 dev_err(engine
->dev
, "failed to do batch requests: %d\n",
222 static void crypto_pump_work(struct kthread_work
*work
)
224 struct crypto_engine
*engine
=
225 container_of(work
, struct crypto_engine
, pump_requests
);
227 crypto_pump_requests(engine
, true);
231 * crypto_transfer_request - transfer the new request into the engine queue
232 * @engine: the hardware engine
233 * @req: the request need to be listed into the engine queue
234 * @need_pump: indicates whether queue the pump of request to kthread_work
236 static int crypto_transfer_request(struct crypto_engine
*engine
,
237 struct crypto_async_request
*req
,
243 spin_lock_irqsave(&engine
->queue_lock
, flags
);
245 if (!engine
->running
) {
246 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
250 ret
= crypto_enqueue_request(&engine
->queue
, req
);
252 if (!engine
->busy
&& need_pump
)
253 kthread_queue_work(engine
->kworker
, &engine
->pump_requests
);
255 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
260 * crypto_transfer_request_to_engine - transfer one request to list
261 * into the engine queue
262 * @engine: the hardware engine
263 * @req: the request need to be listed into the engine queue
265 static int crypto_transfer_request_to_engine(struct crypto_engine
*engine
,
266 struct crypto_async_request
*req
)
268 return crypto_transfer_request(engine
, req
, true);
272 * crypto_transfer_aead_request_to_engine - transfer one aead_request
273 * to list into the engine queue
274 * @engine: the hardware engine
275 * @req: the request need to be listed into the engine queue
277 int crypto_transfer_aead_request_to_engine(struct crypto_engine
*engine
,
278 struct aead_request
*req
)
280 return crypto_transfer_request_to_engine(engine
, &req
->base
);
282 EXPORT_SYMBOL_GPL(crypto_transfer_aead_request_to_engine
);
285 * crypto_transfer_akcipher_request_to_engine - transfer one akcipher_request
286 * to list into the engine queue
287 * @engine: the hardware engine
288 * @req: the request need to be listed into the engine queue
290 int crypto_transfer_akcipher_request_to_engine(struct crypto_engine
*engine
,
291 struct akcipher_request
*req
)
293 return crypto_transfer_request_to_engine(engine
, &req
->base
);
295 EXPORT_SYMBOL_GPL(crypto_transfer_akcipher_request_to_engine
);
298 * crypto_transfer_hash_request_to_engine - transfer one ahash_request
299 * to list into the engine queue
300 * @engine: the hardware engine
301 * @req: the request need to be listed into the engine queue
303 int crypto_transfer_hash_request_to_engine(struct crypto_engine
*engine
,
304 struct ahash_request
*req
)
306 return crypto_transfer_request_to_engine(engine
, &req
->base
);
308 EXPORT_SYMBOL_GPL(crypto_transfer_hash_request_to_engine
);
311 * crypto_transfer_kpp_request_to_engine - transfer one kpp_request to list
312 * into the engine queue
313 * @engine: the hardware engine
314 * @req: the request need to be listed into the engine queue
316 int crypto_transfer_kpp_request_to_engine(struct crypto_engine
*engine
,
317 struct kpp_request
*req
)
319 return crypto_transfer_request_to_engine(engine
, &req
->base
);
321 EXPORT_SYMBOL_GPL(crypto_transfer_kpp_request_to_engine
);
324 * crypto_transfer_skcipher_request_to_engine - transfer one skcipher_request
325 * to list into the engine queue
326 * @engine: the hardware engine
327 * @req: the request need to be listed into the engine queue
329 int crypto_transfer_skcipher_request_to_engine(struct crypto_engine
*engine
,
330 struct skcipher_request
*req
)
332 return crypto_transfer_request_to_engine(engine
, &req
->base
);
334 EXPORT_SYMBOL_GPL(crypto_transfer_skcipher_request_to_engine
);
337 * crypto_finalize_aead_request - finalize one aead_request if
338 * the request is done
339 * @engine: the hardware engine
340 * @req: the request need to be finalized
343 void crypto_finalize_aead_request(struct crypto_engine
*engine
,
344 struct aead_request
*req
, int err
)
346 return crypto_finalize_request(engine
, &req
->base
, err
);
348 EXPORT_SYMBOL_GPL(crypto_finalize_aead_request
);
351 * crypto_finalize_akcipher_request - finalize one akcipher_request if
352 * the request is done
353 * @engine: the hardware engine
354 * @req: the request need to be finalized
357 void crypto_finalize_akcipher_request(struct crypto_engine
*engine
,
358 struct akcipher_request
*req
, int err
)
360 return crypto_finalize_request(engine
, &req
->base
, err
);
362 EXPORT_SYMBOL_GPL(crypto_finalize_akcipher_request
);
365 * crypto_finalize_hash_request - finalize one ahash_request if
366 * the request is done
367 * @engine: the hardware engine
368 * @req: the request need to be finalized
371 void crypto_finalize_hash_request(struct crypto_engine
*engine
,
372 struct ahash_request
*req
, int err
)
374 return crypto_finalize_request(engine
, &req
->base
, err
);
376 EXPORT_SYMBOL_GPL(crypto_finalize_hash_request
);
379 * crypto_finalize_kpp_request - finalize one kpp_request if the request is done
380 * @engine: the hardware engine
381 * @req: the request need to be finalized
384 void crypto_finalize_kpp_request(struct crypto_engine
*engine
,
385 struct kpp_request
*req
, int err
)
387 return crypto_finalize_request(engine
, &req
->base
, err
);
389 EXPORT_SYMBOL_GPL(crypto_finalize_kpp_request
);
392 * crypto_finalize_skcipher_request - finalize one skcipher_request if
393 * the request is done
394 * @engine: the hardware engine
395 * @req: the request need to be finalized
398 void crypto_finalize_skcipher_request(struct crypto_engine
*engine
,
399 struct skcipher_request
*req
, int err
)
401 return crypto_finalize_request(engine
, &req
->base
, err
);
403 EXPORT_SYMBOL_GPL(crypto_finalize_skcipher_request
);
406 * crypto_engine_start - start the hardware engine
407 * @engine: the hardware engine need to be started
409 * Return 0 on success, else on fail.
411 int crypto_engine_start(struct crypto_engine
*engine
)
415 spin_lock_irqsave(&engine
->queue_lock
, flags
);
417 if (engine
->running
|| engine
->busy
) {
418 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
422 engine
->running
= true;
423 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
425 kthread_queue_work(engine
->kworker
, &engine
->pump_requests
);
429 EXPORT_SYMBOL_GPL(crypto_engine_start
);
432 * crypto_engine_stop - stop the hardware engine
433 * @engine: the hardware engine need to be stopped
435 * Return 0 on success, else on fail.
437 int crypto_engine_stop(struct crypto_engine
*engine
)
440 unsigned int limit
= 500;
443 spin_lock_irqsave(&engine
->queue_lock
, flags
);
446 * If the engine queue is not empty or the engine is on busy state,
447 * we need to wait for a while to pump the requests of engine queue.
449 while ((crypto_queue_len(&engine
->queue
) || engine
->busy
) && limit
--) {
450 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
452 spin_lock_irqsave(&engine
->queue_lock
, flags
);
455 if (crypto_queue_len(&engine
->queue
) || engine
->busy
)
458 engine
->running
= false;
460 spin_unlock_irqrestore(&engine
->queue_lock
, flags
);
463 dev_warn(engine
->dev
, "could not stop engine\n");
467 EXPORT_SYMBOL_GPL(crypto_engine_stop
);
470 * crypto_engine_alloc_init_and_set - allocate crypto hardware engine structure
471 * and initialize it by setting the maximum number of entries in the software
472 * crypto-engine queue.
473 * @dev: the device attached with one hardware engine
474 * @retry_support: whether hardware has support for retry mechanism
475 * @cbk_do_batch: pointer to a callback function to be invoked when executing
476 * a batch of requests.
478 * callback(struct crypto_engine *engine)
480 * engine: the crypto engine structure.
481 * @rt: whether this queue is set to run as a realtime task
482 * @qlen: maximum size of the crypto-engine queue
484 * This must be called from context that can sleep.
485 * Return: the crypto engine structure on success, else NULL.
487 struct crypto_engine
*crypto_engine_alloc_init_and_set(struct device
*dev
,
489 int (*cbk_do_batch
)(struct crypto_engine
*engine
),
492 struct crypto_engine
*engine
;
497 engine
= devm_kzalloc(dev
, sizeof(*engine
), GFP_KERNEL
);
503 engine
->running
= false;
504 engine
->busy
= false;
505 engine
->idling
= false;
506 engine
->retry_support
= retry_support
;
507 engine
->priv_data
= dev
;
509 * Batch requests is possible only if
510 * hardware has support for retry mechanism.
512 engine
->do_batch_requests
= retry_support
? cbk_do_batch
: NULL
;
514 snprintf(engine
->name
, sizeof(engine
->name
),
515 "%s-engine", dev_name(dev
));
517 crypto_init_queue(&engine
->queue
, qlen
);
518 spin_lock_init(&engine
->queue_lock
);
520 engine
->kworker
= kthread_create_worker(0, "%s", engine
->name
);
521 if (IS_ERR(engine
->kworker
)) {
522 dev_err(dev
, "failed to create crypto request pump task\n");
525 kthread_init_work(&engine
->pump_requests
, crypto_pump_work
);
528 dev_info(dev
, "will run requests pump with realtime priority\n");
529 sched_set_fifo(engine
->kworker
->task
);
534 EXPORT_SYMBOL_GPL(crypto_engine_alloc_init_and_set
);
537 * crypto_engine_alloc_init - allocate crypto hardware engine structure and
539 * @dev: the device attached with one hardware engine
540 * @rt: whether this queue is set to run as a realtime task
542 * This must be called from context that can sleep.
543 * Return: the crypto engine structure on success, else NULL.
545 struct crypto_engine
*crypto_engine_alloc_init(struct device
*dev
, bool rt
)
547 return crypto_engine_alloc_init_and_set(dev
, false, NULL
, rt
,
548 CRYPTO_ENGINE_MAX_QLEN
);
550 EXPORT_SYMBOL_GPL(crypto_engine_alloc_init
);
553 * crypto_engine_exit - free the resources of hardware engine when exit
554 * @engine: the hardware engine need to be freed
556 void crypto_engine_exit(struct crypto_engine
*engine
)
560 ret
= crypto_engine_stop(engine
);
564 kthread_destroy_worker(engine
->kworker
);
566 EXPORT_SYMBOL_GPL(crypto_engine_exit
);
568 int crypto_engine_register_aead(struct aead_engine_alg
*alg
)
570 if (!alg
->op
.do_one_request
)
573 alg
->base
.base
.cra_flags
|= CRYPTO_ALG_ENGINE
;
575 return crypto_register_aead(&alg
->base
);
577 EXPORT_SYMBOL_GPL(crypto_engine_register_aead
);
579 void crypto_engine_unregister_aead(struct aead_engine_alg
*alg
)
581 crypto_unregister_aead(&alg
->base
);
583 EXPORT_SYMBOL_GPL(crypto_engine_unregister_aead
);
585 int crypto_engine_register_aeads(struct aead_engine_alg
*algs
, int count
)
589 for (i
= 0; i
< count
; i
++) {
590 ret
= crypto_engine_register_aead(&algs
[i
]);
598 crypto_engine_unregister_aeads(algs
, i
);
602 EXPORT_SYMBOL_GPL(crypto_engine_register_aeads
);
604 void crypto_engine_unregister_aeads(struct aead_engine_alg
*algs
, int count
)
608 for (i
= count
- 1; i
>= 0; --i
)
609 crypto_engine_unregister_aead(&algs
[i
]);
611 EXPORT_SYMBOL_GPL(crypto_engine_unregister_aeads
);
613 int crypto_engine_register_ahash(struct ahash_engine_alg
*alg
)
615 if (!alg
->op
.do_one_request
)
618 alg
->base
.halg
.base
.cra_flags
|= CRYPTO_ALG_ENGINE
;
620 return crypto_register_ahash(&alg
->base
);
622 EXPORT_SYMBOL_GPL(crypto_engine_register_ahash
);
624 void crypto_engine_unregister_ahash(struct ahash_engine_alg
*alg
)
626 crypto_unregister_ahash(&alg
->base
);
628 EXPORT_SYMBOL_GPL(crypto_engine_unregister_ahash
);
630 int crypto_engine_register_ahashes(struct ahash_engine_alg
*algs
, int count
)
634 for (i
= 0; i
< count
; i
++) {
635 ret
= crypto_engine_register_ahash(&algs
[i
]);
643 crypto_engine_unregister_ahashes(algs
, i
);
647 EXPORT_SYMBOL_GPL(crypto_engine_register_ahashes
);
649 void crypto_engine_unregister_ahashes(struct ahash_engine_alg
*algs
,
654 for (i
= count
- 1; i
>= 0; --i
)
655 crypto_engine_unregister_ahash(&algs
[i
]);
657 EXPORT_SYMBOL_GPL(crypto_engine_unregister_ahashes
);
659 int crypto_engine_register_akcipher(struct akcipher_engine_alg
*alg
)
661 if (!alg
->op
.do_one_request
)
664 alg
->base
.base
.cra_flags
|= CRYPTO_ALG_ENGINE
;
666 return crypto_register_akcipher(&alg
->base
);
668 EXPORT_SYMBOL_GPL(crypto_engine_register_akcipher
);
670 void crypto_engine_unregister_akcipher(struct akcipher_engine_alg
*alg
)
672 crypto_unregister_akcipher(&alg
->base
);
674 EXPORT_SYMBOL_GPL(crypto_engine_unregister_akcipher
);
676 int crypto_engine_register_kpp(struct kpp_engine_alg
*alg
)
678 if (!alg
->op
.do_one_request
)
681 alg
->base
.base
.cra_flags
|= CRYPTO_ALG_ENGINE
;
683 return crypto_register_kpp(&alg
->base
);
685 EXPORT_SYMBOL_GPL(crypto_engine_register_kpp
);
687 void crypto_engine_unregister_kpp(struct kpp_engine_alg
*alg
)
689 crypto_unregister_kpp(&alg
->base
);
691 EXPORT_SYMBOL_GPL(crypto_engine_unregister_kpp
);
693 int crypto_engine_register_skcipher(struct skcipher_engine_alg
*alg
)
695 if (!alg
->op
.do_one_request
)
698 alg
->base
.base
.cra_flags
|= CRYPTO_ALG_ENGINE
;
700 return crypto_register_skcipher(&alg
->base
);
702 EXPORT_SYMBOL_GPL(crypto_engine_register_skcipher
);
704 void crypto_engine_unregister_skcipher(struct skcipher_engine_alg
*alg
)
706 return crypto_unregister_skcipher(&alg
->base
);
708 EXPORT_SYMBOL_GPL(crypto_engine_unregister_skcipher
);
710 int crypto_engine_register_skciphers(struct skcipher_engine_alg
*algs
,
715 for (i
= 0; i
< count
; i
++) {
716 ret
= crypto_engine_register_skcipher(&algs
[i
]);
724 crypto_engine_unregister_skciphers(algs
, i
);
728 EXPORT_SYMBOL_GPL(crypto_engine_register_skciphers
);
730 void crypto_engine_unregister_skciphers(struct skcipher_engine_alg
*algs
,
735 for (i
= count
- 1; i
>= 0; --i
)
736 crypto_engine_unregister_skcipher(&algs
[i
]);
738 EXPORT_SYMBOL_GPL(crypto_engine_unregister_skciphers
);
740 MODULE_LICENSE("GPL");
741 MODULE_DESCRIPTION("Crypto hardware engine framework");