1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Asynchronous Cryptographic Hash operations.
5 * This is the implementation of the ahash (asynchronous hash) API. It differs
6 * from shash (synchronous hash) in that ahash supports asynchronous operations,
7 * and it hashes data from scatterlists instead of virtually addressed buffers.
9 * The ahash API provides access to both ahash and shash algorithms. The shash
10 * API only provides access to shash algorithms.
12 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
15 #include <crypto/scatterwalk.h>
16 #include <linux/cryptouser.h>
17 #include <linux/err.h>
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/sched.h>
21 #include <linux/slab.h>
22 #include <linux/seq_file.h>
23 #include <linux/string.h>
24 #include <net/netlink.h>
28 #define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
31 * For an ahash tfm that is using an shash algorithm (instead of an ahash
32 * algorithm), this returns the underlying shash tfm.
34 static inline struct crypto_shash
*ahash_to_shash(struct crypto_ahash
*tfm
)
36 return *(struct crypto_shash
**)crypto_ahash_ctx(tfm
);
39 static inline struct shash_desc
*prepare_shash_desc(struct ahash_request
*req
,
40 struct crypto_ahash
*tfm
)
42 struct shash_desc
*desc
= ahash_request_ctx(req
);
44 desc
->tfm
= ahash_to_shash(tfm
);
48 int shash_ahash_update(struct ahash_request
*req
, struct shash_desc
*desc
)
50 struct crypto_hash_walk walk
;
53 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
54 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
55 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
59 EXPORT_SYMBOL_GPL(shash_ahash_update
);
61 int shash_ahash_finup(struct ahash_request
*req
, struct shash_desc
*desc
)
63 struct crypto_hash_walk walk
;
66 nbytes
= crypto_hash_walk_first(req
, &walk
);
68 return crypto_shash_final(desc
, req
->result
);
71 nbytes
= crypto_hash_walk_last(&walk
) ?
72 crypto_shash_finup(desc
, walk
.data
, nbytes
,
74 crypto_shash_update(desc
, walk
.data
, nbytes
);
75 nbytes
= crypto_hash_walk_done(&walk
, nbytes
);
80 EXPORT_SYMBOL_GPL(shash_ahash_finup
);
82 int shash_ahash_digest(struct ahash_request
*req
, struct shash_desc
*desc
)
84 unsigned int nbytes
= req
->nbytes
;
85 struct scatterlist
*sg
;
90 (sg
= req
->src
, offset
= sg
->offset
,
91 nbytes
<= min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
))) {
94 data
= kmap_local_page(sg_page(sg
));
95 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
99 err
= crypto_shash_init(desc
) ?:
100 shash_ahash_finup(req
, desc
);
104 EXPORT_SYMBOL_GPL(shash_ahash_digest
);
106 static void crypto_exit_ahash_using_shash(struct crypto_tfm
*tfm
)
108 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
110 crypto_free_shash(*ctx
);
113 static int crypto_init_ahash_using_shash(struct crypto_tfm
*tfm
)
115 struct crypto_alg
*calg
= tfm
->__crt_alg
;
116 struct crypto_ahash
*crt
= __crypto_ahash_cast(tfm
);
117 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
118 struct crypto_shash
*shash
;
120 if (!crypto_mod_get(calg
))
123 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
125 crypto_mod_put(calg
);
126 return PTR_ERR(shash
);
129 crt
->using_shash
= true;
131 tfm
->exit
= crypto_exit_ahash_using_shash
;
133 crypto_ahash_set_flags(crt
, crypto_shash_get_flags(shash
) &
134 CRYPTO_TFM_NEED_KEY
);
135 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
140 static int hash_walk_next(struct crypto_hash_walk
*walk
)
142 unsigned int offset
= walk
->offset
;
143 unsigned int nbytes
= min(walk
->entrylen
,
144 ((unsigned int)(PAGE_SIZE
)) - offset
);
146 walk
->data
= kmap_local_page(walk
->pg
);
147 walk
->data
+= offset
;
148 walk
->entrylen
-= nbytes
;
152 static int hash_walk_new_entry(struct crypto_hash_walk
*walk
)
154 struct scatterlist
*sg
;
157 walk
->offset
= sg
->offset
;
158 walk
->pg
= sg_page(walk
->sg
) + (walk
->offset
>> PAGE_SHIFT
);
159 walk
->offset
= offset_in_page(walk
->offset
);
160 walk
->entrylen
= sg
->length
;
162 if (walk
->entrylen
> walk
->total
)
163 walk
->entrylen
= walk
->total
;
164 walk
->total
-= walk
->entrylen
;
166 return hash_walk_next(walk
);
169 int crypto_hash_walk_done(struct crypto_hash_walk
*walk
, int err
)
171 walk
->data
-= walk
->offset
;
173 kunmap_local(walk
->data
);
174 crypto_yield(walk
->flags
);
179 if (walk
->entrylen
) {
182 return hash_walk_next(walk
);
188 walk
->sg
= sg_next(walk
->sg
);
190 return hash_walk_new_entry(walk
);
192 EXPORT_SYMBOL_GPL(crypto_hash_walk_done
);
194 int crypto_hash_walk_first(struct ahash_request
*req
,
195 struct crypto_hash_walk
*walk
)
197 walk
->total
= req
->nbytes
;
205 walk
->flags
= req
->base
.flags
;
207 return hash_walk_new_entry(walk
);
209 EXPORT_SYMBOL_GPL(crypto_hash_walk_first
);
211 static int ahash_nosetkey(struct crypto_ahash
*tfm
, const u8
*key
,
217 static void ahash_set_needkey(struct crypto_ahash
*tfm
, struct ahash_alg
*alg
)
219 if (alg
->setkey
!= ahash_nosetkey
&&
220 !(alg
->halg
.base
.cra_flags
& CRYPTO_ALG_OPTIONAL_KEY
))
221 crypto_ahash_set_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
224 int crypto_ahash_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
227 if (likely(tfm
->using_shash
)) {
228 struct crypto_shash
*shash
= ahash_to_shash(tfm
);
231 err
= crypto_shash_setkey(shash
, key
, keylen
);
233 crypto_ahash_set_flags(tfm
,
234 crypto_shash_get_flags(shash
) &
235 CRYPTO_TFM_NEED_KEY
);
239 struct ahash_alg
*alg
= crypto_ahash_alg(tfm
);
242 err
= alg
->setkey(tfm
, key
, keylen
);
244 ahash_set_needkey(tfm
, alg
);
248 crypto_ahash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
251 EXPORT_SYMBOL_GPL(crypto_ahash_setkey
);
253 int crypto_ahash_init(struct ahash_request
*req
)
255 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
257 if (likely(tfm
->using_shash
))
258 return crypto_shash_init(prepare_shash_desc(req
, tfm
));
259 if (crypto_ahash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
261 return crypto_ahash_alg(tfm
)->init(req
);
263 EXPORT_SYMBOL_GPL(crypto_ahash_init
);
265 static int ahash_save_req(struct ahash_request
*req
, crypto_completion_t cplt
,
268 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
269 unsigned int ds
= crypto_ahash_digestsize(tfm
);
270 struct ahash_request
*subreq
;
271 unsigned int subreq_size
;
272 unsigned int reqsize
;
277 subreq_size
= sizeof(*subreq
);
278 reqsize
= crypto_ahash_reqsize(tfm
);
279 reqsize
= ALIGN(reqsize
, crypto_tfm_ctx_alignment());
280 subreq_size
+= reqsize
;
283 flags
= ahash_request_flags(req
);
284 gfp
= (flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) ? GFP_KERNEL
: GFP_ATOMIC
;
285 subreq
= kmalloc(subreq_size
, gfp
);
289 ahash_request_set_tfm(subreq
, tfm
);
290 ahash_request_set_callback(subreq
, flags
, cplt
, req
);
292 result
= (u8
*)(subreq
+ 1) + reqsize
;
294 ahash_request_set_crypt(subreq
, req
->src
, result
, req
->nbytes
);
299 state
= kmalloc(crypto_ahash_statesize(tfm
), gfp
);
305 crypto_ahash_export(req
, state
);
306 crypto_ahash_import(subreq
, state
);
307 kfree_sensitive(state
);
315 static void ahash_restore_req(struct ahash_request
*req
, int err
)
317 struct ahash_request
*subreq
= req
->priv
;
320 memcpy(req
->result
, subreq
->result
,
321 crypto_ahash_digestsize(crypto_ahash_reqtfm(req
)));
325 kfree_sensitive(subreq
);
328 int crypto_ahash_update(struct ahash_request
*req
)
330 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
332 if (likely(tfm
->using_shash
))
333 return shash_ahash_update(req
, ahash_request_ctx(req
));
335 return crypto_ahash_alg(tfm
)->update(req
);
337 EXPORT_SYMBOL_GPL(crypto_ahash_update
);
339 int crypto_ahash_final(struct ahash_request
*req
)
341 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
343 if (likely(tfm
->using_shash
))
344 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
346 return crypto_ahash_alg(tfm
)->final(req
);
348 EXPORT_SYMBOL_GPL(crypto_ahash_final
);
350 int crypto_ahash_finup(struct ahash_request
*req
)
352 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
354 if (likely(tfm
->using_shash
))
355 return shash_ahash_finup(req
, ahash_request_ctx(req
));
357 return crypto_ahash_alg(tfm
)->finup(req
);
359 EXPORT_SYMBOL_GPL(crypto_ahash_finup
);
361 int crypto_ahash_digest(struct ahash_request
*req
)
363 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
365 if (likely(tfm
->using_shash
))
366 return shash_ahash_digest(req
, prepare_shash_desc(req
, tfm
));
368 if (crypto_ahash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
371 return crypto_ahash_alg(tfm
)->digest(req
);
373 EXPORT_SYMBOL_GPL(crypto_ahash_digest
);
375 static void ahash_def_finup_done2(void *data
, int err
)
377 struct ahash_request
*areq
= data
;
379 if (err
== -EINPROGRESS
)
382 ahash_restore_req(areq
, err
);
384 ahash_request_complete(areq
, err
);
387 static int ahash_def_finup_finish1(struct ahash_request
*req
, int err
)
389 struct ahash_request
*subreq
= req
->priv
;
394 subreq
->base
.complete
= ahash_def_finup_done2
;
396 err
= crypto_ahash_alg(crypto_ahash_reqtfm(req
))->final(subreq
);
397 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
401 ahash_restore_req(req
, err
);
405 static void ahash_def_finup_done1(void *data
, int err
)
407 struct ahash_request
*areq
= data
;
408 struct ahash_request
*subreq
;
410 if (err
== -EINPROGRESS
)
414 subreq
->base
.flags
&= CRYPTO_TFM_REQ_MAY_BACKLOG
;
416 err
= ahash_def_finup_finish1(areq
, err
);
417 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
421 ahash_request_complete(areq
, err
);
424 static int ahash_def_finup(struct ahash_request
*req
)
426 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
429 err
= ahash_save_req(req
, ahash_def_finup_done1
, true);
433 err
= crypto_ahash_alg(tfm
)->update(req
->priv
);
434 if (err
== -EINPROGRESS
|| err
== -EBUSY
)
437 return ahash_def_finup_finish1(req
, err
);
440 int crypto_ahash_export(struct ahash_request
*req
, void *out
)
442 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
444 if (likely(tfm
->using_shash
))
445 return crypto_shash_export(ahash_request_ctx(req
), out
);
446 return crypto_ahash_alg(tfm
)->export(req
, out
);
448 EXPORT_SYMBOL_GPL(crypto_ahash_export
);
450 int crypto_ahash_import(struct ahash_request
*req
, const void *in
)
452 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
454 if (likely(tfm
->using_shash
))
455 return crypto_shash_import(prepare_shash_desc(req
, tfm
), in
);
456 if (crypto_ahash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
458 return crypto_ahash_alg(tfm
)->import(req
, in
);
460 EXPORT_SYMBOL_GPL(crypto_ahash_import
);
462 static void crypto_ahash_exit_tfm(struct crypto_tfm
*tfm
)
464 struct crypto_ahash
*hash
= __crypto_ahash_cast(tfm
);
465 struct ahash_alg
*alg
= crypto_ahash_alg(hash
);
470 static int crypto_ahash_init_tfm(struct crypto_tfm
*tfm
)
472 struct crypto_ahash
*hash
= __crypto_ahash_cast(tfm
);
473 struct ahash_alg
*alg
= crypto_ahash_alg(hash
);
475 crypto_ahash_set_statesize(hash
, alg
->halg
.statesize
);
477 if (tfm
->__crt_alg
->cra_type
== &crypto_shash_type
)
478 return crypto_init_ahash_using_shash(tfm
);
480 ahash_set_needkey(hash
, alg
);
483 tfm
->exit
= crypto_ahash_exit_tfm
;
485 return alg
->init_tfm
? alg
->init_tfm(hash
) : 0;
488 static unsigned int crypto_ahash_extsize(struct crypto_alg
*alg
)
490 if (alg
->cra_type
== &crypto_shash_type
)
491 return sizeof(struct crypto_shash
*);
493 return crypto_alg_extsize(alg
);
496 static void crypto_ahash_free_instance(struct crypto_instance
*inst
)
498 struct ahash_instance
*ahash
= ahash_instance(inst
);
503 static int __maybe_unused
crypto_ahash_report(
504 struct sk_buff
*skb
, struct crypto_alg
*alg
)
506 struct crypto_report_hash rhash
;
508 memset(&rhash
, 0, sizeof(rhash
));
510 strscpy(rhash
.type
, "ahash", sizeof(rhash
.type
));
512 rhash
.blocksize
= alg
->cra_blocksize
;
513 rhash
.digestsize
= __crypto_hash_alg_common(alg
)->digestsize
;
515 return nla_put(skb
, CRYPTOCFGA_REPORT_HASH
, sizeof(rhash
), &rhash
);
518 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
520 static void crypto_ahash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
522 seq_printf(m
, "type : ahash\n");
523 seq_printf(m
, "async : %s\n", alg
->cra_flags
& CRYPTO_ALG_ASYNC
?
525 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
526 seq_printf(m
, "digestsize : %u\n",
527 __crypto_hash_alg_common(alg
)->digestsize
);
530 static const struct crypto_type crypto_ahash_type
= {
531 .extsize
= crypto_ahash_extsize
,
532 .init_tfm
= crypto_ahash_init_tfm
,
533 .free
= crypto_ahash_free_instance
,
534 #ifdef CONFIG_PROC_FS
535 .show
= crypto_ahash_show
,
537 #if IS_ENABLED(CONFIG_CRYPTO_USER)
538 .report
= crypto_ahash_report
,
540 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
541 .maskset
= CRYPTO_ALG_TYPE_AHASH_MASK
,
542 .type
= CRYPTO_ALG_TYPE_AHASH
,
543 .tfmsize
= offsetof(struct crypto_ahash
, base
),
546 int crypto_grab_ahash(struct crypto_ahash_spawn
*spawn
,
547 struct crypto_instance
*inst
,
548 const char *name
, u32 type
, u32 mask
)
550 spawn
->base
.frontend
= &crypto_ahash_type
;
551 return crypto_grab_spawn(&spawn
->base
, inst
, name
, type
, mask
);
553 EXPORT_SYMBOL_GPL(crypto_grab_ahash
);
555 struct crypto_ahash
*crypto_alloc_ahash(const char *alg_name
, u32 type
,
558 return crypto_alloc_tfm(alg_name
, &crypto_ahash_type
, type
, mask
);
560 EXPORT_SYMBOL_GPL(crypto_alloc_ahash
);
562 int crypto_has_ahash(const char *alg_name
, u32 type
, u32 mask
)
564 return crypto_type_has_alg(alg_name
, &crypto_ahash_type
, type
, mask
);
566 EXPORT_SYMBOL_GPL(crypto_has_ahash
);
568 static bool crypto_hash_alg_has_setkey(struct hash_alg_common
*halg
)
570 struct crypto_alg
*alg
= &halg
->base
;
572 if (alg
->cra_type
== &crypto_shash_type
)
573 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg
));
575 return __crypto_ahash_alg(alg
)->setkey
!= ahash_nosetkey
;
578 struct crypto_ahash
*crypto_clone_ahash(struct crypto_ahash
*hash
)
580 struct hash_alg_common
*halg
= crypto_hash_alg_common(hash
);
581 struct crypto_tfm
*tfm
= crypto_ahash_tfm(hash
);
582 struct crypto_ahash
*nhash
;
583 struct ahash_alg
*alg
;
586 if (!crypto_hash_alg_has_setkey(halg
)) {
587 tfm
= crypto_tfm_get(tfm
);
589 return ERR_CAST(tfm
);
594 nhash
= crypto_clone_tfm(&crypto_ahash_type
, tfm
);
599 nhash
->reqsize
= hash
->reqsize
;
600 nhash
->statesize
= hash
->statesize
;
602 if (likely(hash
->using_shash
)) {
603 struct crypto_shash
**nctx
= crypto_ahash_ctx(nhash
);
604 struct crypto_shash
*shash
;
606 shash
= crypto_clone_shash(ahash_to_shash(hash
));
608 err
= PTR_ERR(shash
);
611 nhash
->using_shash
= true;
617 alg
= crypto_ahash_alg(hash
);
621 err
= alg
->clone_tfm(nhash
, hash
);
628 crypto_free_ahash(nhash
);
631 EXPORT_SYMBOL_GPL(crypto_clone_ahash
);
633 static int ahash_prepare_alg(struct ahash_alg
*alg
)
635 struct crypto_alg
*base
= &alg
->halg
.base
;
638 if (alg
->halg
.statesize
== 0)
641 err
= hash_prepare_alg(&alg
->halg
);
645 base
->cra_type
= &crypto_ahash_type
;
646 base
->cra_flags
|= CRYPTO_ALG_TYPE_AHASH
;
649 alg
->finup
= ahash_def_finup
;
651 alg
->setkey
= ahash_nosetkey
;
656 int crypto_register_ahash(struct ahash_alg
*alg
)
658 struct crypto_alg
*base
= &alg
->halg
.base
;
661 err
= ahash_prepare_alg(alg
);
665 return crypto_register_alg(base
);
667 EXPORT_SYMBOL_GPL(crypto_register_ahash
);
669 void crypto_unregister_ahash(struct ahash_alg
*alg
)
671 crypto_unregister_alg(&alg
->halg
.base
);
673 EXPORT_SYMBOL_GPL(crypto_unregister_ahash
);
675 int crypto_register_ahashes(struct ahash_alg
*algs
, int count
)
679 for (i
= 0; i
< count
; i
++) {
680 ret
= crypto_register_ahash(&algs
[i
]);
688 for (--i
; i
>= 0; --i
)
689 crypto_unregister_ahash(&algs
[i
]);
693 EXPORT_SYMBOL_GPL(crypto_register_ahashes
);
695 void crypto_unregister_ahashes(struct ahash_alg
*algs
, int count
)
699 for (i
= count
- 1; i
>= 0; --i
)
700 crypto_unregister_ahash(&algs
[i
]);
702 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes
);
704 int ahash_register_instance(struct crypto_template
*tmpl
,
705 struct ahash_instance
*inst
)
709 if (WARN_ON(!inst
->free
))
712 err
= ahash_prepare_alg(&inst
->alg
);
716 return crypto_register_instance(tmpl
, ahash_crypto_instance(inst
));
718 EXPORT_SYMBOL_GPL(ahash_register_instance
);
720 MODULE_LICENSE("GPL");
721 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");