2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
25 static const struct crypto_type crypto_shash_type
;
27 int shash_no_setkey(struct crypto_shash
*tfm
, const u8
*key
,
32 EXPORT_SYMBOL_GPL(shash_no_setkey
);
34 static int shash_setkey_unaligned(struct crypto_shash
*tfm
, const u8
*key
,
37 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
38 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
40 u8
*buffer
, *alignbuffer
;
43 absize
= keylen
+ (alignmask
& ~(crypto_tfm_ctx_alignment() - 1));
44 buffer
= kmalloc(absize
, GFP_KERNEL
);
48 alignbuffer
= (u8
*)ALIGN((unsigned long)buffer
, alignmask
+ 1);
49 memcpy(alignbuffer
, key
, keylen
);
50 err
= shash
->setkey(tfm
, alignbuffer
, keylen
);
55 int crypto_shash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
58 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
59 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
62 if ((unsigned long)key
& alignmask
)
63 err
= shash_setkey_unaligned(tfm
, key
, keylen
);
65 err
= shash
->setkey(tfm
, key
, keylen
);
70 crypto_shash_clear_flags(tfm
, CRYPTO_TFM_NEED_KEY
);
73 EXPORT_SYMBOL_GPL(crypto_shash_setkey
);
75 static inline unsigned int shash_align_buffer_size(unsigned len
,
78 typedef u8
__attribute__ ((aligned
)) u8_aligned
;
79 return len
+ (mask
& ~(__alignof__(u8_aligned
) - 1));
82 static int shash_update_unaligned(struct shash_desc
*desc
, const u8
*data
,
85 struct crypto_shash
*tfm
= desc
->tfm
;
86 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
87 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
88 unsigned int unaligned_len
= alignmask
+ 1 -
89 ((unsigned long)data
& alignmask
);
90 u8 ubuf
[shash_align_buffer_size(unaligned_len
, alignmask
)]
91 __attribute__ ((aligned
));
92 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
95 if (unaligned_len
> len
)
98 memcpy(buf
, data
, unaligned_len
);
99 err
= shash
->update(desc
, buf
, unaligned_len
);
100 memset(buf
, 0, unaligned_len
);
103 shash
->update(desc
, data
+ unaligned_len
, len
- unaligned_len
);
106 int crypto_shash_update(struct shash_desc
*desc
, const u8
*data
,
109 struct crypto_shash
*tfm
= desc
->tfm
;
110 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
111 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
113 if ((unsigned long)data
& alignmask
)
114 return shash_update_unaligned(desc
, data
, len
);
116 return shash
->update(desc
, data
, len
);
118 EXPORT_SYMBOL_GPL(crypto_shash_update
);
120 static int shash_final_unaligned(struct shash_desc
*desc
, u8
*out
)
122 struct crypto_shash
*tfm
= desc
->tfm
;
123 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
124 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
125 unsigned int ds
= crypto_shash_digestsize(tfm
);
126 u8 ubuf
[shash_align_buffer_size(ds
, alignmask
)]
127 __attribute__ ((aligned
));
128 u8
*buf
= PTR_ALIGN(&ubuf
[0], alignmask
+ 1);
131 err
= shash
->final(desc
, buf
);
135 memcpy(out
, buf
, ds
);
142 int crypto_shash_final(struct shash_desc
*desc
, u8
*out
)
144 struct crypto_shash
*tfm
= desc
->tfm
;
145 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
146 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
148 if ((unsigned long)out
& alignmask
)
149 return shash_final_unaligned(desc
, out
);
151 return shash
->final(desc
, out
);
153 EXPORT_SYMBOL_GPL(crypto_shash_final
);
155 static int shash_finup_unaligned(struct shash_desc
*desc
, const u8
*data
,
156 unsigned int len
, u8
*out
)
158 return crypto_shash_update(desc
, data
, len
) ?:
159 crypto_shash_final(desc
, out
);
162 int crypto_shash_finup(struct shash_desc
*desc
, const u8
*data
,
163 unsigned int len
, u8
*out
)
165 struct crypto_shash
*tfm
= desc
->tfm
;
166 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
167 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
169 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
170 return shash_finup_unaligned(desc
, data
, len
, out
);
172 return shash
->finup(desc
, data
, len
, out
);
174 EXPORT_SYMBOL_GPL(crypto_shash_finup
);
176 static int shash_digest_unaligned(struct shash_desc
*desc
, const u8
*data
,
177 unsigned int len
, u8
*out
)
179 return crypto_shash_init(desc
) ?:
180 crypto_shash_finup(desc
, data
, len
, out
);
183 int crypto_shash_digest(struct shash_desc
*desc
, const u8
*data
,
184 unsigned int len
, u8
*out
)
186 struct crypto_shash
*tfm
= desc
->tfm
;
187 struct shash_alg
*shash
= crypto_shash_alg(tfm
);
188 unsigned long alignmask
= crypto_shash_alignmask(tfm
);
190 if (crypto_shash_get_flags(tfm
) & CRYPTO_TFM_NEED_KEY
)
193 if (((unsigned long)data
| (unsigned long)out
) & alignmask
)
194 return shash_digest_unaligned(desc
, data
, len
, out
);
196 return shash
->digest(desc
, data
, len
, out
);
198 EXPORT_SYMBOL_GPL(crypto_shash_digest
);
200 static int shash_default_export(struct shash_desc
*desc
, void *out
)
202 memcpy(out
, shash_desc_ctx(desc
), crypto_shash_descsize(desc
->tfm
));
206 static int shash_default_import(struct shash_desc
*desc
, const void *in
)
208 memcpy(shash_desc_ctx(desc
), in
, crypto_shash_descsize(desc
->tfm
));
212 static int shash_async_setkey(struct crypto_ahash
*tfm
, const u8
*key
,
215 struct crypto_shash
**ctx
= crypto_ahash_ctx(tfm
);
217 return crypto_shash_setkey(*ctx
, key
, keylen
);
220 static int shash_async_init(struct ahash_request
*req
)
222 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
223 struct shash_desc
*desc
= ahash_request_ctx(req
);
226 desc
->flags
= req
->base
.flags
;
228 return crypto_shash_init(desc
);
231 int shash_ahash_update(struct ahash_request
*req
, struct shash_desc
*desc
)
233 struct crypto_hash_walk walk
;
236 for (nbytes
= crypto_hash_walk_first(req
, &walk
); nbytes
> 0;
237 nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
238 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
242 EXPORT_SYMBOL_GPL(shash_ahash_update
);
244 static int shash_async_update(struct ahash_request
*req
)
246 return shash_ahash_update(req
, ahash_request_ctx(req
));
249 static int shash_async_final(struct ahash_request
*req
)
251 return crypto_shash_final(ahash_request_ctx(req
), req
->result
);
254 int shash_ahash_finup(struct ahash_request
*req
, struct shash_desc
*desc
)
256 struct crypto_hash_walk walk
;
259 nbytes
= crypto_hash_walk_first(req
, &walk
);
261 return crypto_shash_final(desc
, req
->result
);
264 nbytes
= crypto_hash_walk_last(&walk
) ?
265 crypto_shash_finup(desc
, walk
.data
, nbytes
,
267 crypto_shash_update(desc
, walk
.data
, nbytes
);
268 nbytes
= crypto_hash_walk_done(&walk
, nbytes
);
269 } while (nbytes
> 0);
273 EXPORT_SYMBOL_GPL(shash_ahash_finup
);
275 static int shash_async_finup(struct ahash_request
*req
)
277 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
278 struct shash_desc
*desc
= ahash_request_ctx(req
);
281 desc
->flags
= req
->base
.flags
;
283 return shash_ahash_finup(req
, desc
);
286 int shash_ahash_digest(struct ahash_request
*req
, struct shash_desc
*desc
)
288 unsigned int nbytes
= req
->nbytes
;
289 struct scatterlist
*sg
;
294 (sg
= req
->src
, offset
= sg
->offset
,
295 nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
))) {
298 data
= kmap_atomic(sg_page(sg
));
299 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
,
302 crypto_yield(desc
->flags
);
304 err
= crypto_shash_init(desc
) ?:
305 shash_ahash_finup(req
, desc
);
309 EXPORT_SYMBOL_GPL(shash_ahash_digest
);
311 static int shash_async_digest(struct ahash_request
*req
)
313 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
314 struct shash_desc
*desc
= ahash_request_ctx(req
);
317 desc
->flags
= req
->base
.flags
;
319 return shash_ahash_digest(req
, desc
);
322 static int shash_async_export(struct ahash_request
*req
, void *out
)
324 return crypto_shash_export(ahash_request_ctx(req
), out
);
327 static int shash_async_import(struct ahash_request
*req
, const void *in
)
329 struct crypto_shash
**ctx
= crypto_ahash_ctx(crypto_ahash_reqtfm(req
));
330 struct shash_desc
*desc
= ahash_request_ctx(req
);
333 desc
->flags
= req
->base
.flags
;
335 return crypto_shash_import(desc
, in
);
338 static void crypto_exit_shash_ops_async(struct crypto_tfm
*tfm
)
340 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
342 crypto_free_shash(*ctx
);
345 int crypto_init_shash_ops_async(struct crypto_tfm
*tfm
)
347 struct crypto_alg
*calg
= tfm
->__crt_alg
;
348 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
349 struct crypto_ahash
*crt
= __crypto_ahash_cast(tfm
);
350 struct crypto_shash
**ctx
= crypto_tfm_ctx(tfm
);
351 struct crypto_shash
*shash
;
353 if (!crypto_mod_get(calg
))
356 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
358 crypto_mod_put(calg
);
359 return PTR_ERR(shash
);
363 tfm
->exit
= crypto_exit_shash_ops_async
;
365 crt
->init
= shash_async_init
;
366 crt
->update
= shash_async_update
;
367 crt
->final
= shash_async_final
;
368 crt
->finup
= shash_async_finup
;
369 crt
->digest
= shash_async_digest
;
370 crt
->setkey
= shash_async_setkey
;
372 crypto_ahash_set_flags(crt
, crypto_shash_get_flags(shash
) &
373 CRYPTO_TFM_NEED_KEY
);
376 crt
->export
= shash_async_export
;
378 crt
->import
= shash_async_import
;
380 crt
->reqsize
= sizeof(struct shash_desc
) + crypto_shash_descsize(shash
);
385 static int shash_compat_setkey(struct crypto_hash
*tfm
, const u8
*key
,
388 struct shash_desc
**descp
= crypto_hash_ctx(tfm
);
389 struct shash_desc
*desc
= *descp
;
391 return crypto_shash_setkey(desc
->tfm
, key
, keylen
);
394 static int shash_compat_init(struct hash_desc
*hdesc
)
396 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
397 struct shash_desc
*desc
= *descp
;
399 desc
->flags
= hdesc
->flags
;
401 return crypto_shash_init(desc
);
404 static int shash_compat_update(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
407 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
408 struct shash_desc
*desc
= *descp
;
409 struct crypto_hash_walk walk
;
412 for (nbytes
= crypto_hash_walk_first_compat(hdesc
, &walk
, sg
, len
);
413 nbytes
> 0; nbytes
= crypto_hash_walk_done(&walk
, nbytes
))
414 nbytes
= crypto_shash_update(desc
, walk
.data
, nbytes
);
419 static int shash_compat_final(struct hash_desc
*hdesc
, u8
*out
)
421 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
423 return crypto_shash_final(*descp
, out
);
426 static int shash_compat_digest(struct hash_desc
*hdesc
, struct scatterlist
*sg
,
427 unsigned int nbytes
, u8
*out
)
429 unsigned int offset
= sg
->offset
;
432 if (nbytes
< min(sg
->length
, ((unsigned int)(PAGE_SIZE
)) - offset
)) {
433 struct shash_desc
**descp
= crypto_hash_ctx(hdesc
->tfm
);
434 struct shash_desc
*desc
= *descp
;
437 desc
->flags
= hdesc
->flags
;
439 data
= kmap_atomic(sg_page(sg
));
440 err
= crypto_shash_digest(desc
, data
+ offset
, nbytes
, out
);
442 crypto_yield(desc
->flags
);
446 err
= shash_compat_init(hdesc
);
450 err
= shash_compat_update(hdesc
, sg
, nbytes
);
454 err
= shash_compat_final(hdesc
, out
);
460 static void crypto_exit_shash_ops_compat(struct crypto_tfm
*tfm
)
462 struct shash_desc
**descp
= crypto_tfm_ctx(tfm
);
463 struct shash_desc
*desc
= *descp
;
465 crypto_free_shash(desc
->tfm
);
469 static int crypto_init_shash_ops_compat(struct crypto_tfm
*tfm
)
471 struct hash_tfm
*crt
= &tfm
->crt_hash
;
472 struct crypto_alg
*calg
= tfm
->__crt_alg
;
473 struct shash_alg
*alg
= __crypto_shash_alg(calg
);
474 struct shash_desc
**descp
= crypto_tfm_ctx(tfm
);
475 struct crypto_shash
*shash
;
476 struct shash_desc
*desc
;
478 if (!crypto_mod_get(calg
))
481 shash
= crypto_create_tfm(calg
, &crypto_shash_type
);
483 crypto_mod_put(calg
);
484 return PTR_ERR(shash
);
487 desc
= kmalloc(sizeof(*desc
) + crypto_shash_descsize(shash
),
490 crypto_free_shash(shash
);
496 tfm
->exit
= crypto_exit_shash_ops_compat
;
498 crt
->init
= shash_compat_init
;
499 crt
->update
= shash_compat_update
;
500 crt
->final
= shash_compat_final
;
501 crt
->digest
= shash_compat_digest
;
502 crt
->setkey
= shash_compat_setkey
;
504 crt
->digestsize
= alg
->digestsize
;
509 static int crypto_init_shash_ops(struct crypto_tfm
*tfm
, u32 type
, u32 mask
)
511 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
512 case CRYPTO_ALG_TYPE_HASH_MASK
:
513 return crypto_init_shash_ops_compat(tfm
);
519 static unsigned int crypto_shash_ctxsize(struct crypto_alg
*alg
, u32 type
,
522 switch (mask
& CRYPTO_ALG_TYPE_MASK
) {
523 case CRYPTO_ALG_TYPE_HASH_MASK
:
524 return sizeof(struct shash_desc
*);
530 static int crypto_shash_init_tfm(struct crypto_tfm
*tfm
)
532 struct crypto_shash
*hash
= __crypto_shash_cast(tfm
);
533 struct shash_alg
*alg
= crypto_shash_alg(hash
);
535 hash
->descsize
= alg
->descsize
;
537 if (crypto_shash_alg_has_setkey(alg
) &&
538 !(alg
->base
.cra_flags
& CRYPTO_ALG_OPTIONAL_KEY
))
539 crypto_shash_set_flags(hash
, CRYPTO_TFM_NEED_KEY
);
544 static unsigned int crypto_shash_extsize(struct crypto_alg
*alg
)
546 return alg
->cra_ctxsize
;
550 static int crypto_shash_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
552 struct crypto_report_hash rhash
;
553 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
555 strncpy(rhash
.type
, "shash", sizeof(rhash
.type
));
557 rhash
.blocksize
= alg
->cra_blocksize
;
558 rhash
.digestsize
= salg
->digestsize
;
560 if (nla_put(skb
, CRYPTOCFGA_REPORT_HASH
,
561 sizeof(struct crypto_report_hash
), &rhash
))
562 goto nla_put_failure
;
569 static int crypto_shash_report(struct sk_buff
*skb
, struct crypto_alg
*alg
)
575 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
576 __attribute__ ((unused
));
577 static void crypto_shash_show(struct seq_file
*m
, struct crypto_alg
*alg
)
579 struct shash_alg
*salg
= __crypto_shash_alg(alg
);
581 seq_printf(m
, "type : shash\n");
582 seq_printf(m
, "blocksize : %u\n", alg
->cra_blocksize
);
583 seq_printf(m
, "digestsize : %u\n", salg
->digestsize
);
586 static const struct crypto_type crypto_shash_type
= {
587 .ctxsize
= crypto_shash_ctxsize
,
588 .extsize
= crypto_shash_extsize
,
589 .init
= crypto_init_shash_ops
,
590 .init_tfm
= crypto_shash_init_tfm
,
591 #ifdef CONFIG_PROC_FS
592 .show
= crypto_shash_show
,
594 .report
= crypto_shash_report
,
595 .maskclear
= ~CRYPTO_ALG_TYPE_MASK
,
596 .maskset
= CRYPTO_ALG_TYPE_MASK
,
597 .type
= CRYPTO_ALG_TYPE_SHASH
,
598 .tfmsize
= offsetof(struct crypto_shash
, base
),
601 struct crypto_shash
*crypto_alloc_shash(const char *alg_name
, u32 type
,
604 return crypto_alloc_tfm(alg_name
, &crypto_shash_type
, type
, mask
);
606 EXPORT_SYMBOL_GPL(crypto_alloc_shash
);
608 static int shash_prepare_alg(struct shash_alg
*alg
)
610 struct crypto_alg
*base
= &alg
->base
;
612 if (alg
->digestsize
> PAGE_SIZE
/ 8 ||
613 alg
->descsize
> PAGE_SIZE
/ 8 ||
614 alg
->statesize
> PAGE_SIZE
/ 8)
617 base
->cra_type
= &crypto_shash_type
;
618 base
->cra_flags
&= ~CRYPTO_ALG_TYPE_MASK
;
619 base
->cra_flags
|= CRYPTO_ALG_TYPE_SHASH
;
622 alg
->finup
= shash_finup_unaligned
;
624 alg
->digest
= shash_digest_unaligned
;
626 alg
->export
= shash_default_export
;
627 alg
->import
= shash_default_import
;
628 alg
->statesize
= alg
->descsize
;
631 alg
->setkey
= shash_no_setkey
;
636 int crypto_register_shash(struct shash_alg
*alg
)
638 struct crypto_alg
*base
= &alg
->base
;
641 err
= shash_prepare_alg(alg
);
645 return crypto_register_alg(base
);
647 EXPORT_SYMBOL_GPL(crypto_register_shash
);
649 int crypto_unregister_shash(struct shash_alg
*alg
)
651 return crypto_unregister_alg(&alg
->base
);
653 EXPORT_SYMBOL_GPL(crypto_unregister_shash
);
655 int crypto_register_shashes(struct shash_alg
*algs
, int count
)
659 for (i
= 0; i
< count
; i
++) {
660 ret
= crypto_register_shash(&algs
[i
]);
668 for (--i
; i
>= 0; --i
)
669 crypto_unregister_shash(&algs
[i
]);
673 EXPORT_SYMBOL_GPL(crypto_register_shashes
);
675 int crypto_unregister_shashes(struct shash_alg
*algs
, int count
)
679 for (i
= count
- 1; i
>= 0; --i
) {
680 ret
= crypto_unregister_shash(&algs
[i
]);
682 pr_err("Failed to unregister %s %s: %d\n",
683 algs
[i
].base
.cra_driver_name
,
684 algs
[i
].base
.cra_name
, ret
);
689 EXPORT_SYMBOL_GPL(crypto_unregister_shashes
);
691 int shash_register_instance(struct crypto_template
*tmpl
,
692 struct shash_instance
*inst
)
696 err
= shash_prepare_alg(&inst
->alg
);
700 return crypto_register_instance(tmpl
, shash_crypto_instance(inst
));
702 EXPORT_SYMBOL_GPL(shash_register_instance
);
704 void shash_free_instance(struct crypto_instance
*inst
)
706 crypto_drop_spawn(crypto_instance_ctx(inst
));
707 kfree(shash_instance(inst
));
709 EXPORT_SYMBOL_GPL(shash_free_instance
);
711 int crypto_init_shash_spawn(struct crypto_shash_spawn
*spawn
,
712 struct shash_alg
*alg
,
713 struct crypto_instance
*inst
)
715 return crypto_init_spawn2(&spawn
->base
, &alg
->base
, inst
,
718 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn
);
720 struct shash_alg
*shash_attr_alg(struct rtattr
*rta
, u32 type
, u32 mask
)
722 struct crypto_alg
*alg
;
724 alg
= crypto_attr_alg2(rta
, &crypto_shash_type
, type
, mask
);
725 return IS_ERR(alg
) ? ERR_CAST(alg
) :
726 container_of(alg
, struct shash_alg
, base
);
728 EXPORT_SYMBOL_GPL(shash_attr_alg
);
730 MODULE_LICENSE("GPL");
731 MODULE_DESCRIPTION("Synchronous cryptographic hash type");