NFSv4.1: RECLAIM_COMPLETE must handle NFS4ERR_CONN_NOT_BOUND_TO_SESSION
[linux/fpc-iii.git] / crypto / shash.c
blobd5bd2f05d03623d6ee9a5969e3399939d643e08d
1 /*
2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
23 #include "internal.h"
25 static const struct crypto_type crypto_shash_type;
27 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28 unsigned int keylen)
30 return -ENOSYS;
32 EXPORT_SYMBOL_GPL(shash_no_setkey);
34 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
35 unsigned int keylen)
37 struct shash_alg *shash = crypto_shash_alg(tfm);
38 unsigned long alignmask = crypto_shash_alignmask(tfm);
39 unsigned long absize;
40 u8 *buffer, *alignbuffer;
41 int err;
43 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
44 buffer = kmalloc(absize, GFP_KERNEL);
45 if (!buffer)
46 return -ENOMEM;
48 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
49 memcpy(alignbuffer, key, keylen);
50 err = shash->setkey(tfm, alignbuffer, keylen);
51 kzfree(buffer);
52 return err;
55 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
56 unsigned int keylen)
58 struct shash_alg *shash = crypto_shash_alg(tfm);
59 unsigned long alignmask = crypto_shash_alignmask(tfm);
60 int err;
62 if ((unsigned long)key & alignmask)
63 err = shash_setkey_unaligned(tfm, key, keylen);
64 else
65 err = shash->setkey(tfm, key, keylen);
67 if (err)
68 return err;
70 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
71 return 0;
73 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
75 static inline unsigned int shash_align_buffer_size(unsigned len,
76 unsigned long mask)
78 typedef u8 __attribute__ ((aligned)) u8_aligned;
79 return len + (mask & ~(__alignof__(u8_aligned) - 1));
82 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
83 unsigned int len)
85 struct crypto_shash *tfm = desc->tfm;
86 struct shash_alg *shash = crypto_shash_alg(tfm);
87 unsigned long alignmask = crypto_shash_alignmask(tfm);
88 unsigned int unaligned_len = alignmask + 1 -
89 ((unsigned long)data & alignmask);
90 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
91 __attribute__ ((aligned));
92 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
93 int err;
95 if (unaligned_len > len)
96 unaligned_len = len;
98 memcpy(buf, data, unaligned_len);
99 err = shash->update(desc, buf, unaligned_len);
100 memset(buf, 0, unaligned_len);
102 return err ?:
103 shash->update(desc, data + unaligned_len, len - unaligned_len);
106 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
107 unsigned int len)
109 struct crypto_shash *tfm = desc->tfm;
110 struct shash_alg *shash = crypto_shash_alg(tfm);
111 unsigned long alignmask = crypto_shash_alignmask(tfm);
113 if ((unsigned long)data & alignmask)
114 return shash_update_unaligned(desc, data, len);
116 return shash->update(desc, data, len);
118 EXPORT_SYMBOL_GPL(crypto_shash_update);
120 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
122 struct crypto_shash *tfm = desc->tfm;
123 unsigned long alignmask = crypto_shash_alignmask(tfm);
124 struct shash_alg *shash = crypto_shash_alg(tfm);
125 unsigned int ds = crypto_shash_digestsize(tfm);
126 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
127 __attribute__ ((aligned));
128 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
129 int err;
131 err = shash->final(desc, buf);
132 if (err)
133 goto out;
135 memcpy(out, buf, ds);
137 out:
138 memset(buf, 0, ds);
139 return err;
142 int crypto_shash_final(struct shash_desc *desc, u8 *out)
144 struct crypto_shash *tfm = desc->tfm;
145 struct shash_alg *shash = crypto_shash_alg(tfm);
146 unsigned long alignmask = crypto_shash_alignmask(tfm);
148 if ((unsigned long)out & alignmask)
149 return shash_final_unaligned(desc, out);
151 return shash->final(desc, out);
153 EXPORT_SYMBOL_GPL(crypto_shash_final);
155 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
156 unsigned int len, u8 *out)
158 return crypto_shash_update(desc, data, len) ?:
159 crypto_shash_final(desc, out);
162 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
163 unsigned int len, u8 *out)
165 struct crypto_shash *tfm = desc->tfm;
166 struct shash_alg *shash = crypto_shash_alg(tfm);
167 unsigned long alignmask = crypto_shash_alignmask(tfm);
169 if (((unsigned long)data | (unsigned long)out) & alignmask)
170 return shash_finup_unaligned(desc, data, len, out);
172 return shash->finup(desc, data, len, out);
174 EXPORT_SYMBOL_GPL(crypto_shash_finup);
176 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
177 unsigned int len, u8 *out)
179 return crypto_shash_init(desc) ?:
180 crypto_shash_finup(desc, data, len, out);
183 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
184 unsigned int len, u8 *out)
186 struct crypto_shash *tfm = desc->tfm;
187 struct shash_alg *shash = crypto_shash_alg(tfm);
188 unsigned long alignmask = crypto_shash_alignmask(tfm);
190 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
191 return -ENOKEY;
193 if (((unsigned long)data | (unsigned long)out) & alignmask)
194 return shash_digest_unaligned(desc, data, len, out);
196 return shash->digest(desc, data, len, out);
198 EXPORT_SYMBOL_GPL(crypto_shash_digest);
200 static int shash_default_export(struct shash_desc *desc, void *out)
202 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
203 return 0;
206 static int shash_default_import(struct shash_desc *desc, const void *in)
208 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
209 return 0;
212 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
213 unsigned int keylen)
215 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
217 return crypto_shash_setkey(*ctx, key, keylen);
220 static int shash_async_init(struct ahash_request *req)
222 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
223 struct shash_desc *desc = ahash_request_ctx(req);
225 desc->tfm = *ctx;
226 desc->flags = req->base.flags;
228 return crypto_shash_init(desc);
231 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
233 struct crypto_hash_walk walk;
234 int nbytes;
236 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
237 nbytes = crypto_hash_walk_done(&walk, nbytes))
238 nbytes = crypto_shash_update(desc, walk.data, nbytes);
240 return nbytes;
242 EXPORT_SYMBOL_GPL(shash_ahash_update);
244 static int shash_async_update(struct ahash_request *req)
246 return shash_ahash_update(req, ahash_request_ctx(req));
249 static int shash_async_final(struct ahash_request *req)
251 return crypto_shash_final(ahash_request_ctx(req), req->result);
254 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
256 struct crypto_hash_walk walk;
257 int nbytes;
259 nbytes = crypto_hash_walk_first(req, &walk);
260 if (!nbytes)
261 return crypto_shash_final(desc, req->result);
263 do {
264 nbytes = crypto_hash_walk_last(&walk) ?
265 crypto_shash_finup(desc, walk.data, nbytes,
266 req->result) :
267 crypto_shash_update(desc, walk.data, nbytes);
268 nbytes = crypto_hash_walk_done(&walk, nbytes);
269 } while (nbytes > 0);
271 return nbytes;
273 EXPORT_SYMBOL_GPL(shash_ahash_finup);
275 static int shash_async_finup(struct ahash_request *req)
277 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
278 struct shash_desc *desc = ahash_request_ctx(req);
280 desc->tfm = *ctx;
281 desc->flags = req->base.flags;
283 return shash_ahash_finup(req, desc);
286 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
288 unsigned int nbytes = req->nbytes;
289 struct scatterlist *sg;
290 unsigned int offset;
291 int err;
293 if (nbytes &&
294 (sg = req->src, offset = sg->offset,
295 nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
296 void *data;
298 data = kmap_atomic(sg_page(sg));
299 err = crypto_shash_digest(desc, data + offset, nbytes,
300 req->result);
301 kunmap_atomic(data);
302 crypto_yield(desc->flags);
303 } else
304 err = crypto_shash_init(desc) ?:
305 shash_ahash_finup(req, desc);
307 return err;
309 EXPORT_SYMBOL_GPL(shash_ahash_digest);
311 static int shash_async_digest(struct ahash_request *req)
313 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
314 struct shash_desc *desc = ahash_request_ctx(req);
316 desc->tfm = *ctx;
317 desc->flags = req->base.flags;
319 return shash_ahash_digest(req, desc);
322 static int shash_async_export(struct ahash_request *req, void *out)
324 return crypto_shash_export(ahash_request_ctx(req), out);
327 static int shash_async_import(struct ahash_request *req, const void *in)
329 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
330 struct shash_desc *desc = ahash_request_ctx(req);
332 desc->tfm = *ctx;
333 desc->flags = req->base.flags;
335 return crypto_shash_import(desc, in);
338 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
340 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
342 crypto_free_shash(*ctx);
345 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
347 struct crypto_alg *calg = tfm->__crt_alg;
348 struct shash_alg *alg = __crypto_shash_alg(calg);
349 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
350 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
351 struct crypto_shash *shash;
353 if (!crypto_mod_get(calg))
354 return -EAGAIN;
356 shash = crypto_create_tfm(calg, &crypto_shash_type);
357 if (IS_ERR(shash)) {
358 crypto_mod_put(calg);
359 return PTR_ERR(shash);
362 *ctx = shash;
363 tfm->exit = crypto_exit_shash_ops_async;
365 crt->init = shash_async_init;
366 crt->update = shash_async_update;
367 crt->final = shash_async_final;
368 crt->finup = shash_async_finup;
369 crt->digest = shash_async_digest;
370 crt->setkey = shash_async_setkey;
372 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
373 CRYPTO_TFM_NEED_KEY);
375 if (alg->export)
376 crt->export = shash_async_export;
377 if (alg->import)
378 crt->import = shash_async_import;
380 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
382 return 0;
385 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
387 struct crypto_shash *hash = __crypto_shash_cast(tfm);
388 struct shash_alg *alg = crypto_shash_alg(hash);
390 hash->descsize = alg->descsize;
392 if (crypto_shash_alg_has_setkey(alg) &&
393 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
394 crypto_shash_set_flags(hash, CRYPTO_TFM_NEED_KEY);
396 return 0;
399 #ifdef CONFIG_NET
400 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
402 struct crypto_report_hash rhash;
403 struct shash_alg *salg = __crypto_shash_alg(alg);
405 strncpy(rhash.type, "shash", sizeof(rhash.type));
407 rhash.blocksize = alg->cra_blocksize;
408 rhash.digestsize = salg->digestsize;
410 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
411 sizeof(struct crypto_report_hash), &rhash))
412 goto nla_put_failure;
413 return 0;
415 nla_put_failure:
416 return -EMSGSIZE;
418 #else
419 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
421 return -ENOSYS;
423 #endif
425 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
426 __attribute__ ((unused));
427 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
429 struct shash_alg *salg = __crypto_shash_alg(alg);
431 seq_printf(m, "type : shash\n");
432 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
433 seq_printf(m, "digestsize : %u\n", salg->digestsize);
436 static const struct crypto_type crypto_shash_type = {
437 .extsize = crypto_alg_extsize,
438 .init_tfm = crypto_shash_init_tfm,
439 #ifdef CONFIG_PROC_FS
440 .show = crypto_shash_show,
441 #endif
442 .report = crypto_shash_report,
443 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
444 .maskset = CRYPTO_ALG_TYPE_MASK,
445 .type = CRYPTO_ALG_TYPE_SHASH,
446 .tfmsize = offsetof(struct crypto_shash, base),
449 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
450 u32 mask)
452 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
454 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
456 static int shash_prepare_alg(struct shash_alg *alg)
458 struct crypto_alg *base = &alg->base;
460 if (alg->digestsize > PAGE_SIZE / 8 ||
461 alg->descsize > PAGE_SIZE / 8 ||
462 alg->statesize > PAGE_SIZE / 8)
463 return -EINVAL;
465 base->cra_type = &crypto_shash_type;
466 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
467 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
469 if (!alg->finup)
470 alg->finup = shash_finup_unaligned;
471 if (!alg->digest)
472 alg->digest = shash_digest_unaligned;
473 if (!alg->export) {
474 alg->export = shash_default_export;
475 alg->import = shash_default_import;
476 alg->statesize = alg->descsize;
478 if (!alg->setkey)
479 alg->setkey = shash_no_setkey;
481 return 0;
484 int crypto_register_shash(struct shash_alg *alg)
486 struct crypto_alg *base = &alg->base;
487 int err;
489 err = shash_prepare_alg(alg);
490 if (err)
491 return err;
493 return crypto_register_alg(base);
495 EXPORT_SYMBOL_GPL(crypto_register_shash);
497 int crypto_unregister_shash(struct shash_alg *alg)
499 return crypto_unregister_alg(&alg->base);
501 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
503 int crypto_register_shashes(struct shash_alg *algs, int count)
505 int i, ret;
507 for (i = 0; i < count; i++) {
508 ret = crypto_register_shash(&algs[i]);
509 if (ret)
510 goto err;
513 return 0;
515 err:
516 for (--i; i >= 0; --i)
517 crypto_unregister_shash(&algs[i]);
519 return ret;
521 EXPORT_SYMBOL_GPL(crypto_register_shashes);
523 int crypto_unregister_shashes(struct shash_alg *algs, int count)
525 int i, ret;
527 for (i = count - 1; i >= 0; --i) {
528 ret = crypto_unregister_shash(&algs[i]);
529 if (ret)
530 pr_err("Failed to unregister %s %s: %d\n",
531 algs[i].base.cra_driver_name,
532 algs[i].base.cra_name, ret);
535 return 0;
537 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
539 int shash_register_instance(struct crypto_template *tmpl,
540 struct shash_instance *inst)
542 int err;
544 err = shash_prepare_alg(&inst->alg);
545 if (err)
546 return err;
548 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
550 EXPORT_SYMBOL_GPL(shash_register_instance);
552 void shash_free_instance(struct crypto_instance *inst)
554 crypto_drop_spawn(crypto_instance_ctx(inst));
555 kfree(shash_instance(inst));
557 EXPORT_SYMBOL_GPL(shash_free_instance);
559 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
560 struct shash_alg *alg,
561 struct crypto_instance *inst)
563 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
564 &crypto_shash_type);
566 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
568 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
570 struct crypto_alg *alg;
572 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
573 return IS_ERR(alg) ? ERR_CAST(alg) :
574 container_of(alg, struct shash_alg, base);
576 EXPORT_SYMBOL_GPL(shash_attr_alg);
578 MODULE_LICENSE("GPL");
579 MODULE_DESCRIPTION("Synchronous cryptographic hash type");