vmalloc: fix __GFP_HIGHMEM usage for vmalloc_32 on 32b systems
[linux/fpc-iii.git] / crypto / shash.c
blob5d732c6bb4b2158f59e7e16e0f96508b8ae90f91
1 /*
2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
24 #include "internal.h"
26 static const struct crypto_type crypto_shash_type;
28 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
29 unsigned int keylen)
31 return -ENOSYS;
33 EXPORT_SYMBOL_GPL(shash_no_setkey);
35 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
36 unsigned int keylen)
38 struct shash_alg *shash = crypto_shash_alg(tfm);
39 unsigned long alignmask = crypto_shash_alignmask(tfm);
40 unsigned long absize;
41 u8 *buffer, *alignbuffer;
42 int err;
44 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
45 buffer = kmalloc(absize, GFP_ATOMIC);
46 if (!buffer)
47 return -ENOMEM;
49 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
50 memcpy(alignbuffer, key, keylen);
51 err = shash->setkey(tfm, alignbuffer, keylen);
52 kzfree(buffer);
53 return err;
56 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
57 unsigned int keylen)
59 struct shash_alg *shash = crypto_shash_alg(tfm);
60 unsigned long alignmask = crypto_shash_alignmask(tfm);
61 int err;
63 if ((unsigned long)key & alignmask)
64 err = shash_setkey_unaligned(tfm, key, keylen);
65 else
66 err = shash->setkey(tfm, key, keylen);
68 if (err)
69 return err;
71 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
72 return 0;
74 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
76 static inline unsigned int shash_align_buffer_size(unsigned len,
77 unsigned long mask)
79 typedef u8 __aligned_largest u8_aligned;
80 return len + (mask & ~(__alignof__(u8_aligned) - 1));
83 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
84 unsigned int len)
86 struct crypto_shash *tfm = desc->tfm;
87 struct shash_alg *shash = crypto_shash_alg(tfm);
88 unsigned long alignmask = crypto_shash_alignmask(tfm);
89 unsigned int unaligned_len = alignmask + 1 -
90 ((unsigned long)data & alignmask);
91 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
92 __aligned_largest;
93 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
94 int err;
96 if (unaligned_len > len)
97 unaligned_len = len;
99 memcpy(buf, data, unaligned_len);
100 err = shash->update(desc, buf, unaligned_len);
101 memset(buf, 0, unaligned_len);
103 return err ?:
104 shash->update(desc, data + unaligned_len, len - unaligned_len);
107 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
108 unsigned int len)
110 struct crypto_shash *tfm = desc->tfm;
111 struct shash_alg *shash = crypto_shash_alg(tfm);
112 unsigned long alignmask = crypto_shash_alignmask(tfm);
114 if ((unsigned long)data & alignmask)
115 return shash_update_unaligned(desc, data, len);
117 return shash->update(desc, data, len);
119 EXPORT_SYMBOL_GPL(crypto_shash_update);
121 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
123 struct crypto_shash *tfm = desc->tfm;
124 unsigned long alignmask = crypto_shash_alignmask(tfm);
125 struct shash_alg *shash = crypto_shash_alg(tfm);
126 unsigned int ds = crypto_shash_digestsize(tfm);
127 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
128 __aligned_largest;
129 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
130 int err;
132 err = shash->final(desc, buf);
133 if (err)
134 goto out;
136 memcpy(out, buf, ds);
138 out:
139 memset(buf, 0, ds);
140 return err;
143 int crypto_shash_final(struct shash_desc *desc, u8 *out)
145 struct crypto_shash *tfm = desc->tfm;
146 struct shash_alg *shash = crypto_shash_alg(tfm);
147 unsigned long alignmask = crypto_shash_alignmask(tfm);
149 if ((unsigned long)out & alignmask)
150 return shash_final_unaligned(desc, out);
152 return shash->final(desc, out);
154 EXPORT_SYMBOL_GPL(crypto_shash_final);
156 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
157 unsigned int len, u8 *out)
159 return crypto_shash_update(desc, data, len) ?:
160 crypto_shash_final(desc, out);
163 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
164 unsigned int len, u8 *out)
166 struct crypto_shash *tfm = desc->tfm;
167 struct shash_alg *shash = crypto_shash_alg(tfm);
168 unsigned long alignmask = crypto_shash_alignmask(tfm);
170 if (((unsigned long)data | (unsigned long)out) & alignmask)
171 return shash_finup_unaligned(desc, data, len, out);
173 return shash->finup(desc, data, len, out);
175 EXPORT_SYMBOL_GPL(crypto_shash_finup);
177 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
178 unsigned int len, u8 *out)
180 return crypto_shash_init(desc) ?:
181 crypto_shash_finup(desc, data, len, out);
184 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
185 unsigned int len, u8 *out)
187 struct crypto_shash *tfm = desc->tfm;
188 struct shash_alg *shash = crypto_shash_alg(tfm);
189 unsigned long alignmask = crypto_shash_alignmask(tfm);
191 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
192 return -ENOKEY;
194 if (((unsigned long)data | (unsigned long)out) & alignmask)
195 return shash_digest_unaligned(desc, data, len, out);
197 return shash->digest(desc, data, len, out);
199 EXPORT_SYMBOL_GPL(crypto_shash_digest);
201 static int shash_default_export(struct shash_desc *desc, void *out)
203 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
204 return 0;
207 static int shash_default_import(struct shash_desc *desc, const void *in)
209 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
210 return 0;
213 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
214 unsigned int keylen)
216 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
218 return crypto_shash_setkey(*ctx, key, keylen);
221 static int shash_async_init(struct ahash_request *req)
223 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
224 struct shash_desc *desc = ahash_request_ctx(req);
226 desc->tfm = *ctx;
227 desc->flags = req->base.flags;
229 return crypto_shash_init(desc);
232 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
234 struct crypto_hash_walk walk;
235 int nbytes;
237 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
238 nbytes = crypto_hash_walk_done(&walk, nbytes))
239 nbytes = crypto_shash_update(desc, walk.data, nbytes);
241 return nbytes;
243 EXPORT_SYMBOL_GPL(shash_ahash_update);
245 static int shash_async_update(struct ahash_request *req)
247 return shash_ahash_update(req, ahash_request_ctx(req));
250 static int shash_async_final(struct ahash_request *req)
252 return crypto_shash_final(ahash_request_ctx(req), req->result);
255 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
257 struct crypto_hash_walk walk;
258 int nbytes;
260 nbytes = crypto_hash_walk_first(req, &walk);
261 if (!nbytes)
262 return crypto_shash_final(desc, req->result);
264 do {
265 nbytes = crypto_hash_walk_last(&walk) ?
266 crypto_shash_finup(desc, walk.data, nbytes,
267 req->result) :
268 crypto_shash_update(desc, walk.data, nbytes);
269 nbytes = crypto_hash_walk_done(&walk, nbytes);
270 } while (nbytes > 0);
272 return nbytes;
274 EXPORT_SYMBOL_GPL(shash_ahash_finup);
276 static int shash_async_finup(struct ahash_request *req)
278 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
279 struct shash_desc *desc = ahash_request_ctx(req);
281 desc->tfm = *ctx;
282 desc->flags = req->base.flags;
284 return shash_ahash_finup(req, desc);
287 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
289 unsigned int nbytes = req->nbytes;
290 struct scatterlist *sg;
291 unsigned int offset;
292 int err;
294 if (nbytes &&
295 (sg = req->src, offset = sg->offset,
296 nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
297 void *data;
299 data = kmap_atomic(sg_page(sg));
300 err = crypto_shash_digest(desc, data + offset, nbytes,
301 req->result);
302 kunmap_atomic(data);
303 crypto_yield(desc->flags);
304 } else
305 err = crypto_shash_init(desc) ?:
306 shash_ahash_finup(req, desc);
308 return err;
310 EXPORT_SYMBOL_GPL(shash_ahash_digest);
312 static int shash_async_digest(struct ahash_request *req)
314 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
315 struct shash_desc *desc = ahash_request_ctx(req);
317 desc->tfm = *ctx;
318 desc->flags = req->base.flags;
320 return shash_ahash_digest(req, desc);
323 static int shash_async_export(struct ahash_request *req, void *out)
325 return crypto_shash_export(ahash_request_ctx(req), out);
328 static int shash_async_import(struct ahash_request *req, const void *in)
330 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
331 struct shash_desc *desc = ahash_request_ctx(req);
333 desc->tfm = *ctx;
334 desc->flags = req->base.flags;
336 return crypto_shash_import(desc, in);
339 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
341 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
343 crypto_free_shash(*ctx);
346 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
348 struct crypto_alg *calg = tfm->__crt_alg;
349 struct shash_alg *alg = __crypto_shash_alg(calg);
350 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
351 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
352 struct crypto_shash *shash;
354 if (!crypto_mod_get(calg))
355 return -EAGAIN;
357 shash = crypto_create_tfm(calg, &crypto_shash_type);
358 if (IS_ERR(shash)) {
359 crypto_mod_put(calg);
360 return PTR_ERR(shash);
363 *ctx = shash;
364 tfm->exit = crypto_exit_shash_ops_async;
366 crt->init = shash_async_init;
367 crt->update = shash_async_update;
368 crt->final = shash_async_final;
369 crt->finup = shash_async_finup;
370 crt->digest = shash_async_digest;
371 crt->setkey = shash_async_setkey;
373 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
374 CRYPTO_TFM_NEED_KEY);
376 if (alg->export)
377 crt->export = shash_async_export;
378 if (alg->import)
379 crt->import = shash_async_import;
381 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
383 return 0;
386 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
388 struct crypto_shash *hash = __crypto_shash_cast(tfm);
389 struct shash_alg *alg = crypto_shash_alg(hash);
391 hash->descsize = alg->descsize;
393 if (crypto_shash_alg_has_setkey(alg) &&
394 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
395 crypto_shash_set_flags(hash, CRYPTO_TFM_NEED_KEY);
397 return 0;
400 #ifdef CONFIG_NET
401 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
403 struct crypto_report_hash rhash;
404 struct shash_alg *salg = __crypto_shash_alg(alg);
406 strncpy(rhash.type, "shash", sizeof(rhash.type));
408 rhash.blocksize = alg->cra_blocksize;
409 rhash.digestsize = salg->digestsize;
411 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
412 sizeof(struct crypto_report_hash), &rhash))
413 goto nla_put_failure;
414 return 0;
416 nla_put_failure:
417 return -EMSGSIZE;
419 #else
420 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
422 return -ENOSYS;
424 #endif
426 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
427 __maybe_unused;
428 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
430 struct shash_alg *salg = __crypto_shash_alg(alg);
432 seq_printf(m, "type : shash\n");
433 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
434 seq_printf(m, "digestsize : %u\n", salg->digestsize);
437 static const struct crypto_type crypto_shash_type = {
438 .extsize = crypto_alg_extsize,
439 .init_tfm = crypto_shash_init_tfm,
440 #ifdef CONFIG_PROC_FS
441 .show = crypto_shash_show,
442 #endif
443 .report = crypto_shash_report,
444 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
445 .maskset = CRYPTO_ALG_TYPE_MASK,
446 .type = CRYPTO_ALG_TYPE_SHASH,
447 .tfmsize = offsetof(struct crypto_shash, base),
450 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
451 u32 mask)
453 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
455 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
457 static int shash_prepare_alg(struct shash_alg *alg)
459 struct crypto_alg *base = &alg->base;
461 if (alg->digestsize > PAGE_SIZE / 8 ||
462 alg->descsize > PAGE_SIZE / 8 ||
463 alg->statesize > PAGE_SIZE / 8)
464 return -EINVAL;
466 base->cra_type = &crypto_shash_type;
467 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
468 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
470 if (!alg->finup)
471 alg->finup = shash_finup_unaligned;
472 if (!alg->digest)
473 alg->digest = shash_digest_unaligned;
474 if (!alg->export) {
475 alg->export = shash_default_export;
476 alg->import = shash_default_import;
477 alg->statesize = alg->descsize;
479 if (!alg->setkey)
480 alg->setkey = shash_no_setkey;
482 return 0;
485 int crypto_register_shash(struct shash_alg *alg)
487 struct crypto_alg *base = &alg->base;
488 int err;
490 err = shash_prepare_alg(alg);
491 if (err)
492 return err;
494 return crypto_register_alg(base);
496 EXPORT_SYMBOL_GPL(crypto_register_shash);
498 int crypto_unregister_shash(struct shash_alg *alg)
500 return crypto_unregister_alg(&alg->base);
502 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
504 int crypto_register_shashes(struct shash_alg *algs, int count)
506 int i, ret;
508 for (i = 0; i < count; i++) {
509 ret = crypto_register_shash(&algs[i]);
510 if (ret)
511 goto err;
514 return 0;
516 err:
517 for (--i; i >= 0; --i)
518 crypto_unregister_shash(&algs[i]);
520 return ret;
522 EXPORT_SYMBOL_GPL(crypto_register_shashes);
524 int crypto_unregister_shashes(struct shash_alg *algs, int count)
526 int i, ret;
528 for (i = count - 1; i >= 0; --i) {
529 ret = crypto_unregister_shash(&algs[i]);
530 if (ret)
531 pr_err("Failed to unregister %s %s: %d\n",
532 algs[i].base.cra_driver_name,
533 algs[i].base.cra_name, ret);
536 return 0;
538 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
540 int shash_register_instance(struct crypto_template *tmpl,
541 struct shash_instance *inst)
543 int err;
545 err = shash_prepare_alg(&inst->alg);
546 if (err)
547 return err;
549 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
551 EXPORT_SYMBOL_GPL(shash_register_instance);
553 void shash_free_instance(struct crypto_instance *inst)
555 crypto_drop_spawn(crypto_instance_ctx(inst));
556 kfree(shash_instance(inst));
558 EXPORT_SYMBOL_GPL(shash_free_instance);
560 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
561 struct shash_alg *alg,
562 struct crypto_instance *inst)
564 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
565 &crypto_shash_type);
567 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
569 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
571 struct crypto_alg *alg;
573 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
574 return IS_ERR(alg) ? ERR_CAST(alg) :
575 container_of(alg, struct shash_alg, base);
577 EXPORT_SYMBOL_GPL(shash_attr_alg);
579 MODULE_LICENSE("GPL");
580 MODULE_DESCRIPTION("Synchronous cryptographic hash type");