serial: 8250_lpss: Unconditionally set PCI master for Quark
[linux/fpc-iii.git] / crypto / shash.c
blob5e31c8d776dfc8a144f25e70122ee77d335e196a
1 /*
2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
24 #include "internal.h"
26 static const struct crypto_type crypto_shash_type;
28 static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
29 unsigned int keylen)
31 return -ENOSYS;
34 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
35 unsigned int keylen)
37 struct shash_alg *shash = crypto_shash_alg(tfm);
38 unsigned long alignmask = crypto_shash_alignmask(tfm);
39 unsigned long absize;
40 u8 *buffer, *alignbuffer;
41 int err;
43 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
44 buffer = kmalloc(absize, GFP_KERNEL);
45 if (!buffer)
46 return -ENOMEM;
48 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
49 memcpy(alignbuffer, key, keylen);
50 err = shash->setkey(tfm, alignbuffer, keylen);
51 kzfree(buffer);
52 return err;
55 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
56 unsigned int keylen)
58 struct shash_alg *shash = crypto_shash_alg(tfm);
59 unsigned long alignmask = crypto_shash_alignmask(tfm);
61 if ((unsigned long)key & alignmask)
62 return shash_setkey_unaligned(tfm, key, keylen);
64 return shash->setkey(tfm, key, keylen);
66 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
68 static inline unsigned int shash_align_buffer_size(unsigned len,
69 unsigned long mask)
71 typedef u8 __aligned_largest u8_aligned;
72 return len + (mask & ~(__alignof__(u8_aligned) - 1));
75 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
76 unsigned int len)
78 struct crypto_shash *tfm = desc->tfm;
79 struct shash_alg *shash = crypto_shash_alg(tfm);
80 unsigned long alignmask = crypto_shash_alignmask(tfm);
81 unsigned int unaligned_len = alignmask + 1 -
82 ((unsigned long)data & alignmask);
83 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
84 __aligned_largest;
85 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
86 int err;
88 if (unaligned_len > len)
89 unaligned_len = len;
91 memcpy(buf, data, unaligned_len);
92 err = shash->update(desc, buf, unaligned_len);
93 memset(buf, 0, unaligned_len);
95 return err ?:
96 shash->update(desc, data + unaligned_len, len - unaligned_len);
99 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
100 unsigned int len)
102 struct crypto_shash *tfm = desc->tfm;
103 struct shash_alg *shash = crypto_shash_alg(tfm);
104 unsigned long alignmask = crypto_shash_alignmask(tfm);
106 if ((unsigned long)data & alignmask)
107 return shash_update_unaligned(desc, data, len);
109 return shash->update(desc, data, len);
111 EXPORT_SYMBOL_GPL(crypto_shash_update);
113 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
115 struct crypto_shash *tfm = desc->tfm;
116 unsigned long alignmask = crypto_shash_alignmask(tfm);
117 struct shash_alg *shash = crypto_shash_alg(tfm);
118 unsigned int ds = crypto_shash_digestsize(tfm);
119 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
120 __aligned_largest;
121 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
122 int err;
124 err = shash->final(desc, buf);
125 if (err)
126 goto out;
128 memcpy(out, buf, ds);
130 out:
131 memset(buf, 0, ds);
132 return err;
135 int crypto_shash_final(struct shash_desc *desc, u8 *out)
137 struct crypto_shash *tfm = desc->tfm;
138 struct shash_alg *shash = crypto_shash_alg(tfm);
139 unsigned long alignmask = crypto_shash_alignmask(tfm);
141 if ((unsigned long)out & alignmask)
142 return shash_final_unaligned(desc, out);
144 return shash->final(desc, out);
146 EXPORT_SYMBOL_GPL(crypto_shash_final);
148 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
149 unsigned int len, u8 *out)
151 return crypto_shash_update(desc, data, len) ?:
152 crypto_shash_final(desc, out);
155 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
156 unsigned int len, u8 *out)
158 struct crypto_shash *tfm = desc->tfm;
159 struct shash_alg *shash = crypto_shash_alg(tfm);
160 unsigned long alignmask = crypto_shash_alignmask(tfm);
162 if (((unsigned long)data | (unsigned long)out) & alignmask)
163 return shash_finup_unaligned(desc, data, len, out);
165 return shash->finup(desc, data, len, out);
167 EXPORT_SYMBOL_GPL(crypto_shash_finup);
169 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
170 unsigned int len, u8 *out)
172 return crypto_shash_init(desc) ?:
173 crypto_shash_finup(desc, data, len, out);
176 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
177 unsigned int len, u8 *out)
179 struct crypto_shash *tfm = desc->tfm;
180 struct shash_alg *shash = crypto_shash_alg(tfm);
181 unsigned long alignmask = crypto_shash_alignmask(tfm);
183 if (((unsigned long)data | (unsigned long)out) & alignmask)
184 return shash_digest_unaligned(desc, data, len, out);
186 return shash->digest(desc, data, len, out);
188 EXPORT_SYMBOL_GPL(crypto_shash_digest);
190 static int shash_default_export(struct shash_desc *desc, void *out)
192 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
193 return 0;
196 static int shash_default_import(struct shash_desc *desc, const void *in)
198 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
199 return 0;
202 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
203 unsigned int keylen)
205 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
207 return crypto_shash_setkey(*ctx, key, keylen);
210 static int shash_async_init(struct ahash_request *req)
212 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
213 struct shash_desc *desc = ahash_request_ctx(req);
215 desc->tfm = *ctx;
216 desc->flags = req->base.flags;
218 return crypto_shash_init(desc);
221 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
223 struct crypto_hash_walk walk;
224 int nbytes;
226 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
227 nbytes = crypto_hash_walk_done(&walk, nbytes))
228 nbytes = crypto_shash_update(desc, walk.data, nbytes);
230 return nbytes;
232 EXPORT_SYMBOL_GPL(shash_ahash_update);
234 static int shash_async_update(struct ahash_request *req)
236 return shash_ahash_update(req, ahash_request_ctx(req));
239 static int shash_async_final(struct ahash_request *req)
241 return crypto_shash_final(ahash_request_ctx(req), req->result);
244 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
246 struct crypto_hash_walk walk;
247 int nbytes;
249 nbytes = crypto_hash_walk_first(req, &walk);
250 if (!nbytes)
251 return crypto_shash_final(desc, req->result);
253 do {
254 nbytes = crypto_hash_walk_last(&walk) ?
255 crypto_shash_finup(desc, walk.data, nbytes,
256 req->result) :
257 crypto_shash_update(desc, walk.data, nbytes);
258 nbytes = crypto_hash_walk_done(&walk, nbytes);
259 } while (nbytes > 0);
261 return nbytes;
263 EXPORT_SYMBOL_GPL(shash_ahash_finup);
265 static int shash_async_finup(struct ahash_request *req)
267 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
268 struct shash_desc *desc = ahash_request_ctx(req);
270 desc->tfm = *ctx;
271 desc->flags = req->base.flags;
273 return shash_ahash_finup(req, desc);
276 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
278 struct scatterlist *sg = req->src;
279 unsigned int offset = sg->offset;
280 unsigned int nbytes = req->nbytes;
281 int err;
283 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
284 void *data;
286 data = kmap_atomic(sg_page(sg));
287 err = crypto_shash_digest(desc, data + offset, nbytes,
288 req->result);
289 kunmap_atomic(data);
290 crypto_yield(desc->flags);
291 } else
292 err = crypto_shash_init(desc) ?:
293 shash_ahash_finup(req, desc);
295 return err;
297 EXPORT_SYMBOL_GPL(shash_ahash_digest);
299 static int shash_async_digest(struct ahash_request *req)
301 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
302 struct shash_desc *desc = ahash_request_ctx(req);
304 desc->tfm = *ctx;
305 desc->flags = req->base.flags;
307 return shash_ahash_digest(req, desc);
310 static int shash_async_export(struct ahash_request *req, void *out)
312 return crypto_shash_export(ahash_request_ctx(req), out);
315 static int shash_async_import(struct ahash_request *req, const void *in)
317 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
318 struct shash_desc *desc = ahash_request_ctx(req);
320 desc->tfm = *ctx;
321 desc->flags = req->base.flags;
323 return crypto_shash_import(desc, in);
326 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
328 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
330 crypto_free_shash(*ctx);
333 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
335 struct crypto_alg *calg = tfm->__crt_alg;
336 struct shash_alg *alg = __crypto_shash_alg(calg);
337 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
338 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
339 struct crypto_shash *shash;
341 if (!crypto_mod_get(calg))
342 return -EAGAIN;
344 shash = crypto_create_tfm(calg, &crypto_shash_type);
345 if (IS_ERR(shash)) {
346 crypto_mod_put(calg);
347 return PTR_ERR(shash);
350 *ctx = shash;
351 tfm->exit = crypto_exit_shash_ops_async;
353 crt->init = shash_async_init;
354 crt->update = shash_async_update;
355 crt->final = shash_async_final;
356 crt->finup = shash_async_finup;
357 crt->digest = shash_async_digest;
358 crt->setkey = shash_async_setkey;
360 crt->has_setkey = alg->setkey != shash_no_setkey;
362 if (alg->export)
363 crt->export = shash_async_export;
364 if (alg->import)
365 crt->import = shash_async_import;
367 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
369 return 0;
372 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
374 struct crypto_shash *hash = __crypto_shash_cast(tfm);
376 hash->descsize = crypto_shash_alg(hash)->descsize;
377 return 0;
380 #ifdef CONFIG_NET
381 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
383 struct crypto_report_hash rhash;
384 struct shash_alg *salg = __crypto_shash_alg(alg);
386 strncpy(rhash.type, "shash", sizeof(rhash.type));
388 rhash.blocksize = alg->cra_blocksize;
389 rhash.digestsize = salg->digestsize;
391 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
392 sizeof(struct crypto_report_hash), &rhash))
393 goto nla_put_failure;
394 return 0;
396 nla_put_failure:
397 return -EMSGSIZE;
399 #else
400 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
402 return -ENOSYS;
404 #endif
406 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
407 __maybe_unused;
408 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
410 struct shash_alg *salg = __crypto_shash_alg(alg);
412 seq_printf(m, "type : shash\n");
413 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
414 seq_printf(m, "digestsize : %u\n", salg->digestsize);
417 static const struct crypto_type crypto_shash_type = {
418 .extsize = crypto_alg_extsize,
419 .init_tfm = crypto_shash_init_tfm,
420 #ifdef CONFIG_PROC_FS
421 .show = crypto_shash_show,
422 #endif
423 .report = crypto_shash_report,
424 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
425 .maskset = CRYPTO_ALG_TYPE_MASK,
426 .type = CRYPTO_ALG_TYPE_SHASH,
427 .tfmsize = offsetof(struct crypto_shash, base),
430 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
431 u32 mask)
433 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
435 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
437 static int shash_prepare_alg(struct shash_alg *alg)
439 struct crypto_alg *base = &alg->base;
441 if (alg->digestsize > PAGE_SIZE / 8 ||
442 alg->descsize > PAGE_SIZE / 8 ||
443 alg->statesize > PAGE_SIZE / 8)
444 return -EINVAL;
446 base->cra_type = &crypto_shash_type;
447 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
448 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
450 if (!alg->finup)
451 alg->finup = shash_finup_unaligned;
452 if (!alg->digest)
453 alg->digest = shash_digest_unaligned;
454 if (!alg->export) {
455 alg->export = shash_default_export;
456 alg->import = shash_default_import;
457 alg->statesize = alg->descsize;
459 if (!alg->setkey)
460 alg->setkey = shash_no_setkey;
462 return 0;
465 int crypto_register_shash(struct shash_alg *alg)
467 struct crypto_alg *base = &alg->base;
468 int err;
470 err = shash_prepare_alg(alg);
471 if (err)
472 return err;
474 return crypto_register_alg(base);
476 EXPORT_SYMBOL_GPL(crypto_register_shash);
478 int crypto_unregister_shash(struct shash_alg *alg)
480 return crypto_unregister_alg(&alg->base);
482 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
484 int crypto_register_shashes(struct shash_alg *algs, int count)
486 int i, ret;
488 for (i = 0; i < count; i++) {
489 ret = crypto_register_shash(&algs[i]);
490 if (ret)
491 goto err;
494 return 0;
496 err:
497 for (--i; i >= 0; --i)
498 crypto_unregister_shash(&algs[i]);
500 return ret;
502 EXPORT_SYMBOL_GPL(crypto_register_shashes);
504 int crypto_unregister_shashes(struct shash_alg *algs, int count)
506 int i, ret;
508 for (i = count - 1; i >= 0; --i) {
509 ret = crypto_unregister_shash(&algs[i]);
510 if (ret)
511 pr_err("Failed to unregister %s %s: %d\n",
512 algs[i].base.cra_driver_name,
513 algs[i].base.cra_name, ret);
516 return 0;
518 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
520 int shash_register_instance(struct crypto_template *tmpl,
521 struct shash_instance *inst)
523 int err;
525 err = shash_prepare_alg(&inst->alg);
526 if (err)
527 return err;
529 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
531 EXPORT_SYMBOL_GPL(shash_register_instance);
533 void shash_free_instance(struct crypto_instance *inst)
535 crypto_drop_spawn(crypto_instance_ctx(inst));
536 kfree(shash_instance(inst));
538 EXPORT_SYMBOL_GPL(shash_free_instance);
540 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
541 struct shash_alg *alg,
542 struct crypto_instance *inst)
544 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
545 &crypto_shash_type);
547 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
549 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
551 struct crypto_alg *alg;
553 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
554 return IS_ERR(alg) ? ERR_CAST(alg) :
555 container_of(alg, struct shash_alg, base);
557 EXPORT_SYMBOL_GPL(shash_attr_alg);
559 MODULE_LICENSE("GPL");
560 MODULE_DESCRIPTION("Synchronous cryptographic hash type");