x86/vector: Use IRQD_CAN_RESERVE flag
[linux/fpc-iii.git] / crypto / shash.c
blobe849d3ee2e2728d346df1f21f6a8d4db57fc42c5
1 /*
2 * Synchronous Cryptographic Hash operations.
4 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 #include <linux/compiler.h>
24 #include "internal.h"
26 static const struct crypto_type crypto_shash_type;
28 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
29 unsigned int keylen)
31 return -ENOSYS;
33 EXPORT_SYMBOL_GPL(shash_no_setkey);
35 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
36 unsigned int keylen)
38 struct shash_alg *shash = crypto_shash_alg(tfm);
39 unsigned long alignmask = crypto_shash_alignmask(tfm);
40 unsigned long absize;
41 u8 *buffer, *alignbuffer;
42 int err;
44 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
45 buffer = kmalloc(absize, GFP_ATOMIC);
46 if (!buffer)
47 return -ENOMEM;
49 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
50 memcpy(alignbuffer, key, keylen);
51 err = shash->setkey(tfm, alignbuffer, keylen);
52 kzfree(buffer);
53 return err;
56 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
57 unsigned int keylen)
59 struct shash_alg *shash = crypto_shash_alg(tfm);
60 unsigned long alignmask = crypto_shash_alignmask(tfm);
62 if ((unsigned long)key & alignmask)
63 return shash_setkey_unaligned(tfm, key, keylen);
65 return shash->setkey(tfm, key, keylen);
67 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
69 static inline unsigned int shash_align_buffer_size(unsigned len,
70 unsigned long mask)
72 typedef u8 __aligned_largest u8_aligned;
73 return len + (mask & ~(__alignof__(u8_aligned) - 1));
76 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
77 unsigned int len)
79 struct crypto_shash *tfm = desc->tfm;
80 struct shash_alg *shash = crypto_shash_alg(tfm);
81 unsigned long alignmask = crypto_shash_alignmask(tfm);
82 unsigned int unaligned_len = alignmask + 1 -
83 ((unsigned long)data & alignmask);
84 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
85 __aligned_largest;
86 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
87 int err;
89 if (unaligned_len > len)
90 unaligned_len = len;
92 memcpy(buf, data, unaligned_len);
93 err = shash->update(desc, buf, unaligned_len);
94 memset(buf, 0, unaligned_len);
96 return err ?:
97 shash->update(desc, data + unaligned_len, len - unaligned_len);
100 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
101 unsigned int len)
103 struct crypto_shash *tfm = desc->tfm;
104 struct shash_alg *shash = crypto_shash_alg(tfm);
105 unsigned long alignmask = crypto_shash_alignmask(tfm);
107 if ((unsigned long)data & alignmask)
108 return shash_update_unaligned(desc, data, len);
110 return shash->update(desc, data, len);
112 EXPORT_SYMBOL_GPL(crypto_shash_update);
114 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
116 struct crypto_shash *tfm = desc->tfm;
117 unsigned long alignmask = crypto_shash_alignmask(tfm);
118 struct shash_alg *shash = crypto_shash_alg(tfm);
119 unsigned int ds = crypto_shash_digestsize(tfm);
120 u8 ubuf[shash_align_buffer_size(ds, alignmask)]
121 __aligned_largest;
122 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
123 int err;
125 err = shash->final(desc, buf);
126 if (err)
127 goto out;
129 memcpy(out, buf, ds);
131 out:
132 memset(buf, 0, ds);
133 return err;
136 int crypto_shash_final(struct shash_desc *desc, u8 *out)
138 struct crypto_shash *tfm = desc->tfm;
139 struct shash_alg *shash = crypto_shash_alg(tfm);
140 unsigned long alignmask = crypto_shash_alignmask(tfm);
142 if ((unsigned long)out & alignmask)
143 return shash_final_unaligned(desc, out);
145 return shash->final(desc, out);
147 EXPORT_SYMBOL_GPL(crypto_shash_final);
149 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
150 unsigned int len, u8 *out)
152 return crypto_shash_update(desc, data, len) ?:
153 crypto_shash_final(desc, out);
156 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
157 unsigned int len, u8 *out)
159 struct crypto_shash *tfm = desc->tfm;
160 struct shash_alg *shash = crypto_shash_alg(tfm);
161 unsigned long alignmask = crypto_shash_alignmask(tfm);
163 if (((unsigned long)data | (unsigned long)out) & alignmask)
164 return shash_finup_unaligned(desc, data, len, out);
166 return shash->finup(desc, data, len, out);
168 EXPORT_SYMBOL_GPL(crypto_shash_finup);
170 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
171 unsigned int len, u8 *out)
173 return crypto_shash_init(desc) ?:
174 crypto_shash_finup(desc, data, len, out);
177 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
178 unsigned int len, u8 *out)
180 struct crypto_shash *tfm = desc->tfm;
181 struct shash_alg *shash = crypto_shash_alg(tfm);
182 unsigned long alignmask = crypto_shash_alignmask(tfm);
184 if (((unsigned long)data | (unsigned long)out) & alignmask)
185 return shash_digest_unaligned(desc, data, len, out);
187 return shash->digest(desc, data, len, out);
189 EXPORT_SYMBOL_GPL(crypto_shash_digest);
191 static int shash_default_export(struct shash_desc *desc, void *out)
193 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
194 return 0;
197 static int shash_default_import(struct shash_desc *desc, const void *in)
199 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
200 return 0;
203 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
204 unsigned int keylen)
206 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
208 return crypto_shash_setkey(*ctx, key, keylen);
211 static int shash_async_init(struct ahash_request *req)
213 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
214 struct shash_desc *desc = ahash_request_ctx(req);
216 desc->tfm = *ctx;
217 desc->flags = req->base.flags;
219 return crypto_shash_init(desc);
222 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
224 struct crypto_hash_walk walk;
225 int nbytes;
227 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
228 nbytes = crypto_hash_walk_done(&walk, nbytes))
229 nbytes = crypto_shash_update(desc, walk.data, nbytes);
231 return nbytes;
233 EXPORT_SYMBOL_GPL(shash_ahash_update);
235 static int shash_async_update(struct ahash_request *req)
237 return shash_ahash_update(req, ahash_request_ctx(req));
240 static int shash_async_final(struct ahash_request *req)
242 return crypto_shash_final(ahash_request_ctx(req), req->result);
245 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
247 struct crypto_hash_walk walk;
248 int nbytes;
250 nbytes = crypto_hash_walk_first(req, &walk);
251 if (!nbytes)
252 return crypto_shash_final(desc, req->result);
254 do {
255 nbytes = crypto_hash_walk_last(&walk) ?
256 crypto_shash_finup(desc, walk.data, nbytes,
257 req->result) :
258 crypto_shash_update(desc, walk.data, nbytes);
259 nbytes = crypto_hash_walk_done(&walk, nbytes);
260 } while (nbytes > 0);
262 return nbytes;
264 EXPORT_SYMBOL_GPL(shash_ahash_finup);
266 static int shash_async_finup(struct ahash_request *req)
268 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
269 struct shash_desc *desc = ahash_request_ctx(req);
271 desc->tfm = *ctx;
272 desc->flags = req->base.flags;
274 return shash_ahash_finup(req, desc);
277 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
279 unsigned int nbytes = req->nbytes;
280 struct scatterlist *sg;
281 unsigned int offset;
282 int err;
284 if (nbytes &&
285 (sg = req->src, offset = sg->offset,
286 nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
287 void *data;
289 data = kmap_atomic(sg_page(sg));
290 err = crypto_shash_digest(desc, data + offset, nbytes,
291 req->result);
292 kunmap_atomic(data);
293 crypto_yield(desc->flags);
294 } else
295 err = crypto_shash_init(desc) ?:
296 shash_ahash_finup(req, desc);
298 return err;
300 EXPORT_SYMBOL_GPL(shash_ahash_digest);
302 static int shash_async_digest(struct ahash_request *req)
304 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
305 struct shash_desc *desc = ahash_request_ctx(req);
307 desc->tfm = *ctx;
308 desc->flags = req->base.flags;
310 return shash_ahash_digest(req, desc);
313 static int shash_async_export(struct ahash_request *req, void *out)
315 return crypto_shash_export(ahash_request_ctx(req), out);
318 static int shash_async_import(struct ahash_request *req, const void *in)
320 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
321 struct shash_desc *desc = ahash_request_ctx(req);
323 desc->tfm = *ctx;
324 desc->flags = req->base.flags;
326 return crypto_shash_import(desc, in);
329 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
331 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
333 crypto_free_shash(*ctx);
336 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
338 struct crypto_alg *calg = tfm->__crt_alg;
339 struct shash_alg *alg = __crypto_shash_alg(calg);
340 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
341 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
342 struct crypto_shash *shash;
344 if (!crypto_mod_get(calg))
345 return -EAGAIN;
347 shash = crypto_create_tfm(calg, &crypto_shash_type);
348 if (IS_ERR(shash)) {
349 crypto_mod_put(calg);
350 return PTR_ERR(shash);
353 *ctx = shash;
354 tfm->exit = crypto_exit_shash_ops_async;
356 crt->init = shash_async_init;
357 crt->update = shash_async_update;
358 crt->final = shash_async_final;
359 crt->finup = shash_async_finup;
360 crt->digest = shash_async_digest;
361 crt->setkey = shash_async_setkey;
363 crt->has_setkey = alg->setkey != shash_no_setkey;
365 if (alg->export)
366 crt->export = shash_async_export;
367 if (alg->import)
368 crt->import = shash_async_import;
370 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
372 return 0;
375 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
377 struct crypto_shash *hash = __crypto_shash_cast(tfm);
379 hash->descsize = crypto_shash_alg(hash)->descsize;
380 return 0;
383 #ifdef CONFIG_NET
384 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
386 struct crypto_report_hash rhash;
387 struct shash_alg *salg = __crypto_shash_alg(alg);
389 strncpy(rhash.type, "shash", sizeof(rhash.type));
391 rhash.blocksize = alg->cra_blocksize;
392 rhash.digestsize = salg->digestsize;
394 if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
395 sizeof(struct crypto_report_hash), &rhash))
396 goto nla_put_failure;
397 return 0;
399 nla_put_failure:
400 return -EMSGSIZE;
402 #else
403 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
405 return -ENOSYS;
407 #endif
409 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
410 __maybe_unused;
411 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
413 struct shash_alg *salg = __crypto_shash_alg(alg);
415 seq_printf(m, "type : shash\n");
416 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
417 seq_printf(m, "digestsize : %u\n", salg->digestsize);
420 static const struct crypto_type crypto_shash_type = {
421 .extsize = crypto_alg_extsize,
422 .init_tfm = crypto_shash_init_tfm,
423 #ifdef CONFIG_PROC_FS
424 .show = crypto_shash_show,
425 #endif
426 .report = crypto_shash_report,
427 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
428 .maskset = CRYPTO_ALG_TYPE_MASK,
429 .type = CRYPTO_ALG_TYPE_SHASH,
430 .tfmsize = offsetof(struct crypto_shash, base),
433 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
434 u32 mask)
436 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
438 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
440 static int shash_prepare_alg(struct shash_alg *alg)
442 struct crypto_alg *base = &alg->base;
444 if (alg->digestsize > PAGE_SIZE / 8 ||
445 alg->descsize > PAGE_SIZE / 8 ||
446 alg->statesize > PAGE_SIZE / 8)
447 return -EINVAL;
449 base->cra_type = &crypto_shash_type;
450 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
451 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
453 if (!alg->finup)
454 alg->finup = shash_finup_unaligned;
455 if (!alg->digest)
456 alg->digest = shash_digest_unaligned;
457 if (!alg->export) {
458 alg->export = shash_default_export;
459 alg->import = shash_default_import;
460 alg->statesize = alg->descsize;
462 if (!alg->setkey)
463 alg->setkey = shash_no_setkey;
465 return 0;
468 int crypto_register_shash(struct shash_alg *alg)
470 struct crypto_alg *base = &alg->base;
471 int err;
473 err = shash_prepare_alg(alg);
474 if (err)
475 return err;
477 return crypto_register_alg(base);
479 EXPORT_SYMBOL_GPL(crypto_register_shash);
481 int crypto_unregister_shash(struct shash_alg *alg)
483 return crypto_unregister_alg(&alg->base);
485 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
487 int crypto_register_shashes(struct shash_alg *algs, int count)
489 int i, ret;
491 for (i = 0; i < count; i++) {
492 ret = crypto_register_shash(&algs[i]);
493 if (ret)
494 goto err;
497 return 0;
499 err:
500 for (--i; i >= 0; --i)
501 crypto_unregister_shash(&algs[i]);
503 return ret;
505 EXPORT_SYMBOL_GPL(crypto_register_shashes);
507 int crypto_unregister_shashes(struct shash_alg *algs, int count)
509 int i, ret;
511 for (i = count - 1; i >= 0; --i) {
512 ret = crypto_unregister_shash(&algs[i]);
513 if (ret)
514 pr_err("Failed to unregister %s %s: %d\n",
515 algs[i].base.cra_driver_name,
516 algs[i].base.cra_name, ret);
519 return 0;
521 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
523 int shash_register_instance(struct crypto_template *tmpl,
524 struct shash_instance *inst)
526 int err;
528 err = shash_prepare_alg(&inst->alg);
529 if (err)
530 return err;
532 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
534 EXPORT_SYMBOL_GPL(shash_register_instance);
536 void shash_free_instance(struct crypto_instance *inst)
538 crypto_drop_spawn(crypto_instance_ctx(inst));
539 kfree(shash_instance(inst));
541 EXPORT_SYMBOL_GPL(shash_free_instance);
543 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
544 struct shash_alg *alg,
545 struct crypto_instance *inst)
547 return crypto_init_spawn2(&spawn->base, &alg->base, inst,
548 &crypto_shash_type);
550 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
552 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
554 struct crypto_alg *alg;
556 alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
557 return IS_ERR(alg) ? ERR_CAST(alg) :
558 container_of(alg, struct shash_alg, base);
560 EXPORT_SYMBOL_GPL(shash_attr_alg);
562 MODULE_LICENSE("GPL");
563 MODULE_DESCRIPTION("Synchronous cryptographic hash type");