[IA64] operator priority fix in acpi_map_lsapic()
[wrt350n-kernel.git] / crypto / api.c
blob1f5c724773568acf31f7dde568972f4925f39407
1 /*
2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include "internal.h"
29 LIST_HEAD(crypto_alg_list);
30 EXPORT_SYMBOL_GPL(crypto_alg_list);
31 DECLARE_RWSEM(crypto_alg_sem);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem);
34 BLOCKING_NOTIFIER_HEAD(crypto_chain);
35 EXPORT_SYMBOL_GPL(crypto_chain);
37 static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
39 atomic_inc(&alg->cra_refcnt);
40 return alg;
43 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
47 EXPORT_SYMBOL_GPL(crypto_mod_get);
49 void crypto_mod_put(struct crypto_alg *alg)
51 struct module *module = alg->cra_module;
53 crypto_alg_put(alg);
54 module_put(module);
56 EXPORT_SYMBOL_GPL(crypto_mod_put);
58 struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type, u32 mask)
60 struct crypto_alg *q, *alg = NULL;
61 int best = -2;
63 list_for_each_entry(q, &crypto_alg_list, cra_list) {
64 int exact, fuzzy;
66 if (crypto_is_moribund(q))
67 continue;
69 if ((q->cra_flags ^ type) & mask)
70 continue;
72 if (crypto_is_larval(q) &&
73 ((struct crypto_larval *)q)->mask != mask)
74 continue;
76 exact = !strcmp(q->cra_driver_name, name);
77 fuzzy = !strcmp(q->cra_name, name);
78 if (!exact && !(fuzzy && q->cra_priority > best))
79 continue;
81 if (unlikely(!crypto_mod_get(q)))
82 continue;
84 best = q->cra_priority;
85 if (alg)
86 crypto_mod_put(alg);
87 alg = q;
89 if (exact)
90 break;
93 return alg;
95 EXPORT_SYMBOL_GPL(__crypto_alg_lookup);
97 static void crypto_larval_destroy(struct crypto_alg *alg)
99 struct crypto_larval *larval = (void *)alg;
101 BUG_ON(!crypto_is_larval(alg));
102 if (larval->adult)
103 crypto_mod_put(larval->adult);
104 kfree(larval);
107 static struct crypto_alg *crypto_larval_alloc(const char *name, u32 type,
108 u32 mask)
110 struct crypto_alg *alg;
111 struct crypto_larval *larval;
113 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
114 if (!larval)
115 return ERR_PTR(-ENOMEM);
117 larval->mask = mask;
118 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
119 larval->alg.cra_priority = -1;
120 larval->alg.cra_destroy = crypto_larval_destroy;
122 atomic_set(&larval->alg.cra_refcnt, 2);
123 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
124 init_completion(&larval->completion);
126 down_write(&crypto_alg_sem);
127 alg = __crypto_alg_lookup(name, type, mask);
128 if (!alg) {
129 alg = &larval->alg;
130 list_add(&alg->cra_list, &crypto_alg_list);
132 up_write(&crypto_alg_sem);
134 if (alg != &larval->alg)
135 kfree(larval);
137 return alg;
140 static void crypto_larval_kill(struct crypto_alg *alg)
142 struct crypto_larval *larval = (void *)alg;
144 down_write(&crypto_alg_sem);
145 list_del(&alg->cra_list);
146 up_write(&crypto_alg_sem);
147 complete_all(&larval->completion);
148 crypto_alg_put(alg);
151 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
153 struct crypto_larval *larval = (void *)alg;
155 wait_for_completion_interruptible_timeout(&larval->completion, 60 * HZ);
156 alg = larval->adult;
157 if (alg) {
158 if (!crypto_mod_get(alg))
159 alg = ERR_PTR(-EAGAIN);
160 } else
161 alg = ERR_PTR(-ENOENT);
162 crypto_mod_put(&larval->alg);
164 return alg;
167 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
168 u32 mask)
170 struct crypto_alg *alg;
172 down_read(&crypto_alg_sem);
173 alg = __crypto_alg_lookup(name, type, mask);
174 up_read(&crypto_alg_sem);
176 return alg;
179 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
181 struct crypto_alg *alg;
182 struct crypto_alg *larval;
183 int ok;
185 if (!name)
186 return ERR_PTR(-ENOENT);
188 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
189 type &= mask;
191 alg = try_then_request_module(crypto_alg_lookup(name, type, mask),
192 name);
193 if (alg)
194 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
196 larval = crypto_larval_alloc(name, type, mask);
197 if (IS_ERR(larval) || !crypto_is_larval(larval))
198 return larval;
200 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
201 if (ok == NOTIFY_DONE) {
202 request_module("cryptomgr");
203 ok = crypto_notify(CRYPTO_MSG_ALG_REQUEST, larval);
206 if (ok == NOTIFY_STOP)
207 alg = crypto_larval_wait(larval);
208 else {
209 crypto_mod_put(larval);
210 alg = ERR_PTR(-ENOENT);
212 crypto_larval_kill(larval);
213 return alg;
215 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
217 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
219 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
221 if (type_obj)
222 return type_obj->init(tfm, type, mask);
224 switch (crypto_tfm_alg_type(tfm)) {
225 case CRYPTO_ALG_TYPE_CIPHER:
226 return crypto_init_cipher_ops(tfm);
228 case CRYPTO_ALG_TYPE_DIGEST:
229 return crypto_init_digest_ops(tfm);
231 case CRYPTO_ALG_TYPE_COMPRESS:
232 return crypto_init_compress_ops(tfm);
234 default:
235 break;
238 BUG();
239 return -EINVAL;
242 static void crypto_exit_ops(struct crypto_tfm *tfm)
244 const struct crypto_type *type = tfm->__crt_alg->cra_type;
246 if (type) {
247 if (type->exit)
248 type->exit(tfm);
249 return;
252 switch (crypto_tfm_alg_type(tfm)) {
253 case CRYPTO_ALG_TYPE_CIPHER:
254 crypto_exit_cipher_ops(tfm);
255 break;
257 case CRYPTO_ALG_TYPE_DIGEST:
258 crypto_exit_digest_ops(tfm);
259 break;
261 case CRYPTO_ALG_TYPE_COMPRESS:
262 crypto_exit_compress_ops(tfm);
263 break;
265 default:
266 BUG();
271 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
273 const struct crypto_type *type_obj = alg->cra_type;
274 unsigned int len;
276 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
277 if (type_obj)
278 return len + type_obj->ctxsize(alg, type, mask);
280 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
281 default:
282 BUG();
284 case CRYPTO_ALG_TYPE_CIPHER:
285 len += crypto_cipher_ctxsize(alg);
286 break;
288 case CRYPTO_ALG_TYPE_DIGEST:
289 len += crypto_digest_ctxsize(alg);
290 break;
292 case CRYPTO_ALG_TYPE_COMPRESS:
293 len += crypto_compress_ctxsize(alg);
294 break;
297 return len;
300 void crypto_shoot_alg(struct crypto_alg *alg)
302 down_write(&crypto_alg_sem);
303 alg->cra_flags |= CRYPTO_ALG_DYING;
304 up_write(&crypto_alg_sem);
306 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
308 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
309 u32 mask)
311 struct crypto_tfm *tfm = NULL;
312 unsigned int tfm_size;
313 int err = -ENOMEM;
315 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
316 tfm = kzalloc(tfm_size, GFP_KERNEL);
317 if (tfm == NULL)
318 goto out_err;
320 tfm->__crt_alg = alg;
322 err = crypto_init_ops(tfm, type, mask);
323 if (err)
324 goto out_free_tfm;
326 if (alg->cra_init && (err = alg->cra_init(tfm))) {
327 if (err == -EAGAIN)
328 crypto_shoot_alg(alg);
329 goto cra_init_failed;
332 goto out;
334 cra_init_failed:
335 crypto_exit_ops(tfm);
336 out_free_tfm:
337 kfree(tfm);
338 out_err:
339 tfm = ERR_PTR(err);
340 out:
341 return tfm;
343 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
346 * crypto_alloc_base - Locate algorithm and allocate transform
347 * @alg_name: Name of algorithm
348 * @type: Type of algorithm
349 * @mask: Mask for type comparison
351 * crypto_alloc_base() will first attempt to locate an already loaded
352 * algorithm. If that fails and the kernel supports dynamically loadable
353 * modules, it will then attempt to load a module of the same name or
354 * alias. If that fails it will send a query to any loaded crypto manager
355 * to construct an algorithm on the fly. A refcount is grabbed on the
356 * algorithm which is then associated with the new transform.
358 * The returned transform is of a non-determinate type. Most people
359 * should use one of the more specific allocation functions such as
360 * crypto_alloc_blkcipher.
362 * In case of error the return value is an error pointer.
364 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
366 struct crypto_tfm *tfm;
367 int err;
369 for (;;) {
370 struct crypto_alg *alg;
372 alg = crypto_alg_mod_lookup(alg_name, type, mask);
373 if (IS_ERR(alg)) {
374 err = PTR_ERR(alg);
375 goto err;
378 tfm = __crypto_alloc_tfm(alg, type, mask);
379 if (!IS_ERR(tfm))
380 return tfm;
382 crypto_mod_put(alg);
383 err = PTR_ERR(tfm);
385 err:
386 if (err != -EAGAIN)
387 break;
388 if (signal_pending(current)) {
389 err = -EINTR;
390 break;
394 return ERR_PTR(err);
396 EXPORT_SYMBOL_GPL(crypto_alloc_base);
399 * crypto_free_tfm - Free crypto transform
400 * @tfm: Transform to free
402 * crypto_free_tfm() frees up the transform and any associated resources,
403 * then drops the refcount on the associated algorithm.
405 void crypto_free_tfm(struct crypto_tfm *tfm)
407 struct crypto_alg *alg;
408 int size;
410 if (unlikely(!tfm))
411 return;
413 alg = tfm->__crt_alg;
414 size = sizeof(*tfm) + alg->cra_ctxsize;
416 if (alg->cra_exit)
417 alg->cra_exit(tfm);
418 crypto_exit_ops(tfm);
419 crypto_mod_put(alg);
420 memset(tfm, 0, size);
421 kfree(tfm);
424 EXPORT_SYMBOL_GPL(crypto_free_tfm);
426 int crypto_has_alg(const char *name, u32 type, u32 mask)
428 int ret = 0;
429 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
431 if (!IS_ERR(alg)) {
432 crypto_mod_put(alg);
433 ret = 1;
436 return ret;
438 EXPORT_SYMBOL_GPL(crypto_has_alg);