2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list
);
30 EXPORT_SYMBOL_GPL(crypto_alg_list
);
31 DECLARE_RWSEM(crypto_alg_sem
);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem
);
34 BLOCKING_NOTIFIER_HEAD(crypto_chain
);
35 EXPORT_SYMBOL_GPL(crypto_chain
);
37 static struct crypto_alg
*crypto_larval_wait(struct crypto_alg
*alg
);
39 struct crypto_alg
*crypto_mod_get(struct crypto_alg
*alg
)
41 return try_module_get(alg
->cra_module
) ? crypto_alg_get(alg
) : NULL
;
43 EXPORT_SYMBOL_GPL(crypto_mod_get
);
45 void crypto_mod_put(struct crypto_alg
*alg
)
47 struct module
*module
= alg
->cra_module
;
52 EXPORT_SYMBOL_GPL(crypto_mod_put
);
54 static inline int crypto_is_test_larval(struct crypto_larval
*larval
)
56 return larval
->alg
.cra_driver_name
[0];
59 static struct crypto_alg
*__crypto_alg_lookup(const char *name
, u32 type
,
62 struct crypto_alg
*q
, *alg
= NULL
;
65 list_for_each_entry(q
, &crypto_alg_list
, cra_list
) {
68 if (crypto_is_moribund(q
))
71 if ((q
->cra_flags
^ type
) & mask
)
74 if (crypto_is_larval(q
) &&
75 !crypto_is_test_larval((struct crypto_larval
*)q
) &&
76 ((struct crypto_larval
*)q
)->mask
!= mask
)
79 exact
= !strcmp(q
->cra_driver_name
, name
);
80 fuzzy
= !strcmp(q
->cra_name
, name
);
81 if (!exact
&& !(fuzzy
&& q
->cra_priority
> best
))
84 if (unlikely(!crypto_mod_get(q
)))
87 best
= q
->cra_priority
;
99 static void crypto_larval_destroy(struct crypto_alg
*alg
)
101 struct crypto_larval
*larval
= (void *)alg
;
103 BUG_ON(!crypto_is_larval(alg
));
105 crypto_mod_put(larval
->adult
);
109 struct crypto_larval
*crypto_larval_alloc(const char *name
, u32 type
, u32 mask
)
111 struct crypto_larval
*larval
;
113 larval
= kzalloc(sizeof(*larval
), GFP_KERNEL
);
115 return ERR_PTR(-ENOMEM
);
118 larval
->alg
.cra_flags
= CRYPTO_ALG_LARVAL
| type
;
119 larval
->alg
.cra_priority
= -1;
120 larval
->alg
.cra_destroy
= crypto_larval_destroy
;
122 strlcpy(larval
->alg
.cra_name
, name
, CRYPTO_MAX_ALG_NAME
);
123 init_completion(&larval
->completion
);
127 EXPORT_SYMBOL_GPL(crypto_larval_alloc
);
129 static struct crypto_alg
*crypto_larval_add(const char *name
, u32 type
,
132 struct crypto_alg
*alg
;
133 struct crypto_larval
*larval
;
135 larval
= crypto_larval_alloc(name
, type
, mask
);
137 return ERR_CAST(larval
);
139 atomic_set(&larval
->alg
.cra_refcnt
, 2);
141 down_write(&crypto_alg_sem
);
142 alg
= __crypto_alg_lookup(name
, type
, mask
);
145 list_add(&alg
->cra_list
, &crypto_alg_list
);
147 up_write(&crypto_alg_sem
);
149 if (alg
!= &larval
->alg
) {
151 if (crypto_is_larval(alg
))
152 alg
= crypto_larval_wait(alg
);
158 void crypto_larval_kill(struct crypto_alg
*alg
)
160 struct crypto_larval
*larval
= (void *)alg
;
162 down_write(&crypto_alg_sem
);
163 list_del(&alg
->cra_list
);
164 up_write(&crypto_alg_sem
);
165 complete_all(&larval
->completion
);
168 EXPORT_SYMBOL_GPL(crypto_larval_kill
);
170 static struct crypto_alg
*crypto_larval_wait(struct crypto_alg
*alg
)
172 struct crypto_larval
*larval
= (void *)alg
;
175 timeout
= wait_for_completion_killable_timeout(
176 &larval
->completion
, 60 * HZ
);
180 alg
= ERR_PTR(-EINTR
);
182 alg
= ERR_PTR(-ETIMEDOUT
);
184 alg
= ERR_PTR(-ENOENT
);
185 else if (crypto_is_test_larval(larval
) &&
186 !(alg
->cra_flags
& CRYPTO_ALG_TESTED
))
187 alg
= ERR_PTR(-EAGAIN
);
188 else if (!crypto_mod_get(alg
))
189 alg
= ERR_PTR(-EAGAIN
);
190 crypto_mod_put(&larval
->alg
);
195 struct crypto_alg
*crypto_alg_lookup(const char *name
, u32 type
, u32 mask
)
197 struct crypto_alg
*alg
;
199 down_read(&crypto_alg_sem
);
200 alg
= __crypto_alg_lookup(name
, type
, mask
);
201 up_read(&crypto_alg_sem
);
205 EXPORT_SYMBOL_GPL(crypto_alg_lookup
);
207 struct crypto_alg
*crypto_larval_lookup(const char *name
, u32 type
, u32 mask
)
209 struct crypto_alg
*alg
;
212 return ERR_PTR(-ENOENT
);
214 mask
&= ~(CRYPTO_ALG_LARVAL
| CRYPTO_ALG_DEAD
);
217 alg
= crypto_alg_lookup(name
, type
, mask
);
219 request_module("crypto-%s", name
);
221 if (!((type
^ CRYPTO_ALG_NEED_FALLBACK
) & mask
&
222 CRYPTO_ALG_NEED_FALLBACK
))
223 request_module("crypto-%s-all", name
);
225 alg
= crypto_alg_lookup(name
, type
, mask
);
229 return crypto_is_larval(alg
) ? crypto_larval_wait(alg
) : alg
;
231 return crypto_larval_add(name
, type
, mask
);
233 EXPORT_SYMBOL_GPL(crypto_larval_lookup
);
235 int crypto_probing_notify(unsigned long val
, void *v
)
239 ok
= blocking_notifier_call_chain(&crypto_chain
, val
, v
);
240 if (ok
== NOTIFY_DONE
) {
241 request_module("cryptomgr");
242 ok
= blocking_notifier_call_chain(&crypto_chain
, val
, v
);
247 EXPORT_SYMBOL_GPL(crypto_probing_notify
);
249 struct crypto_alg
*crypto_alg_mod_lookup(const char *name
, u32 type
, u32 mask
)
251 struct crypto_alg
*alg
;
252 struct crypto_alg
*larval
;
255 if (!((type
| mask
) & CRYPTO_ALG_TESTED
)) {
256 type
|= CRYPTO_ALG_TESTED
;
257 mask
|= CRYPTO_ALG_TESTED
;
260 larval
= crypto_larval_lookup(name
, type
, mask
);
261 if (IS_ERR(larval
) || !crypto_is_larval(larval
))
264 ok
= crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST
, larval
);
266 if (ok
== NOTIFY_STOP
)
267 alg
= crypto_larval_wait(larval
);
269 crypto_mod_put(larval
);
270 alg
= ERR_PTR(-ENOENT
);
272 crypto_larval_kill(larval
);
275 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup
);
277 static int crypto_init_ops(struct crypto_tfm
*tfm
, u32 type
, u32 mask
)
279 const struct crypto_type
*type_obj
= tfm
->__crt_alg
->cra_type
;
282 return type_obj
->init(tfm
, type
, mask
);
284 switch (crypto_tfm_alg_type(tfm
)) {
285 case CRYPTO_ALG_TYPE_CIPHER
:
286 return crypto_init_cipher_ops(tfm
);
288 case CRYPTO_ALG_TYPE_COMPRESS
:
289 return crypto_init_compress_ops(tfm
);
299 static void crypto_exit_ops(struct crypto_tfm
*tfm
)
301 const struct crypto_type
*type
= tfm
->__crt_alg
->cra_type
;
309 switch (crypto_tfm_alg_type(tfm
)) {
310 case CRYPTO_ALG_TYPE_CIPHER
:
311 crypto_exit_cipher_ops(tfm
);
314 case CRYPTO_ALG_TYPE_COMPRESS
:
315 crypto_exit_compress_ops(tfm
);
323 static unsigned int crypto_ctxsize(struct crypto_alg
*alg
, u32 type
, u32 mask
)
325 const struct crypto_type
*type_obj
= alg
->cra_type
;
328 len
= alg
->cra_alignmask
& ~(crypto_tfm_ctx_alignment() - 1);
330 return len
+ type_obj
->ctxsize(alg
, type
, mask
);
332 switch (alg
->cra_flags
& CRYPTO_ALG_TYPE_MASK
) {
336 case CRYPTO_ALG_TYPE_CIPHER
:
337 len
+= crypto_cipher_ctxsize(alg
);
340 case CRYPTO_ALG_TYPE_COMPRESS
:
341 len
+= crypto_compress_ctxsize(alg
);
348 void crypto_shoot_alg(struct crypto_alg
*alg
)
350 down_write(&crypto_alg_sem
);
351 alg
->cra_flags
|= CRYPTO_ALG_DYING
;
352 up_write(&crypto_alg_sem
);
354 EXPORT_SYMBOL_GPL(crypto_shoot_alg
);
356 struct crypto_tfm
*__crypto_alloc_tfm(struct crypto_alg
*alg
, u32 type
,
359 struct crypto_tfm
*tfm
= NULL
;
360 unsigned int tfm_size
;
363 tfm_size
= sizeof(*tfm
) + crypto_ctxsize(alg
, type
, mask
);
364 tfm
= kzalloc(tfm_size
, GFP_KERNEL
);
368 tfm
->__crt_alg
= alg
;
370 err
= crypto_init_ops(tfm
, type
, mask
);
374 if (!tfm
->exit
&& alg
->cra_init
&& (err
= alg
->cra_init(tfm
)))
375 goto cra_init_failed
;
380 crypto_exit_ops(tfm
);
383 crypto_shoot_alg(alg
);
390 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm
);
393 * crypto_alloc_base - Locate algorithm and allocate transform
394 * @alg_name: Name of algorithm
395 * @type: Type of algorithm
396 * @mask: Mask for type comparison
398 * This function should not be used by new algorithm types.
399 * Please use crypto_alloc_tfm instead.
401 * crypto_alloc_base() will first attempt to locate an already loaded
402 * algorithm. If that fails and the kernel supports dynamically loadable
403 * modules, it will then attempt to load a module of the same name or
404 * alias. If that fails it will send a query to any loaded crypto manager
405 * to construct an algorithm on the fly. A refcount is grabbed on the
406 * algorithm which is then associated with the new transform.
408 * The returned transform is of a non-determinate type. Most people
409 * should use one of the more specific allocation functions such as
410 * crypto_alloc_blkcipher.
412 * In case of error the return value is an error pointer.
414 struct crypto_tfm
*crypto_alloc_base(const char *alg_name
, u32 type
, u32 mask
)
416 struct crypto_tfm
*tfm
;
420 struct crypto_alg
*alg
;
422 alg
= crypto_alg_mod_lookup(alg_name
, type
, mask
);
428 tfm
= __crypto_alloc_tfm(alg
, type
, mask
);
438 if (fatal_signal_pending(current
)) {
446 EXPORT_SYMBOL_GPL(crypto_alloc_base
);
448 void *crypto_create_tfm(struct crypto_alg
*alg
,
449 const struct crypto_type
*frontend
)
452 struct crypto_tfm
*tfm
= NULL
;
453 unsigned int tfmsize
;
457 tfmsize
= frontend
->tfmsize
;
458 total
= tfmsize
+ sizeof(*tfm
) + frontend
->extsize(alg
);
460 mem
= kzalloc(total
, GFP_KERNEL
);
464 tfm
= (struct crypto_tfm
*)(mem
+ tfmsize
);
465 tfm
->__crt_alg
= alg
;
467 err
= frontend
->init_tfm(tfm
);
471 if (!tfm
->exit
&& alg
->cra_init
&& (err
= alg
->cra_init(tfm
)))
472 goto cra_init_failed
;
477 crypto_exit_ops(tfm
);
480 crypto_shoot_alg(alg
);
487 EXPORT_SYMBOL_GPL(crypto_create_tfm
);
489 struct crypto_alg
*crypto_find_alg(const char *alg_name
,
490 const struct crypto_type
*frontend
,
493 struct crypto_alg
*(*lookup
)(const char *name
, u32 type
, u32 mask
) =
494 crypto_alg_mod_lookup
;
497 type
&= frontend
->maskclear
;
498 mask
&= frontend
->maskclear
;
499 type
|= frontend
->type
;
500 mask
|= frontend
->maskset
;
502 if (frontend
->lookup
)
503 lookup
= frontend
->lookup
;
506 return lookup(alg_name
, type
, mask
);
508 EXPORT_SYMBOL_GPL(crypto_find_alg
);
511 * crypto_alloc_tfm - Locate algorithm and allocate transform
512 * @alg_name: Name of algorithm
513 * @frontend: Frontend algorithm type
514 * @type: Type of algorithm
515 * @mask: Mask for type comparison
517 * crypto_alloc_tfm() will first attempt to locate an already loaded
518 * algorithm. If that fails and the kernel supports dynamically loadable
519 * modules, it will then attempt to load a module of the same name or
520 * alias. If that fails it will send a query to any loaded crypto manager
521 * to construct an algorithm on the fly. A refcount is grabbed on the
522 * algorithm which is then associated with the new transform.
524 * The returned transform is of a non-determinate type. Most people
525 * should use one of the more specific allocation functions such as
526 * crypto_alloc_blkcipher.
528 * In case of error the return value is an error pointer.
530 void *crypto_alloc_tfm(const char *alg_name
,
531 const struct crypto_type
*frontend
, u32 type
, u32 mask
)
537 struct crypto_alg
*alg
;
539 alg
= crypto_find_alg(alg_name
, frontend
, type
, mask
);
545 tfm
= crypto_create_tfm(alg
, frontend
);
555 if (fatal_signal_pending(current
)) {
563 EXPORT_SYMBOL_GPL(crypto_alloc_tfm
);
566 * crypto_destroy_tfm - Free crypto transform
567 * @mem: Start of tfm slab
568 * @tfm: Transform to free
570 * This function frees up the transform and any associated resources,
571 * then drops the refcount on the associated algorithm.
573 void crypto_destroy_tfm(void *mem
, struct crypto_tfm
*tfm
)
575 struct crypto_alg
*alg
;
580 alg
= tfm
->__crt_alg
;
582 if (!tfm
->exit
&& alg
->cra_exit
)
584 crypto_exit_ops(tfm
);
588 EXPORT_SYMBOL_GPL(crypto_destroy_tfm
);
590 int crypto_has_alg(const char *name
, u32 type
, u32 mask
)
593 struct crypto_alg
*alg
= crypto_alg_mod_lookup(name
, type
, mask
);
602 EXPORT_SYMBOL_GPL(crypto_has_alg
);
604 MODULE_DESCRIPTION("Cryptographic core API");
605 MODULE_LICENSE("GPL");