2 * pcrypt - Parallel crypto wrapper.
4 * Copyright (C) 2009 secunet Security Networks AG
5 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms and conditions of the GNU General Public License,
9 * version 2, as published by the Free Software Foundation.
11 * This program is distributed in the hope it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 * You should have received a copy of the GNU General Public License along with
17 * this program; if not, write to the Free Software Foundation, Inc.,
18 * 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
21 #include <crypto/algapi.h>
22 #include <crypto/internal/aead.h>
23 #include <linux/err.h>
24 #include <linux/init.h>
25 #include <linux/module.h>
26 #include <linux/slab.h>
27 #include <linux/notifier.h>
28 #include <linux/kobject.h>
29 #include <linux/cpu.h>
30 #include <crypto/pcrypt.h>
32 struct padata_pcrypt
{
33 struct padata_instance
*pinst
;
34 struct workqueue_struct
*wq
;
37 * Cpumask for callback CPUs. It should be
38 * equal to serial cpumask of corresponding padata instance,
39 * so it is updated when padata notifies us about serial
42 * cb_cpumask is protected by RCU. This fact prevents us from
43 * using cpumask_var_t directly because the actual type of
44 * cpumsak_var_t depends on kernel configuration(particularly on
45 * CONFIG_CPUMASK_OFFSTACK macro). Depending on the configuration
46 * cpumask_var_t may be either a pointer to the struct cpumask
47 * or a variable allocated on the stack. Thus we can not safely use
48 * cpumask_var_t with RCU operations such as rcu_assign_pointer or
49 * rcu_dereference. So cpumask_var_t is wrapped with struct
50 * pcrypt_cpumask which makes possible to use it with RCU.
52 struct pcrypt_cpumask
{
55 struct notifier_block nblock
;
58 static struct padata_pcrypt pencrypt
;
59 static struct padata_pcrypt pdecrypt
;
60 static struct kset
*pcrypt_kset
;
62 struct pcrypt_instance_ctx
{
63 struct crypto_spawn spawn
;
64 unsigned int tfm_count
;
67 struct pcrypt_aead_ctx
{
68 struct crypto_aead
*child
;
72 static int pcrypt_do_parallel(struct padata_priv
*padata
, unsigned int *cb_cpu
,
73 struct padata_pcrypt
*pcrypt
)
75 unsigned int cpu_index
, cpu
, i
;
76 struct pcrypt_cpumask
*cpumask
;
81 cpumask
= rcu_dereference_bh(pcrypt
->cb_cpumask
);
82 if (cpumask_test_cpu(cpu
, cpumask
->mask
))
85 if (!cpumask_weight(cpumask
->mask
))
88 cpu_index
= cpu
% cpumask_weight(cpumask
->mask
);
90 cpu
= cpumask_first(cpumask
->mask
);
91 for (i
= 0; i
< cpu_index
; i
++)
92 cpu
= cpumask_next(cpu
, cpumask
->mask
);
98 return padata_do_parallel(pcrypt
->pinst
, padata
, cpu
);
101 static int pcrypt_aead_setkey(struct crypto_aead
*parent
,
102 const u8
*key
, unsigned int keylen
)
104 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
106 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
109 static int pcrypt_aead_setauthsize(struct crypto_aead
*parent
,
110 unsigned int authsize
)
112 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
114 return crypto_aead_setauthsize(ctx
->child
, authsize
);
117 static void pcrypt_aead_serial(struct padata_priv
*padata
)
119 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
120 struct aead_request
*req
= pcrypt_request_ctx(preq
);
122 aead_request_complete(req
->base
.data
, padata
->info
);
125 static void pcrypt_aead_giv_serial(struct padata_priv
*padata
)
127 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
128 struct aead_givcrypt_request
*req
= pcrypt_request_ctx(preq
);
130 aead_request_complete(req
->areq
.base
.data
, padata
->info
);
133 static void pcrypt_aead_done(struct crypto_async_request
*areq
, int err
)
135 struct aead_request
*req
= areq
->data
;
136 struct pcrypt_request
*preq
= aead_request_ctx(req
);
137 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
140 req
->base
.flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
142 padata_do_serial(padata
);
145 static void pcrypt_aead_enc(struct padata_priv
*padata
)
147 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
148 struct aead_request
*req
= pcrypt_request_ctx(preq
);
150 padata
->info
= crypto_aead_encrypt(req
);
152 if (padata
->info
== -EINPROGRESS
)
155 padata_do_serial(padata
);
158 static int pcrypt_aead_encrypt(struct aead_request
*req
)
161 struct pcrypt_request
*preq
= aead_request_ctx(req
);
162 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
163 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
164 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
165 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
166 u32 flags
= aead_request_flags(req
);
168 memset(padata
, 0, sizeof(struct padata_priv
));
170 padata
->parallel
= pcrypt_aead_enc
;
171 padata
->serial
= pcrypt_aead_serial
;
173 aead_request_set_tfm(creq
, ctx
->child
);
174 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
175 pcrypt_aead_done
, req
);
176 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
177 req
->cryptlen
, req
->iv
);
178 aead_request_set_assoc(creq
, req
->assoc
, req
->assoclen
);
180 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, &pencrypt
);
187 static void pcrypt_aead_dec(struct padata_priv
*padata
)
189 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
190 struct aead_request
*req
= pcrypt_request_ctx(preq
);
192 padata
->info
= crypto_aead_decrypt(req
);
194 if (padata
->info
== -EINPROGRESS
)
197 padata_do_serial(padata
);
200 static int pcrypt_aead_decrypt(struct aead_request
*req
)
203 struct pcrypt_request
*preq
= aead_request_ctx(req
);
204 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
205 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
206 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
207 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
208 u32 flags
= aead_request_flags(req
);
210 memset(padata
, 0, sizeof(struct padata_priv
));
212 padata
->parallel
= pcrypt_aead_dec
;
213 padata
->serial
= pcrypt_aead_serial
;
215 aead_request_set_tfm(creq
, ctx
->child
);
216 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
217 pcrypt_aead_done
, req
);
218 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
219 req
->cryptlen
, req
->iv
);
220 aead_request_set_assoc(creq
, req
->assoc
, req
->assoclen
);
222 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, &pdecrypt
);
229 static void pcrypt_aead_givenc(struct padata_priv
*padata
)
231 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
232 struct aead_givcrypt_request
*req
= pcrypt_request_ctx(preq
);
234 padata
->info
= crypto_aead_givencrypt(req
);
236 if (padata
->info
== -EINPROGRESS
)
239 padata_do_serial(padata
);
242 static int pcrypt_aead_givencrypt(struct aead_givcrypt_request
*req
)
245 struct aead_request
*areq
= &req
->areq
;
246 struct pcrypt_request
*preq
= aead_request_ctx(areq
);
247 struct aead_givcrypt_request
*creq
= pcrypt_request_ctx(preq
);
248 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
249 struct crypto_aead
*aead
= aead_givcrypt_reqtfm(req
);
250 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
251 u32 flags
= aead_request_flags(areq
);
253 memset(padata
, 0, sizeof(struct padata_priv
));
255 padata
->parallel
= pcrypt_aead_givenc
;
256 padata
->serial
= pcrypt_aead_giv_serial
;
258 aead_givcrypt_set_tfm(creq
, ctx
->child
);
259 aead_givcrypt_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
260 pcrypt_aead_done
, areq
);
261 aead_givcrypt_set_crypt(creq
, areq
->src
, areq
->dst
,
262 areq
->cryptlen
, areq
->iv
);
263 aead_givcrypt_set_assoc(creq
, areq
->assoc
, areq
->assoclen
);
264 aead_givcrypt_set_giv(creq
, req
->giv
, req
->seq
);
266 err
= pcrypt_do_parallel(padata
, &ctx
->cb_cpu
, &pencrypt
);
273 static int pcrypt_aead_init_tfm(struct crypto_tfm
*tfm
)
276 struct crypto_instance
*inst
= crypto_tfm_alg_instance(tfm
);
277 struct pcrypt_instance_ctx
*ictx
= crypto_instance_ctx(inst
);
278 struct pcrypt_aead_ctx
*ctx
= crypto_tfm_ctx(tfm
);
279 struct crypto_aead
*cipher
;
283 cpu_index
= ictx
->tfm_count
% cpumask_weight(cpu_online_mask
);
285 ctx
->cb_cpu
= cpumask_first(cpu_online_mask
);
286 for (cpu
= 0; cpu
< cpu_index
; cpu
++)
287 ctx
->cb_cpu
= cpumask_next(ctx
->cb_cpu
, cpu_online_mask
);
289 cipher
= crypto_spawn_aead(crypto_instance_ctx(inst
));
292 return PTR_ERR(cipher
);
295 tfm
->crt_aead
.reqsize
= sizeof(struct pcrypt_request
)
296 + sizeof(struct aead_givcrypt_request
)
297 + crypto_aead_reqsize(cipher
);
302 static void pcrypt_aead_exit_tfm(struct crypto_tfm
*tfm
)
304 struct pcrypt_aead_ctx
*ctx
= crypto_tfm_ctx(tfm
);
306 crypto_free_aead(ctx
->child
);
309 static struct crypto_instance
*pcrypt_alloc_instance(struct crypto_alg
*alg
)
311 struct crypto_instance
*inst
;
312 struct pcrypt_instance_ctx
*ctx
;
315 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
317 inst
= ERR_PTR(-ENOMEM
);
322 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
323 "pcrypt(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
326 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
328 ctx
= crypto_instance_ctx(inst
);
329 err
= crypto_init_spawn(&ctx
->spawn
, alg
, inst
,
330 CRYPTO_ALG_TYPE_MASK
);
334 inst
->alg
.cra_priority
= alg
->cra_priority
+ 100;
335 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
336 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
347 static struct crypto_instance
*pcrypt_alloc_aead(struct rtattr
**tb
,
350 struct crypto_instance
*inst
;
351 struct crypto_alg
*alg
;
353 alg
= crypto_get_attr_alg(tb
, type
, (mask
& CRYPTO_ALG_TYPE_MASK
));
355 return ERR_CAST(alg
);
357 inst
= pcrypt_alloc_instance(alg
);
361 inst
->alg
.cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
;
362 inst
->alg
.cra_type
= &crypto_aead_type
;
364 inst
->alg
.cra_aead
.ivsize
= alg
->cra_aead
.ivsize
;
365 inst
->alg
.cra_aead
.geniv
= alg
->cra_aead
.geniv
;
366 inst
->alg
.cra_aead
.maxauthsize
= alg
->cra_aead
.maxauthsize
;
368 inst
->alg
.cra_ctxsize
= sizeof(struct pcrypt_aead_ctx
);
370 inst
->alg
.cra_init
= pcrypt_aead_init_tfm
;
371 inst
->alg
.cra_exit
= pcrypt_aead_exit_tfm
;
373 inst
->alg
.cra_aead
.setkey
= pcrypt_aead_setkey
;
374 inst
->alg
.cra_aead
.setauthsize
= pcrypt_aead_setauthsize
;
375 inst
->alg
.cra_aead
.encrypt
= pcrypt_aead_encrypt
;
376 inst
->alg
.cra_aead
.decrypt
= pcrypt_aead_decrypt
;
377 inst
->alg
.cra_aead
.givencrypt
= pcrypt_aead_givencrypt
;
384 static struct crypto_instance
*pcrypt_alloc(struct rtattr
**tb
)
386 struct crypto_attr_type
*algt
;
388 algt
= crypto_get_attr_type(tb
);
390 return ERR_CAST(algt
);
392 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
393 case CRYPTO_ALG_TYPE_AEAD
:
394 return pcrypt_alloc_aead(tb
, algt
->type
, algt
->mask
);
397 return ERR_PTR(-EINVAL
);
400 static void pcrypt_free(struct crypto_instance
*inst
)
402 struct pcrypt_instance_ctx
*ctx
= crypto_instance_ctx(inst
);
404 crypto_drop_spawn(&ctx
->spawn
);
408 static int pcrypt_cpumask_change_notify(struct notifier_block
*self
,
409 unsigned long val
, void *data
)
411 struct padata_pcrypt
*pcrypt
;
412 struct pcrypt_cpumask
*new_mask
, *old_mask
;
413 struct padata_cpumask
*cpumask
= (struct padata_cpumask
*)data
;
415 if (!(val
& PADATA_CPU_SERIAL
))
418 pcrypt
= container_of(self
, struct padata_pcrypt
, nblock
);
419 new_mask
= kmalloc(sizeof(*new_mask
), GFP_KERNEL
);
422 if (!alloc_cpumask_var(&new_mask
->mask
, GFP_KERNEL
)) {
427 old_mask
= pcrypt
->cb_cpumask
;
429 cpumask_copy(new_mask
->mask
, cpumask
->cbcpu
);
430 rcu_assign_pointer(pcrypt
->cb_cpumask
, new_mask
);
431 synchronize_rcu_bh();
433 free_cpumask_var(old_mask
->mask
);
438 static int pcrypt_sysfs_add(struct padata_instance
*pinst
, const char *name
)
442 pinst
->kobj
.kset
= pcrypt_kset
;
443 ret
= kobject_add(&pinst
->kobj
, NULL
, name
);
445 kobject_uevent(&pinst
->kobj
, KOBJ_ADD
);
450 static int pcrypt_init_padata(struct padata_pcrypt
*pcrypt
,
454 struct pcrypt_cpumask
*mask
;
458 pcrypt
->wq
= alloc_workqueue("%s", WQ_MEM_RECLAIM
| WQ_CPU_INTENSIVE
,
463 pcrypt
->pinst
= padata_alloc_possible(pcrypt
->wq
);
465 goto err_destroy_workqueue
;
467 mask
= kmalloc(sizeof(*mask
), GFP_KERNEL
);
469 goto err_free_padata
;
470 if (!alloc_cpumask_var(&mask
->mask
, GFP_KERNEL
)) {
472 goto err_free_padata
;
475 cpumask_and(mask
->mask
, cpu_possible_mask
, cpu_online_mask
);
476 rcu_assign_pointer(pcrypt
->cb_cpumask
, mask
);
478 pcrypt
->nblock
.notifier_call
= pcrypt_cpumask_change_notify
;
479 ret
= padata_register_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
481 goto err_free_cpumask
;
483 ret
= pcrypt_sysfs_add(pcrypt
->pinst
, name
);
485 goto err_unregister_notifier
;
491 err_unregister_notifier
:
492 padata_unregister_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
494 free_cpumask_var(mask
->mask
);
497 padata_free(pcrypt
->pinst
);
498 err_destroy_workqueue
:
499 destroy_workqueue(pcrypt
->wq
);
506 static void pcrypt_fini_padata(struct padata_pcrypt
*pcrypt
)
508 free_cpumask_var(pcrypt
->cb_cpumask
->mask
);
509 kfree(pcrypt
->cb_cpumask
);
511 padata_stop(pcrypt
->pinst
);
512 padata_unregister_cpumask_notifier(pcrypt
->pinst
, &pcrypt
->nblock
);
513 destroy_workqueue(pcrypt
->wq
);
514 padata_free(pcrypt
->pinst
);
517 static struct crypto_template pcrypt_tmpl
= {
519 .alloc
= pcrypt_alloc
,
521 .module
= THIS_MODULE
,
524 static int __init
pcrypt_init(void)
528 pcrypt_kset
= kset_create_and_add("pcrypt", NULL
, kernel_kobj
);
532 err
= pcrypt_init_padata(&pencrypt
, "pencrypt");
536 err
= pcrypt_init_padata(&pdecrypt
, "pdecrypt");
538 goto err_deinit_pencrypt
;
540 padata_start(pencrypt
.pinst
);
541 padata_start(pdecrypt
.pinst
);
543 return crypto_register_template(&pcrypt_tmpl
);
546 pcrypt_fini_padata(&pencrypt
);
548 kset_unregister(pcrypt_kset
);
553 static void __exit
pcrypt_exit(void)
555 pcrypt_fini_padata(&pencrypt
);
556 pcrypt_fini_padata(&pdecrypt
);
558 kset_unregister(pcrypt_kset
);
559 crypto_unregister_template(&pcrypt_tmpl
);
562 module_init(pcrypt_init
);
563 module_exit(pcrypt_exit
);
565 MODULE_LICENSE("GPL");
566 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
567 MODULE_DESCRIPTION("Parallel crypto wrapper");
568 MODULE_ALIAS_CRYPTO("pcrypt");