1 // SPDX-License-Identifier: GPL-2.0-only
3 * pcrypt - Parallel crypto wrapper.
5 * Copyright (C) 2009 secunet Security Networks AG
6 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <linux/atomic.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/slab.h>
16 #include <linux/kobject.h>
17 #include <linux/cpu.h>
18 #include <crypto/pcrypt.h>
20 static struct padata_instance
*pencrypt
;
21 static struct padata_instance
*pdecrypt
;
22 static struct kset
*pcrypt_kset
;
24 struct pcrypt_instance_ctx
{
25 struct crypto_aead_spawn spawn
;
26 struct padata_shell
*psenc
;
27 struct padata_shell
*psdec
;
31 struct pcrypt_aead_ctx
{
32 struct crypto_aead
*child
;
36 static inline struct pcrypt_instance_ctx
*pcrypt_tfm_ictx(
37 struct crypto_aead
*tfm
)
39 return aead_instance_ctx(aead_alg_instance(tfm
));
42 static int pcrypt_aead_setkey(struct crypto_aead
*parent
,
43 const u8
*key
, unsigned int keylen
)
45 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
47 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
50 static int pcrypt_aead_setauthsize(struct crypto_aead
*parent
,
51 unsigned int authsize
)
53 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
55 return crypto_aead_setauthsize(ctx
->child
, authsize
);
58 static void pcrypt_aead_serial(struct padata_priv
*padata
)
60 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
61 struct aead_request
*req
= pcrypt_request_ctx(preq
);
63 aead_request_complete(req
->base
.data
, padata
->info
);
66 static void pcrypt_aead_done(struct crypto_async_request
*areq
, int err
)
68 struct aead_request
*req
= areq
->data
;
69 struct pcrypt_request
*preq
= aead_request_ctx(req
);
70 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
74 padata_do_serial(padata
);
77 static void pcrypt_aead_enc(struct padata_priv
*padata
)
79 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
80 struct aead_request
*req
= pcrypt_request_ctx(preq
);
82 padata
->info
= crypto_aead_encrypt(req
);
84 if (padata
->info
== -EINPROGRESS
)
87 padata_do_serial(padata
);
90 static int pcrypt_aead_encrypt(struct aead_request
*req
)
93 struct pcrypt_request
*preq
= aead_request_ctx(req
);
94 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
95 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
96 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
97 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
98 u32 flags
= aead_request_flags(req
);
99 struct pcrypt_instance_ctx
*ictx
;
101 ictx
= pcrypt_tfm_ictx(aead
);
103 memset(padata
, 0, sizeof(struct padata_priv
));
105 padata
->parallel
= pcrypt_aead_enc
;
106 padata
->serial
= pcrypt_aead_serial
;
108 aead_request_set_tfm(creq
, ctx
->child
);
109 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
110 pcrypt_aead_done
, req
);
111 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
112 req
->cryptlen
, req
->iv
);
113 aead_request_set_ad(creq
, req
->assoclen
);
115 err
= padata_do_parallel(ictx
->psenc
, padata
, &ctx
->cb_cpu
);
122 static void pcrypt_aead_dec(struct padata_priv
*padata
)
124 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
125 struct aead_request
*req
= pcrypt_request_ctx(preq
);
127 padata
->info
= crypto_aead_decrypt(req
);
129 if (padata
->info
== -EINPROGRESS
)
132 padata_do_serial(padata
);
135 static int pcrypt_aead_decrypt(struct aead_request
*req
)
138 struct pcrypt_request
*preq
= aead_request_ctx(req
);
139 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
140 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
141 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
142 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
143 u32 flags
= aead_request_flags(req
);
144 struct pcrypt_instance_ctx
*ictx
;
146 ictx
= pcrypt_tfm_ictx(aead
);
148 memset(padata
, 0, sizeof(struct padata_priv
));
150 padata
->parallel
= pcrypt_aead_dec
;
151 padata
->serial
= pcrypt_aead_serial
;
153 aead_request_set_tfm(creq
, ctx
->child
);
154 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
155 pcrypt_aead_done
, req
);
156 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
157 req
->cryptlen
, req
->iv
);
158 aead_request_set_ad(creq
, req
->assoclen
);
160 err
= padata_do_parallel(ictx
->psdec
, padata
, &ctx
->cb_cpu
);
167 static int pcrypt_aead_init_tfm(struct crypto_aead
*tfm
)
170 struct aead_instance
*inst
= aead_alg_instance(tfm
);
171 struct pcrypt_instance_ctx
*ictx
= aead_instance_ctx(inst
);
172 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
173 struct crypto_aead
*cipher
;
175 cpu_index
= (unsigned int)atomic_inc_return(&ictx
->tfm_count
) %
176 cpumask_weight(cpu_online_mask
);
178 ctx
->cb_cpu
= cpumask_first(cpu_online_mask
);
179 for (cpu
= 0; cpu
< cpu_index
; cpu
++)
180 ctx
->cb_cpu
= cpumask_next(ctx
->cb_cpu
, cpu_online_mask
);
182 cipher
= crypto_spawn_aead(&ictx
->spawn
);
185 return PTR_ERR(cipher
);
188 crypto_aead_set_reqsize(tfm
, sizeof(struct pcrypt_request
) +
189 sizeof(struct aead_request
) +
190 crypto_aead_reqsize(cipher
));
195 static void pcrypt_aead_exit_tfm(struct crypto_aead
*tfm
)
197 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
199 crypto_free_aead(ctx
->child
);
202 static void pcrypt_free(struct aead_instance
*inst
)
204 struct pcrypt_instance_ctx
*ctx
= aead_instance_ctx(inst
);
206 crypto_drop_aead(&ctx
->spawn
);
207 padata_free_shell(ctx
->psdec
);
208 padata_free_shell(ctx
->psenc
);
212 static int pcrypt_init_instance(struct crypto_instance
*inst
,
213 struct crypto_alg
*alg
)
215 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
216 "pcrypt(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
217 return -ENAMETOOLONG
;
219 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
221 inst
->alg
.cra_priority
= alg
->cra_priority
+ 100;
222 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
223 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
228 static int pcrypt_create_aead(struct crypto_template
*tmpl
, struct rtattr
**tb
,
231 struct pcrypt_instance_ctx
*ctx
;
232 struct crypto_attr_type
*algt
;
233 struct aead_instance
*inst
;
234 struct aead_alg
*alg
;
238 algt
= crypto_get_attr_type(tb
);
240 return PTR_ERR(algt
);
242 name
= crypto_attr_alg_name(tb
[1]);
244 return PTR_ERR(name
);
246 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
252 ctx
= aead_instance_ctx(inst
);
253 ctx
->psenc
= padata_alloc_shell(pencrypt
);
257 ctx
->psdec
= padata_alloc_shell(pdecrypt
);
261 err
= crypto_grab_aead(&ctx
->spawn
, aead_crypto_instance(inst
),
266 alg
= crypto_spawn_aead_alg(&ctx
->spawn
);
267 err
= pcrypt_init_instance(aead_crypto_instance(inst
), &alg
->base
);
271 inst
->alg
.base
.cra_flags
= CRYPTO_ALG_ASYNC
;
273 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
274 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
276 inst
->alg
.base
.cra_ctxsize
= sizeof(struct pcrypt_aead_ctx
);
278 inst
->alg
.init
= pcrypt_aead_init_tfm
;
279 inst
->alg
.exit
= pcrypt_aead_exit_tfm
;
281 inst
->alg
.setkey
= pcrypt_aead_setkey
;
282 inst
->alg
.setauthsize
= pcrypt_aead_setauthsize
;
283 inst
->alg
.encrypt
= pcrypt_aead_encrypt
;
284 inst
->alg
.decrypt
= pcrypt_aead_decrypt
;
286 inst
->free
= pcrypt_free
;
288 err
= aead_register_instance(tmpl
, inst
);
296 crypto_drop_aead(&ctx
->spawn
);
298 padata_free_shell(ctx
->psdec
);
300 padata_free_shell(ctx
->psenc
);
306 static int pcrypt_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
308 struct crypto_attr_type
*algt
;
310 algt
= crypto_get_attr_type(tb
);
312 return PTR_ERR(algt
);
314 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
315 case CRYPTO_ALG_TYPE_AEAD
:
316 return pcrypt_create_aead(tmpl
, tb
, algt
->type
, algt
->mask
);
322 static int pcrypt_sysfs_add(struct padata_instance
*pinst
, const char *name
)
326 pinst
->kobj
.kset
= pcrypt_kset
;
327 ret
= kobject_add(&pinst
->kobj
, NULL
, "%s", name
);
329 kobject_uevent(&pinst
->kobj
, KOBJ_ADD
);
334 static int pcrypt_init_padata(struct padata_instance
**pinst
, const char *name
)
338 *pinst
= padata_alloc_possible(name
);
342 ret
= pcrypt_sysfs_add(*pinst
, name
);
349 static void pcrypt_fini_padata(struct padata_instance
*pinst
)
355 static struct crypto_template pcrypt_tmpl
= {
357 .create
= pcrypt_create
,
358 .module
= THIS_MODULE
,
361 static int __init
pcrypt_init(void)
365 pcrypt_kset
= kset_create_and_add("pcrypt", NULL
, kernel_kobj
);
369 err
= pcrypt_init_padata(&pencrypt
, "pencrypt");
373 err
= pcrypt_init_padata(&pdecrypt
, "pdecrypt");
375 goto err_deinit_pencrypt
;
377 padata_start(pencrypt
);
378 padata_start(pdecrypt
);
380 return crypto_register_template(&pcrypt_tmpl
);
383 pcrypt_fini_padata(pencrypt
);
385 kset_unregister(pcrypt_kset
);
390 static void __exit
pcrypt_exit(void)
392 crypto_unregister_template(&pcrypt_tmpl
);
394 pcrypt_fini_padata(pencrypt
);
395 pcrypt_fini_padata(pdecrypt
);
397 kset_unregister(pcrypt_kset
);
400 subsys_initcall(pcrypt_init
);
401 module_exit(pcrypt_exit
);
403 MODULE_LICENSE("GPL");
404 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
405 MODULE_DESCRIPTION("Parallel crypto wrapper");
406 MODULE_ALIAS_CRYPTO("pcrypt");