1 // SPDX-License-Identifier: GPL-2.0-only
3 * pcrypt - Parallel crypto wrapper.
5 * Copyright (C) 2009 secunet Security Networks AG
6 * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <linux/atomic.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/slab.h>
16 #include <linux/kobject.h>
17 #include <linux/cpu.h>
18 #include <crypto/pcrypt.h>
20 static struct padata_instance
*pencrypt
;
21 static struct padata_instance
*pdecrypt
;
22 static struct kset
*pcrypt_kset
;
24 struct pcrypt_instance_ctx
{
25 struct crypto_aead_spawn spawn
;
26 struct padata_shell
*psenc
;
27 struct padata_shell
*psdec
;
31 struct pcrypt_aead_ctx
{
32 struct crypto_aead
*child
;
36 static inline struct pcrypt_instance_ctx
*pcrypt_tfm_ictx(
37 struct crypto_aead
*tfm
)
39 return aead_instance_ctx(aead_alg_instance(tfm
));
42 static int pcrypt_aead_setkey(struct crypto_aead
*parent
,
43 const u8
*key
, unsigned int keylen
)
45 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
47 return crypto_aead_setkey(ctx
->child
, key
, keylen
);
50 static int pcrypt_aead_setauthsize(struct crypto_aead
*parent
,
51 unsigned int authsize
)
53 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(parent
);
55 return crypto_aead_setauthsize(ctx
->child
, authsize
);
58 static void pcrypt_aead_serial(struct padata_priv
*padata
)
60 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
61 struct aead_request
*req
= pcrypt_request_ctx(preq
);
63 aead_request_complete(req
->base
.data
, padata
->info
);
66 static void pcrypt_aead_done(void *data
, int err
)
68 struct aead_request
*req
= data
;
69 struct pcrypt_request
*preq
= aead_request_ctx(req
);
70 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
74 padata_do_serial(padata
);
77 static void pcrypt_aead_enc(struct padata_priv
*padata
)
79 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
80 struct aead_request
*req
= pcrypt_request_ctx(preq
);
83 ret
= crypto_aead_encrypt(req
);
85 if (ret
== -EINPROGRESS
)
89 padata_do_serial(padata
);
92 static int pcrypt_aead_encrypt(struct aead_request
*req
)
95 struct pcrypt_request
*preq
= aead_request_ctx(req
);
96 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
97 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
98 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
99 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
100 u32 flags
= aead_request_flags(req
);
101 struct pcrypt_instance_ctx
*ictx
;
103 ictx
= pcrypt_tfm_ictx(aead
);
105 memset(padata
, 0, sizeof(struct padata_priv
));
107 padata
->parallel
= pcrypt_aead_enc
;
108 padata
->serial
= pcrypt_aead_serial
;
110 aead_request_set_tfm(creq
, ctx
->child
);
111 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
112 pcrypt_aead_done
, req
);
113 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
114 req
->cryptlen
, req
->iv
);
115 aead_request_set_ad(creq
, req
->assoclen
);
117 err
= padata_do_parallel(ictx
->psenc
, padata
, &ctx
->cb_cpu
);
121 /* try non-parallel mode */
122 return crypto_aead_encrypt(creq
);
128 static void pcrypt_aead_dec(struct padata_priv
*padata
)
130 struct pcrypt_request
*preq
= pcrypt_padata_request(padata
);
131 struct aead_request
*req
= pcrypt_request_ctx(preq
);
134 ret
= crypto_aead_decrypt(req
);
136 if (ret
== -EINPROGRESS
)
140 padata_do_serial(padata
);
143 static int pcrypt_aead_decrypt(struct aead_request
*req
)
146 struct pcrypt_request
*preq
= aead_request_ctx(req
);
147 struct aead_request
*creq
= pcrypt_request_ctx(preq
);
148 struct padata_priv
*padata
= pcrypt_request_padata(preq
);
149 struct crypto_aead
*aead
= crypto_aead_reqtfm(req
);
150 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(aead
);
151 u32 flags
= aead_request_flags(req
);
152 struct pcrypt_instance_ctx
*ictx
;
154 ictx
= pcrypt_tfm_ictx(aead
);
156 memset(padata
, 0, sizeof(struct padata_priv
));
158 padata
->parallel
= pcrypt_aead_dec
;
159 padata
->serial
= pcrypt_aead_serial
;
161 aead_request_set_tfm(creq
, ctx
->child
);
162 aead_request_set_callback(creq
, flags
& ~CRYPTO_TFM_REQ_MAY_SLEEP
,
163 pcrypt_aead_done
, req
);
164 aead_request_set_crypt(creq
, req
->src
, req
->dst
,
165 req
->cryptlen
, req
->iv
);
166 aead_request_set_ad(creq
, req
->assoclen
);
168 err
= padata_do_parallel(ictx
->psdec
, padata
, &ctx
->cb_cpu
);
172 /* try non-parallel mode */
173 return crypto_aead_decrypt(creq
);
179 static int pcrypt_aead_init_tfm(struct crypto_aead
*tfm
)
182 struct aead_instance
*inst
= aead_alg_instance(tfm
);
183 struct pcrypt_instance_ctx
*ictx
= aead_instance_ctx(inst
);
184 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
185 struct crypto_aead
*cipher
;
187 cpu_index
= (unsigned int)atomic_inc_return(&ictx
->tfm_count
) %
188 cpumask_weight(cpu_online_mask
);
190 ctx
->cb_cpu
= cpumask_first(cpu_online_mask
);
191 for (cpu
= 0; cpu
< cpu_index
; cpu
++)
192 ctx
->cb_cpu
= cpumask_next(ctx
->cb_cpu
, cpu_online_mask
);
194 cipher
= crypto_spawn_aead(&ictx
->spawn
);
197 return PTR_ERR(cipher
);
200 crypto_aead_set_reqsize(tfm
, sizeof(struct pcrypt_request
) +
201 sizeof(struct aead_request
) +
202 crypto_aead_reqsize(cipher
));
207 static void pcrypt_aead_exit_tfm(struct crypto_aead
*tfm
)
209 struct pcrypt_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
211 crypto_free_aead(ctx
->child
);
214 static void pcrypt_free(struct aead_instance
*inst
)
216 struct pcrypt_instance_ctx
*ctx
= aead_instance_ctx(inst
);
218 crypto_drop_aead(&ctx
->spawn
);
219 padata_free_shell(ctx
->psdec
);
220 padata_free_shell(ctx
->psenc
);
224 static int pcrypt_init_instance(struct crypto_instance
*inst
,
225 struct crypto_alg
*alg
)
227 if (snprintf(inst
->alg
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
,
228 "pcrypt(%s)", alg
->cra_driver_name
) >= CRYPTO_MAX_ALG_NAME
)
229 return -ENAMETOOLONG
;
231 memcpy(inst
->alg
.cra_name
, alg
->cra_name
, CRYPTO_MAX_ALG_NAME
);
233 inst
->alg
.cra_priority
= alg
->cra_priority
+ 100;
234 inst
->alg
.cra_blocksize
= alg
->cra_blocksize
;
235 inst
->alg
.cra_alignmask
= alg
->cra_alignmask
;
240 static int pcrypt_create_aead(struct crypto_template
*tmpl
, struct rtattr
**tb
,
241 struct crypto_attr_type
*algt
)
243 struct pcrypt_instance_ctx
*ctx
;
244 struct aead_instance
*inst
;
245 struct aead_alg
*alg
;
246 u32 mask
= crypto_algt_inherited_mask(algt
);
249 inst
= kzalloc(sizeof(*inst
) + sizeof(*ctx
), GFP_KERNEL
);
255 ctx
= aead_instance_ctx(inst
);
256 ctx
->psenc
= padata_alloc_shell(pencrypt
);
260 ctx
->psdec
= padata_alloc_shell(pdecrypt
);
264 err
= crypto_grab_aead(&ctx
->spawn
, aead_crypto_instance(inst
),
265 crypto_attr_alg_name(tb
[1]), 0, mask
);
269 alg
= crypto_spawn_aead_alg(&ctx
->spawn
);
270 err
= pcrypt_init_instance(aead_crypto_instance(inst
), &alg
->base
);
274 inst
->alg
.base
.cra_flags
|= CRYPTO_ALG_ASYNC
;
276 inst
->alg
.ivsize
= crypto_aead_alg_ivsize(alg
);
277 inst
->alg
.maxauthsize
= crypto_aead_alg_maxauthsize(alg
);
279 inst
->alg
.base
.cra_ctxsize
= sizeof(struct pcrypt_aead_ctx
);
281 inst
->alg
.init
= pcrypt_aead_init_tfm
;
282 inst
->alg
.exit
= pcrypt_aead_exit_tfm
;
284 inst
->alg
.setkey
= pcrypt_aead_setkey
;
285 inst
->alg
.setauthsize
= pcrypt_aead_setauthsize
;
286 inst
->alg
.encrypt
= pcrypt_aead_encrypt
;
287 inst
->alg
.decrypt
= pcrypt_aead_decrypt
;
289 inst
->free
= pcrypt_free
;
291 err
= aead_register_instance(tmpl
, inst
);
299 static int pcrypt_create(struct crypto_template
*tmpl
, struct rtattr
**tb
)
301 struct crypto_attr_type
*algt
;
303 algt
= crypto_get_attr_type(tb
);
305 return PTR_ERR(algt
);
307 switch (algt
->type
& algt
->mask
& CRYPTO_ALG_TYPE_MASK
) {
308 case CRYPTO_ALG_TYPE_AEAD
:
309 return pcrypt_create_aead(tmpl
, tb
, algt
);
315 static int pcrypt_sysfs_add(struct padata_instance
*pinst
, const char *name
)
319 pinst
->kobj
.kset
= pcrypt_kset
;
320 ret
= kobject_add(&pinst
->kobj
, NULL
, "%s", name
);
322 kobject_uevent(&pinst
->kobj
, KOBJ_ADD
);
327 static int pcrypt_init_padata(struct padata_instance
**pinst
, const char *name
)
331 *pinst
= padata_alloc(name
);
335 ret
= pcrypt_sysfs_add(*pinst
, name
);
342 static struct crypto_template pcrypt_tmpl
= {
344 .create
= pcrypt_create
,
345 .module
= THIS_MODULE
,
348 static int __init
pcrypt_init(void)
352 pcrypt_kset
= kset_create_and_add("pcrypt", NULL
, kernel_kobj
);
356 err
= pcrypt_init_padata(&pencrypt
, "pencrypt");
360 err
= pcrypt_init_padata(&pdecrypt
, "pdecrypt");
362 goto err_deinit_pencrypt
;
364 return crypto_register_template(&pcrypt_tmpl
);
367 padata_free(pencrypt
);
369 kset_unregister(pcrypt_kset
);
374 static void __exit
pcrypt_exit(void)
376 crypto_unregister_template(&pcrypt_tmpl
);
378 padata_free(pencrypt
);
379 padata_free(pdecrypt
);
381 kset_unregister(pcrypt_kset
);
384 subsys_initcall(pcrypt_init
);
385 module_exit(pcrypt_exit
);
387 MODULE_LICENSE("GPL");
388 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
389 MODULE_DESCRIPTION("Parallel crypto wrapper");
390 MODULE_ALIAS_CRYPTO("pcrypt");