1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Shared crypto simd helpers
5 * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * Copyright (c) 2016 Herbert Xu <herbert@gondor.apana.org.au>
7 * Copyright (c) 2019 Google LLC
9 * Based on aesni-intel_glue.c by:
10 * Copyright (C) 2008, Intel Corp.
11 * Author: Huang Ying <ying.huang@intel.com>
15 * Shared crypto SIMD helpers. These functions dynamically create and register
16 * an skcipher or AEAD algorithm that wraps another, internal algorithm. The
17 * wrapper ensures that the internal algorithm is only executed in a context
18 * where SIMD instructions are usable, i.e. where may_use_simd() returns true.
19 * If SIMD is already usable, the wrapper directly calls the internal algorithm.
20 * Otherwise it defers execution to a workqueue via cryptd.
22 * This is an alternative to the internal algorithm implementing a fallback for
23 * the !may_use_simd() case itself.
25 * Note that the wrapper algorithm is asynchronous, i.e. it has the
26 * CRYPTO_ALG_ASYNC flag set. Therefore it won't be found by users who
27 * explicitly allocate a synchronous algorithm.
30 #include <crypto/cryptd.h>
31 #include <crypto/internal/aead.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/internal/skcipher.h>
34 #include <linux/kernel.h>
35 #include <linux/module.h>
36 #include <linux/preempt.h>
39 /* skcipher support */
41 struct simd_skcipher_alg
{
42 const char *ialg_name
;
43 struct skcipher_alg alg
;
46 struct simd_skcipher_ctx
{
47 struct cryptd_skcipher
*cryptd_tfm
;
50 static int simd_skcipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
53 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
54 struct crypto_skcipher
*child
= &ctx
->cryptd_tfm
->base
;
56 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
57 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(tfm
) &
59 return crypto_skcipher_setkey(child
, key
, key_len
);
62 static int simd_skcipher_encrypt(struct skcipher_request
*req
)
64 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
65 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
66 struct skcipher_request
*subreq
;
67 struct crypto_skcipher
*child
;
69 subreq
= skcipher_request_ctx(req
);
72 if (!crypto_simd_usable() ||
73 (in_atomic() && cryptd_skcipher_queued(ctx
->cryptd_tfm
)))
74 child
= &ctx
->cryptd_tfm
->base
;
76 child
= cryptd_skcipher_child(ctx
->cryptd_tfm
);
78 skcipher_request_set_tfm(subreq
, child
);
80 return crypto_skcipher_encrypt(subreq
);
83 static int simd_skcipher_decrypt(struct skcipher_request
*req
)
85 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
86 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
87 struct skcipher_request
*subreq
;
88 struct crypto_skcipher
*child
;
90 subreq
= skcipher_request_ctx(req
);
93 if (!crypto_simd_usable() ||
94 (in_atomic() && cryptd_skcipher_queued(ctx
->cryptd_tfm
)))
95 child
= &ctx
->cryptd_tfm
->base
;
97 child
= cryptd_skcipher_child(ctx
->cryptd_tfm
);
99 skcipher_request_set_tfm(subreq
, child
);
101 return crypto_skcipher_decrypt(subreq
);
104 static void simd_skcipher_exit(struct crypto_skcipher
*tfm
)
106 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
108 cryptd_free_skcipher(ctx
->cryptd_tfm
);
111 static int simd_skcipher_init(struct crypto_skcipher
*tfm
)
113 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
114 struct cryptd_skcipher
*cryptd_tfm
;
115 struct simd_skcipher_alg
*salg
;
116 struct skcipher_alg
*alg
;
119 alg
= crypto_skcipher_alg(tfm
);
120 salg
= container_of(alg
, struct simd_skcipher_alg
, alg
);
122 cryptd_tfm
= cryptd_alloc_skcipher(salg
->ialg_name
,
124 CRYPTO_ALG_INTERNAL
);
125 if (IS_ERR(cryptd_tfm
))
126 return PTR_ERR(cryptd_tfm
);
128 ctx
->cryptd_tfm
= cryptd_tfm
;
130 reqsize
= crypto_skcipher_reqsize(cryptd_skcipher_child(cryptd_tfm
));
131 reqsize
= max(reqsize
, crypto_skcipher_reqsize(&cryptd_tfm
->base
));
132 reqsize
+= sizeof(struct skcipher_request
);
134 crypto_skcipher_set_reqsize(tfm
, reqsize
);
139 struct simd_skcipher_alg
*simd_skcipher_create_compat(struct skcipher_alg
*ialg
,
142 const char *basename
)
144 struct simd_skcipher_alg
*salg
;
145 struct skcipher_alg
*alg
;
148 salg
= kzalloc(sizeof(*salg
), GFP_KERNEL
);
150 salg
= ERR_PTR(-ENOMEM
);
154 salg
->ialg_name
= basename
;
158 if (snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", algname
) >=
162 if (snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
163 drvname
) >= CRYPTO_MAX_ALG_NAME
)
166 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
|
167 (ialg
->base
.cra_flags
& CRYPTO_ALG_INHERITED_FLAGS
);
168 alg
->base
.cra_priority
= ialg
->base
.cra_priority
;
169 alg
->base
.cra_blocksize
= ialg
->base
.cra_blocksize
;
170 alg
->base
.cra_alignmask
= ialg
->base
.cra_alignmask
;
171 alg
->base
.cra_module
= ialg
->base
.cra_module
;
172 alg
->base
.cra_ctxsize
= sizeof(struct simd_skcipher_ctx
);
174 alg
->ivsize
= ialg
->ivsize
;
175 alg
->chunksize
= ialg
->chunksize
;
176 alg
->min_keysize
= ialg
->min_keysize
;
177 alg
->max_keysize
= ialg
->max_keysize
;
179 alg
->init
= simd_skcipher_init
;
180 alg
->exit
= simd_skcipher_exit
;
182 alg
->setkey
= simd_skcipher_setkey
;
183 alg
->encrypt
= simd_skcipher_encrypt
;
184 alg
->decrypt
= simd_skcipher_decrypt
;
186 err
= crypto_register_skcipher(alg
);
198 EXPORT_SYMBOL_GPL(simd_skcipher_create_compat
);
200 void simd_skcipher_free(struct simd_skcipher_alg
*salg
)
202 crypto_unregister_skcipher(&salg
->alg
);
205 EXPORT_SYMBOL_GPL(simd_skcipher_free
);
207 int simd_register_skciphers_compat(struct skcipher_alg
*algs
, int count
,
208 struct simd_skcipher_alg
**simd_algs
)
214 const char *basename
;
215 struct simd_skcipher_alg
*simd
;
217 err
= crypto_register_skciphers(algs
, count
);
221 for (i
= 0; i
< count
; i
++) {
222 WARN_ON(strncmp(algs
[i
].base
.cra_name
, "__", 2));
223 WARN_ON(strncmp(algs
[i
].base
.cra_driver_name
, "__", 2));
224 algname
= algs
[i
].base
.cra_name
+ 2;
225 drvname
= algs
[i
].base
.cra_driver_name
+ 2;
226 basename
= algs
[i
].base
.cra_driver_name
;
227 simd
= simd_skcipher_create_compat(algs
+ i
, algname
, drvname
, basename
);
236 simd_unregister_skciphers(algs
, count
, simd_algs
);
239 EXPORT_SYMBOL_GPL(simd_register_skciphers_compat
);
241 void simd_unregister_skciphers(struct skcipher_alg
*algs
, int count
,
242 struct simd_skcipher_alg
**simd_algs
)
246 crypto_unregister_skciphers(algs
, count
);
248 for (i
= 0; i
< count
; i
++) {
250 simd_skcipher_free(simd_algs
[i
]);
255 EXPORT_SYMBOL_GPL(simd_unregister_skciphers
);
259 struct simd_aead_alg
{
260 const char *ialg_name
;
264 struct simd_aead_ctx
{
265 struct cryptd_aead
*cryptd_tfm
;
268 static int simd_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
269 unsigned int key_len
)
271 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
272 struct crypto_aead
*child
= &ctx
->cryptd_tfm
->base
;
274 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
275 crypto_aead_set_flags(child
, crypto_aead_get_flags(tfm
) &
276 CRYPTO_TFM_REQ_MASK
);
277 return crypto_aead_setkey(child
, key
, key_len
);
280 static int simd_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
282 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
283 struct crypto_aead
*child
= &ctx
->cryptd_tfm
->base
;
285 return crypto_aead_setauthsize(child
, authsize
);
288 static int simd_aead_encrypt(struct aead_request
*req
)
290 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
291 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
292 struct aead_request
*subreq
;
293 struct crypto_aead
*child
;
295 subreq
= aead_request_ctx(req
);
298 if (!crypto_simd_usable() ||
299 (in_atomic() && cryptd_aead_queued(ctx
->cryptd_tfm
)))
300 child
= &ctx
->cryptd_tfm
->base
;
302 child
= cryptd_aead_child(ctx
->cryptd_tfm
);
304 aead_request_set_tfm(subreq
, child
);
306 return crypto_aead_encrypt(subreq
);
309 static int simd_aead_decrypt(struct aead_request
*req
)
311 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
312 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
313 struct aead_request
*subreq
;
314 struct crypto_aead
*child
;
316 subreq
= aead_request_ctx(req
);
319 if (!crypto_simd_usable() ||
320 (in_atomic() && cryptd_aead_queued(ctx
->cryptd_tfm
)))
321 child
= &ctx
->cryptd_tfm
->base
;
323 child
= cryptd_aead_child(ctx
->cryptd_tfm
);
325 aead_request_set_tfm(subreq
, child
);
327 return crypto_aead_decrypt(subreq
);
330 static void simd_aead_exit(struct crypto_aead
*tfm
)
332 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
334 cryptd_free_aead(ctx
->cryptd_tfm
);
337 static int simd_aead_init(struct crypto_aead
*tfm
)
339 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
340 struct cryptd_aead
*cryptd_tfm
;
341 struct simd_aead_alg
*salg
;
342 struct aead_alg
*alg
;
345 alg
= crypto_aead_alg(tfm
);
346 salg
= container_of(alg
, struct simd_aead_alg
, alg
);
348 cryptd_tfm
= cryptd_alloc_aead(salg
->ialg_name
, CRYPTO_ALG_INTERNAL
,
349 CRYPTO_ALG_INTERNAL
);
350 if (IS_ERR(cryptd_tfm
))
351 return PTR_ERR(cryptd_tfm
);
353 ctx
->cryptd_tfm
= cryptd_tfm
;
355 reqsize
= crypto_aead_reqsize(cryptd_aead_child(cryptd_tfm
));
356 reqsize
= max(reqsize
, crypto_aead_reqsize(&cryptd_tfm
->base
));
357 reqsize
+= sizeof(struct aead_request
);
359 crypto_aead_set_reqsize(tfm
, reqsize
);
364 static struct simd_aead_alg
*simd_aead_create_compat(struct aead_alg
*ialg
,
367 const char *basename
)
369 struct simd_aead_alg
*salg
;
370 struct aead_alg
*alg
;
373 salg
= kzalloc(sizeof(*salg
), GFP_KERNEL
);
375 salg
= ERR_PTR(-ENOMEM
);
379 salg
->ialg_name
= basename
;
383 if (snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", algname
) >=
387 if (snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
388 drvname
) >= CRYPTO_MAX_ALG_NAME
)
391 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
|
392 (ialg
->base
.cra_flags
& CRYPTO_ALG_INHERITED_FLAGS
);
393 alg
->base
.cra_priority
= ialg
->base
.cra_priority
;
394 alg
->base
.cra_blocksize
= ialg
->base
.cra_blocksize
;
395 alg
->base
.cra_alignmask
= ialg
->base
.cra_alignmask
;
396 alg
->base
.cra_module
= ialg
->base
.cra_module
;
397 alg
->base
.cra_ctxsize
= sizeof(struct simd_aead_ctx
);
399 alg
->ivsize
= ialg
->ivsize
;
400 alg
->maxauthsize
= ialg
->maxauthsize
;
401 alg
->chunksize
= ialg
->chunksize
;
403 alg
->init
= simd_aead_init
;
404 alg
->exit
= simd_aead_exit
;
406 alg
->setkey
= simd_aead_setkey
;
407 alg
->setauthsize
= simd_aead_setauthsize
;
408 alg
->encrypt
= simd_aead_encrypt
;
409 alg
->decrypt
= simd_aead_decrypt
;
411 err
= crypto_register_aead(alg
);
424 static void simd_aead_free(struct simd_aead_alg
*salg
)
426 crypto_unregister_aead(&salg
->alg
);
430 int simd_register_aeads_compat(struct aead_alg
*algs
, int count
,
431 struct simd_aead_alg
**simd_algs
)
437 const char *basename
;
438 struct simd_aead_alg
*simd
;
440 err
= crypto_register_aeads(algs
, count
);
444 for (i
= 0; i
< count
; i
++) {
445 WARN_ON(strncmp(algs
[i
].base
.cra_name
, "__", 2));
446 WARN_ON(strncmp(algs
[i
].base
.cra_driver_name
, "__", 2));
447 algname
= algs
[i
].base
.cra_name
+ 2;
448 drvname
= algs
[i
].base
.cra_driver_name
+ 2;
449 basename
= algs
[i
].base
.cra_driver_name
;
450 simd
= simd_aead_create_compat(algs
+ i
, algname
, drvname
, basename
);
459 simd_unregister_aeads(algs
, count
, simd_algs
);
462 EXPORT_SYMBOL_GPL(simd_register_aeads_compat
);
464 void simd_unregister_aeads(struct aead_alg
*algs
, int count
,
465 struct simd_aead_alg
**simd_algs
)
469 crypto_unregister_aeads(algs
, count
);
471 for (i
= 0; i
< count
; i
++) {
473 simd_aead_free(simd_algs
[i
]);
478 EXPORT_SYMBOL_GPL(simd_unregister_aeads
);
480 MODULE_DESCRIPTION("Shared crypto SIMD helpers");
481 MODULE_LICENSE("GPL");