1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Shared crypto simd helpers
5 * Copyright (c) 2012 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * Copyright (c) 2016 Herbert Xu <herbert@gondor.apana.org.au>
7 * Copyright (c) 2019 Google LLC
9 * Based on aesni-intel_glue.c by:
10 * Copyright (C) 2008, Intel Corp.
11 * Author: Huang Ying <ying.huang@intel.com>
15 * Shared crypto SIMD helpers. These functions dynamically create and register
16 * an skcipher or AEAD algorithm that wraps another, internal algorithm. The
17 * wrapper ensures that the internal algorithm is only executed in a context
18 * where SIMD instructions are usable, i.e. where may_use_simd() returns true.
19 * If SIMD is already usable, the wrapper directly calls the internal algorithm.
20 * Otherwise it defers execution to a workqueue via cryptd.
22 * This is an alternative to the internal algorithm implementing a fallback for
23 * the !may_use_simd() case itself.
25 * Note that the wrapper algorithm is asynchronous, i.e. it has the
26 * CRYPTO_ALG_ASYNC flag set. Therefore it won't be found by users who
27 * explicitly allocate a synchronous algorithm.
30 #include <crypto/cryptd.h>
31 #include <crypto/internal/aead.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/internal/skcipher.h>
34 #include <linux/kernel.h>
35 #include <linux/module.h>
36 #include <linux/preempt.h>
39 /* skcipher support */
41 struct simd_skcipher_alg
{
42 const char *ialg_name
;
43 struct skcipher_alg alg
;
46 struct simd_skcipher_ctx
{
47 struct cryptd_skcipher
*cryptd_tfm
;
50 static int simd_skcipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
53 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
54 struct crypto_skcipher
*child
= &ctx
->cryptd_tfm
->base
;
57 crypto_skcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
58 crypto_skcipher_set_flags(child
, crypto_skcipher_get_flags(tfm
) &
60 err
= crypto_skcipher_setkey(child
, key
, key_len
);
61 crypto_skcipher_set_flags(tfm
, crypto_skcipher_get_flags(child
) &
66 static int simd_skcipher_encrypt(struct skcipher_request
*req
)
68 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
69 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
70 struct skcipher_request
*subreq
;
71 struct crypto_skcipher
*child
;
73 subreq
= skcipher_request_ctx(req
);
76 if (!crypto_simd_usable() ||
77 (in_atomic() && cryptd_skcipher_queued(ctx
->cryptd_tfm
)))
78 child
= &ctx
->cryptd_tfm
->base
;
80 child
= cryptd_skcipher_child(ctx
->cryptd_tfm
);
82 skcipher_request_set_tfm(subreq
, child
);
84 return crypto_skcipher_encrypt(subreq
);
87 static int simd_skcipher_decrypt(struct skcipher_request
*req
)
89 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
90 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
91 struct skcipher_request
*subreq
;
92 struct crypto_skcipher
*child
;
94 subreq
= skcipher_request_ctx(req
);
97 if (!crypto_simd_usable() ||
98 (in_atomic() && cryptd_skcipher_queued(ctx
->cryptd_tfm
)))
99 child
= &ctx
->cryptd_tfm
->base
;
101 child
= cryptd_skcipher_child(ctx
->cryptd_tfm
);
103 skcipher_request_set_tfm(subreq
, child
);
105 return crypto_skcipher_decrypt(subreq
);
108 static void simd_skcipher_exit(struct crypto_skcipher
*tfm
)
110 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
112 cryptd_free_skcipher(ctx
->cryptd_tfm
);
115 static int simd_skcipher_init(struct crypto_skcipher
*tfm
)
117 struct simd_skcipher_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
118 struct cryptd_skcipher
*cryptd_tfm
;
119 struct simd_skcipher_alg
*salg
;
120 struct skcipher_alg
*alg
;
123 alg
= crypto_skcipher_alg(tfm
);
124 salg
= container_of(alg
, struct simd_skcipher_alg
, alg
);
126 cryptd_tfm
= cryptd_alloc_skcipher(salg
->ialg_name
,
128 CRYPTO_ALG_INTERNAL
);
129 if (IS_ERR(cryptd_tfm
))
130 return PTR_ERR(cryptd_tfm
);
132 ctx
->cryptd_tfm
= cryptd_tfm
;
134 reqsize
= crypto_skcipher_reqsize(cryptd_skcipher_child(cryptd_tfm
));
135 reqsize
= max(reqsize
, crypto_skcipher_reqsize(&cryptd_tfm
->base
));
136 reqsize
+= sizeof(struct skcipher_request
);
138 crypto_skcipher_set_reqsize(tfm
, reqsize
);
143 struct simd_skcipher_alg
*simd_skcipher_create_compat(const char *algname
,
145 const char *basename
)
147 struct simd_skcipher_alg
*salg
;
148 struct crypto_skcipher
*tfm
;
149 struct skcipher_alg
*ialg
;
150 struct skcipher_alg
*alg
;
153 tfm
= crypto_alloc_skcipher(basename
, CRYPTO_ALG_INTERNAL
,
154 CRYPTO_ALG_INTERNAL
| CRYPTO_ALG_ASYNC
);
156 return ERR_CAST(tfm
);
158 ialg
= crypto_skcipher_alg(tfm
);
160 salg
= kzalloc(sizeof(*salg
), GFP_KERNEL
);
162 salg
= ERR_PTR(-ENOMEM
);
166 salg
->ialg_name
= basename
;
170 if (snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", algname
) >=
174 if (snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
175 drvname
) >= CRYPTO_MAX_ALG_NAME
)
178 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
;
179 alg
->base
.cra_priority
= ialg
->base
.cra_priority
;
180 alg
->base
.cra_blocksize
= ialg
->base
.cra_blocksize
;
181 alg
->base
.cra_alignmask
= ialg
->base
.cra_alignmask
;
182 alg
->base
.cra_module
= ialg
->base
.cra_module
;
183 alg
->base
.cra_ctxsize
= sizeof(struct simd_skcipher_ctx
);
185 alg
->ivsize
= ialg
->ivsize
;
186 alg
->chunksize
= ialg
->chunksize
;
187 alg
->min_keysize
= ialg
->min_keysize
;
188 alg
->max_keysize
= ialg
->max_keysize
;
190 alg
->init
= simd_skcipher_init
;
191 alg
->exit
= simd_skcipher_exit
;
193 alg
->setkey
= simd_skcipher_setkey
;
194 alg
->encrypt
= simd_skcipher_encrypt
;
195 alg
->decrypt
= simd_skcipher_decrypt
;
197 err
= crypto_register_skcipher(alg
);
202 crypto_free_skcipher(tfm
);
210 EXPORT_SYMBOL_GPL(simd_skcipher_create_compat
);
212 struct simd_skcipher_alg
*simd_skcipher_create(const char *algname
,
213 const char *basename
)
215 char drvname
[CRYPTO_MAX_ALG_NAME
];
217 if (snprintf(drvname
, CRYPTO_MAX_ALG_NAME
, "simd-%s", basename
) >=
219 return ERR_PTR(-ENAMETOOLONG
);
221 return simd_skcipher_create_compat(algname
, drvname
, basename
);
223 EXPORT_SYMBOL_GPL(simd_skcipher_create
);
225 void simd_skcipher_free(struct simd_skcipher_alg
*salg
)
227 crypto_unregister_skcipher(&salg
->alg
);
230 EXPORT_SYMBOL_GPL(simd_skcipher_free
);
232 int simd_register_skciphers_compat(struct skcipher_alg
*algs
, int count
,
233 struct simd_skcipher_alg
**simd_algs
)
239 const char *basename
;
240 struct simd_skcipher_alg
*simd
;
242 err
= crypto_register_skciphers(algs
, count
);
246 for (i
= 0; i
< count
; i
++) {
247 WARN_ON(strncmp(algs
[i
].base
.cra_name
, "__", 2));
248 WARN_ON(strncmp(algs
[i
].base
.cra_driver_name
, "__", 2));
249 algname
= algs
[i
].base
.cra_name
+ 2;
250 drvname
= algs
[i
].base
.cra_driver_name
+ 2;
251 basename
= algs
[i
].base
.cra_driver_name
;
252 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
261 simd_unregister_skciphers(algs
, count
, simd_algs
);
264 EXPORT_SYMBOL_GPL(simd_register_skciphers_compat
);
266 void simd_unregister_skciphers(struct skcipher_alg
*algs
, int count
,
267 struct simd_skcipher_alg
**simd_algs
)
271 crypto_unregister_skciphers(algs
, count
);
273 for (i
= 0; i
< count
; i
++) {
275 simd_skcipher_free(simd_algs
[i
]);
280 EXPORT_SYMBOL_GPL(simd_unregister_skciphers
);
284 struct simd_aead_alg
{
285 const char *ialg_name
;
289 struct simd_aead_ctx
{
290 struct cryptd_aead
*cryptd_tfm
;
293 static int simd_aead_setkey(struct crypto_aead
*tfm
, const u8
*key
,
294 unsigned int key_len
)
296 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
297 struct crypto_aead
*child
= &ctx
->cryptd_tfm
->base
;
300 crypto_aead_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
301 crypto_aead_set_flags(child
, crypto_aead_get_flags(tfm
) &
302 CRYPTO_TFM_REQ_MASK
);
303 err
= crypto_aead_setkey(child
, key
, key_len
);
304 crypto_aead_set_flags(tfm
, crypto_aead_get_flags(child
) &
305 CRYPTO_TFM_RES_MASK
);
309 static int simd_aead_setauthsize(struct crypto_aead
*tfm
, unsigned int authsize
)
311 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
312 struct crypto_aead
*child
= &ctx
->cryptd_tfm
->base
;
314 return crypto_aead_setauthsize(child
, authsize
);
317 static int simd_aead_encrypt(struct aead_request
*req
)
319 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
320 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
321 struct aead_request
*subreq
;
322 struct crypto_aead
*child
;
324 subreq
= aead_request_ctx(req
);
327 if (!crypto_simd_usable() ||
328 (in_atomic() && cryptd_aead_queued(ctx
->cryptd_tfm
)))
329 child
= &ctx
->cryptd_tfm
->base
;
331 child
= cryptd_aead_child(ctx
->cryptd_tfm
);
333 aead_request_set_tfm(subreq
, child
);
335 return crypto_aead_encrypt(subreq
);
338 static int simd_aead_decrypt(struct aead_request
*req
)
340 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
341 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
342 struct aead_request
*subreq
;
343 struct crypto_aead
*child
;
345 subreq
= aead_request_ctx(req
);
348 if (!crypto_simd_usable() ||
349 (in_atomic() && cryptd_aead_queued(ctx
->cryptd_tfm
)))
350 child
= &ctx
->cryptd_tfm
->base
;
352 child
= cryptd_aead_child(ctx
->cryptd_tfm
);
354 aead_request_set_tfm(subreq
, child
);
356 return crypto_aead_decrypt(subreq
);
359 static void simd_aead_exit(struct crypto_aead
*tfm
)
361 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
363 cryptd_free_aead(ctx
->cryptd_tfm
);
366 static int simd_aead_init(struct crypto_aead
*tfm
)
368 struct simd_aead_ctx
*ctx
= crypto_aead_ctx(tfm
);
369 struct cryptd_aead
*cryptd_tfm
;
370 struct simd_aead_alg
*salg
;
371 struct aead_alg
*alg
;
374 alg
= crypto_aead_alg(tfm
);
375 salg
= container_of(alg
, struct simd_aead_alg
, alg
);
377 cryptd_tfm
= cryptd_alloc_aead(salg
->ialg_name
, CRYPTO_ALG_INTERNAL
,
378 CRYPTO_ALG_INTERNAL
);
379 if (IS_ERR(cryptd_tfm
))
380 return PTR_ERR(cryptd_tfm
);
382 ctx
->cryptd_tfm
= cryptd_tfm
;
384 reqsize
= crypto_aead_reqsize(cryptd_aead_child(cryptd_tfm
));
385 reqsize
= max(reqsize
, crypto_aead_reqsize(&cryptd_tfm
->base
));
386 reqsize
+= sizeof(struct aead_request
);
388 crypto_aead_set_reqsize(tfm
, reqsize
);
393 struct simd_aead_alg
*simd_aead_create_compat(const char *algname
,
395 const char *basename
)
397 struct simd_aead_alg
*salg
;
398 struct crypto_aead
*tfm
;
399 struct aead_alg
*ialg
;
400 struct aead_alg
*alg
;
403 tfm
= crypto_alloc_aead(basename
, CRYPTO_ALG_INTERNAL
,
404 CRYPTO_ALG_INTERNAL
| CRYPTO_ALG_ASYNC
);
406 return ERR_CAST(tfm
);
408 ialg
= crypto_aead_alg(tfm
);
410 salg
= kzalloc(sizeof(*salg
), GFP_KERNEL
);
412 salg
= ERR_PTR(-ENOMEM
);
416 salg
->ialg_name
= basename
;
420 if (snprintf(alg
->base
.cra_name
, CRYPTO_MAX_ALG_NAME
, "%s", algname
) >=
424 if (snprintf(alg
->base
.cra_driver_name
, CRYPTO_MAX_ALG_NAME
, "%s",
425 drvname
) >= CRYPTO_MAX_ALG_NAME
)
428 alg
->base
.cra_flags
= CRYPTO_ALG_ASYNC
;
429 alg
->base
.cra_priority
= ialg
->base
.cra_priority
;
430 alg
->base
.cra_blocksize
= ialg
->base
.cra_blocksize
;
431 alg
->base
.cra_alignmask
= ialg
->base
.cra_alignmask
;
432 alg
->base
.cra_module
= ialg
->base
.cra_module
;
433 alg
->base
.cra_ctxsize
= sizeof(struct simd_aead_ctx
);
435 alg
->ivsize
= ialg
->ivsize
;
436 alg
->maxauthsize
= ialg
->maxauthsize
;
437 alg
->chunksize
= ialg
->chunksize
;
439 alg
->init
= simd_aead_init
;
440 alg
->exit
= simd_aead_exit
;
442 alg
->setkey
= simd_aead_setkey
;
443 alg
->setauthsize
= simd_aead_setauthsize
;
444 alg
->encrypt
= simd_aead_encrypt
;
445 alg
->decrypt
= simd_aead_decrypt
;
447 err
= crypto_register_aead(alg
);
452 crypto_free_aead(tfm
);
460 EXPORT_SYMBOL_GPL(simd_aead_create_compat
);
462 struct simd_aead_alg
*simd_aead_create(const char *algname
,
463 const char *basename
)
465 char drvname
[CRYPTO_MAX_ALG_NAME
];
467 if (snprintf(drvname
, CRYPTO_MAX_ALG_NAME
, "simd-%s", basename
) >=
469 return ERR_PTR(-ENAMETOOLONG
);
471 return simd_aead_create_compat(algname
, drvname
, basename
);
473 EXPORT_SYMBOL_GPL(simd_aead_create
);
475 void simd_aead_free(struct simd_aead_alg
*salg
)
477 crypto_unregister_aead(&salg
->alg
);
480 EXPORT_SYMBOL_GPL(simd_aead_free
);
482 int simd_register_aeads_compat(struct aead_alg
*algs
, int count
,
483 struct simd_aead_alg
**simd_algs
)
489 const char *basename
;
490 struct simd_aead_alg
*simd
;
492 err
= crypto_register_aeads(algs
, count
);
496 for (i
= 0; i
< count
; i
++) {
497 WARN_ON(strncmp(algs
[i
].base
.cra_name
, "__", 2));
498 WARN_ON(strncmp(algs
[i
].base
.cra_driver_name
, "__", 2));
499 algname
= algs
[i
].base
.cra_name
+ 2;
500 drvname
= algs
[i
].base
.cra_driver_name
+ 2;
501 basename
= algs
[i
].base
.cra_driver_name
;
502 simd
= simd_aead_create_compat(algname
, drvname
, basename
);
511 simd_unregister_aeads(algs
, count
, simd_algs
);
514 EXPORT_SYMBOL_GPL(simd_register_aeads_compat
);
516 void simd_unregister_aeads(struct aead_alg
*algs
, int count
,
517 struct simd_aead_alg
**simd_algs
)
521 crypto_unregister_aeads(algs
, count
);
523 for (i
= 0; i
< count
; i
++) {
525 simd_aead_free(simd_algs
[i
]);
530 EXPORT_SYMBOL_GPL(simd_unregister_aeads
);
532 MODULE_LICENSE("GPL");