Merge tag 'sched-urgent-2020-12-27' of git://git.kernel.org/pub/scm/linux/kernel...
[linux/fpc-iii.git] / arch / arm / crypto / aes-cipher-glue.c
blob8cd00f56800e7f97574a7f7756c896f546c7284c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Scalar AES core transform
5 * Copyright (C) 2017 Linaro Ltd.
6 * Author: Ard Biesheuvel <ard.biesheuvel@linaro.org>
7 */
9 #include <crypto/aes.h>
10 #include <linux/crypto.h>
11 #include <linux/module.h>
13 asmlinkage void __aes_arm_encrypt(u32 *rk, int rounds, const u8 *in, u8 *out);
14 asmlinkage void __aes_arm_decrypt(u32 *rk, int rounds, const u8 *in, u8 *out);
16 static void aes_arm_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
18 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
19 int rounds = 6 + ctx->key_length / 4;
21 __aes_arm_encrypt(ctx->key_enc, rounds, in, out);
24 static void aes_arm_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
26 struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
27 int rounds = 6 + ctx->key_length / 4;
29 __aes_arm_decrypt(ctx->key_dec, rounds, in, out);
32 static struct crypto_alg aes_alg = {
33 .cra_name = "aes",
34 .cra_driver_name = "aes-arm",
35 .cra_priority = 200,
36 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
37 .cra_blocksize = AES_BLOCK_SIZE,
38 .cra_ctxsize = sizeof(struct crypto_aes_ctx),
39 .cra_module = THIS_MODULE,
41 .cra_cipher.cia_min_keysize = AES_MIN_KEY_SIZE,
42 .cra_cipher.cia_max_keysize = AES_MAX_KEY_SIZE,
43 .cra_cipher.cia_setkey = crypto_aes_set_key,
44 .cra_cipher.cia_encrypt = aes_arm_encrypt,
45 .cra_cipher.cia_decrypt = aes_arm_decrypt,
47 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
48 .cra_alignmask = 3,
49 #endif
52 static int __init aes_init(void)
54 return crypto_register_alg(&aes_alg);
57 static void __exit aes_fini(void)
59 crypto_unregister_alg(&aes_alg);
62 module_init(aes_init);
63 module_exit(aes_fini);
65 MODULE_DESCRIPTION("Scalar AES cipher for ARM");
66 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
67 MODULE_LICENSE("GPL v2");
68 MODULE_ALIAS_CRYPTO("aes");