1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64
5 * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org>
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sha2.h>
14 #include <crypto/sha256_base.h>
15 #include <linux/module.h>
16 #include <linux/string.h>
17 #include <linux/types.h>
19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
20 MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>");
21 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
22 MODULE_LICENSE("GPL v2");
23 MODULE_ALIAS_CRYPTO("sha224");
24 MODULE_ALIAS_CRYPTO("sha256");
26 asmlinkage
void sha256_block_data_order(u32
*digest
, const void *data
,
27 unsigned int num_blks
);
28 EXPORT_SYMBOL(sha256_block_data_order
);
30 static void sha256_arm64_transform(struct sha256_state
*sst
, u8
const *src
,
33 sha256_block_data_order(sst
->state
, src
, blocks
);
36 asmlinkage
void sha256_block_neon(u32
*digest
, const void *data
,
37 unsigned int num_blks
);
39 static void sha256_neon_transform(struct sha256_state
*sst
, u8
const *src
,
42 sha256_block_neon(sst
->state
, src
, blocks
);
45 static int crypto_sha256_arm64_update(struct shash_desc
*desc
, const u8
*data
,
48 return sha256_base_do_update(desc
, data
, len
, sha256_arm64_transform
);
51 static int crypto_sha256_arm64_finup(struct shash_desc
*desc
, const u8
*data
,
52 unsigned int len
, u8
*out
)
55 sha256_base_do_update(desc
, data
, len
, sha256_arm64_transform
);
56 sha256_base_do_finalize(desc
, sha256_arm64_transform
);
58 return sha256_base_finish(desc
, out
);
61 static int crypto_sha256_arm64_final(struct shash_desc
*desc
, u8
*out
)
63 return crypto_sha256_arm64_finup(desc
, NULL
, 0, out
);
66 static struct shash_alg algs
[] = { {
67 .digestsize
= SHA256_DIGEST_SIZE
,
68 .init
= sha256_base_init
,
69 .update
= crypto_sha256_arm64_update
,
70 .final
= crypto_sha256_arm64_final
,
71 .finup
= crypto_sha256_arm64_finup
,
72 .descsize
= sizeof(struct sha256_state
),
73 .base
.cra_name
= "sha256",
74 .base
.cra_driver_name
= "sha256-arm64",
75 .base
.cra_priority
= 125,
76 .base
.cra_blocksize
= SHA256_BLOCK_SIZE
,
77 .base
.cra_module
= THIS_MODULE
,
79 .digestsize
= SHA224_DIGEST_SIZE
,
80 .init
= sha224_base_init
,
81 .update
= crypto_sha256_arm64_update
,
82 .final
= crypto_sha256_arm64_final
,
83 .finup
= crypto_sha256_arm64_finup
,
84 .descsize
= sizeof(struct sha256_state
),
85 .base
.cra_name
= "sha224",
86 .base
.cra_driver_name
= "sha224-arm64",
87 .base
.cra_priority
= 125,
88 .base
.cra_blocksize
= SHA224_BLOCK_SIZE
,
89 .base
.cra_module
= THIS_MODULE
,
92 static int sha256_update_neon(struct shash_desc
*desc
, const u8
*data
,
95 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
97 if (!crypto_simd_usable())
98 return sha256_base_do_update(desc
, data
, len
,
99 sha256_arm64_transform
);
102 unsigned int chunk
= len
;
105 * Don't hog the CPU for the entire time it takes to process all
106 * input when running on a preemptible kernel, but process the
107 * data block by block instead.
109 if (IS_ENABLED(CONFIG_PREEMPTION
) &&
110 chunk
+ sctx
->count
% SHA256_BLOCK_SIZE
> SHA256_BLOCK_SIZE
)
111 chunk
= SHA256_BLOCK_SIZE
-
112 sctx
->count
% SHA256_BLOCK_SIZE
;
115 sha256_base_do_update(desc
, data
, chunk
, sha256_neon_transform
);
123 static int sha256_finup_neon(struct shash_desc
*desc
, const u8
*data
,
124 unsigned int len
, u8
*out
)
126 if (!crypto_simd_usable()) {
128 sha256_base_do_update(desc
, data
, len
,
129 sha256_arm64_transform
);
130 sha256_base_do_finalize(desc
, sha256_arm64_transform
);
133 sha256_update_neon(desc
, data
, len
);
135 sha256_base_do_finalize(desc
, sha256_neon_transform
);
138 return sha256_base_finish(desc
, out
);
141 static int sha256_final_neon(struct shash_desc
*desc
, u8
*out
)
143 return sha256_finup_neon(desc
, NULL
, 0, out
);
146 static struct shash_alg neon_algs
[] = { {
147 .digestsize
= SHA256_DIGEST_SIZE
,
148 .init
= sha256_base_init
,
149 .update
= sha256_update_neon
,
150 .final
= sha256_final_neon
,
151 .finup
= sha256_finup_neon
,
152 .descsize
= sizeof(struct sha256_state
),
153 .base
.cra_name
= "sha256",
154 .base
.cra_driver_name
= "sha256-arm64-neon",
155 .base
.cra_priority
= 150,
156 .base
.cra_blocksize
= SHA256_BLOCK_SIZE
,
157 .base
.cra_module
= THIS_MODULE
,
159 .digestsize
= SHA224_DIGEST_SIZE
,
160 .init
= sha224_base_init
,
161 .update
= sha256_update_neon
,
162 .final
= sha256_final_neon
,
163 .finup
= sha256_finup_neon
,
164 .descsize
= sizeof(struct sha256_state
),
165 .base
.cra_name
= "sha224",
166 .base
.cra_driver_name
= "sha224-arm64-neon",
167 .base
.cra_priority
= 150,
168 .base
.cra_blocksize
= SHA224_BLOCK_SIZE
,
169 .base
.cra_module
= THIS_MODULE
,
172 static int __init
sha256_mod_init(void)
174 int ret
= crypto_register_shashes(algs
, ARRAY_SIZE(algs
));
178 if (cpu_have_named_feature(ASIMD
)) {
179 ret
= crypto_register_shashes(neon_algs
, ARRAY_SIZE(neon_algs
));
181 crypto_unregister_shashes(algs
, ARRAY_SIZE(algs
));
186 static void __exit
sha256_mod_fini(void)
188 if (cpu_have_named_feature(ASIMD
))
189 crypto_unregister_shashes(neon_algs
, ARRAY_SIZE(neon_algs
));
190 crypto_unregister_shashes(algs
, ARRAY_SIZE(algs
));
193 module_init(sha256_mod_init
);
194 module_exit(sha256_mod_fini
);