1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Linux/arm64 port of the OpenSSL SHA256 implementation for AArch64
5 * Copyright (c) 2016 Linaro Ltd. <ard.biesheuvel@linaro.org>
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sha2.h>
14 #include <crypto/sha256_base.h>
15 #include <linux/types.h>
16 #include <linux/string.h>
18 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash for arm64");
19 MODULE_AUTHOR("Andy Polyakov <appro@openssl.org>");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22 MODULE_ALIAS_CRYPTO("sha224");
23 MODULE_ALIAS_CRYPTO("sha256");
25 asmlinkage
void sha256_block_data_order(u32
*digest
, const void *data
,
26 unsigned int num_blks
);
27 EXPORT_SYMBOL(sha256_block_data_order
);
29 static void __sha256_block_data_order(struct sha256_state
*sst
, u8
const *src
,
32 sha256_block_data_order(sst
->state
, src
, blocks
);
35 asmlinkage
void sha256_block_neon(u32
*digest
, const void *data
,
36 unsigned int num_blks
);
38 static void __sha256_block_neon(struct sha256_state
*sst
, u8
const *src
,
41 sha256_block_neon(sst
->state
, src
, blocks
);
44 static int crypto_sha256_arm64_update(struct shash_desc
*desc
, const u8
*data
,
47 return sha256_base_do_update(desc
, data
, len
,
48 __sha256_block_data_order
);
51 static int crypto_sha256_arm64_finup(struct shash_desc
*desc
, const u8
*data
,
52 unsigned int len
, u8
*out
)
55 sha256_base_do_update(desc
, data
, len
,
56 __sha256_block_data_order
);
57 sha256_base_do_finalize(desc
, __sha256_block_data_order
);
59 return sha256_base_finish(desc
, out
);
62 static int crypto_sha256_arm64_final(struct shash_desc
*desc
, u8
*out
)
64 return crypto_sha256_arm64_finup(desc
, NULL
, 0, out
);
67 static struct shash_alg algs
[] = { {
68 .digestsize
= SHA256_DIGEST_SIZE
,
69 .init
= sha256_base_init
,
70 .update
= crypto_sha256_arm64_update
,
71 .final
= crypto_sha256_arm64_final
,
72 .finup
= crypto_sha256_arm64_finup
,
73 .descsize
= sizeof(struct sha256_state
),
74 .base
.cra_name
= "sha256",
75 .base
.cra_driver_name
= "sha256-arm64",
76 .base
.cra_priority
= 125,
77 .base
.cra_blocksize
= SHA256_BLOCK_SIZE
,
78 .base
.cra_module
= THIS_MODULE
,
80 .digestsize
= SHA224_DIGEST_SIZE
,
81 .init
= sha224_base_init
,
82 .update
= crypto_sha256_arm64_update
,
83 .final
= crypto_sha256_arm64_final
,
84 .finup
= crypto_sha256_arm64_finup
,
85 .descsize
= sizeof(struct sha256_state
),
86 .base
.cra_name
= "sha224",
87 .base
.cra_driver_name
= "sha224-arm64",
88 .base
.cra_priority
= 125,
89 .base
.cra_blocksize
= SHA224_BLOCK_SIZE
,
90 .base
.cra_module
= THIS_MODULE
,
93 static int sha256_update_neon(struct shash_desc
*desc
, const u8
*data
,
96 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
98 if (!crypto_simd_usable())
99 return sha256_base_do_update(desc
, data
, len
,
100 __sha256_block_data_order
);
103 unsigned int chunk
= len
;
106 * Don't hog the CPU for the entire time it takes to process all
107 * input when running on a preemptible kernel, but process the
108 * data block by block instead.
110 if (IS_ENABLED(CONFIG_PREEMPTION
) &&
111 chunk
+ sctx
->count
% SHA256_BLOCK_SIZE
> SHA256_BLOCK_SIZE
)
112 chunk
= SHA256_BLOCK_SIZE
-
113 sctx
->count
% SHA256_BLOCK_SIZE
;
116 sha256_base_do_update(desc
, data
, chunk
, __sha256_block_neon
);
124 static int sha256_finup_neon(struct shash_desc
*desc
, const u8
*data
,
125 unsigned int len
, u8
*out
)
127 if (!crypto_simd_usable()) {
129 sha256_base_do_update(desc
, data
, len
,
130 __sha256_block_data_order
);
131 sha256_base_do_finalize(desc
, __sha256_block_data_order
);
134 sha256_update_neon(desc
, data
, len
);
136 sha256_base_do_finalize(desc
, __sha256_block_neon
);
139 return sha256_base_finish(desc
, out
);
142 static int sha256_final_neon(struct shash_desc
*desc
, u8
*out
)
144 return sha256_finup_neon(desc
, NULL
, 0, out
);
147 static struct shash_alg neon_algs
[] = { {
148 .digestsize
= SHA256_DIGEST_SIZE
,
149 .init
= sha256_base_init
,
150 .update
= sha256_update_neon
,
151 .final
= sha256_final_neon
,
152 .finup
= sha256_finup_neon
,
153 .descsize
= sizeof(struct sha256_state
),
154 .base
.cra_name
= "sha256",
155 .base
.cra_driver_name
= "sha256-arm64-neon",
156 .base
.cra_priority
= 150,
157 .base
.cra_blocksize
= SHA256_BLOCK_SIZE
,
158 .base
.cra_module
= THIS_MODULE
,
160 .digestsize
= SHA224_DIGEST_SIZE
,
161 .init
= sha224_base_init
,
162 .update
= sha256_update_neon
,
163 .final
= sha256_final_neon
,
164 .finup
= sha256_finup_neon
,
165 .descsize
= sizeof(struct sha256_state
),
166 .base
.cra_name
= "sha224",
167 .base
.cra_driver_name
= "sha224-arm64-neon",
168 .base
.cra_priority
= 150,
169 .base
.cra_blocksize
= SHA224_BLOCK_SIZE
,
170 .base
.cra_module
= THIS_MODULE
,
173 static int __init
sha256_mod_init(void)
175 int ret
= crypto_register_shashes(algs
, ARRAY_SIZE(algs
));
179 if (cpu_have_named_feature(ASIMD
)) {
180 ret
= crypto_register_shashes(neon_algs
, ARRAY_SIZE(neon_algs
));
182 crypto_unregister_shashes(algs
, ARRAY_SIZE(algs
));
187 static void __exit
sha256_mod_fini(void)
189 if (cpu_have_named_feature(ASIMD
))
190 crypto_unregister_shashes(neon_algs
, ARRAY_SIZE(neon_algs
));
191 crypto_unregister_shashes(algs
, ARRAY_SIZE(algs
));
194 module_init(sha256_mod_init
);
195 module_exit(sha256_mod_fini
);