2 * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
4 * Copyright (C) 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <asm/unaligned.h>
13 #include <crypto/internal/hash.h>
14 #include <crypto/sha.h>
15 #include <linux/cpufeature.h>
16 #include <linux/crypto.h>
17 #include <linux/module.h>
19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
23 asmlinkage
int sha2_ce_transform(int blocks
, u8
const *src
, u32
*state
,
24 u8
*head
, long bytes
);
26 static int sha224_init(struct shash_desc
*desc
)
28 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
30 *sctx
= (struct sha256_state
){
32 SHA224_H0
, SHA224_H1
, SHA224_H2
, SHA224_H3
,
33 SHA224_H4
, SHA224_H5
, SHA224_H6
, SHA224_H7
,
39 static int sha256_init(struct shash_desc
*desc
)
41 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
43 *sctx
= (struct sha256_state
){
45 SHA256_H0
, SHA256_H1
, SHA256_H2
, SHA256_H3
,
46 SHA256_H4
, SHA256_H5
, SHA256_H6
, SHA256_H7
,
52 static int sha2_update(struct shash_desc
*desc
, const u8
*data
,
55 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
56 unsigned int partial
= sctx
->count
% SHA256_BLOCK_SIZE
;
60 if ((partial
+ len
) >= SHA256_BLOCK_SIZE
) {
64 int p
= SHA256_BLOCK_SIZE
- partial
;
66 memcpy(sctx
->buf
+ partial
, data
, p
);
71 blocks
= len
/ SHA256_BLOCK_SIZE
;
72 len
%= SHA256_BLOCK_SIZE
;
74 kernel_neon_begin_partial(28);
75 sha2_ce_transform(blocks
, data
, sctx
->state
,
76 partial
? sctx
->buf
: NULL
, 0);
79 data
+= blocks
* SHA256_BLOCK_SIZE
;
83 memcpy(sctx
->buf
+ partial
, data
, len
);
87 static void sha2_final(struct shash_desc
*desc
)
89 static const u8 padding
[SHA256_BLOCK_SIZE
] = { 0x80, };
91 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
92 __be64 bits
= cpu_to_be64(sctx
->count
<< 3);
93 u32 padlen
= SHA256_BLOCK_SIZE
94 - ((sctx
->count
+ sizeof(bits
)) % SHA256_BLOCK_SIZE
);
96 sha2_update(desc
, padding
, padlen
);
97 sha2_update(desc
, (const u8
*)&bits
, sizeof(bits
));
100 static int sha224_final(struct shash_desc
*desc
, u8
*out
)
102 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
103 __be32
*dst
= (__be32
*)out
;
108 for (i
= 0; i
< SHA224_DIGEST_SIZE
/ sizeof(__be32
); i
++)
109 put_unaligned_be32(sctx
->state
[i
], dst
++);
111 *sctx
= (struct sha256_state
){};
115 static int sha256_final(struct shash_desc
*desc
, u8
*out
)
117 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
118 __be32
*dst
= (__be32
*)out
;
123 for (i
= 0; i
< SHA256_DIGEST_SIZE
/ sizeof(__be32
); i
++)
124 put_unaligned_be32(sctx
->state
[i
], dst
++);
126 *sctx
= (struct sha256_state
){};
130 static void sha2_finup(struct shash_desc
*desc
, const u8
*data
,
133 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
136 if (sctx
->count
|| !len
|| (len
% SHA256_BLOCK_SIZE
)) {
137 sha2_update(desc
, data
, len
);
143 * Use a fast path if the input is a multiple of 64 bytes. In
144 * this case, there is no need to copy data around, and we can
145 * perform the entire digest calculation in a single invocation
146 * of sha2_ce_transform()
148 blocks
= len
/ SHA256_BLOCK_SIZE
;
150 kernel_neon_begin_partial(28);
151 sha2_ce_transform(blocks
, data
, sctx
->state
, NULL
, len
);
153 data
+= blocks
* SHA256_BLOCK_SIZE
;
156 static int sha224_finup(struct shash_desc
*desc
, const u8
*data
,
157 unsigned int len
, u8
*out
)
159 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
160 __be32
*dst
= (__be32
*)out
;
163 sha2_finup(desc
, data
, len
);
165 for (i
= 0; i
< SHA224_DIGEST_SIZE
/ sizeof(__be32
); i
++)
166 put_unaligned_be32(sctx
->state
[i
], dst
++);
168 *sctx
= (struct sha256_state
){};
172 static int sha256_finup(struct shash_desc
*desc
, const u8
*data
,
173 unsigned int len
, u8
*out
)
175 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
176 __be32
*dst
= (__be32
*)out
;
179 sha2_finup(desc
, data
, len
);
181 for (i
= 0; i
< SHA256_DIGEST_SIZE
/ sizeof(__be32
); i
++)
182 put_unaligned_be32(sctx
->state
[i
], dst
++);
184 *sctx
= (struct sha256_state
){};
188 static int sha2_export(struct shash_desc
*desc
, void *out
)
190 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
191 struct sha256_state
*dst
= out
;
197 static int sha2_import(struct shash_desc
*desc
, const void *in
)
199 struct sha256_state
*sctx
= shash_desc_ctx(desc
);
200 struct sha256_state
const *src
= in
;
206 static struct shash_alg algs
[] = { {
208 .update
= sha2_update
,
209 .final
= sha224_final
,
210 .finup
= sha224_finup
,
211 .export
= sha2_export
,
212 .import
= sha2_import
,
213 .descsize
= sizeof(struct sha256_state
),
214 .digestsize
= SHA224_DIGEST_SIZE
,
215 .statesize
= sizeof(struct sha256_state
),
217 .cra_name
= "sha224",
218 .cra_driver_name
= "sha224-ce",
220 .cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
221 .cra_blocksize
= SHA256_BLOCK_SIZE
,
222 .cra_module
= THIS_MODULE
,
226 .update
= sha2_update
,
227 .final
= sha256_final
,
228 .finup
= sha256_finup
,
229 .export
= sha2_export
,
230 .import
= sha2_import
,
231 .descsize
= sizeof(struct sha256_state
),
232 .digestsize
= SHA256_DIGEST_SIZE
,
233 .statesize
= sizeof(struct sha256_state
),
235 .cra_name
= "sha256",
236 .cra_driver_name
= "sha256-ce",
238 .cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
239 .cra_blocksize
= SHA256_BLOCK_SIZE
,
240 .cra_module
= THIS_MODULE
,
244 static int __init
sha2_ce_mod_init(void)
246 return crypto_register_shashes(algs
, ARRAY_SIZE(algs
));
249 static void __exit
sha2_ce_mod_fini(void)
251 crypto_unregister_shashes(algs
, ARRAY_SIZE(algs
));
254 module_cpu_feature_match(SHA2
, sha2_ce_mod_init
);
255 module_exit(sha2_ce_mod_fini
);