drm/panthor: Don't add write fences to the shared BOs
[drm/drm-misc.git] / arch / x86 / crypto / sm3_avx_glue.c
blob661b6f22ffcd80cf20fe824f8524c03e1691a032
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3 * SM3 Secure Hash Algorithm, AVX assembler accelerated.
4 * specified in: https://datatracker.ietf.org/doc/html/draft-sca-cfrg-sm3-02
6 * Copyright (C) 2021 Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
7 */
9 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/types.h>
16 #include <crypto/sm3.h>
17 #include <crypto/sm3_base.h>
18 #include <asm/simd.h>
20 asmlinkage void sm3_transform_avx(struct sm3_state *state,
21 const u8 *data, int nblocks);
23 static int sm3_avx_update(struct shash_desc *desc, const u8 *data,
24 unsigned int len)
26 struct sm3_state *sctx = shash_desc_ctx(desc);
28 if (!crypto_simd_usable() ||
29 (sctx->count % SM3_BLOCK_SIZE) + len < SM3_BLOCK_SIZE) {
30 sm3_update(sctx, data, len);
31 return 0;
35 * Make sure struct sm3_state begins directly with the SM3
36 * 256-bit internal state, as this is what the asm functions expect.
38 BUILD_BUG_ON(offsetof(struct sm3_state, state) != 0);
40 kernel_fpu_begin();
41 sm3_base_do_update(desc, data, len, sm3_transform_avx);
42 kernel_fpu_end();
44 return 0;
47 static int sm3_avx_finup(struct shash_desc *desc, const u8 *data,
48 unsigned int len, u8 *out)
50 if (!crypto_simd_usable()) {
51 struct sm3_state *sctx = shash_desc_ctx(desc);
53 if (len)
54 sm3_update(sctx, data, len);
56 sm3_final(sctx, out);
57 return 0;
60 kernel_fpu_begin();
61 if (len)
62 sm3_base_do_update(desc, data, len, sm3_transform_avx);
63 sm3_base_do_finalize(desc, sm3_transform_avx);
64 kernel_fpu_end();
66 return sm3_base_finish(desc, out);
69 static int sm3_avx_final(struct shash_desc *desc, u8 *out)
71 if (!crypto_simd_usable()) {
72 sm3_final(shash_desc_ctx(desc), out);
73 return 0;
76 kernel_fpu_begin();
77 sm3_base_do_finalize(desc, sm3_transform_avx);
78 kernel_fpu_end();
80 return sm3_base_finish(desc, out);
83 static struct shash_alg sm3_avx_alg = {
84 .digestsize = SM3_DIGEST_SIZE,
85 .init = sm3_base_init,
86 .update = sm3_avx_update,
87 .final = sm3_avx_final,
88 .finup = sm3_avx_finup,
89 .descsize = sizeof(struct sm3_state),
90 .base = {
91 .cra_name = "sm3",
92 .cra_driver_name = "sm3-avx",
93 .cra_priority = 300,
94 .cra_blocksize = SM3_BLOCK_SIZE,
95 .cra_module = THIS_MODULE,
99 static int __init sm3_avx_mod_init(void)
101 const char *feature_name;
103 if (!boot_cpu_has(X86_FEATURE_AVX)) {
104 pr_info("AVX instruction are not detected.\n");
105 return -ENODEV;
108 if (!boot_cpu_has(X86_FEATURE_BMI2)) {
109 pr_info("BMI2 instruction are not detected.\n");
110 return -ENODEV;
113 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
114 &feature_name)) {
115 pr_info("CPU feature '%s' is not supported.\n", feature_name);
116 return -ENODEV;
119 return crypto_register_shash(&sm3_avx_alg);
122 static void __exit sm3_avx_mod_exit(void)
124 crypto_unregister_shash(&sm3_avx_alg);
127 module_init(sm3_avx_mod_init);
128 module_exit(sm3_avx_mod_exit);
130 MODULE_LICENSE("GPL v2");
131 MODULE_AUTHOR("Tianjia Zhang <tianjia.zhang@linux.alibaba.com>");
132 MODULE_DESCRIPTION("SM3 Secure Hash Algorithm, AVX assembler accelerated");
133 MODULE_ALIAS_CRYPTO("sm3");
134 MODULE_ALIAS_CRYPTO("sm3-avx");