1 // SPDX-License-Identifier: GPL-2.0-only
2 /* Glue code for CRC32C optimized for sparc64 crypto opcodes.
4 * This is based largely upon arch/x86/crypto/crc32c-intel.c
6 * Copyright (C) 2008 Intel Corporation
7 * Authors: Austin Zhang <austin_zhang@linux.intel.com>
8 * Kent Liu <kent.liu@intel.com>
11 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/kernel.h>
16 #include <linux/crc32.h>
17 #include <asm/pstate.h>
20 static DEFINE_STATIC_KEY_FALSE(have_crc32c_opcode
);
22 u32
crc32_le_arch(u32 crc
, const u8
*data
, size_t len
)
24 return crc32_le_base(crc
, data
, len
);
26 EXPORT_SYMBOL(crc32_le_arch
);
28 void crc32c_sparc64(u32
*crcp
, const u64
*data
, size_t len
);
30 u32
crc32c_le_arch(u32 crc
, const u8
*data
, size_t len
)
32 size_t n
= -(uintptr_t)data
& 7;
34 if (!static_branch_likely(&have_crc32c_opcode
))
35 return crc32c_le_base(crc
, data
, len
);
38 /* Data isn't 8-byte aligned. Align it. */
40 crc
= crc32c_le_base(crc
, data
, n
);
46 crc32c_sparc64(&crc
, (const u64
*)data
, n
);
51 crc
= crc32c_le_base(crc
, data
, len
);
54 EXPORT_SYMBOL(crc32c_le_arch
);
56 u32
crc32_be_arch(u32 crc
, const u8
*data
, size_t len
)
58 return crc32_be_base(crc
, data
, len
);
60 EXPORT_SYMBOL(crc32_be_arch
);
62 static int __init
crc32_sparc_init(void)
66 if (!(sparc64_elf_hwcap
& HWCAP_SPARC_CRYPTO
))
69 __asm__
__volatile__("rd %%asr26, %0" : "=r" (cfr
));
70 if (!(cfr
& CFR_CRC32C
))
73 static_branch_enable(&have_crc32c_opcode
);
74 pr_info("Using sparc64 crc32c opcode optimized CRC32C implementation\n");
77 arch_initcall(crc32_sparc_init
);
79 static void __exit
crc32_sparc_exit(void)
82 module_exit(crc32_sparc_exit
);
84 u32
crc32_optimizations(void)
86 if (static_key_enabled(&have_crc32c_opcode
))
87 return CRC32C_OPTIMIZATION
;
90 EXPORT_SYMBOL(crc32_optimizations
);
92 MODULE_LICENSE("GPL");
93 MODULE_DESCRIPTION("CRC32c (Castagnoli), sparc64 crc32c opcode accelerated");