1 //===-- Implementation of bcmp --------------------------------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #ifndef LLVM_LIBC_SRC_STRING_MEMORY_UTILS_BCMP_IMPLEMENTATIONS_H
10 #define LLVM_LIBC_SRC_STRING_MEMORY_UTILS_BCMP_IMPLEMENTATIONS_H
12 #include "src/__support/common.h"
13 #include "src/__support/macros/optimization.h" // LIBC_UNLIKELY LIBC_LOOP_NOUNROLL
14 #include "src/__support/macros/properties/architectures.h"
15 #include "src/string/memory_utils/op_aarch64.h"
16 #include "src/string/memory_utils/op_builtin.h"
17 #include "src/string/memory_utils/op_generic.h"
18 #include "src/string/memory_utils/op_x86.h"
20 #include <stddef.h> // size_t
22 namespace __llvm_libc
{
24 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
25 inline_bcmp_byte_per_byte(CPtr p1
, CPtr p2
, size_t offset
, size_t count
) {
27 for (; offset
< count
; ++offset
)
28 if (p1
[offset
] != p2
[offset
])
29 return BcmpReturnType::NONZERO();
30 return BcmpReturnType::ZERO();
33 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
34 inline_bcmp_aligned_access_64bit(CPtr p1
, CPtr p2
, size_t count
) {
35 constexpr size_t kAlign
= sizeof(uint64_t);
36 if (count
<= 2 * kAlign
)
37 return inline_bcmp_byte_per_byte(p1
, p2
, 0, count
);
38 size_t bytes_to_p1_align
= distance_to_align_up
<kAlign
>(p1
);
39 if (auto value
= inline_bcmp_byte_per_byte(p1
, p2
, 0, bytes_to_p1_align
))
41 size_t offset
= bytes_to_p1_align
;
42 size_t p2_alignment
= distance_to_align_down
<kAlign
>(p2
+ offset
);
43 for (; offset
< count
- kAlign
; offset
+= kAlign
) {
45 if (p2_alignment
== 0)
46 a
= load64_aligned
<uint64_t>(p2
, offset
);
47 else if (p2_alignment
== 4)
48 a
= load64_aligned
<uint32_t, uint32_t>(p2
, offset
);
49 else if (p2_alignment
== 2)
50 a
= load64_aligned
<uint16_t, uint16_t, uint16_t, uint16_t>(p2
, offset
);
52 a
= load64_aligned
<uint8_t, uint16_t, uint16_t, uint16_t, uint8_t>(
54 uint64_t b
= load64_aligned
<uint64_t>(p1
, offset
);
56 return BcmpReturnType::NONZERO();
58 return inline_bcmp_byte_per_byte(p1
, p2
, offset
, count
);
61 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
62 inline_bcmp_aligned_access_32bit(CPtr p1
, CPtr p2
, size_t count
) {
63 constexpr size_t kAlign
= sizeof(uint32_t);
64 if (count
<= 2 * kAlign
)
65 return inline_bcmp_byte_per_byte(p1
, p2
, 0, count
);
66 size_t bytes_to_p1_align
= distance_to_align_up
<kAlign
>(p1
);
67 if (auto value
= inline_bcmp_byte_per_byte(p1
, p2
, 0, bytes_to_p1_align
))
69 size_t offset
= bytes_to_p1_align
;
70 size_t p2_alignment
= distance_to_align_down
<kAlign
>(p2
+ offset
);
71 for (; offset
< count
- kAlign
; offset
+= kAlign
) {
73 if (p2_alignment
== 0)
74 a
= load32_aligned
<uint32_t>(p2
, offset
);
75 else if (p2_alignment
== 2)
76 a
= load32_aligned
<uint16_t, uint16_t>(p2
, offset
);
78 a
= load32_aligned
<uint8_t, uint16_t, uint8_t>(p2
, offset
);
79 uint32_t b
= load32_aligned
<uint32_t>(p1
, offset
);
81 return BcmpReturnType::NONZERO();
83 return inline_bcmp_byte_per_byte(p1
, p2
, offset
, count
);
86 #if defined(LIBC_TARGET_ARCH_IS_X86) || defined(LIBC_TARGET_ARCH_IS_AARCH64)
87 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
88 inline_bcmp_generic_gt16(CPtr p1
, CPtr p2
, size_t count
) {
90 return generic::Bcmp
<16>::loop_and_tail(p1
, p2
, count
);
91 if (auto value
= generic::Bcmp
<64>::block(p1
, p2
))
93 align_to_next_boundary
<64, Arg::P1
>(p1
, p2
, count
);
94 return generic::Bcmp
<64>::loop_and_tail(p1
, p2
, count
);
96 #endif // defined(LIBC_TARGET_ARCH_IS_X86) ||
97 // defined(LIBC_TARGET_ARCH_IS_AARCH64)
99 #if defined(LIBC_TARGET_ARCH_IS_X86)
100 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
101 inline_bcmp_x86_sse2_gt16(CPtr p1
, CPtr p2
, size_t count
) {
103 return x86::sse2::Bcmp
<16>::head_tail(p1
, p2
, count
);
105 return x86::sse2::Bcmp
<16>::loop_and_tail(p1
, p2
, count
);
106 if (auto value
= x86::sse2::Bcmp
<16>::block(p1
, p2
))
108 align_to_next_boundary
<16, Arg::P1
>(p1
, p2
, count
);
109 return x86::sse2::Bcmp
<64>::loop_and_tail(p1
, p2
, count
);
112 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
113 inline_bcmp_x86_avx2_gt16(CPtr p1
, CPtr p2
, size_t count
) {
115 return x86::sse2::Bcmp
<16>::head_tail(p1
, p2
, count
);
117 return x86::avx2::Bcmp
<32>::head_tail(p1
, p2
, count
);
119 return x86::avx2::Bcmp
<64>::head_tail(p1
, p2
, count
);
120 if (LIBC_UNLIKELY(count
>= 256)) {
121 if (auto value
= x86::avx2::Bcmp
<64>::block(p1
, p2
))
123 align_to_next_boundary
<64, Arg::P1
>(p1
, p2
, count
);
125 return x86::avx2::Bcmp
<64>::loop_and_tail(p1
, p2
, count
);
128 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
129 inline_bcmp_x86_avx512bw_gt16(CPtr p1
, CPtr p2
, size_t count
) {
131 return x86::sse2::Bcmp
<16>::head_tail(p1
, p2
, count
);
133 return x86::avx2::Bcmp
<32>::head_tail(p1
, p2
, count
);
135 return x86::avx512bw::Bcmp
<64>::head_tail(p1
, p2
, count
);
136 if (LIBC_UNLIKELY(count
>= 256)) {
137 if (auto value
= x86::avx512bw::Bcmp
<64>::block(p1
, p2
))
139 align_to_next_boundary
<64, Arg::P1
>(p1
, p2
, count
);
141 return x86::avx512bw::Bcmp
<64>::loop_and_tail(p1
, p2
, count
);
144 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
inline_bcmp_x86(CPtr p1
, CPtr p2
,
147 return BcmpReturnType::ZERO();
149 return generic::Bcmp
<1>::block(p1
, p2
);
151 return generic::Bcmp
<2>::block(p1
, p2
);
153 return generic::Bcmp
<2>::head_tail(p1
, p2
, count
);
155 return generic::Bcmp
<4>::head_tail(p1
, p2
, count
);
157 return generic::Bcmp
<8>::head_tail(p1
, p2
, count
);
158 if constexpr (x86::kAvx512BW
)
159 return inline_bcmp_x86_avx512bw_gt16(p1
, p2
, count
);
160 else if constexpr (x86::kAvx2
)
161 return inline_bcmp_x86_avx2_gt16(p1
, p2
, count
);
162 else if constexpr (x86::kSse2
)
163 return inline_bcmp_x86_sse2_gt16(p1
, p2
, count
);
165 return inline_bcmp_generic_gt16(p1
, p2
, count
);
167 #endif // defined(LIBC_TARGET_ARCH_IS_X86)
169 #if defined(LIBC_TARGET_ARCH_IS_AARCH64)
170 [[maybe_unused
]] LIBC_INLINE BcmpReturnType
inline_bcmp_aarch64(CPtr p1
,
173 if (LIBC_LIKELY(count
<= 32)) {
174 if (LIBC_UNLIKELY(count
>= 16)) {
175 return aarch64::Bcmp
<16>::head_tail(p1
, p2
, count
);
179 return BcmpReturnType::ZERO();
181 return generic::Bcmp
<1>::block(p1
, p2
);
183 return generic::Bcmp
<2>::block(p1
, p2
);
185 return generic::Bcmp
<2>::head_tail(p1
, p2
, count
);
187 return generic::Bcmp
<4>::block(p1
, p2
);
191 return generic::Bcmp
<4>::head_tail(p1
, p2
, count
);
193 return generic::Bcmp
<8>::block(p1
, p2
);
201 return generic::Bcmp
<8>::head_tail(p1
, p2
, count
);
206 return aarch64::Bcmp
<32>::head_tail(p1
, p2
, count
);
208 // Aligned loop if > 256, otherwise normal loop
209 if (LIBC_UNLIKELY(count
> 256)) {
210 if (auto value
= aarch64::Bcmp
<32>::block(p1
, p2
))
212 align_to_next_boundary
<16, Arg::P1
>(p1
, p2
, count
);
214 return aarch64::Bcmp
<32>::loop_and_tail(p1
, p2
, count
);
216 #endif // defined(LIBC_TARGET_ARCH_IS_AARCH64)
218 LIBC_INLINE BcmpReturnType
inline_bcmp(CPtr p1
, CPtr p2
, size_t count
) {
219 #if defined(LIBC_TARGET_ARCH_IS_X86)
220 return inline_bcmp_x86(p1
, p2
, count
);
221 #elif defined(LIBC_TARGET_ARCH_IS_AARCH64)
222 return inline_bcmp_aarch64(p1
, p2
, count
);
223 #elif defined(LIBC_TARGET_ARCH_IS_RISCV64)
224 return inline_bcmp_aligned_access_64bit(p1
, p2
, count
);
225 #elif defined(LIBC_TARGET_ARCH_IS_RISCV32)
226 return inline_bcmp_aligned_access_32bit(p1
, p2
, count
);
228 return inline_bcmp_byte_per_byte(p1
, p2
, 0, count
);
232 LIBC_INLINE
int inline_bcmp(const void *p1
, const void *p2
, size_t count
) {
233 return static_cast<int>(inline_bcmp(reinterpret_cast<CPtr
>(p1
),
234 reinterpret_cast<CPtr
>(p2
), count
));
237 } // namespace __llvm_libc
239 #endif // LLVM_LIBC_SRC_STRING_MEMORY_UTILS_BCMP_IMPLEMENTATIONS_H