1 ; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -enable-var-scope -check-prefixes=GCN,SI %s
2 ; RUN: llc -march=amdgcn -mcpu=fiji -verify-machineinstrs < %s | FileCheck -enable-var-scope -check-prefixes=GCN,VI %s
4 declare i64 @llvm.amdgcn.icmp.i32(i32, i32, i32) #0
5 declare i64 @llvm.amdgcn.icmp.i64(i64, i64, i32) #0
6 declare i64 @llvm.amdgcn.icmp.i16(i16, i16, i32) #0
7 declare i64 @llvm.amdgcn.icmp.i1(i1, i1, i32) #0
9 ; No crash on invalid input
10 ; GCN-LABEL: {{^}}v_icmp_i32_dynamic_cc:
12 define amdgpu_kernel void @v_icmp_i32_dynamic_cc(i64 addrspace(1)* %out, i32 %src, i32 %cc) {
13 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 %cc)
14 store i64 %result, i64 addrspace(1)* %out
18 ; GCN-LABEL: {{^}}v_icmp_i32_eq:
19 ; GCN: v_cmp_eq_u32_e64
20 define amdgpu_kernel void @v_icmp_i32_eq(i64 addrspace(1)* %out, i32 %src) {
21 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 32)
22 store i64 %result, i64 addrspace(1)* %out
26 ; GCN-LABEL: {{^}}v_icmp_i32:
27 ; GCN-NOT: v_cmp_eq_u32_e64
28 define amdgpu_kernel void @v_icmp_i32(i64 addrspace(1)* %out, i32 %src) {
29 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 30)
30 store i64 %result, i64 addrspace(1)* %out
34 ; GCN-LABEL: {{^}}v_icmp_i32_ne:
35 ; GCN: v_cmp_ne_u32_e64
36 define amdgpu_kernel void @v_icmp_i32_ne(i64 addrspace(1)* %out, i32 %src) {
37 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 33)
38 store i64 %result, i64 addrspace(1)* %out
42 ; GCN-LABEL: {{^}}v_icmp_i32_ugt:
43 ; GCN: v_cmp_gt_u32_e64
44 define amdgpu_kernel void @v_icmp_i32_ugt(i64 addrspace(1)* %out, i32 %src) {
45 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 34)
46 store i64 %result, i64 addrspace(1)* %out
50 ; GCN-LABEL: {{^}}v_icmp_i32_uge:
51 ; GCN: v_cmp_ge_u32_e64
52 define amdgpu_kernel void @v_icmp_i32_uge(i64 addrspace(1)* %out, i32 %src) {
53 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 35)
54 store i64 %result, i64 addrspace(1)* %out
58 ; GCN-LABEL: {{^}}v_icmp_i32_ult:
59 ; GCN: v_cmp_lt_u32_e64
60 define amdgpu_kernel void @v_icmp_i32_ult(i64 addrspace(1)* %out, i32 %src) {
61 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 36)
62 store i64 %result, i64 addrspace(1)* %out
66 ; GCN-LABEL: {{^}}v_icmp_i32_ule:
67 ; GCN: v_cmp_le_u32_e64
68 define amdgpu_kernel void @v_icmp_i32_ule(i64 addrspace(1)* %out, i32 %src) {
69 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 37)
70 store i64 %result, i64 addrspace(1)* %out
74 ; GCN-LABEL: {{^}}v_icmp_i32_sgt:
75 ; GCN: v_cmp_gt_i32_e64
76 define amdgpu_kernel void @v_icmp_i32_sgt(i64 addrspace(1)* %out, i32 %src) #1 {
77 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 38)
78 store i64 %result, i64 addrspace(1)* %out
82 ; GCN-LABEL: {{^}}v_icmp_i32_sge:
83 ; GCN: v_cmp_ge_i32_e64
84 define amdgpu_kernel void @v_icmp_i32_sge(i64 addrspace(1)* %out, i32 %src) {
85 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 39)
86 store i64 %result, i64 addrspace(1)* %out
90 ; GCN-LABEL: {{^}}v_icmp_i32_slt:
91 ; GCN: v_cmp_lt_i32_e64
92 define amdgpu_kernel void @v_icmp_i32_slt(i64 addrspace(1)* %out, i32 %src) {
93 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 40)
94 store i64 %result, i64 addrspace(1)* %out
97 ; GCN-LABEL: {{^}}v_icmp_i32_sle:
98 ; GCN: v_cmp_le_i32_e64
99 define amdgpu_kernel void @v_icmp_i32_sle(i64 addrspace(1)* %out, i32 %src) {
100 %result = call i64 @llvm.amdgcn.icmp.i32(i32 %src, i32 100, i32 41)
101 store i64 %result, i64 addrspace(1)* %out
105 ; GCN-LABEL: {{^}}v_icmp_i64_eq:
106 ; GCN: v_cmp_eq_u64_e64
107 define amdgpu_kernel void @v_icmp_i64_eq(i64 addrspace(1)* %out, i64 %src) {
108 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 32)
109 store i64 %result, i64 addrspace(1)* %out
113 ; GCN-LABEL: {{^}}v_icmp_i64_ne:
114 ; GCN: v_cmp_ne_u64_e64
115 define amdgpu_kernel void @v_icmp_i64_ne(i64 addrspace(1)* %out, i64 %src) {
116 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 33)
117 store i64 %result, i64 addrspace(1)* %out
121 ; GCN-LABEL: {{^}}v_icmp_u64_ugt:
122 ; GCN: v_cmp_gt_u64_e64
123 define amdgpu_kernel void @v_icmp_u64_ugt(i64 addrspace(1)* %out, i64 %src) {
124 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 34)
125 store i64 %result, i64 addrspace(1)* %out
129 ; GCN-LABEL: {{^}}v_icmp_u64_uge:
130 ; GCN: v_cmp_ge_u64_e64
131 define amdgpu_kernel void @v_icmp_u64_uge(i64 addrspace(1)* %out, i64 %src) {
132 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 35)
133 store i64 %result, i64 addrspace(1)* %out
137 ; GCN-LABEL: {{^}}v_icmp_u64_ult:
138 ; GCN: v_cmp_lt_u64_e64
139 define amdgpu_kernel void @v_icmp_u64_ult(i64 addrspace(1)* %out, i64 %src) {
140 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 36)
141 store i64 %result, i64 addrspace(1)* %out
145 ; GCN-LABEL: {{^}}v_icmp_u64_ule:
146 ; GCN: v_cmp_le_u64_e64
147 define amdgpu_kernel void @v_icmp_u64_ule(i64 addrspace(1)* %out, i64 %src) {
148 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 37)
149 store i64 %result, i64 addrspace(1)* %out
153 ; GCN-LABEL: {{^}}v_icmp_i64_sgt:
154 ; GCN: v_cmp_gt_i64_e64
155 define amdgpu_kernel void @v_icmp_i64_sgt(i64 addrspace(1)* %out, i64 %src) {
156 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 38)
157 store i64 %result, i64 addrspace(1)* %out
161 ; GCN-LABEL: {{^}}v_icmp_i64_sge:
162 ; GCN: v_cmp_ge_i64_e64
163 define amdgpu_kernel void @v_icmp_i64_sge(i64 addrspace(1)* %out, i64 %src) {
164 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 39)
165 store i64 %result, i64 addrspace(1)* %out
169 ; GCN-LABEL: {{^}}v_icmp_i64_slt:
170 ; GCN: v_cmp_lt_i64_e64
171 define amdgpu_kernel void @v_icmp_i64_slt(i64 addrspace(1)* %out, i64 %src) {
172 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 40)
173 store i64 %result, i64 addrspace(1)* %out
176 ; GCN-LABEL: {{^}}v_icmp_i64_sle:
177 ; GCN: v_cmp_le_i64_e64
178 define amdgpu_kernel void @v_icmp_i64_sle(i64 addrspace(1)* %out, i64 %src) {
179 %result = call i64 @llvm.amdgcn.icmp.i64(i64 %src, i64 100, i32 41)
180 store i64 %result, i64 addrspace(1)* %out
184 ; GCN-LABEL: {{^}}v_icmp_i16_dynamic_cc:
186 define amdgpu_kernel void @v_icmp_i16_dynamic_cc(i64 addrspace(1)* %out, i16 %src, i32 %cc) {
187 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 %cc)
188 store i64 %result, i64 addrspace(1)* %out
192 ; GCN-LABEL: {{^}}v_icmp_i16_eq:
193 ; VI: v_cmp_eq_u16_e64
195 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
196 ; SI-DAG: s_and_b32 [[CVT:s[0-9]+]], s{{[0-9]+}}, 0xffff{{$}}
197 ; SI: v_cmp_eq_u32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
198 define amdgpu_kernel void @v_icmp_i16_eq(i64 addrspace(1)* %out, i16 %src) {
199 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 32)
200 store i64 %result, i64 addrspace(1)* %out
204 ; GCN-LABEL: {{^}}v_icmp_i16:
206 define amdgpu_kernel void @v_icmp_i16(i64 addrspace(1)* %out, i16 %src) {
207 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 30)
208 store i64 %result, i64 addrspace(1)* %out
211 ; GCN-LABEL: {{^}}v_icmp_i16_ne:
212 ; VI: v_cmp_ne_u16_e64
214 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
215 ; SI-DAG: s_and_b32 [[CVT:s[0-9]+]], s{{[0-9]+}}, 0xffff{{$}}
216 ; SI: v_cmp_ne_u32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
217 define amdgpu_kernel void @v_icmp_i16_ne(i64 addrspace(1)* %out, i16 %src) {
218 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 33)
219 store i64 %result, i64 addrspace(1)* %out
223 ; GCN-LABEL: {{^}}v_icmp_i16_ugt:
224 ; VI: v_cmp_gt_u16_e64
226 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
227 ; SI-DAG: s_and_b32 [[CVT:s[0-9]+]], s{{[0-9]+}}, 0xffff{{$}}
228 ; SI: v_cmp_gt_u32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
229 define amdgpu_kernel void @v_icmp_i16_ugt(i64 addrspace(1)* %out, i16 %src) {
230 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 34)
231 store i64 %result, i64 addrspace(1)* %out
235 ; GCN-LABEL: {{^}}v_icmp_i16_uge:
236 ; VI: v_cmp_ge_u16_e64
238 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
239 ; SI-DAG: s_and_b32 [[CVT:s[0-9]+]], s{{[0-9]+}}, 0xffff{{$}}
240 ; SI: v_cmp_ge_u32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
241 define amdgpu_kernel void @v_icmp_i16_uge(i64 addrspace(1)* %out, i16 %src) {
242 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 35)
243 store i64 %result, i64 addrspace(1)* %out
247 ; GCN-LABEL: {{^}}v_icmp_i16_ult:
248 ; VI: v_cmp_lt_u16_e64
250 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
251 ; SI-DAG: s_and_b32 [[CVT:s[0-9]+]], s{{[0-9]+}}, 0xffff{{$}}
252 ; SI: v_cmp_lt_u32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
253 define amdgpu_kernel void @v_icmp_i16_ult(i64 addrspace(1)* %out, i16 %src) {
254 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 36)
255 store i64 %result, i64 addrspace(1)* %out
259 ; GCN-LABEL: {{^}}v_icmp_i16_ule:
260 ; VI: v_cmp_le_u16_e64
262 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
263 ; SI-DAG: s_and_b32 [[CVT:s[0-9]+]], s{{[0-9]+}}, 0xffff{{$}}
264 ; SI: v_cmp_le_u32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
265 define amdgpu_kernel void @v_icmp_i16_ule(i64 addrspace(1)* %out, i16 %src) {
266 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 37)
267 store i64 %result, i64 addrspace(1)* %out
271 ; GCN-LABEL: {{^}}v_icmp_i16_sgt:
272 ; VI: v_cmp_gt_i16_e64
274 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
275 ; SI-DAG: s_sext_i32_i16 [[CVT:s[0-9]+]], s{{[0-9]+}}
276 ; SI: v_cmp_gt_i32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
277 define amdgpu_kernel void @v_icmp_i16_sgt(i64 addrspace(1)* %out, i16 %src) #1 {
278 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 38)
279 store i64 %result, i64 addrspace(1)* %out
283 ; GCN-LABEL: {{^}}v_icmp_i16_sge:
284 ; VI: v_cmp_ge_i16_e64
286 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
287 ; SI-DAG: s_sext_i32_i16 [[CVT:s[0-9]+]], s{{[0-9]+}}
288 ; SI: v_cmp_ge_i32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
289 define amdgpu_kernel void @v_icmp_i16_sge(i64 addrspace(1)* %out, i16 %src) {
290 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 39)
291 store i64 %result, i64 addrspace(1)* %out
295 ; GCN-LABEL: {{^}}v_icmp_i16_slt:
296 ; VI: v_cmp_lt_i16_e64
298 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
299 ; SI-DAG: s_sext_i32_i16 [[CVT:s[0-9]+]], s{{[0-9]+}}
300 ; SI: v_cmp_lt_i32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
301 define amdgpu_kernel void @v_icmp_i16_slt(i64 addrspace(1)* %out, i16 %src) {
302 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 40)
303 store i64 %result, i64 addrspace(1)* %out
306 ; GCN-LABEL: {{^}}v_icmp_i16_sle:
307 ; VI: v_cmp_le_i16_e64
309 ; SI-DAG: v_mov_b32_e32 [[K:v[0-9]+]], 0x64
310 ; SI-DAG: s_sext_i32_i16 [[CVT:s[0-9]+]], s{{[0-9]+}}
311 ; SI: v_cmp_le_i32_e64 s{{\[[0-9]+:[0-9]+\]}}, [[CVT]], [[K]]
312 define amdgpu_kernel void @v_icmp_i16_sle(i64 addrspace(1)* %out, i16 %src) {
313 %result = call i64 @llvm.amdgcn.icmp.i16(i16 %src, i16 100, i32 41)
314 store i64 %result, i64 addrspace(1)* %out
318 ; GCN-LABEL: {{^}}v_icmp_i1_ne0:
319 ; GCN: v_cmp_gt_u32_e64 s[[C0:\[[0-9]+:[0-9]+\]]],
320 ; GCN: v_cmp_gt_u32_e64 s[[C1:\[[0-9]+:[0-9]+\]]],
321 ; GCN: s_and_b64 s[[SRC:\[[0-9]+:[0-9]+\]]], s[[C0]], s[[C1]]
322 ; SI-NEXT: s_mov_b32 s{{[0-9]+}}, -1
323 ; GCN-NEXT: v_mov_b32_e32
324 ; GCN-NEXT: v_mov_b32_e32
325 ; GCN-NEXT: {{global|flat|buffer}}_store_dwordx2
326 define amdgpu_kernel void @v_icmp_i1_ne0(i64 addrspace(1)* %out, i32 %a, i32 %b) {
327 %c0 = icmp ugt i32 %a, 1
328 %c1 = icmp ugt i32 %b, 2
329 %src = and i1 %c0, %c1
330 %result = call i64 @llvm.amdgcn.icmp.i1(i1 %src, i1 false, i32 33)
331 store i64 %result, i64 addrspace(1)* %out
335 attributes #0 = { nounwind readnone convergent }