1 ; RUN: llc -mtriple=amdgcn--amdpal -mcpu=tahiti -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=SI %s
2 ; RUN: llc -mtriple=amdgcn--amdpal -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefix=GCN -check-prefix=VI %s
4 ; Since this intrinsic is exposed as a constant after isel, use it to
5 ; defeat the DAG's compare with constant canonicalizations.
6 declare i32 @llvm.amdgcn.groupstaticsize() #1
8 @lds = addrspace(3) global [512 x i32] undef, align 4
10 ; GCN-LABEL: {{^}}br_scc_eq_i32_inline_imm:
11 ; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 4{{$}}
12 define amdgpu_kernel void @br_scc_eq_i32_inline_imm(i32 %cond, ptr addrspace(1) %out) #0 {
14 %cmp0 = icmp eq i32 %cond, 4
15 br i1 %cmp0, label %endif, label %if
18 call void asm sideeffect "", ""()
22 store volatile i32 1, ptr addrspace(1) %out
26 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max:
27 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x7fff{{$}}
28 define amdgpu_kernel void @br_scc_eq_i32_simm16_max(i32 %cond, ptr addrspace(1) %out) #0 {
30 %cmp0 = icmp eq i32 %cond, 32767
31 br i1 %cmp0, label %endif, label %if
34 call void asm sideeffect "", ""()
38 store volatile i32 1, ptr addrspace(1) %out
42 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_max_p1:
43 ; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0x8000{{$}}
44 define amdgpu_kernel void @br_scc_eq_i32_simm16_max_p1(i32 %cond, ptr addrspace(1) %out) #0 {
46 %cmp0 = icmp eq i32 %cond, 32768
47 br i1 %cmp0, label %endif, label %if
50 call void asm sideeffect "", ""()
54 store volatile i32 1, ptr addrspace(1) %out
58 ; GCN-LABEL: {{^}}br_scc_ne_i32_simm16_max_p1:
59 ; GCN: s_cmpk_lg_u32 s{{[0-9]+}}, 0x8000{{$}}
60 define amdgpu_kernel void @br_scc_ne_i32_simm16_max_p1(i32 %cond, ptr addrspace(1) %out) #0 {
62 %cmp0 = icmp ne i32 %cond, 32768
63 br i1 %cmp0, label %endif, label %if
66 call void asm sideeffect "", ""()
70 store volatile i32 1, ptr addrspace(1) %out
74 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min:
75 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x8000{{$}}
76 define amdgpu_kernel void @br_scc_eq_i32_simm16_min(i32 %cond, ptr addrspace(1) %out) #0 {
78 %cmp0 = icmp eq i32 %cond, -32768
79 br i1 %cmp0, label %endif, label %if
82 call void asm sideeffect "", ""()
86 store volatile i32 1, ptr addrspace(1) %out
90 ; GCN-LABEL: {{^}}br_scc_eq_i32_simm16_min_m1:
91 ; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 0xffff7fff{{$}}
92 define amdgpu_kernel void @br_scc_eq_i32_simm16_min_m1(i32 %cond, ptr addrspace(1) %out) #0 {
94 %cmp0 = icmp eq i32 %cond, -32769
95 br i1 %cmp0, label %endif, label %if
98 call void asm sideeffect "", ""()
102 store volatile i32 1, ptr addrspace(1) %out
106 ; GCN-LABEL: {{^}}br_scc_eq_i32_uimm15_max:
107 ; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}}
108 define amdgpu_kernel void @br_scc_eq_i32_uimm15_max(i32 %cond, ptr addrspace(1) %out) #0 {
110 %cmp0 = icmp eq i32 %cond, 65535
111 br i1 %cmp0, label %endif, label %if
114 call void asm sideeffect "", ""()
118 store volatile i32 1, ptr addrspace(1) %out
122 ; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max:
123 ; GCN: s_cmpk_eq_u32 s{{[0-9]+}}, 0xffff{{$}}
124 define amdgpu_kernel void @br_scc_eq_i32_uimm16_max(i32 %cond, ptr addrspace(1) %out) #0 {
126 %cmp0 = icmp eq i32 %cond, 65535
127 br i1 %cmp0, label %endif, label %if
130 call void asm sideeffect "", ""()
134 store volatile i32 1, ptr addrspace(1) %out
138 ; GCN-LABEL: {{^}}br_scc_eq_i32_uimm16_max_p1:
139 ; GCN: s_cmp_eq_u32 s{{[0-9]+}}, 0x10000{{$}}
140 define amdgpu_kernel void @br_scc_eq_i32_uimm16_max_p1(i32 %cond, ptr addrspace(1) %out) #0 {
142 %cmp0 = icmp eq i32 %cond, 65536
143 br i1 %cmp0, label %endif, label %if
146 call void asm sideeffect "", ""()
150 store volatile i32 1, ptr addrspace(1) %out
155 ; GCN-LABEL: {{^}}br_scc_eq_i32:
156 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x41{{$}}
157 define amdgpu_kernel void @br_scc_eq_i32(i32 %cond, ptr addrspace(1) %out) #0 {
159 %cmp0 = icmp eq i32 %cond, 65
160 br i1 %cmp0, label %endif, label %if
163 call void asm sideeffect "", ""()
167 store volatile i32 1, ptr addrspace(1) %out
171 ; GCN-LABEL: {{^}}br_scc_ne_i32:
172 ; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x41{{$}}
173 define amdgpu_kernel void @br_scc_ne_i32(i32 %cond, ptr addrspace(1) %out) #0 {
175 %cmp0 = icmp ne i32 %cond, 65
176 br i1 %cmp0, label %endif, label %if
179 call void asm sideeffect "", ""()
183 store volatile i32 1, ptr addrspace(1) %out
187 ; GCN-LABEL: {{^}}br_scc_sgt_i32:
188 ; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x41{{$}}
189 define amdgpu_kernel void @br_scc_sgt_i32(i32 %cond, ptr addrspace(1) %out) #0 {
191 %cmp0 = icmp sgt i32 %cond, 65
192 br i1 %cmp0, label %endif, label %if
195 call void asm sideeffect "", ""()
199 store volatile i32 1, ptr addrspace(1) %out
203 ; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max:
204 ; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x7fff{{$}}
205 define amdgpu_kernel void @br_scc_sgt_i32_simm16_max(i32 %cond, ptr addrspace(1) %out) #0 {
207 %cmp0 = icmp sgt i32 %cond, 32767
208 br i1 %cmp0, label %endif, label %if
211 call void asm sideeffect "", ""()
215 store volatile i32 1, ptr addrspace(1) %out
219 ; GCN-LABEL: {{^}}br_scc_sgt_i32_simm16_max_p1:
220 ; GCN: s_cmp_gt_i32 s{{[0-9]+}}, 0x8000{{$}}
221 define amdgpu_kernel void @br_scc_sgt_i32_simm16_max_p1(i32 %cond, ptr addrspace(1) %out) #0 {
223 %cmp0 = icmp sgt i32 %cond, 32768
224 br i1 %cmp0, label %endif, label %if
227 call void asm sideeffect "", ""()
231 store volatile i32 1, ptr addrspace(1) %out
235 ; GCN-LABEL: {{^}}br_scc_sge_i32:
236 ; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}}
237 define amdgpu_kernel void @br_scc_sge_i32(i32 %cond, ptr addrspace(1) %out) #0 {
239 %size = call i32 @llvm.amdgcn.groupstaticsize()
240 %cmp0 = icmp sge i32 %cond, %size
241 br i1 %cmp0, label %endif, label %if
244 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
248 store volatile i32 1, ptr addrspace(1) %out
252 ; GCN-LABEL: {{^}}br_scc_slt_i32:
253 ; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x41{{$}}
254 define amdgpu_kernel void @br_scc_slt_i32(i32 %cond, ptr addrspace(1) %out) #0 {
256 %cmp0 = icmp slt i32 %cond, 65
257 br i1 %cmp0, label %endif, label %if
260 call void asm sideeffect "", ""()
264 store volatile i32 1, ptr addrspace(1) %out
268 ; GCN-LABEL: {{^}}br_scc_sle_i32:
269 ; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}}
270 define amdgpu_kernel void @br_scc_sle_i32(i32 %cond, ptr addrspace(1) %out) #0 {
272 %size = call i32 @llvm.amdgcn.groupstaticsize()
273 %cmp0 = icmp sle i32 %cond, %size
274 br i1 %cmp0, label %endif, label %if
277 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
281 store volatile i32 1, ptr addrspace(1) %out
285 ; GCN-LABEL: {{^}}br_scc_ugt_i32:
286 ; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}}
287 define amdgpu_kernel void @br_scc_ugt_i32(i32 %cond, ptr addrspace(1) %out) #0 {
289 %size = call i32 @llvm.amdgcn.groupstaticsize()
290 %cmp0 = icmp ugt i32 %cond, %size
291 br i1 %cmp0, label %endif, label %if
294 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
298 store volatile i32 1, ptr addrspace(1) %out
302 ; GCN-LABEL: {{^}}br_scc_uge_i32:
303 ; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}}
304 define amdgpu_kernel void @br_scc_uge_i32(i32 %cond, ptr addrspace(1) %out) #0 {
306 %size = call i32 @llvm.amdgcn.groupstaticsize()
307 %cmp0 = icmp uge i32 %cond, %size
308 br i1 %cmp0, label %endif, label %if
311 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
315 store volatile i32 1, ptr addrspace(1) %out
319 ; GCN-LABEL: {{^}}br_scc_ult_i32:
320 ; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x41{{$}}
321 define amdgpu_kernel void @br_scc_ult_i32(i32 %cond, ptr addrspace(1) %out) #0 {
323 %cmp0 = icmp ult i32 %cond, 65
324 br i1 %cmp0, label %endif, label %if
327 call void asm sideeffect "", ""()
331 store volatile i32 1, ptr addrspace(1) %out
335 ; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16:
336 ; GCN: s_cmp_lt_u32 s{{[0-9]+}}, 0xffff8000
337 define amdgpu_kernel void @br_scc_ult_i32_min_simm16(i32 %cond, ptr addrspace(1) %out) #0 {
339 %cmp0 = icmp ult i32 %cond, -32768
340 br i1 %cmp0, label %endif, label %if
343 call void asm sideeffect "", ""()
347 store volatile i32 1, ptr addrspace(1) %out
351 ; GCN-LABEL: {{^}}br_scc_ult_i32_min_simm16_m1:
352 ; GCN: s_cmp_lt_u32 s{{[0-9]+}}, 0xffff7fff{{$}}
353 define amdgpu_kernel void @br_scc_ult_i32_min_simm16_m1(i32 %cond, ptr addrspace(1) %out) #0 {
355 %cmp0 = icmp ult i32 %cond, -32769
356 br i1 %cmp0, label %endif, label %if
359 call void asm sideeffect "", ""()
363 store volatile i32 1, ptr addrspace(1) %out
367 ; GCN-LABEL: {{^}}br_scc_ule_i32:
368 ; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}}
369 define amdgpu_kernel void @br_scc_ule_i32(i32 %cond, ptr addrspace(1) %out) #0 {
371 %size = call i32 @llvm.amdgcn.groupstaticsize()
372 %cmp0 = icmp ule i32 %cond, %size
373 br i1 %cmp0, label %endif, label %if
376 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
380 store volatile i32 1, ptr addrspace(1) %out
384 ; GCN-LABEL: {{^}}commute_br_scc_eq_i32:
385 ; GCN: s_cmpk_eq_i32 s{{[0-9]+}}, 0x800{{$}}
386 define amdgpu_kernel void @commute_br_scc_eq_i32(i32 %cond, ptr addrspace(1) %out) #0 {
388 %size = call i32 @llvm.amdgcn.groupstaticsize()
389 %cmp0 = icmp eq i32 %size, %cond
390 br i1 %cmp0, label %endif, label %if
393 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
397 store volatile i32 1, ptr addrspace(1) %out
401 ; GCN-LABEL: {{^}}commute_br_scc_ne_i32:
402 ; GCN: s_cmpk_lg_i32 s{{[0-9]+}}, 0x800{{$}}
403 define amdgpu_kernel void @commute_br_scc_ne_i32(i32 %cond, ptr addrspace(1) %out) #0 {
405 %size = call i32 @llvm.amdgcn.groupstaticsize()
406 %cmp0 = icmp ne i32 %size, %cond
407 br i1 %cmp0, label %endif, label %if
410 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
414 store volatile i32 1, ptr addrspace(1) %out
418 ; GCN-LABEL: {{^}}commute_br_scc_sgt_i32:
419 ; GCN: s_cmpk_lt_i32 s{{[0-9]+}}, 0x800{{$}}
420 define amdgpu_kernel void @commute_br_scc_sgt_i32(i32 %cond, ptr addrspace(1) %out) #0 {
422 %size = call i32 @llvm.amdgcn.groupstaticsize()
423 %cmp0 = icmp sgt i32 %size, %cond
424 br i1 %cmp0, label %endif, label %if
427 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
431 store volatile i32 1, ptr addrspace(1) %out
435 ; GCN-LABEL: {{^}}commute_br_scc_sge_i32:
436 ; GCN: s_cmpk_le_i32 s{{[0-9]+}}, 0x800{{$}}
437 define amdgpu_kernel void @commute_br_scc_sge_i32(i32 %cond, ptr addrspace(1) %out) #0 {
439 %size = call i32 @llvm.amdgcn.groupstaticsize()
440 %cmp0 = icmp sge i32 %size, %cond
441 br i1 %cmp0, label %endif, label %if
444 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
448 store volatile i32 1, ptr addrspace(1) %out
452 ; GCN-LABEL: {{^}}commute_br_scc_slt_i32:
453 ; GCN: s_cmpk_gt_i32 s{{[0-9]+}}, 0x800{{$}}
454 define amdgpu_kernel void @commute_br_scc_slt_i32(i32 %cond, ptr addrspace(1) %out) #0 {
456 %size = call i32 @llvm.amdgcn.groupstaticsize()
457 %cmp0 = icmp slt i32 %size, %cond
458 br i1 %cmp0, label %endif, label %if
461 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
465 store volatile i32 1, ptr addrspace(1) %out
469 ; GCN-LABEL: {{^}}commute_br_scc_sle_i32:
470 ; GCN: s_cmpk_ge_i32 s{{[0-9]+}}, 0x800{{$}}
471 define amdgpu_kernel void @commute_br_scc_sle_i32(i32 %cond, ptr addrspace(1) %out) #0 {
473 %size = call i32 @llvm.amdgcn.groupstaticsize()
474 %cmp0 = icmp sle i32 %size, %cond
475 br i1 %cmp0, label %endif, label %if
478 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
482 store volatile i32 1, ptr addrspace(1) %out
486 ; GCN-LABEL: {{^}}commute_br_scc_ugt_i32:
487 ; GCN: s_cmpk_lt_u32 s{{[0-9]+}}, 0x800{{$}}
488 define amdgpu_kernel void @commute_br_scc_ugt_i32(i32 %cond, ptr addrspace(1) %out) #0 {
490 %size = call i32 @llvm.amdgcn.groupstaticsize()
491 %cmp0 = icmp ugt i32 %size, %cond
492 br i1 %cmp0, label %endif, label %if
495 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
499 store volatile i32 1, ptr addrspace(1) %out
503 ; GCN-LABEL: {{^}}commute_br_scc_uge_i32:
504 ; GCN: s_cmpk_le_u32 s{{[0-9]+}}, 0x800{{$}}
505 define amdgpu_kernel void @commute_br_scc_uge_i32(i32 %cond, ptr addrspace(1) %out) #0 {
507 %size = call i32 @llvm.amdgcn.groupstaticsize()
508 %cmp0 = icmp uge i32 %size, %cond
509 br i1 %cmp0, label %endif, label %if
512 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
516 store volatile i32 1, ptr addrspace(1) %out
520 ; GCN-LABEL: {{^}}commute_br_scc_ult_i32:
521 ; GCN: s_cmpk_gt_u32 s{{[0-9]+}}, 0x800{{$}}
522 define amdgpu_kernel void @commute_br_scc_ult_i32(i32 %cond, ptr addrspace(1) %out) #0 {
524 %size = call i32 @llvm.amdgcn.groupstaticsize()
525 %cmp0 = icmp ult i32 %size, %cond
526 br i1 %cmp0, label %endif, label %if
529 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
533 store volatile i32 1, ptr addrspace(1) %out
537 ; GCN-LABEL: {{^}}commute_br_scc_ule_i32:
538 ; GCN: s_cmpk_ge_u32 s{{[0-9]+}}, 0x800{{$}}
539 define amdgpu_kernel void @commute_br_scc_ule_i32(i32 %cond, ptr addrspace(1) %out) #0 {
541 %size = call i32 @llvm.amdgcn.groupstaticsize()
542 %cmp0 = icmp ule i32 %size, %cond
543 br i1 %cmp0, label %endif, label %if
546 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
550 store volatile i32 1, ptr addrspace(1) %out
554 ; GCN-LABEL: {{^}}br_scc_ult_i32_non_u16:
555 ; GCN: s_cmp_lt_u32 s{{[0-9]+}}, 0xfffff7ff
556 define amdgpu_kernel void @br_scc_ult_i32_non_u16(i32 %cond, ptr addrspace(1) %out) #0 {
558 %size = call i32 @llvm.amdgcn.groupstaticsize()
559 %not.size = xor i32 %size, -1
560 %cmp0 = icmp ult i32 %cond, %not.size
561 br i1 %cmp0, label %endif, label %if
564 call void asm sideeffect "; $0", "v"(ptr addrspace(3) @lds)
568 store volatile i32 1, ptr addrspace(1) %out
572 ; GCN-LABEL: {{^}}br_scc_eq_i64_inline_imm:
573 ; VI: s_cmp_eq_u64 s{{\[[0-9]+:[0-9]+\]}}, 4
575 ; SI: v_cmp_eq_u64_e64
576 define amdgpu_kernel void @br_scc_eq_i64_inline_imm(i64 %cond, ptr addrspace(1) %out) #0 {
578 %cmp0 = icmp eq i64 %cond, 4
579 br i1 %cmp0, label %endif, label %if
582 call void asm sideeffect "", ""()
586 store volatile i32 1, ptr addrspace(1) %out
590 ; GCN-LABEL: {{^}}br_scc_eq_i64_simm16:
591 ; VI-DAG: s_movk_i32 s[[K_LO:[0-9]+]], 0x4d2
592 ; VI-DAG: s_mov_b32 s[[K_HI:[0-9]+]], 1
593 ; VI: s_cmp_eq_u64 s{{\[[0-9]+:[0-9]+\]}}, s[[[K_LO]]:[[K_HI]]]
595 ; SI: v_cmp_eq_u64_e32
596 define amdgpu_kernel void @br_scc_eq_i64_simm16(i64 %cond, ptr addrspace(1) %out) #0 {
598 %cmp0 = icmp eq i64 %cond, 4294968530
599 br i1 %cmp0, label %endif, label %if
602 call void asm sideeffect "", ""()
606 store volatile i32 1, ptr addrspace(1) %out
610 ; GCN-LABEL: {{^}}br_scc_ne_i64_inline_imm:
611 ; VI: s_cmp_lg_u64 s{{\[[0-9]+:[0-9]+\]}}, 4
613 ; SI: v_cmp_ne_u64_e64
614 define amdgpu_kernel void @br_scc_ne_i64_inline_imm(i64 %cond, ptr addrspace(1) %out) #0 {
616 %cmp0 = icmp ne i64 %cond, 4
617 br i1 %cmp0, label %endif, label %if
620 call void asm sideeffect "", ""()
624 store volatile i32 1, ptr addrspace(1) %out
628 ; GCN-LABEL: {{^}}br_scc_ne_i64_simm16:
629 ; VI-DAG: s_movk_i32 s[[K_LO:[0-9]+]], 0x4d2
630 ; VI-DAG: s_mov_b32 s[[K_HI:[0-9]+]], 1
631 ; VI: s_cmp_lg_u64 s{{\[[0-9]+:[0-9]+\]}}, s[[[K_LO]]:[[K_HI]]]
633 ; SI: v_cmp_ne_u64_e32
634 define amdgpu_kernel void @br_scc_ne_i64_simm16(i64 %cond, ptr addrspace(1) %out) #0 {
636 %cmp0 = icmp ne i64 %cond, 4294968530
637 br i1 %cmp0, label %endif, label %if
640 call void asm sideeffect "", ""()
644 store volatile i32 1, ptr addrspace(1) %out
648 attributes #0 = { nounwind }
649 attributes #1 = { nounwind readnone }