1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=amdgcn -mcpu=gfx900 -run-pass=instruction-select -verify-machineinstrs -global-isel %s -o - | FileCheck %s -check-prefixes=WAVE64
3 # RUN: llc -mtriple=amdgcn -mcpu=gfx1010 -run-pass=instruction-select -verify-machineinstrs -global-isel %s -o - | FileCheck %s -check-prefixes=WAVE32
6 name: i1_vcc_to_vcc_copy
9 tracksRegLiveness: true
13 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4
15 ; WAVE64-LABEL: name: i1_vcc_to_vcc_copy
16 ; WAVE64: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4
18 ; WAVE64-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0
19 ; WAVE64-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1
20 ; WAVE64-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr2
21 ; WAVE64-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr3
22 ; WAVE64-NEXT: [[COPY4:%[0-9]+]]:vgpr_32 = COPY $vgpr4
23 ; WAVE64-NEXT: [[DEF:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
24 ; WAVE64-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2
25 ; WAVE64-NEXT: [[COPY5:%[0-9]+]]:vgpr_32 = COPY [[S_MOV_B32_]]
26 ; WAVE64-NEXT: [[V_CMP_EQ_U32_e64_:%[0-9]+]]:sreg_64_xexec = V_CMP_EQ_U32_e64 [[COPY]], [[COPY5]], implicit $exec
27 ; WAVE64-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY2]], 0, [[COPY1]], [[V_CMP_EQ_U32_e64_]], implicit $exec
28 ; WAVE64-NEXT: [[V_CNDMASK_B32_e64_1:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY4]], 0, [[COPY3]], [[V_CMP_EQ_U32_e64_]], implicit $exec
29 ; WAVE64-NEXT: [[COPY6:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
30 ; WAVE64-NEXT: [[COPY7:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
31 ; WAVE64-NEXT: EXP_DONE 0, [[V_CNDMASK_B32_e64_]], [[V_CNDMASK_B32_e64_1]], [[COPY6]], [[COPY7]], -1, 0, 15, implicit $exec
32 ; WAVE64-NEXT: S_ENDPGM 0
34 ; WAVE32-LABEL: name: i1_vcc_to_vcc_copy
35 ; WAVE32: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4
37 ; WAVE32-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr0
38 ; WAVE32-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1
39 ; WAVE32-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr2
40 ; WAVE32-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr3
41 ; WAVE32-NEXT: [[COPY4:%[0-9]+]]:vgpr_32 = COPY $vgpr4
42 ; WAVE32-NEXT: [[DEF:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
43 ; WAVE32-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2
44 ; WAVE32-NEXT: [[COPY5:%[0-9]+]]:vgpr_32 = COPY [[S_MOV_B32_]]
45 ; WAVE32-NEXT: [[V_CMP_EQ_U32_e64_:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_EQ_U32_e64 [[COPY]], [[COPY5]], implicit $exec
46 ; WAVE32-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY2]], 0, [[COPY1]], [[V_CMP_EQ_U32_e64_]], implicit $exec
47 ; WAVE32-NEXT: [[V_CNDMASK_B32_e64_1:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY4]], 0, [[COPY3]], [[V_CMP_EQ_U32_e64_]], implicit $exec
48 ; WAVE32-NEXT: [[COPY6:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
49 ; WAVE32-NEXT: [[COPY7:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
50 ; WAVE32-NEXT: EXP_DONE 0, [[V_CNDMASK_B32_e64_]], [[V_CNDMASK_B32_e64_1]], [[COPY6]], [[COPY7]], -1, 0, 15, implicit $exec
51 ; WAVE32-NEXT: S_ENDPGM 0
52 %0:vgpr(s32) = COPY $vgpr0
53 %1:vgpr(s32) = COPY $vgpr1
54 %2:vgpr(s32) = COPY $vgpr2
55 %3:vgpr(s32) = COPY $vgpr3
56 %4:vgpr(s32) = COPY $vgpr4
57 %5:sgpr(s32) = G_IMPLICIT_DEF
58 %6:sgpr(s32) = G_CONSTANT i32 2
59 %7:vgpr(s32) = COPY %6(s32)
60 %8:vcc(s1) = G_ICMP intpred(eq), %0(s32), %7
61 %9:vgpr(s32) = G_SELECT %8(s1), %1, %2
62 %10:vgpr(s32) = G_SELECT %8(s1), %3, %4
63 %11:vgpr(s32) = COPY %5(s32)
64 %12:vgpr(s32) = COPY %5(s32)
65 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.amdgcn.exp), 0, 15, %9(s32), %10(s32), %11(s32), %12(s32), -1, -1
70 name: i1_sgpr_to_vcc_copy
73 tracksRegLiveness: true
77 liveins: $sgpr0, $vgpr0, $vgpr1, $vgpr2, $vgpr3
79 ; WAVE64-LABEL: name: i1_sgpr_to_vcc_copy
80 ; WAVE64: liveins: $sgpr0, $vgpr0, $vgpr1, $vgpr2, $vgpr3
82 ; WAVE64-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $sgpr0
83 ; WAVE64-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
84 ; WAVE64-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr1
85 ; WAVE64-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr2
86 ; WAVE64-NEXT: [[COPY4:%[0-9]+]]:vgpr_32 = COPY $vgpr3
87 ; WAVE64-NEXT: [[DEF:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
88 ; WAVE64-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2
89 ; WAVE64-NEXT: S_CMP_EQ_U32 [[COPY]], [[S_MOV_B32_]], implicit-def $scc
90 ; WAVE64-NEXT: [[COPY5:%[0-9]+]]:sreg_32 = COPY $scc
91 ; WAVE64-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32 = S_AND_B32 1, [[COPY5]], implicit-def dead $scc
92 ; WAVE64-NEXT: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64_xexec = V_CMP_NE_U32_e64 0, [[S_AND_B32_]], implicit $exec
93 ; WAVE64-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY2]], 0, [[COPY1]], [[V_CMP_NE_U32_e64_]], implicit $exec
94 ; WAVE64-NEXT: [[S_AND_B32_1:%[0-9]+]]:sreg_32 = S_AND_B32 1, [[COPY5]], implicit-def dead $scc
95 ; WAVE64-NEXT: [[V_CMP_NE_U32_e64_1:%[0-9]+]]:sreg_64_xexec = V_CMP_NE_U32_e64 0, [[S_AND_B32_1]], implicit $exec
96 ; WAVE64-NEXT: [[V_CNDMASK_B32_e64_1:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY4]], 0, [[COPY3]], [[V_CMP_NE_U32_e64_1]], implicit $exec
97 ; WAVE64-NEXT: [[COPY6:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
98 ; WAVE64-NEXT: [[COPY7:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
99 ; WAVE64-NEXT: EXP_DONE 0, [[V_CNDMASK_B32_e64_]], [[V_CNDMASK_B32_e64_1]], [[COPY6]], [[COPY7]], -1, 0, 15, implicit $exec
100 ; WAVE64-NEXT: S_ENDPGM 0
102 ; WAVE32-LABEL: name: i1_sgpr_to_vcc_copy
103 ; WAVE32: liveins: $sgpr0, $vgpr0, $vgpr1, $vgpr2, $vgpr3
104 ; WAVE32-NEXT: {{ $}}
105 ; WAVE32-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $sgpr0
106 ; WAVE32-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
107 ; WAVE32-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr1
108 ; WAVE32-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr2
109 ; WAVE32-NEXT: [[COPY4:%[0-9]+]]:vgpr_32 = COPY $vgpr3
110 ; WAVE32-NEXT: [[DEF:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
111 ; WAVE32-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 2
112 ; WAVE32-NEXT: S_CMP_EQ_U32 [[COPY]], [[S_MOV_B32_]], implicit-def $scc
113 ; WAVE32-NEXT: [[COPY5:%[0-9]+]]:sreg_32 = COPY $scc
114 ; WAVE32-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32 = S_AND_B32 1, [[COPY5]], implicit-def dead $scc
115 ; WAVE32-NEXT: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_NE_U32_e64 0, [[S_AND_B32_]], implicit $exec
116 ; WAVE32-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY2]], 0, [[COPY1]], [[V_CMP_NE_U32_e64_]], implicit $exec
117 ; WAVE32-NEXT: [[S_AND_B32_1:%[0-9]+]]:sreg_32 = S_AND_B32 1, [[COPY5]], implicit-def dead $scc
118 ; WAVE32-NEXT: [[V_CMP_NE_U32_e64_1:%[0-9]+]]:sreg_32_xm0_xexec = V_CMP_NE_U32_e64 0, [[S_AND_B32_1]], implicit $exec
119 ; WAVE32-NEXT: [[V_CNDMASK_B32_e64_1:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, [[COPY4]], 0, [[COPY3]], [[V_CMP_NE_U32_e64_1]], implicit $exec
120 ; WAVE32-NEXT: [[COPY6:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
121 ; WAVE32-NEXT: [[COPY7:%[0-9]+]]:vgpr_32 = COPY [[DEF]]
122 ; WAVE32-NEXT: EXP_DONE 0, [[V_CNDMASK_B32_e64_]], [[V_CNDMASK_B32_e64_1]], [[COPY6]], [[COPY7]], -1, 0, 15, implicit $exec
123 ; WAVE32-NEXT: S_ENDPGM 0
124 %0:sgpr(s32) = COPY $sgpr0
125 %1:vgpr(s32) = COPY $vgpr0
126 %2:vgpr(s32) = COPY $vgpr1
127 %3:vgpr(s32) = COPY $vgpr2
128 %4:vgpr(s32) = COPY $vgpr3
129 %5:sgpr(s32) = G_IMPLICIT_DEF
130 %6:sgpr(s32) = G_CONSTANT i32 2
131 %7:sgpr(s32) = G_ICMP intpred(eq), %0(s32), %6
132 %8:sgpr(s1) = G_TRUNC %7(s32)
133 %9:vcc(s1) = COPY %8(s1)
134 %10:vgpr(s32) = G_SELECT %9(s1), %1, %2
135 %11:vcc(s1) = COPY %8(s1)
136 %12:vgpr(s32) = G_SELECT %11(s1), %3, %4
137 %13:vgpr(s32) = COPY %5(s32)
138 %14:vgpr(s32) = COPY %5(s32)
139 G_INTRINSIC_W_SIDE_EFFECTS intrinsic(@llvm.amdgcn.exp), 0, 15, %10(s32), %12(s32), %13(s32), %14(s32), -1, -1