1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -march=amdgcn -verify-machineinstrs -run-pass=si-optimize-exec-masking-pre-ra %s -o - | FileCheck -check-prefix=GCN %s
4 # Make sure dbg_value doesn't change codeegn when collapsing end_cf
6 name: simple_nested_if_dbg_value
7 tracksRegLiveness: true
9 - { reg: '$vgpr0', virtual-reg: '%0' }
10 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
14 ; GCN-LABEL: name: simple_nested_if_dbg_value
16 ; GCN: successors: %bb.1(0x40000000), %bb.4(0x40000000)
17 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
18 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
19 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
20 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
21 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
22 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
23 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
24 ; GCN: SI_MASK_BRANCH %bb.4, implicit $exec
27 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
28 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
29 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
30 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
31 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
32 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
33 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
34 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
35 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
36 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
37 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
38 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
39 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
40 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
43 ; GCN: successors: %bb.3(0x80000000)
44 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
45 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
46 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
47 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
49 ; GCN: successors: %bb.4(0x80000000)
53 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
54 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
55 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
56 ; GCN: $m0 = S_MOV_B32 -1
57 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
60 successors: %bb.1, %bb.4
61 liveins: $vgpr0, $sgpr0_sgpr1
63 %1:sgpr_64 = COPY $sgpr0_sgpr1
64 %0:vgpr_32 = COPY $vgpr0
65 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
66 %3:sreg_64 = COPY $exec, implicit-def $exec
67 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
68 $exec = S_MOV_B64_term %4
69 SI_MASK_BRANCH %bb.4, implicit $exec
73 successors: %bb.2, %bb.3
75 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
76 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
77 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
78 %7:vgpr_32 = COPY %5.sub1
79 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
80 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
81 %5.sub3:sgpr_128 = S_MOV_B32 61440
82 %5.sub2:sgpr_128 = S_MOV_B32 0
83 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
84 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
85 %12:sreg_64 = COPY $exec, implicit-def $exec
86 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
87 $exec = S_MOV_B64_term %13
88 SI_MASK_BRANCH %bb.3, implicit $exec
92 %5.sub0:sgpr_128 = COPY %5.sub2
93 %5.sub1:sgpr_128 = COPY %5.sub2
94 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
95 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
98 $exec = S_OR_B64 $exec, %12, implicit-def $scc
103 $exec = S_OR_B64 $exec, %3, implicit-def $scc
104 %15:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
105 %16:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
107 DS_WRITE_B32 %16, %15, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
112 # Empty block separates the collapsable s_or_b64
114 name: simple_nested_if_empty_block_between
115 tracksRegLiveness: true
117 - { reg: '$vgpr0', virtual-reg: '%0' }
118 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
120 isEntryFunction: true
122 ; GCN-LABEL: name: simple_nested_if_empty_block_between
124 ; GCN: successors: %bb.1(0x40000000), %bb.5(0x40000000)
125 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
126 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
127 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
128 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
129 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
130 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
131 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
132 ; GCN: SI_MASK_BRANCH %bb.5, implicit $exec
133 ; GCN: S_BRANCH %bb.1
135 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
136 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
137 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
138 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
139 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
140 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
141 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
142 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
143 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
144 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
145 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
146 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
147 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
148 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
149 ; GCN: S_BRANCH %bb.2
151 ; GCN: successors: %bb.3(0x80000000)
152 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
153 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
154 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
155 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
157 ; GCN: successors: %bb.4(0x80000000)
159 ; GCN: successors: %bb.5(0x80000000)
161 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
162 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
163 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
164 ; GCN: $m0 = S_MOV_B32 -1
165 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
168 successors: %bb.1, %bb.4
169 liveins: $vgpr0, $sgpr0_sgpr1
171 %1:sgpr_64 = COPY $sgpr0_sgpr1
172 %0:vgpr_32 = COPY $vgpr0
173 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
174 %3:sreg_64 = COPY $exec, implicit-def $exec
175 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
176 $exec = S_MOV_B64_term %4
177 SI_MASK_BRANCH %bb.4, implicit $exec
181 successors: %bb.2, %bb.3
183 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
184 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
185 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
186 %7:vgpr_32 = COPY %5.sub1
187 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
188 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
189 %5.sub3:sgpr_128 = S_MOV_B32 61440
190 %5.sub2:sgpr_128 = S_MOV_B32 0
191 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
192 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
193 %12:sreg_64 = COPY $exec, implicit-def $exec
194 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
195 $exec = S_MOV_B64_term %13
196 SI_MASK_BRANCH %bb.3, implicit $exec
200 %5.sub0:sgpr_128 = COPY %5.sub2
201 %5.sub1:sgpr_128 = COPY %5.sub2
202 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
203 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
206 $exec = S_OR_B64 $exec, %12, implicit-def $scc
211 $exec = S_OR_B64 $exec, %3, implicit-def $scc
212 %15:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
213 %16:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
215 DS_WRITE_B32 %16, %15, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
220 # Effectively empty block separates the collapsable s_or_b64
222 name: simple_nested_if_empty_block_dbg_between
223 tracksRegLiveness: true
225 - { reg: '$vgpr0', virtual-reg: '%0' }
226 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
228 isEntryFunction: true
230 ; GCN-LABEL: name: simple_nested_if_empty_block_dbg_between
232 ; GCN: successors: %bb.1(0x40000000), %bb.5(0x40000000)
233 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
234 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
235 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
236 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
237 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
238 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
239 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
240 ; GCN: SI_MASK_BRANCH %bb.5, implicit $exec
241 ; GCN: S_BRANCH %bb.1
243 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
244 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
245 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
246 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
247 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
248 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
249 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
250 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
251 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
252 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
253 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
254 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
255 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
256 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
257 ; GCN: S_BRANCH %bb.2
259 ; GCN: successors: %bb.3(0x80000000)
260 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
261 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
262 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
263 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
265 ; GCN: successors: %bb.4(0x80000000)
267 ; GCN: successors: %bb.5(0x80000000)
270 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
271 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
272 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
273 ; GCN: $m0 = S_MOV_B32 -1
274 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
277 successors: %bb.1, %bb.4
278 liveins: $vgpr0, $sgpr0_sgpr1
280 %1:sgpr_64 = COPY $sgpr0_sgpr1
281 %0:vgpr_32 = COPY $vgpr0
282 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
283 %3:sreg_64 = COPY $exec, implicit-def $exec
284 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
285 $exec = S_MOV_B64_term %4
286 SI_MASK_BRANCH %bb.4, implicit $exec
290 successors: %bb.2, %bb.3
292 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
293 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
294 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
295 %7:vgpr_32 = COPY %5.sub1
296 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
297 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
298 %5.sub3:sgpr_128 = S_MOV_B32 61440
299 %5.sub2:sgpr_128 = S_MOV_B32 0
300 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
301 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
302 %12:sreg_64 = COPY $exec, implicit-def $exec
303 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
304 $exec = S_MOV_B64_term %13
305 SI_MASK_BRANCH %bb.3, implicit $exec
309 %5.sub0:sgpr_128 = COPY %5.sub2
310 %5.sub1:sgpr_128 = COPY %5.sub2
311 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
312 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
315 $exec = S_OR_B64 $exec, %12, implicit-def $scc
321 $exec = S_OR_B64 $exec, %3, implicit-def $scc
322 %15:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
323 %16:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
325 DS_WRITE_B32 %16, %15, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
331 name: skip_salu_and_meta_insts_find_first
332 tracksRegLiveness: true
334 - { reg: '$vgpr0', virtual-reg: '%0' }
335 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
337 isEntryFunction: true
339 ; GCN-LABEL: name: skip_salu_and_meta_insts_find_first
341 ; GCN: successors: %bb.1(0x40000000), %bb.4(0x40000000)
342 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
343 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
344 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
345 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
346 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
347 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
348 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
349 ; GCN: SI_MASK_BRANCH %bb.4, implicit $exec
350 ; GCN: S_BRANCH %bb.1
352 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
353 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
354 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
355 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
356 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
357 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
358 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
359 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
360 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
361 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
362 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
363 ; GCN: [[COPY4:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
364 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY4]], [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
365 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
366 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
367 ; GCN: S_BRANCH %bb.2
369 ; GCN: successors: %bb.3(0x80000000)
370 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
371 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
372 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
373 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
375 ; GCN: successors: %bb.4(0x80000000)
376 ; GCN: [[DEF:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF
377 ; GCN: dead %16:sgpr_32 = S_BREV_B32 [[DEF]]
379 ; GCN: $exec = S_OR_B64 $exec, [[COPY4]], implicit-def $scc
381 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
382 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
383 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
384 ; GCN: $m0 = S_MOV_B32 -1
385 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
388 successors: %bb.1, %bb.4
389 liveins: $vgpr0, $sgpr0_sgpr1
391 %1:sgpr_64 = COPY $sgpr0_sgpr1
392 %0:vgpr_32 = COPY $vgpr0
393 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
394 %3:sreg_64 = COPY $exec, implicit-def $exec
395 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
396 $exec = S_MOV_B64_term %4
397 SI_MASK_BRANCH %bb.4, implicit $exec
401 successors: %bb.2, %bb.3
403 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
404 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
405 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
406 %7:vgpr_32 = COPY %5.sub1
407 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
408 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
409 %5.sub3:sgpr_128 = S_MOV_B32 61440
410 %5.sub2:sgpr_128 = S_MOV_B32 0
411 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
412 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
413 %12:sreg_64 = COPY $exec, implicit-def $exec
414 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
415 $exec = S_MOV_B64_term %13
416 SI_MASK_BRANCH %bb.3, implicit $exec
420 %5.sub0:sgpr_128 = COPY %5.sub2
421 %5.sub1:sgpr_128 = COPY %5.sub2
422 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
423 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
426 %15:sgpr_32 = IMPLICIT_DEF
427 %16:sgpr_32 = S_BREV_B32 %15
429 $exec = S_OR_B64 $exec, %12, implicit-def $scc
432 $exec = S_OR_B64 $exec, %3, implicit-def $scc
433 %17:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
434 %18:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
436 DS_WRITE_B32 %18, %17, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
441 # Make sure SALU instructions, meta instructions, and SGPR->SGPR
442 # copies are skipped.
444 name: skip_salu_and_meta_insts_after
445 tracksRegLiveness: true
447 - { reg: '$vgpr0', virtual-reg: '%0' }
448 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
450 isEntryFunction: true
452 ; GCN-LABEL: name: skip_salu_and_meta_insts_after
454 ; GCN: successors: %bb.1(0x40000000), %bb.4(0x40000000)
455 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
456 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
457 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
458 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
459 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
460 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
461 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
462 ; GCN: SI_MASK_BRANCH %bb.4, implicit $exec
463 ; GCN: S_BRANCH %bb.1
465 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
466 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
467 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
468 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
469 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
470 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
471 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
472 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
473 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
474 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
475 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
476 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
477 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
478 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
479 ; GCN: S_BRANCH %bb.2
481 ; GCN: successors: %bb.3(0x80000000)
482 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
483 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
484 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
485 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
487 ; GCN: successors: %bb.4(0x80000000)
488 ; GCN: [[DEF:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF
489 ; GCN: [[S_BREV_B32_:%[0-9]+]]:sgpr_32 = S_BREV_B32 [[DEF]]
491 ; GCN: dead %17:sgpr_32 = COPY [[S_BREV_B32_]]
493 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
494 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
495 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
496 ; GCN: $m0 = S_MOV_B32 -1
497 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
500 successors: %bb.1, %bb.4
501 liveins: $vgpr0, $sgpr0_sgpr1
503 %1:sgpr_64 = COPY $sgpr0_sgpr1
504 %0:vgpr_32 = COPY $vgpr0
505 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
506 %3:sreg_64 = COPY $exec, implicit-def $exec
507 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
508 $exec = S_MOV_B64_term %4
509 SI_MASK_BRANCH %bb.4, implicit $exec
513 successors: %bb.2, %bb.3
515 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
516 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
517 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
518 %7:vgpr_32 = COPY %5.sub1
519 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
520 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
521 %5.sub3:sgpr_128 = S_MOV_B32 61440
522 %5.sub2:sgpr_128 = S_MOV_B32 0
523 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
524 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
525 %12:sreg_64 = COPY $exec, implicit-def $exec
526 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
527 $exec = S_MOV_B64_term %13
528 SI_MASK_BRANCH %bb.3, implicit $exec
532 %5.sub0:sgpr_128 = COPY %5.sub2
533 %5.sub1:sgpr_128 = COPY %5.sub2
534 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
535 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
538 $exec = S_OR_B64 $exec, %12, implicit-def $scc
539 %15:sgpr_32 = IMPLICIT_DEF
540 %16:sgpr_32 = S_BREV_B32 %15
542 %19:sgpr_32 = COPY %16
545 $exec = S_OR_B64 $exec, %3, implicit-def $scc
546 %17:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
547 %18:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
549 DS_WRITE_B32 %18, %17, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
554 # SALU instruction depends on exec through a normal operand.
556 name: salu_exec_dependency
557 tracksRegLiveness: true
559 - { reg: '$vgpr0', virtual-reg: '%0' }
560 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
562 isEntryFunction: true
564 ; GCN-LABEL: name: salu_exec_dependency
566 ; GCN: successors: %bb.1(0x40000000), %bb.4(0x40000000)
567 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
568 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
569 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
570 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
571 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
572 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
573 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
574 ; GCN: SI_MASK_BRANCH %bb.4, implicit $exec
575 ; GCN: S_BRANCH %bb.1
577 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
578 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
579 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
580 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
581 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
582 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
583 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
584 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
585 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
586 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
587 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
588 ; GCN: [[COPY4:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
589 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY4]], [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
590 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
591 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
592 ; GCN: S_BRANCH %bb.2
594 ; GCN: successors: %bb.3(0x80000000)
595 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
596 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
597 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
598 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
600 ; GCN: successors: %bb.4(0x80000000)
601 ; GCN: $exec = S_OR_B64 $exec, [[COPY4]], implicit-def $scc
602 ; GCN: dead %15:sreg_64 = S_BREV_B64 $exec
604 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
605 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
606 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
607 ; GCN: $m0 = S_MOV_B32 -1
608 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
611 successors: %bb.1, %bb.4
612 liveins: $vgpr0, $sgpr0_sgpr1
614 %1:sgpr_64 = COPY $sgpr0_sgpr1
615 %0:vgpr_32 = COPY $vgpr0
616 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
617 %3:sreg_64 = COPY $exec, implicit-def $exec
618 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
619 $exec = S_MOV_B64_term %4
620 SI_MASK_BRANCH %bb.4, implicit $exec
624 successors: %bb.2, %bb.3
626 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
627 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
628 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
629 %7:vgpr_32 = COPY %5.sub1
630 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
631 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
632 %5.sub3:sgpr_128 = S_MOV_B32 61440
633 %5.sub2:sgpr_128 = S_MOV_B32 0
634 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
635 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
636 %12:sreg_64 = COPY $exec, implicit-def $exec
637 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
638 $exec = S_MOV_B64_term %13
639 SI_MASK_BRANCH %bb.3, implicit $exec
643 %5.sub0:sgpr_128 = COPY %5.sub2
644 %5.sub1:sgpr_128 = COPY %5.sub2
645 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
646 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
649 $exec = S_OR_B64 $exec, %12, implicit-def $scc
650 %15:sreg_64 = S_BREV_B64 $exec
653 $exec = S_OR_B64 $exec, %3, implicit-def $scc
654 %17:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
655 %18:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
657 DS_WRITE_B32 %18, %17, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
662 # Copy to / from VGPR should be assumed to read exec
664 name: copy_no_explicit_exec_dependency
665 tracksRegLiveness: true
667 - { reg: '$vgpr0', virtual-reg: '%0' }
668 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
670 isEntryFunction: true
672 ; GCN-LABEL: name: copy_no_explicit_exec_dependency
674 ; GCN: successors: %bb.1(0x40000000), %bb.4(0x40000000)
675 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
676 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
677 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
678 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
679 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
680 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
681 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
682 ; GCN: SI_MASK_BRANCH %bb.4, implicit $exec
683 ; GCN: S_BRANCH %bb.1
685 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
686 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
687 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
688 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
689 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
690 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
691 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
692 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
693 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
694 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
695 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
696 ; GCN: [[COPY4:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
697 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY4]], [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
698 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
699 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
700 ; GCN: S_BRANCH %bb.2
702 ; GCN: successors: %bb.3(0x80000000)
703 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
704 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
705 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
706 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
708 ; GCN: successors: %bb.4(0x80000000)
709 ; GCN: $exec = S_OR_B64 $exec, [[COPY4]], implicit-def $scc
710 ; GCN: dead %15:vgpr_32 = COPY %5.sub2
712 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
713 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
714 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
715 ; GCN: $m0 = S_MOV_B32 -1
716 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
719 successors: %bb.1, %bb.4
720 liveins: $vgpr0, $sgpr0_sgpr1
722 %1:sgpr_64 = COPY $sgpr0_sgpr1
723 %0:vgpr_32 = COPY $vgpr0
724 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
725 %3:sreg_64 = COPY $exec, implicit-def $exec
726 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
727 $exec = S_MOV_B64_term %4
728 SI_MASK_BRANCH %bb.4, implicit $exec
732 successors: %bb.2, %bb.3
734 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
735 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
736 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
737 %7:vgpr_32 = COPY %5.sub1
738 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
739 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
740 %5.sub3:sgpr_128 = S_MOV_B32 61440
741 %5.sub2:sgpr_128 = S_MOV_B32 0
742 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
743 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
744 %12:sreg_64 = COPY $exec, implicit-def $exec
745 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
746 $exec = S_MOV_B64_term %13
747 SI_MASK_BRANCH %bb.3, implicit $exec
751 %5.sub0:sgpr_128 = COPY %5.sub2
752 %5.sub1:sgpr_128 = COPY %5.sub2
753 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
754 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
757 $exec = S_OR_B64 $exec, %12, implicit-def $scc
758 %15:vgpr_32 = COPY %5.sub2
761 $exec = S_OR_B64 $exec, %3, implicit-def $scc
762 %17:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
763 %18:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
765 DS_WRITE_B32 %18, %17, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
770 # There's no real reason this can't be handled, but isn't now.
772 name: simple_nested_if_not_layout_successor
773 tracksRegLiveness: true
775 - { reg: '$vgpr0', virtual-reg: '%0' }
776 - { reg: '$sgpr0_sgpr1', virtual-reg: '%1' }
778 isEntryFunction: true
780 ; GCN-LABEL: name: simple_nested_if_not_layout_successor
782 ; GCN: successors: %bb.1(0x40000000), %bb.4(0x40000000)
783 ; GCN: liveins: $vgpr0, $sgpr0_sgpr1
784 ; GCN: [[COPY:%[0-9]+]]:sgpr_64 = COPY $sgpr0_sgpr1
785 ; GCN: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0
786 ; GCN: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_LT_U32_e64 1, [[COPY1]], implicit $exec
787 ; GCN: [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
788 ; GCN: [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_LT_U32_e64_]], implicit-def dead $scc
789 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_]]
790 ; GCN: SI_MASK_BRANCH %bb.4, implicit $exec
791 ; GCN: S_BRANCH %bb.1
793 ; GCN: successors: %bb.2(0x40000000), %bb.3(0x40000000)
794 ; GCN: undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM [[COPY]], 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
795 ; GCN: undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, [[COPY1]], implicit $exec
796 ; GCN: %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
797 ; GCN: [[COPY3:%[0-9]+]]:vgpr_32 = COPY %5.sub1
798 ; GCN: undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
799 ; GCN: %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, [[COPY3]], %9, 0, implicit $exec
800 ; GCN: %5.sub3:sgpr_128 = S_MOV_B32 61440
801 ; GCN: %5.sub2:sgpr_128 = S_MOV_B32 0
802 ; GCN: BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
803 ; GCN: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 2, [[COPY1]], implicit $exec
804 ; GCN: [[COPY4:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
805 ; GCN: [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY4]], [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
806 ; GCN: $exec = S_MOV_B64_term [[S_AND_B64_1]]
807 ; GCN: SI_MASK_BRANCH %bb.3, implicit $exec
808 ; GCN: S_BRANCH %bb.2
810 ; GCN: successors: %bb.3(0x80000000)
811 ; GCN: %5.sub0:sgpr_128 = COPY %5.sub2
812 ; GCN: %5.sub1:sgpr_128 = COPY %5.sub2
813 ; GCN: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
814 ; GCN: BUFFER_STORE_DWORD_ADDR64 [[V_MOV_B32_e32_]], %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
816 ; GCN: successors: %bb.5(0x80000000)
817 ; GCN: $exec = S_OR_B64 $exec, [[COPY4]], implicit-def $scc
818 ; GCN: S_BRANCH %bb.5
820 ; GCN: $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
821 ; GCN: [[V_MOV_B32_e32_1:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
822 ; GCN: [[V_MOV_B32_e32_2:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
823 ; GCN: $m0 = S_MOV_B32 -1
824 ; GCN: DS_WRITE_B32 [[V_MOV_B32_e32_2]], [[V_MOV_B32_e32_1]], 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)
827 ; GCN: successors: %bb.4(0x80000000)
828 ; GCN: S_BRANCH %bb.4
830 successors: %bb.1, %bb.4
831 liveins: $vgpr0, $sgpr0_sgpr1
833 %1:sgpr_64 = COPY $sgpr0_sgpr1
834 %0:vgpr_32 = COPY $vgpr0
835 %2:sreg_64 = V_CMP_LT_U32_e64 1, %0, implicit $exec
836 %3:sreg_64 = COPY $exec, implicit-def $exec
837 %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
838 $exec = S_MOV_B64_term %4
839 SI_MASK_BRANCH %bb.4, implicit $exec
843 successors: %bb.2, %bb.3
845 undef %5.sub0_sub1:sgpr_128 = S_LOAD_DWORDX2_IMM %1, 9, 0, 0 :: (dereferenceable invariant load 8, align 4, addrspace 4)
846 undef %6.sub0:vreg_64 = V_LSHLREV_B32_e32 2, %0, implicit $exec
847 %6.sub1:vreg_64 = V_MOV_B32_e32 0, implicit $exec
848 %7:vgpr_32 = COPY %5.sub1
849 undef %8.sub0:vreg_64, %9:sreg_64_xexec = V_ADD_I32_e64 %5.sub0, %6.sub0, 0, implicit $exec
850 %8.sub1:vreg_64, dead %10:sreg_64_xexec = V_ADDC_U32_e64 0, %7, %9, 0, implicit $exec
851 %5.sub3:sgpr_128 = S_MOV_B32 61440
852 %5.sub2:sgpr_128 = S_MOV_B32 0
853 BUFFER_STORE_DWORD_ADDR64 %6.sub1, %6, %5, 0, 0, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
854 %11:sreg_64 = V_CMP_NE_U32_e64 2, %0, implicit $exec
855 %12:sreg_64 = COPY $exec, implicit-def $exec
856 %13:sreg_64 = S_AND_B64 %12, %11, implicit-def dead $scc
857 $exec = S_MOV_B64_term %13
858 SI_MASK_BRANCH %bb.3, implicit $exec
862 %5.sub0:sgpr_128 = COPY %5.sub2
863 %5.sub1:sgpr_128 = COPY %5.sub2
864 %14:vgpr_32 = V_MOV_B32_e32 1, implicit $exec
865 BUFFER_STORE_DWORD_ADDR64 %14, %8, %5, 0, 4, 0, 0, 0, 0, implicit $exec :: (store 4, addrspace 1)
868 $exec = S_OR_B64 $exec, %12, implicit-def $scc
872 $exec = S_OR_B64 $exec, %3, implicit-def $scc
873 %15:vgpr_32 = V_MOV_B32_e32 3, implicit $exec
874 %16:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
876 DS_WRITE_B32 %16, %15, 0, 0, implicit $m0, implicit $exec :: (store 4, addrspace 3)