1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 4
2 # RUN: llc -mtriple=amdgcn-amd-amdpal -mcpu=gfx1030 -run-pass=greedy --stress-regalloc=6 --verify-machineinstrs -o - %s | FileCheck -check-prefix=GCN %s
4 # The spills/copies during RA for scalar register block LiveIns should be inserted at the beginning of the block.
5 # The COPY inserted in bb.9 during liverange split should precede the SPILL that was inserted earlier in the flow.
9 tracksRegLiveness: true
11 - { id: 0, class: vgpr_32 }
12 - { id: 1, class: vreg_64 }
13 - { id: 2, class: sgpr_32 }
14 - { id: 3, class: sreg_32 }
15 - { id: 4, class: sreg_32 }
16 - { id: 5, class: sreg_32 }
17 - { id: 6, class: sgpr_256 }
18 - { id: 7, class: sgpr_256 }
19 - { id: 8, class: sgpr_256 }
20 - { id: 9, class: sgpr_256 }
21 - { id: 10, class: sgpr_256 }
22 - { id: 11, class: sreg_32_xm0_xexec }
23 - { id: 12, class: sreg_32_xm0_xexec }
24 - { id: 13, class: sgpr_64 }
25 - { id: 14, class: sreg_32_xm0_xexec }
26 - { id: 15, class: sreg_32 }
27 - { id: 16, class: sreg_32 }
28 - { id: 17, class: sreg_32 }
29 - { id: 18, class: sreg_32 }
30 - { id: 19, class: sreg_32 }
31 - { id: 20, class: sreg_32 }
32 - { id: 21, class: sreg_32 }
33 - { id: 22, class: sreg_32 }
34 - { id: 23, class: sreg_32 }
35 - { id: 24, class: sreg_32 }
36 - { id: 25, class: sreg_32 }
37 - { id: 26, class: sreg_32 }
38 - { id: 27, class: sreg_32 }
39 - { id: 28, class: sreg_32 }
40 - { id: 29, class: sreg_32 }
41 - { id: 30, class: sreg_32 }
42 - { id: 31, class: sreg_32 }
43 - { id: 32, class: sreg_32 }
44 - { id: 33, class: sreg_32 }
45 - { id: 34, class: sreg_32 }
46 - { id: 35, class: sreg_32 }
47 - { id: 36, class: sreg_32 }
48 - { id: 37, class: sreg_32 }
49 - { id: 38, class: sreg_32 }
50 - { id: 39, class: sreg_32 }
51 - { id: 40, class: sreg_32 }
52 - { id: 41, class: sreg_32 }
53 - { id: 42, class: sreg_32 }
54 - { id: 43, class: sreg_32 }
55 - { id: 44, class: sreg_32 }
56 - { id: 45, class: sreg_32 }
57 - { id: 46, class: sreg_32 }
58 - { id: 47, class: sreg_32 }
59 - { id: 48, class: sreg_32 }
60 - { id: 49, class: sreg_32 }
61 - { id: 50, class: sreg_32 }
62 - { id: 51, class: sreg_32 }
63 - { id: 52, class: sreg_32 }
64 - { id: 53, class: sreg_32 }
65 - { id: 54, class: sreg_32 }
66 - { id: 55, class: sreg_32 }
67 - { id: 56, class: sreg_32 }
68 - { id: 57, class: sreg_32 }
69 - { id: 58, class: sreg_32 }
70 - { id: 59, class: sreg_32 }
71 - { id: 60, class: sreg_32 }
72 - { id: 61, class: sreg_32 }
73 - { id: 62, class: sreg_32 }
74 - { id: 63, class: sreg_32 }
75 - { id: 64, class: sreg_32 }
76 - { id: 65, class: sreg_32 }
77 - { id: 66, class: sreg_32 }
78 - { id: 67, class: sreg_32 }
79 - { id: 68, class: sreg_32 }
80 - { id: 69, class: sreg_32 }
81 - { id: 70, class: sreg_32 }
82 - { id: 71, class: sreg_32 }
83 - { id: 72, class: sreg_32 }
84 - { id: 73, class: sreg_32 }
85 - { id: 74, class: sreg_32 }
86 - { id: 75, class: sreg_32 }
87 - { id: 76, class: sreg_32 }
88 - { id: 77, class: sreg_32 }
89 - { id: 78, class: sreg_32 }
90 - { id: 79, class: sreg_32 }
91 - { id: 80, class: sreg_32 }
92 - { id: 81, class: sreg_32 }
93 - { id: 82, class: sreg_32 }
94 - { id: 83, class: sreg_32 }
95 - { id: 84, class: sreg_32 }
96 - { id: 85, class: sreg_32 }
97 - { id: 86, class: sreg_32 }
98 - { id: 87, class: sreg_32 }
99 - { id: 88, class: sreg_32 }
100 - { id: 89, class: sreg_32 }
101 - { id: 90, class: sreg_32 }
102 - { id: 91, class: sreg_32 }
103 - { id: 92, class: sreg_32 }
104 - { id: 93, class: sgpr_64 }
105 - { id: 94, class: sreg_32_xm0_xexec }
106 - { id: 95, class: sgpr_32 }
107 - { id: 96, class: sreg_32_xm0_xexec }
108 - { id: 97, class: sreg_64 }
109 - { id: 98, class: sreg_32_xm0_xexec }
110 - { id: 99, class: sreg_32_xm0_xexec }
111 - { id: 100, class: sreg_64 }
112 - { id: 101, class: sgpr_128 }
113 - { id: 102, class: sreg_64_xexec }
114 - { id: 103, class: sgpr_32 }
115 - { id: 104, class: sgpr_64 }
116 - { id: 105, class: sgpr_64 }
117 - { id: 106, class: sgpr_64 }
118 - { id: 107, class: sreg_32, preferred-register: '$vcc' }
119 - { id: 108, class: sreg_32, preferred-register: '$vcc' }
120 - { id: 109, class: sgpr_32 }
121 - { id: 110, class: sgpr_256 }
122 - { id: 111, class: sgpr_512 }
123 - { id: 112, class: sgpr_512 }
124 - { id: 113, class: sgpr_256 }
125 - { id: 114, class: sgpr_256 }
126 - { id: 115, class: sgpr_256 }
127 - { id: 116, class: sreg_32_xm0_xexec }
130 isEntryFunction: true
131 stackPtrOffsetReg: '$sgpr32'
132 sgprForEXECCopy: '$sgpr105'
134 ; GCN-LABEL: name: test_kernel
136 ; GCN-NEXT: successors: %bb.1(0x40000000), %bb.2(0x40000000)
137 ; GCN-NEXT: liveins: $sgpr0, $sgpr1, $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12, $vgpr13
139 ; GCN-NEXT: dead [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
140 ; GCN-NEXT: dead undef [[DEF1:%[0-9]+]].sub1:vreg_64 = IMPLICIT_DEF
141 ; GCN-NEXT: SI_SPILL_S32_SAVE $sgpr1, %stack.15, implicit $exec, implicit $sgpr32 :: (store (s32) into %stack.15, addrspace 5)
142 ; GCN-NEXT: undef [[COPY:%[0-9]+]].sub1:sgpr_64 = COPY $sgpr0
143 ; GCN-NEXT: SI_SPILL_S64_SAVE [[COPY]], %stack.2, implicit $exec, implicit $sgpr32 :: (store (s64) into %stack.2, align 4, addrspace 5)
144 ; GCN-NEXT: undef [[V_READFIRSTLANE_B32_:%[0-9]+]].sub0:sgpr_64 = V_READFIRSTLANE_B32 undef [[DEF]], implicit $exec
145 ; GCN-NEXT: [[V_READFIRSTLANE_B32_:%[0-9]+]].sub1:sgpr_64 = V_READFIRSTLANE_B32 undef [[DEF]], implicit $exec
146 ; GCN-NEXT: undef [[V_READFIRSTLANE_B32_1:%[0-9]+]].sub0:sgpr_64 = V_READFIRSTLANE_B32 undef [[DEF]], implicit $exec
147 ; GCN-NEXT: [[V_READFIRSTLANE_B32_1:%[0-9]+]].sub1:sgpr_64 = IMPLICIT_DEF
148 ; GCN-NEXT: SI_SPILL_S64_SAVE [[V_READFIRSTLANE_B32_1]], %stack.19, implicit $exec, implicit $sgpr32 :: (store (s64) into %stack.19, align 4, addrspace 5)
149 ; GCN-NEXT: undef [[V_READFIRSTLANE_B32_2:%[0-9]+]].sub0:sgpr_64 = V_READFIRSTLANE_B32 undef [[DEF]], implicit $exec
150 ; GCN-NEXT: [[V_READFIRSTLANE_B32_2:%[0-9]+]].sub1:sgpr_64 = V_READFIRSTLANE_B32 undef [[DEF]], implicit $exec
151 ; GCN-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 0
152 ; GCN-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 0
153 ; GCN-NEXT: SI_SPILL_S32_SAVE [[S_MOV_B32_1]], %stack.17, implicit $exec, implicit $sgpr32 :: (store (s32) into %stack.17, addrspace 5)
154 ; GCN-NEXT: S_CBRANCH_SCC1 %bb.2, implicit undef $scc
155 ; GCN-NEXT: S_BRANCH %bb.1
158 ; GCN-NEXT: successors: %bb.2(0x80000000)
160 ; GCN-NEXT: [[DEF2:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
161 ; GCN-NEXT: KILL [[DEF2]]
164 ; GCN-NEXT: successors: %bb.3(0x40000000), %bb.4(0x40000000)
166 ; GCN-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM undef [[V_READFIRSTLANE_B32_2]], 132, 0 :: ("amdgpu-noclobber" load (s128), align 8, addrspace 1)
167 ; GCN-NEXT: SI_SPILL_S128_SAVE [[S_LOAD_DWORDX4_IMM]], %stack.14, implicit $exec, implicit $sgpr32 :: (store (s128) into %stack.14, align 4, addrspace 5)
168 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM undef [[V_READFIRSTLANE_B32_2]], 188, 0 :: ("amdgpu-noclobber" load (s256), align 8, addrspace 1)
169 ; GCN-NEXT: S_CBRANCH_SCC1 %bb.4, implicit undef $scc
170 ; GCN-NEXT: S_BRANCH %bb.3
173 ; GCN-NEXT: successors: %bb.4(0x80000000)
175 ; GCN-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 -1
178 ; GCN-NEXT: successors: %bb.5(0x40000000), %bb.6(0x40000000)
180 ; GCN-NEXT: SI_SPILL_S32_SAVE [[S_MOV_B32_]], %stack.9, implicit $exec, implicit $sgpr32 :: (store (s32) into %stack.9, addrspace 5)
181 ; GCN-NEXT: [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM undef [[V_READFIRSTLANE_B32_2]], 120, 0 :: ("amdgpu-noclobber" load (s64), align 16, addrspace 1)
182 ; GCN-NEXT: SI_SPILL_S64_SAVE [[S_LOAD_DWORDX2_IMM]], %stack.18, implicit $exec, implicit $sgpr32 :: (store (s64) into %stack.18, align 4, addrspace 5)
183 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM1:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM undef [[V_READFIRSTLANE_B32_2]], 352, 0 :: ("amdgpu-noclobber" load (s256), align 16, addrspace 1)
184 ; GCN-NEXT: SI_SPILL_S256_SAVE [[S_LOAD_DWORDX8_IMM1]], %stack.10, implicit $exec, implicit $sgpr32 :: (store (s256) into %stack.10, align 4, addrspace 5)
185 ; GCN-NEXT: [[S_LOAD_DWORD_IMM:%[0-9]+]]:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM undef %97:sreg_64, 0, 0
186 ; GCN-NEXT: SI_SPILL_S32_SAVE [[S_LOAD_DWORD_IMM]], %stack.11, implicit $exec, implicit $sgpr32 :: (store (s32) into %stack.11, addrspace 5)
187 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM2:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM undef [[V_READFIRSTLANE_B32_2]], 652, 0 :: ("amdgpu-noclobber" load (s256), align 8, addrspace 1)
188 ; GCN-NEXT: SI_SPILL_S256_SAVE [[S_LOAD_DWORDX8_IMM2]], %stack.6, implicit $exec, implicit $sgpr32 :: (store (s256) into %stack.6, align 4, addrspace 5)
189 ; GCN-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0
190 ; GCN-NEXT: [[S_LOAD_DWORD_IMM1:%[0-9]+]]:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM [[S_MOV_B64_]], 0, 0 :: ("amdgpu-noclobber" load (s32), align 8, addrspace 1)
191 ; GCN-NEXT: SI_SPILL_S32_SAVE [[S_LOAD_DWORD_IMM1]], %stack.3, implicit $exec, implicit $sgpr32 :: (store (s32) into %stack.3, addrspace 5)
192 ; GCN-NEXT: SI_SPILL_S64_SAVE [[V_READFIRSTLANE_B32_2]], %stack.1, implicit $exec, implicit $sgpr32 :: (store (s64) into %stack.1, align 4, addrspace 5)
193 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM3:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM [[V_READFIRSTLANE_B32_2]], 688, 0 :: ("amdgpu-noclobber" load (s256), align 16, addrspace 1)
194 ; GCN-NEXT: SI_SPILL_S256_SAVE [[S_LOAD_DWORDX8_IMM3]], %stack.4, implicit $exec, implicit $sgpr32 :: (store (s256) into %stack.4, align 4, addrspace 5)
195 ; GCN-NEXT: [[S_MOV_B32_2:%[0-9]+]]:sgpr_32 = S_MOV_B32 0
196 ; GCN-NEXT: [[S_MOV_B32_3:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 0
197 ; GCN-NEXT: S_CBRANCH_SCC1 %bb.6, implicit undef $scc
198 ; GCN-NEXT: S_BRANCH %bb.5
201 ; GCN-NEXT: successors: %bb.6(0x80000000)
203 ; GCN-NEXT: [[S_MOV_B32_3:%[0-9]+]]:sreg_32_xm0_xexec = S_MOV_B32 -1
206 ; GCN-NEXT: successors: %bb.7(0x40000000), %bb.10(0x40000000)
208 ; GCN-NEXT: SI_SPILL_S32_SAVE [[S_MOV_B32_3]], %stack.5, implicit $exec, implicit $sgpr32 :: (store (s32) into %stack.5, addrspace 5)
209 ; GCN-NEXT: [[S_LOAD_DWORD_IMM2:%[0-9]+]]:sgpr_32 = S_LOAD_DWORD_IMM undef %123:sgpr_64, 0, 0 :: ("amdgpu-noclobber" load (s32), align 16, addrspace 1)
210 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM4:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM undef %124:sgpr_64, 152, 0 :: ("amdgpu-noclobber" load (s256), align 4, addrspace 1)
211 ; GCN-NEXT: SI_SPILL_S256_SAVE [[S_LOAD_DWORDX8_IMM4]], %stack.20, implicit $exec, implicit $sgpr32 :: (store (s256) into %stack.20, align 4, addrspace 5)
212 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM5:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM undef %125:sgpr_64, 220, 0 :: ("amdgpu-noclobber" load (s256), align 4, addrspace 1)
213 ; GCN-NEXT: SI_SPILL_S256_SAVE [[S_LOAD_DWORDX8_IMM5]], %stack.16, implicit $exec, implicit $sgpr32 :: (store (s256) into %stack.16, align 4, addrspace 5)
214 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM6:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM undef %126:sgpr_64, 384, 0 :: ("amdgpu-noclobber" load (s256), align 4, addrspace 1)
215 ; GCN-NEXT: SI_SPILL_S256_SAVE [[S_LOAD_DWORDX8_IMM6]], %stack.13, implicit $exec, implicit $sgpr32 :: (store (s256) into %stack.13, align 4, addrspace 5)
216 ; GCN-NEXT: [[S_LOAD_DWORDX16_IMM:%[0-9]+]]:sgpr_512 = S_LOAD_DWORDX16_IMM undef %127:sgpr_64, 440, 0 :: ("amdgpu-noclobber" load (s512), align 8, addrspace 1)
217 ; GCN-NEXT: [[S_LOAD_DWORDX16_IMM1:%[0-9]+]]:sgpr_512 = S_LOAD_DWORDX16_IMM undef %128:sgpr_64, 584, 0 :: ("amdgpu-noclobber" load (s512), align 16, addrspace 1)
218 ; GCN-NEXT: SI_SPILL_S512_SAVE [[S_LOAD_DWORDX16_IMM1]], %stack.12, implicit $exec, implicit $sgpr32 :: (store (s512) into %stack.12, align 4, addrspace 5)
219 ; GCN-NEXT: [[S_LOAD_DWORDX8_IMM7:%[0-9]+]]:sgpr_256 = S_LOAD_DWORDX8_IMM [[V_READFIRSTLANE_B32_]], 156, 0 :: ("amdgpu-noclobber" load (s256), align 8, addrspace 1)
220 ; GCN-NEXT: SI_SPILL_S256_SAVE [[S_LOAD_DWORDX8_IMM7]], %stack.8, implicit $exec, implicit $sgpr32 :: (store (s256) into %stack.8, align 4, addrspace 5)
221 ; GCN-NEXT: [[SI_SPILL_S64_RESTORE:%[0-9]+]]:sgpr_64 = SI_SPILL_S64_RESTORE %stack.19, implicit $exec, implicit $sgpr32 :: (load (s64) from %stack.19, align 4, addrspace 5)
222 ; GCN-NEXT: [[S_LOAD_DWORD_IMM3:%[0-9]+]]:sgpr_32 = S_LOAD_DWORD_IMM [[SI_SPILL_S64_RESTORE]], 0, 0 :: ("amdgpu-noclobber" load (s32), align 8, addrspace 1)
223 ; GCN-NEXT: SI_SPILL_S32_SAVE [[S_LOAD_DWORD_IMM3]], %stack.7, implicit $exec, implicit $sgpr32 :: (store (s32) into %stack.7, addrspace 5)
224 ; GCN-NEXT: SI_SPILL_S64_SAVE [[V_READFIRSTLANE_B32_]], %stack.0, implicit $exec, implicit $sgpr32 :: (store (s64) into %stack.0, align 4, addrspace 5)
225 ; GCN-NEXT: [[COPY1:%[0-9]+]]:sgpr_64 = COPY [[V_READFIRSTLANE_B32_]]
226 ; GCN-NEXT: dead [[S_LOAD_DWORD_IMM4:%[0-9]+]]:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM [[COPY1]], 0, 0 :: ("amdgpu-noclobber" load (s32), addrspace 1)
227 ; GCN-NEXT: [[S_MOV_B64_1:%[0-9]+]]:sreg_64 = S_MOV_B64 0
228 ; GCN-NEXT: [[S_LOAD_DWORD_IMM5:%[0-9]+]]:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM [[S_MOV_B64_1]], 0, 0 :: ("amdgpu-noclobber" load (s32), addrspace 1)
229 ; GCN-NEXT: [[SI_SPILL_S64_RESTORE1:%[0-9]+]]:sgpr_64 = SI_SPILL_S64_RESTORE %stack.2, implicit $exec, implicit $sgpr32 :: (load (s64) from %stack.2, align 4, addrspace 5)
230 ; GCN-NEXT: undef [[COPY2:%[0-9]+]].sub1:sgpr_64 = COPY [[SI_SPILL_S64_RESTORE1]].sub1
231 ; GCN-NEXT: [[COPY2:%[0-9]+]].sub0:sgpr_64 = S_MOV_B32 1
232 ; GCN-NEXT: S_CBRANCH_SCC1 %bb.10, implicit undef $scc
233 ; GCN-NEXT: S_BRANCH %bb.7
236 ; GCN-NEXT: successors: %bb.8(0x40000000), %bb.9(0x40000000)
238 ; GCN-NEXT: SI_SPILL_S64_SAVE [[COPY2]], %stack.2, implicit $exec, implicit $sgpr32 :: (store (s64) into %stack.2, align 4, addrspace 5)
239 ; GCN-NEXT: undef [[V_READFIRSTLANE_B32_3:%[0-9]+]].sub0:sgpr_64 = V_READFIRSTLANE_B32 undef [[DEF1]].sub0, implicit $exec
240 ; GCN-NEXT: dead [[V_READFIRSTLANE_B32_3:%[0-9]+]].sub1:sgpr_64 = V_READFIRSTLANE_B32 undef [[DEF1]].sub1, implicit $exec
241 ; GCN-NEXT: [[DEF3:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
242 ; GCN-NEXT: [[S_MOV_B32_4:%[0-9]+]]:sgpr_32 = S_MOV_B32 0
243 ; GCN-NEXT: $vcc = COPY [[DEF3]]
244 ; GCN-NEXT: S_CBRANCH_VCCNZ %bb.9, implicit $vcc
245 ; GCN-NEXT: S_BRANCH %bb.8
248 ; GCN-NEXT: successors: %bb.9(0x80000000)
250 ; GCN-NEXT: [[S_MOV_B32_4:%[0-9]+]]:sgpr_32 = S_MOV_B32 -1
253 ; GCN-NEXT: successors: %bb.10(0x80000000)
255 ; GCN-NEXT: [[S_MOV_B32_2:%[0-9]+]]:sgpr_32 = COPY [[S_MOV_B32_4]]
256 ; GCN-NEXT: [[COPY2:%[0-9]+]]:sgpr_64 = SI_SPILL_S64_RESTORE %stack.2, implicit $exec, implicit $sgpr32 :: (load (s64) from %stack.2, align 4, addrspace 5)
259 ; GCN-NEXT: successors: %bb.11(0x40000000), %bb.12(0x40000000)
261 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[S_LOAD_DWORD_IMM2]], 0, implicit $mode, implicit $exec
262 ; GCN-NEXT: [[SI_SPILL_S32_RESTORE:%[0-9]+]]:sreg_32_xm0_xexec = SI_SPILL_S32_RESTORE %stack.17, implicit $exec, implicit $sgpr32 :: (load (s32) from %stack.17, addrspace 5)
263 ; GCN-NEXT: dead [[S_AND_B32_:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[V_CMP_GT_F32_e64_]], [[SI_SPILL_S32_RESTORE]], implicit-def dead $scc
264 ; GCN-NEXT: [[SI_SPILL_S32_RESTORE1:%[0-9]+]]:sgpr_32 = SI_SPILL_S32_RESTORE %stack.15, implicit $exec, implicit $sgpr32 :: (load (s32) from %stack.15, addrspace 5)
265 ; GCN-NEXT: S_CMP_EQ_U32 [[SI_SPILL_S32_RESTORE1]], 0, implicit-def $scc
266 ; GCN-NEXT: dead [[DEF4:%[0-9]+]]:sreg_32_xm0_xexec = IMPLICIT_DEF
267 ; GCN-NEXT: [[SI_SPILL_S64_RESTORE2:%[0-9]+]]:sreg_64_xexec = SI_SPILL_S64_RESTORE %stack.18, implicit $exec, implicit $sgpr32 :: (load (s64) from %stack.18, align 4, addrspace 5)
268 ; GCN-NEXT: S_CMP_EQ_U32 [[SI_SPILL_S64_RESTORE2]].sub1, 0, implicit-def $scc
269 ; GCN-NEXT: dead [[DEF5:%[0-9]+]]:sreg_32_xm0_xexec = IMPLICIT_DEF
270 ; GCN-NEXT: [[SI_SPILL_S256_RESTORE:%[0-9]+]]:sgpr_256 = SI_SPILL_S256_RESTORE %stack.20, implicit $exec, implicit $sgpr32 :: (load (s256) from %stack.20, align 4, addrspace 5)
271 ; GCN-NEXT: undef [[COPY3:%[0-9]+]].sub0:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE]].sub0 {
272 ; GCN-NEXT: internal [[COPY3]].sub2:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE]].sub2
273 ; GCN-NEXT: internal [[COPY3]].sub4:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE]].sub4
274 ; GCN-NEXT: internal [[COPY3]].sub7:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE]].sub7
276 ; GCN-NEXT: dead [[S_OR_B32_:%[0-9]+]]:sreg_32 = S_OR_B32 [[COPY3]].sub7, [[S_LOAD_DWORD_IMM5]], implicit-def dead $scc
277 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY3]].sub0, 0, implicit $mode, implicit $exec
278 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_2:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY3]].sub2, 0, implicit $mode, implicit $exec
279 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_3:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY3]].sub4, 0, implicit $mode, implicit $exec
280 ; GCN-NEXT: dead [[S_OR_B32_1:%[0-9]+]]:sreg_32 = S_OR_B32 [[S_LOAD_DWORDX8_IMM]].sub0, undef [[S_OR_B32_]], implicit-def dead $scc
281 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_4:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[S_LOAD_DWORDX8_IMM]].sub1, 0, implicit $mode, implicit $exec
282 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_5:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[S_LOAD_DWORDX8_IMM]].sub2, 0, implicit $mode, implicit $exec
283 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_6:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[S_LOAD_DWORDX8_IMM]].sub3, 0, implicit $mode, implicit $exec
284 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_7:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[S_LOAD_DWORDX8_IMM]].sub4, 0, implicit $mode, implicit $exec
285 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_8:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[S_LOAD_DWORDX8_IMM]].sub5, 0, implicit $mode, implicit $exec
286 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_9:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[S_LOAD_DWORDX8_IMM]].sub6, 0, implicit $mode, implicit $exec
287 ; GCN-NEXT: [[SI_SPILL_S128_RESTORE:%[0-9]+]]:sgpr_128 = SI_SPILL_S128_RESTORE %stack.14, implicit $exec, implicit $sgpr32 :: (load (s128) from %stack.14, align 4, addrspace 5)
288 ; GCN-NEXT: undef [[COPY4:%[0-9]+]].sub0_sub1_sub2:sgpr_128 = COPY [[SI_SPILL_S128_RESTORE]].sub0_sub1_sub2
289 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_10:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY4]].sub0, 0, implicit $mode, implicit $exec
290 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_11:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY4]].sub1, 0, implicit $mode, implicit $exec
291 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_12:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY4]].sub2, 0, implicit $mode, implicit $exec
292 ; GCN-NEXT: [[DEF6:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
293 ; GCN-NEXT: dead [[S_AND_B32_1:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[DEF5]], [[DEF6]], implicit-def dead $scc
294 ; GCN-NEXT: dead [[DEF7:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
295 ; GCN-NEXT: dead [[DEF8:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
296 ; GCN-NEXT: dead [[DEF9:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
297 ; GCN-NEXT: dead [[DEF10:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
298 ; GCN-NEXT: dead [[DEF11:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
299 ; GCN-NEXT: dead [[S_AND_B32_2:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[DEF11]], undef [[DEF11]], implicit-def dead $scc
300 ; GCN-NEXT: [[SI_SPILL_S256_RESTORE1:%[0-9]+]]:sgpr_256 = SI_SPILL_S256_RESTORE %stack.16, implicit $exec, implicit $sgpr32 :: (load (s256) from %stack.16, align 4, addrspace 5)
301 ; GCN-NEXT: undef [[COPY5:%[0-9]+]].sub0:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE1]].sub0 {
302 ; GCN-NEXT: internal [[COPY5]].sub2:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE1]].sub2
303 ; GCN-NEXT: internal [[COPY5]].sub5:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE1]].sub5
304 ; GCN-NEXT: internal [[COPY5]].sub7:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE1]].sub7
306 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_13:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY5]].sub0, 0, implicit $mode, implicit $exec
307 ; GCN-NEXT: dead [[S_AND_B32_3:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[V_CMP_GT_F32_e64_8]], undef [[V_CMP_GT_F32_e64_9]], implicit-def dead $scc
308 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_14:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY5]].sub2, 0, implicit $mode, implicit $exec
309 ; GCN-NEXT: dead [[S_OR_B32_2:%[0-9]+]]:sreg_32 = S_OR_B32 [[COPY5]].sub5, [[COPY5]].sub7, implicit-def dead $scc
310 ; GCN-NEXT: [[SI_SPILL_S256_RESTORE2:%[0-9]+]]:sgpr_256 = SI_SPILL_S256_RESTORE %stack.10, implicit $exec, implicit $sgpr32 :: (load (s256) from %stack.10, align 4, addrspace 5)
311 ; GCN-NEXT: undef [[COPY6:%[0-9]+]].lo16_hi16_sub1_lo16_sub1_hi16_sub2_lo16_sub2_hi16_sub3_lo16_sub3_hi16_sub4_lo16_sub4_hi16_sub5_lo16_sub5_hi16_sub6_lo16_sub6_hi16:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE2]].lo16_hi16_sub1_lo16_sub1_hi16_sub2_lo16_sub2_hi16_sub3_lo16_sub3_hi16_sub4_lo16_sub4_hi16_sub5_lo16_sub5_hi16_sub6_lo16_sub6_hi16
312 ; GCN-NEXT: dead [[S_OR_B32_3:%[0-9]+]]:sreg_32 = S_OR_B32 [[COPY6]].sub0, [[COPY6]].sub1, implicit-def dead $scc
313 ; GCN-NEXT: dead [[S_OR_B32_4:%[0-9]+]]:sreg_32 = S_OR_B32 [[COPY6]].sub2, undef [[S_OR_B32_3]], implicit-def dead $scc
314 ; GCN-NEXT: [[SI_SPILL_S32_RESTORE2:%[0-9]+]]:sreg_32_xm0_xexec = SI_SPILL_S32_RESTORE %stack.9, implicit $exec, implicit $sgpr32 :: (load (s32) from %stack.9, addrspace 5)
315 ; GCN-NEXT: dead [[S_AND_B32_4:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[S_OR_B32_3]], [[SI_SPILL_S32_RESTORE2]], implicit-def dead $scc
316 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_15:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY6]].sub3, 0, implicit $mode, implicit $exec
317 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_16:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY6]].sub4, 0, implicit $mode, implicit $exec
318 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_17:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY6]].sub5, 0, implicit $mode, implicit $exec
319 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_18:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY6]].sub6, 0, implicit $mode, implicit $exec
320 ; GCN-NEXT: [[SI_SPILL_S32_RESTORE3:%[0-9]+]]:sreg_32_xm0_xexec = SI_SPILL_S32_RESTORE %stack.11, implicit $exec, implicit $sgpr32 :: (load (s32) from %stack.11, addrspace 5)
321 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_19:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[SI_SPILL_S32_RESTORE3]], 0, implicit $mode, implicit $exec
322 ; GCN-NEXT: [[SI_SPILL_S256_RESTORE3:%[0-9]+]]:sgpr_256 = SI_SPILL_S256_RESTORE %stack.13, implicit $exec, implicit $sgpr32 :: (load (s256) from %stack.13, align 4, addrspace 5)
323 ; GCN-NEXT: undef [[COPY7:%[0-9]+]].sub0:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE3]].sub0 {
324 ; GCN-NEXT: internal [[COPY7]].sub2:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE3]].sub2
325 ; GCN-NEXT: internal [[COPY7]].sub4:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE3]].sub4
326 ; GCN-NEXT: internal [[COPY7]].sub7:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE3]].sub7
328 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_20:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY7]].sub0, 0, implicit $mode, implicit $exec
329 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_21:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY7]].sub2, 0, implicit $mode, implicit $exec
330 ; GCN-NEXT: dead [[DEF12:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
331 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_22:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY7]].sub4, 0, implicit $mode, implicit $exec
332 ; GCN-NEXT: dead [[S_AND_B32_5:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[DEF12]], undef [[V_CMP_GT_F32_e64_20]], implicit-def dead $scc
333 ; GCN-NEXT: S_CMP_EQ_U32 [[COPY7]].sub7, 0, implicit-def $scc
334 ; GCN-NEXT: undef [[COPY8:%[0-9]+]].sub0:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub0 {
335 ; GCN-NEXT: internal [[COPY8]].sub2:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub2
336 ; GCN-NEXT: internal [[COPY8]].sub4:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub4
337 ; GCN-NEXT: internal [[COPY8]].sub6:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub6
338 ; GCN-NEXT: internal [[COPY8]].sub9:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub9
339 ; GCN-NEXT: internal [[COPY8]].sub10:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub10
340 ; GCN-NEXT: internal [[COPY8]].sub13:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub13
341 ; GCN-NEXT: internal [[COPY8]].sub14:sgpr_512 = COPY [[S_LOAD_DWORDX16_IMM]].sub14
343 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_23:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY8]].sub0, 0, implicit $mode, implicit $exec
344 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_24:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY8]].sub2, 0, implicit $mode, implicit $exec
345 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_25:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY8]].sub4, 0, implicit $mode, implicit $exec
346 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_26:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY8]].sub6, 0, implicit $mode, implicit $exec
347 ; GCN-NEXT: dead [[S_AND_B32_6:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[V_CMP_GT_F32_e64_23]], undef [[V_CMP_GT_F32_e64_23]], implicit-def dead $scc
348 ; GCN-NEXT: dead [[S_OR_B32_5:%[0-9]+]]:sreg_32 = S_OR_B32 [[COPY8]].sub10, [[COPY8]].sub9, implicit-def dead $scc
349 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_27:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY8]].sub13, 0, implicit $mode, implicit $exec
350 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_28:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY8]].sub14, 0, implicit $mode, implicit $exec
351 ; GCN-NEXT: [[SI_SPILL_S512_RESTORE:%[0-9]+]]:sgpr_512 = SI_SPILL_S512_RESTORE %stack.12, implicit $exec, implicit $sgpr32 :: (load (s512) from %stack.12, align 4, addrspace 5)
352 ; GCN-NEXT: undef [[COPY9:%[0-9]+]].sub1:sgpr_512 = COPY [[SI_SPILL_S512_RESTORE]].sub1 {
353 ; GCN-NEXT: internal [[COPY9]].sub5:sgpr_512 = COPY [[SI_SPILL_S512_RESTORE]].sub5
354 ; GCN-NEXT: internal [[COPY9]].sub6:sgpr_512 = COPY [[SI_SPILL_S512_RESTORE]].sub6
355 ; GCN-NEXT: internal [[COPY9]].sub9:sgpr_512 = COPY [[SI_SPILL_S512_RESTORE]].sub9
356 ; GCN-NEXT: internal [[COPY9]].sub10:sgpr_512 = COPY [[SI_SPILL_S512_RESTORE]].sub10
357 ; GCN-NEXT: internal [[COPY9]].sub12:sgpr_512 = COPY [[SI_SPILL_S512_RESTORE]].sub12
358 ; GCN-NEXT: internal [[COPY9]].sub15:sgpr_512 = COPY [[SI_SPILL_S512_RESTORE]].sub15
360 ; GCN-NEXT: S_CMP_EQ_U32 [[COPY9]].sub1, 0, implicit-def $scc
361 ; GCN-NEXT: dead [[DEF13:%[0-9]+]]:sreg_32_xm0_xexec = IMPLICIT_DEF
362 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_29:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY9]].sub5, 0, implicit $mode, implicit $exec
363 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_30:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY9]].sub6, 0, implicit $mode, implicit $exec
364 ; GCN-NEXT: dead [[DEF14:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
365 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_31:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY9]].sub9, 0, implicit $mode, implicit $exec
366 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_32:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY9]].sub10, 0, implicit $mode, implicit $exec
367 ; GCN-NEXT: dead [[DEF15:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
368 ; GCN-NEXT: dead [[S_AND_B32_7:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[DEF15]], undef [[DEF14]], implicit-def dead $scc
369 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_33:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY9]].sub12, 0, implicit $mode, implicit $exec
370 ; GCN-NEXT: [[SI_SPILL_S256_RESTORE4:%[0-9]+]]:sgpr_256 = SI_SPILL_S256_RESTORE %stack.6, implicit $exec, implicit $sgpr32 :: (load (s256) from %stack.6, align 4, addrspace 5)
371 ; GCN-NEXT: undef [[COPY10:%[0-9]+]].lo16_hi16_sub1_lo16_sub1_hi16_sub2_lo16_sub2_hi16_sub3_lo16_sub3_hi16_sub4_lo16_sub4_hi16_sub5_lo16_sub5_hi16_sub6_lo16_sub6_hi16:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE4]].lo16_hi16_sub1_lo16_sub1_hi16_sub2_lo16_sub2_hi16_sub3_lo16_sub3_hi16_sub4_lo16_sub4_hi16_sub5_lo16_sub5_hi16_sub6_lo16_sub6_hi16
372 ; GCN-NEXT: dead [[S_OR_B32_6:%[0-9]+]]:sreg_32 = S_OR_B32 [[COPY10]].sub0, [[COPY9]].sub15, implicit-def dead $scc
373 ; GCN-NEXT: dead [[DEF16:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
374 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_34:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY10]].sub1, 0, implicit $mode, implicit $exec
375 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_35:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY10]].sub2, 0, implicit $mode, implicit $exec
376 ; GCN-NEXT: dead [[DEF17:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
377 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_36:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY10]].sub3, 0, implicit $mode, implicit $exec
378 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_37:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY10]].sub4, 0, implicit $mode, implicit $exec
379 ; GCN-NEXT: dead [[DEF18:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
380 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_38:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY10]].sub5, 0, implicit $mode, implicit $exec
381 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_39:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY10]].sub6, 0, implicit $mode, implicit $exec
382 ; GCN-NEXT: dead [[S_AND_B32_8:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[DEF18]], undef [[DEF17]], implicit-def dead $scc
383 ; GCN-NEXT: [[SI_SPILL_S256_RESTORE5:%[0-9]+]]:sgpr_256 = SI_SPILL_S256_RESTORE %stack.4, implicit $exec, implicit $sgpr32 :: (load (s256) from %stack.4, align 4, addrspace 5)
384 ; GCN-NEXT: undef [[COPY11:%[0-9]+]].sub0_sub1_sub2_sub3_sub4_sub5:sgpr_256 = COPY [[SI_SPILL_S256_RESTORE5]].sub0_sub1_sub2_sub3_sub4_sub5
385 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_40:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY11]].sub0, 0, implicit $mode, implicit $exec
386 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_41:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY11]].sub1, 0, implicit $mode, implicit $exec
387 ; GCN-NEXT: [[SI_SPILL_S32_RESTORE4:%[0-9]+]]:sreg_32_xm0_xexec = SI_SPILL_S32_RESTORE %stack.3, implicit $exec, implicit $sgpr32 :: (load (s32) from %stack.3, addrspace 5)
388 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_42:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[SI_SPILL_S32_RESTORE4]], 0, implicit $mode, implicit $exec
389 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_43:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY11]].sub2, 0, implicit $mode, implicit $exec
390 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_44:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[COPY11]].sub3, 0, implicit $mode, implicit $exec
391 ; GCN-NEXT: dead [[S_OR_B32_7:%[0-9]+]]:sreg_32 = S_OR_B32 [[COPY11]].sub4, [[COPY11]].sub5, implicit-def dead $scc
392 ; GCN-NEXT: S_CMP_EQ_U32 [[SI_SPILL_S32_RESTORE4]], 0, implicit-def $scc
393 ; GCN-NEXT: [[SI_SPILL_S32_RESTORE5:%[0-9]+]]:sreg_32_xm0_xexec = SI_SPILL_S32_RESTORE %stack.5, implicit $exec, implicit $sgpr32 :: (load (s32) from %stack.5, addrspace 5)
394 ; GCN-NEXT: dead [[S_AND_B32_9:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[S_OR_B32_7]], [[SI_SPILL_S32_RESTORE5]], implicit-def dead $scc
395 ; GCN-NEXT: dead [[S_LOAD_DWORD_IMM6:%[0-9]+]]:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM [[COPY2]], 0, 0 :: ("amdgpu-noclobber" load (s32), align 8, addrspace 1)
396 ; GCN-NEXT: [[SI_SPILL_S256_RESTORE6:%[0-9]+]]:sgpr_256 = SI_SPILL_S256_RESTORE %stack.8, implicit $exec, implicit $sgpr32 :: (load (s256) from %stack.8, align 4, addrspace 5)
397 ; GCN-NEXT: S_CMP_EQ_U32 [[SI_SPILL_S256_RESTORE6]].sub7, 0, implicit-def $scc
398 ; GCN-NEXT: [[SI_SPILL_S32_RESTORE6:%[0-9]+]]:sgpr_32 = SI_SPILL_S32_RESTORE %stack.7, implicit $exec, implicit $sgpr32 :: (load (s32) from %stack.7, addrspace 5)
399 ; GCN-NEXT: dead [[V_CMP_GT_F32_e64_45:%[0-9]+]]:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, [[SI_SPILL_S32_RESTORE6]], 0, implicit $mode, implicit $exec
400 ; GCN-NEXT: [[DEF19:%[0-9]+]]:sreg_32 = IMPLICIT_DEF
401 ; GCN-NEXT: dead [[S_AND_B32_10:%[0-9]+]]:sreg_32 = S_AND_B32 [[DEF19]], undef [[S_LOAD_DWORD_IMM6]], implicit-def dead $scc
402 ; GCN-NEXT: dead [[S_AND_B32_11:%[0-9]+]]:sreg_32 = S_AND_B32 undef [[S_AND_B32_10]], [[S_MOV_B32_2]], implicit-def dead $scc
403 ; GCN-NEXT: $vcc = COPY undef [[S_AND_B32_11]]
404 ; GCN-NEXT: S_CBRANCH_VCCNZ %bb.12, implicit $vcc
405 ; GCN-NEXT: S_BRANCH %bb.11
408 ; GCN-NEXT: successors: %bb.12(0x80000000)
411 ; GCN-NEXT: [[SI_SPILL_S64_RESTORE3:%[0-9]+]]:sgpr_64 = SI_SPILL_S64_RESTORE %stack.1, implicit $exec, implicit $sgpr32 :: (load (s64) from %stack.1, align 4, addrspace 5)
412 ; GCN-NEXT: GLOBAL_STORE_DWORD_SADDR undef [[DEF]], undef [[DEF]], [[SI_SPILL_S64_RESTORE3]], 0, 0, implicit $exec :: (store (s32), addrspace 1)
413 ; GCN-NEXT: [[SI_SPILL_S64_RESTORE4:%[0-9]+]]:sgpr_64 = SI_SPILL_S64_RESTORE %stack.0, implicit $exec, implicit $sgpr32 :: (load (s64) from %stack.0, align 4, addrspace 5)
414 ; GCN-NEXT: GLOBAL_STORE_DWORD_SADDR undef [[DEF]], undef [[DEF]], [[SI_SPILL_S64_RESTORE4]], 0, 0, implicit $exec :: (store (s32), addrspace 1)
415 ; GCN-NEXT: S_ENDPGM 0
417 successors: %bb.1, %bb.2
418 liveins: $sgpr0, $sgpr1, $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12, $vgpr13
420 %0:vgpr_32 = IMPLICIT_DEF
421 undef %1.sub1:vreg_64 = IMPLICIT_DEF
422 %109:sgpr_32 = COPY $sgpr1
423 undef %93.sub1:sgpr_64 = COPY $sgpr0
424 undef %106.sub0:sgpr_64 = V_READFIRSTLANE_B32 undef %0, implicit $exec
425 %106.sub1:sgpr_64 = V_READFIRSTLANE_B32 undef %0, implicit $exec
426 undef %105.sub0:sgpr_64 = V_READFIRSTLANE_B32 undef %0, implicit $exec
427 %105.sub1:sgpr_64 = IMPLICIT_DEF
428 undef %104.sub0:sgpr_64 = V_READFIRSTLANE_B32 undef %0, implicit $exec
429 %104.sub1:sgpr_64 = V_READFIRSTLANE_B32 undef %0, implicit $exec
430 %4:sreg_32 = S_MOV_B32 0
431 %5:sreg_32 = S_MOV_B32 0
432 S_CBRANCH_SCC1 %bb.2, implicit undef $scc
436 %5:sreg_32 = IMPLICIT_DEF
439 successors: %bb.3, %bb.4
441 %101:sgpr_128 = S_LOAD_DWORDX4_IMM undef %104, 132, 0 :: ("amdgpu-noclobber" load (s128), align 8, addrspace 1)
442 %10:sgpr_256 = S_LOAD_DWORDX8_IMM undef %104, 188, 0 :: ("amdgpu-noclobber" load (s256), align 8, addrspace 1)
443 %100:sreg_64 = S_MOV_B64 0
444 S_CBRANCH_SCC1 %bb.4, implicit undef $scc
448 %4:sreg_32 = S_MOV_B32 -1
451 successors: %bb.5, %bb.6
453 %102:sreg_64_xexec = S_LOAD_DWORDX2_IMM undef %104, 120, 0 :: ("amdgpu-noclobber" load (s64), align 16, addrspace 1)
454 %8:sgpr_256 = S_LOAD_DWORDX8_IMM undef %104, 352, 0 :: ("amdgpu-noclobber" load (s256), align 16, addrspace 1)
455 %98:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM undef %97:sreg_64, 0, 0
456 %7:sgpr_256 = S_LOAD_DWORDX8_IMM undef %104, 652, 0 :: ("amdgpu-noclobber" load (s256), align 8, addrspace 1)
457 %96:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %100, 0, 0 :: ("amdgpu-noclobber" load (s32), align 8, addrspace 1)
458 %6:sgpr_256 = S_LOAD_DWORDX8_IMM %104, 688, 0 :: ("amdgpu-noclobber" load (s256), align 16, addrspace 1)
459 %2:sgpr_32 = S_MOV_B32 0
460 %3:sreg_32 = S_MOV_B32 0
461 S_CBRANCH_SCC1 %bb.6, implicit undef $scc
465 %3:sreg_32 = S_MOV_B32 -1
468 successors: %bb.7, %bb.10
470 %103:sgpr_32 = S_LOAD_DWORD_IMM undef %104, 0, 0 :: ("amdgpu-noclobber" load (s32), align 16, addrspace 1)
471 %115:sgpr_256 = S_LOAD_DWORDX8_IMM undef %104, 152, 0 :: ("amdgpu-noclobber" load (s256), align 4, addrspace 1)
472 %114:sgpr_256 = S_LOAD_DWORDX8_IMM undef %104, 220, 0 :: ("amdgpu-noclobber" load (s256), align 4, addrspace 1)
473 %113:sgpr_256 = S_LOAD_DWORDX8_IMM undef %104, 384, 0 :: ("amdgpu-noclobber" load (s256), align 4, addrspace 1)
474 %112:sgpr_512 = S_LOAD_DWORDX16_IMM undef %104, 440, 0 :: ("amdgpu-noclobber" load (s512), align 8, addrspace 1)
475 %111:sgpr_512 = S_LOAD_DWORDX16_IMM undef %104, 584, 0 :: ("amdgpu-noclobber" load (s512), align 16, addrspace 1)
476 %110:sgpr_256 = S_LOAD_DWORDX8_IMM %106, 156, 0 :: ("amdgpu-noclobber" load (s256), align 8, addrspace 1)
477 %95:sgpr_32 = S_LOAD_DWORD_IMM %105, 0, 0 :: ("amdgpu-noclobber" load (s32), align 8, addrspace 1)
478 %94:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %106, 0, 0 :: ("amdgpu-noclobber" load (s32), addrspace 1)
479 %99:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %100, 0, 0 :: ("amdgpu-noclobber" load (s32), addrspace 1)
480 %107:sreg_32 = IMPLICIT_DEF
481 %108:sreg_32 = IMPLICIT_DEF
482 %93.sub0:sgpr_64 = S_MOV_B32 1
483 S_CBRANCH_SCC1 %bb.10, implicit undef $scc
487 successors: %bb.8, %bb.9
489 undef %13.sub0:sgpr_64 = V_READFIRSTLANE_B32 undef %1.sub0, implicit $exec
490 %13.sub1:sgpr_64 = V_READFIRSTLANE_B32 undef %1.sub1, implicit $exec
491 %92:sreg_32 = IMPLICIT_DEF
492 %2:sgpr_32 = S_MOV_B32 0
494 S_CBRANCH_VCCNZ %bb.9, implicit $vcc
498 %2:sgpr_32 = S_MOV_B32 -1
503 successors: %bb.11, %bb.12
505 %91:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %103, 0, implicit $mode, implicit $exec
506 %90:sreg_32 = S_AND_B32 undef %91, %5, implicit-def dead $scc
507 S_CMP_EQ_U32 %109, 0, implicit-def $scc
508 %12:sreg_32_xm0_xexec = IMPLICIT_DEF
509 S_CMP_EQ_U32 %102.sub1, 0, implicit-def $scc
510 %11:sreg_32_xm0_xexec = IMPLICIT_DEF
511 %77:sreg_32 = S_OR_B32 %115.sub7, %99, implicit-def dead $scc
512 %82:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %115.sub0, 0, implicit $mode, implicit $exec
513 %79:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %115.sub2, 0, implicit $mode, implicit $exec
514 %78:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %115.sub4, 0, implicit $mode, implicit $exec
515 %76:sreg_32 = S_OR_B32 %10.sub0, undef %77, implicit-def dead $scc
516 %75:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %10.sub1, 0, implicit $mode, implicit $exec
517 %74:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %10.sub2, 0, implicit $mode, implicit $exec
518 %73:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %10.sub3, 0, implicit $mode, implicit $exec
519 %72:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %10.sub4, 0, implicit $mode, implicit $exec
520 %70:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %10.sub5, 0, implicit $mode, implicit $exec
521 %69:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %10.sub6, 0, implicit $mode, implicit $exec
522 %87:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %101.sub0, 0, implicit $mode, implicit $exec
523 %86:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %101.sub1, 0, implicit $mode, implicit $exec
524 %83:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %101.sub2, 0, implicit $mode, implicit $exec
525 %89:sreg_32 = S_AND_B32 undef %11, %108, implicit-def dead $scc
526 %88:sreg_32 = IMPLICIT_DEF
527 %85:sreg_32 = IMPLICIT_DEF
528 %84:sreg_32 = IMPLICIT_DEF
529 %81:sreg_32 = IMPLICIT_DEF
530 %80:sreg_32 = IMPLICIT_DEF
531 %71:sreg_32 = S_AND_B32 undef %80, undef %80, implicit-def dead $scc
532 %67:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %114.sub0, 0, implicit $mode, implicit $exec
533 %68:sreg_32 = S_AND_B32 undef %70, undef %69, implicit-def dead $scc
534 %66:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %114.sub2, 0, implicit $mode, implicit $exec
535 %65:sreg_32 = S_OR_B32 %114.sub5, %114.sub7, implicit-def dead $scc
536 %63:sreg_32 = S_OR_B32 %8.sub0, %8.sub1, implicit-def dead $scc
537 %62:sreg_32 = S_OR_B32 %8.sub2, undef %63, implicit-def dead $scc
538 %64:sreg_32 = S_AND_B32 undef %63, %4, implicit-def dead $scc
539 %61:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %8.sub3, 0, implicit $mode, implicit $exec
540 %60:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %8.sub4, 0, implicit $mode, implicit $exec
541 %59:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %8.sub5, 0, implicit $mode, implicit $exec
542 %58:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %8.sub6, 0, implicit $mode, implicit $exec
543 %57:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %98, 0, implicit $mode, implicit $exec
544 %56:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %113.sub0, 0, implicit $mode, implicit $exec
545 %53:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %113.sub2, 0, implicit $mode, implicit $exec
546 %55:sreg_32 = IMPLICIT_DEF
547 %52:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %113.sub4, 0, implicit $mode, implicit $exec
548 %54:sreg_32 = S_AND_B32 undef %55, undef %56, implicit-def dead $scc
549 S_CMP_EQ_U32 %113.sub7, 0, implicit-def $scc
550 %51:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %112.sub0, 0, implicit $mode, implicit $exec
551 %49:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %112.sub2, 0, implicit $mode, implicit $exec
552 %48:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %112.sub4, 0, implicit $mode, implicit $exec
553 %47:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %112.sub6, 0, implicit $mode, implicit $exec
554 %50:sreg_32 = S_AND_B32 undef %51, undef %51, implicit-def dead $scc
555 %46:sreg_32 = S_OR_B32 %112.sub10, %112.sub9, implicit-def dead $scc
556 %45:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %112.sub13, 0, implicit $mode, implicit $exec
557 %44:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %112.sub14, 0, implicit $mode, implicit $exec
558 S_CMP_EQ_U32 %111.sub1, 0, implicit-def $scc
559 %116:sreg_32_xm0_xexec = IMPLICIT_DEF
560 %42:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %111.sub5, 0, implicit $mode, implicit $exec
561 %41:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %111.sub6, 0, implicit $mode, implicit $exec
562 %43:sreg_32 = IMPLICIT_DEF
563 %38:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %111.sub9, 0, implicit $mode, implicit $exec
564 %37:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %111.sub10, 0, implicit $mode, implicit $exec
565 %40:sreg_32 = IMPLICIT_DEF
566 %39:sreg_32 = S_AND_B32 undef %40, undef %43, implicit-def dead $scc
567 %36:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %111.sub12, 0, implicit $mode, implicit $exec
568 %34:sreg_32 = S_OR_B32 %7.sub0, %111.sub15, implicit-def dead $scc
569 %35:sreg_32 = IMPLICIT_DEF
570 %32:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %7.sub1, 0, implicit $mode, implicit $exec
571 %31:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %7.sub2, 0, implicit $mode, implicit $exec
572 %33:sreg_32 = IMPLICIT_DEF
573 %28:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %7.sub3, 0, implicit $mode, implicit $exec
574 %27:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %7.sub4, 0, implicit $mode, implicit $exec
575 %30:sreg_32 = IMPLICIT_DEF
576 %26:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %7.sub5, 0, implicit $mode, implicit $exec
577 %25:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %7.sub6, 0, implicit $mode, implicit $exec
578 %29:sreg_32 = S_AND_B32 undef %30, undef %33, implicit-def dead $scc
579 %23:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %6.sub0, 0, implicit $mode, implicit $exec
580 %22:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %6.sub1, 0, implicit $mode, implicit $exec
581 %24:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %96, 0, implicit $mode, implicit $exec
582 %21:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %6.sub2, 0, implicit $mode, implicit $exec
583 %20:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %6.sub3, 0, implicit $mode, implicit $exec
584 %19:sreg_32 = S_OR_B32 %6.sub4, %6.sub5, implicit-def dead $scc
585 S_CMP_EQ_U32 %96, 0, implicit-def $scc
586 %18:sreg_32 = S_AND_B32 undef %19, %3, implicit-def dead $scc
587 %14:sreg_32_xm0_xexec = S_LOAD_DWORD_IMM %93, 0, 0 :: ("amdgpu-noclobber" load (s32), align 8, addrspace 1)
588 S_CMP_EQ_U32 %110.sub7, 0, implicit-def $scc
589 %16:sreg_32 = V_CMP_GT_F32_e64 0, 0, 0, %95, 0, implicit $mode, implicit $exec
590 %17:sreg_32 = S_AND_B32 %107, undef %14, implicit-def dead $scc
591 %15:sreg_32 = S_AND_B32 undef %17, %2, implicit-def dead $scc
592 $vcc = COPY undef %15
593 S_CBRANCH_VCCNZ %bb.12, implicit $vcc
599 GLOBAL_STORE_DWORD_SADDR undef %0, undef %0, %104, 0, 0, implicit $exec :: (store (s32), addrspace 1)
600 GLOBAL_STORE_DWORD_SADDR undef %0, undef %0, %106, 0, 0, implicit $exec :: (store (s32), addrspace 1)