1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx900 -run-pass=amdgpu-prelegalizer-combiner %s -o - | FileCheck -check-prefix=GFX9 %s
3 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx900 -run-pass=amdgpu-prelegalizer-combiner -fp-contract=fast %s -o - | FileCheck -check-prefix=GFX9-CONTRACT %s
4 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx900 -run-pass=amdgpu-prelegalizer-combiner --denormal-fp-math=preserve-sign %s -o - | FileCheck -check-prefix=GFX9-DENORM %s
5 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx900 -run-pass=amdgpu-prelegalizer-combiner -enable-unsafe-fp-math %s -o - | FileCheck -check-prefix=GFX9-UNSAFE %s
6 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx1010 -run-pass=amdgpu-prelegalizer-combiner %s -o - | FileCheck -check-prefix=GFX10 %s
7 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx1010 -run-pass=amdgpu-prelegalizer-combiner -fp-contract=fast %s -o - | FileCheck -check-prefix=GFX10-CONTRACT %s
8 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx1010 -run-pass=amdgpu-prelegalizer-combiner --denormal-fp-math=preserve-sign %s -o - | FileCheck -check-prefix=GFX10-DENORM %s
9 # RUN: llc -global-isel -march=amdgcn -mcpu=gfx1010 -run-pass=amdgpu-prelegalizer-combiner -enable-unsafe-fp-math %s -o - | FileCheck -check-prefix=GFX10-UNSAFE %s
12 name: test_f32_add_mul
15 liveins: $vgpr0, $vgpr1, $vgpr2, $sgpr30_sgpr31
17 ; GFX9-LABEL: name: test_f32_add_mul
18 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
19 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
20 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
21 ; GFX9: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
22 ; GFX9: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
23 ; GFX9: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[FMUL]], [[COPY2]]
24 ; GFX9: $vgpr0 = COPY [[FADD]](s32)
25 ; GFX9: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
26 ; GFX9: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
27 ; GFX9-CONTRACT-LABEL: name: test_f32_add_mul
28 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
29 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
30 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
31 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
32 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
33 ; GFX9-CONTRACT: $vgpr0 = COPY [[FMA]](s32)
34 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
35 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
36 ; GFX9-DENORM-LABEL: name: test_f32_add_mul
37 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
38 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
39 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
40 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
41 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
42 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[FMUL]], [[COPY2]]
43 ; GFX9-DENORM: $vgpr0 = COPY [[FADD]](s32)
44 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
45 ; GFX9-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
46 ; GFX9-UNSAFE-LABEL: name: test_f32_add_mul
47 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
48 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
49 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
50 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
51 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
52 ; GFX9-UNSAFE: $vgpr0 = COPY [[FMA]](s32)
53 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
54 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
55 ; GFX10-LABEL: name: test_f32_add_mul
56 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
57 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
58 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
59 ; GFX10: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
60 ; GFX10: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
61 ; GFX10: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[FMUL]], [[COPY2]]
62 ; GFX10: $vgpr0 = COPY [[FADD]](s32)
63 ; GFX10: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
64 ; GFX10: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
65 ; GFX10-CONTRACT-LABEL: name: test_f32_add_mul
66 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
67 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
68 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
69 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
70 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
71 ; GFX10-CONTRACT: $vgpr0 = COPY [[FMA]](s32)
72 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
73 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
74 ; GFX10-DENORM-LABEL: name: test_f32_add_mul
75 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
76 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
77 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
78 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
79 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
80 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[FMUL]], [[COPY2]]
81 ; GFX10-DENORM: $vgpr0 = COPY [[FADD]](s32)
82 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
83 ; GFX10-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
84 ; GFX10-UNSAFE-LABEL: name: test_f32_add_mul
85 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
86 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
87 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
88 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
89 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
90 ; GFX10-UNSAFE: $vgpr0 = COPY [[FMA]](s32)
91 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
92 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
93 %0:_(s32) = COPY $vgpr0
94 %1:_(s32) = COPY $vgpr1
95 %2:_(s32) = COPY $vgpr2
96 %3:sgpr_64 = COPY $sgpr30_sgpr31
97 %4:_(s32) = reassoc G_FMUL %0, %1
98 %5:_(s32) = reassoc G_FADD %4, %2
100 %6:ccr_sgpr_64 = COPY %3
101 S_SETPC_B64_return %6, implicit $vgpr0
105 name: test_f32_add_mul_rhs
108 liveins: $vgpr0, $vgpr1, $vgpr2, $sgpr30_sgpr31
110 ; GFX9-LABEL: name: test_f32_add_mul_rhs
111 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
112 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
113 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
114 ; GFX9: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
115 ; GFX9: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
116 ; GFX9: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[COPY2]], [[FMUL]]
117 ; GFX9: $vgpr0 = COPY [[FADD]](s32)
118 ; GFX9: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
119 ; GFX9: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
120 ; GFX9-CONTRACT-LABEL: name: test_f32_add_mul_rhs
121 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
122 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
123 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
124 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
125 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
126 ; GFX9-CONTRACT: $vgpr0 = COPY [[FMA]](s32)
127 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
128 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
129 ; GFX9-DENORM-LABEL: name: test_f32_add_mul_rhs
130 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
131 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
132 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
133 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
134 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
135 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[COPY2]], [[FMUL]]
136 ; GFX9-DENORM: $vgpr0 = COPY [[FADD]](s32)
137 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
138 ; GFX9-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
139 ; GFX9-UNSAFE-LABEL: name: test_f32_add_mul_rhs
140 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
141 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
142 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
143 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
144 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
145 ; GFX9-UNSAFE: $vgpr0 = COPY [[FMA]](s32)
146 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
147 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
148 ; GFX10-LABEL: name: test_f32_add_mul_rhs
149 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
150 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
151 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
152 ; GFX10: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
153 ; GFX10: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
154 ; GFX10: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[COPY2]], [[FMUL]]
155 ; GFX10: $vgpr0 = COPY [[FADD]](s32)
156 ; GFX10: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
157 ; GFX10: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
158 ; GFX10-CONTRACT-LABEL: name: test_f32_add_mul_rhs
159 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
160 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
161 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
162 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
163 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
164 ; GFX10-CONTRACT: $vgpr0 = COPY [[FMA]](s32)
165 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
166 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
167 ; GFX10-DENORM-LABEL: name: test_f32_add_mul_rhs
168 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
169 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
170 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
171 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
172 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(s32) = reassoc G_FMUL [[COPY]], [[COPY1]]
173 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(s32) = reassoc G_FADD [[COPY2]], [[FMUL]]
174 ; GFX10-DENORM: $vgpr0 = COPY [[FADD]](s32)
175 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
176 ; GFX10-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
177 ; GFX10-UNSAFE-LABEL: name: test_f32_add_mul_rhs
178 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
179 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
180 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
181 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
182 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(s32) = G_FMA [[COPY]], [[COPY1]], [[COPY2]]
183 ; GFX10-UNSAFE: $vgpr0 = COPY [[FMA]](s32)
184 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
185 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
186 %0:_(s32) = COPY $vgpr0
187 %1:_(s32) = COPY $vgpr1
188 %2:_(s32) = COPY $vgpr2
189 %3:sgpr_64 = COPY $sgpr30_sgpr31
190 %4:_(s32) = reassoc G_FMUL %0, %1
191 %5:_(s32) = reassoc G_FADD %2, %4
192 $vgpr0 = COPY %5(s32)
193 %6:ccr_sgpr_64 = COPY %3
194 S_SETPC_B64_return %6, implicit $vgpr0
198 name: test_half_add_mul
201 liveins: $vgpr0, $vgpr1, $vgpr2, $sgpr30_sgpr31
203 ; GFX9-LABEL: name: test_half_add_mul
204 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
205 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
206 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
207 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
208 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
209 ; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
210 ; GFX9: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
211 ; GFX9: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
212 ; GFX9: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[FMUL]], [[TRUNC2]]
213 ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
214 ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
215 ; GFX9: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
216 ; GFX9: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
217 ; GFX9-CONTRACT-LABEL: name: test_half_add_mul
218 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
219 ; GFX9-CONTRACT: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
220 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
221 ; GFX9-CONTRACT: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
222 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
223 ; GFX9-CONTRACT: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
224 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
225 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
226 ; GFX9-CONTRACT: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
227 ; GFX9-CONTRACT: $vgpr0 = COPY [[ANYEXT]](s32)
228 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
229 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
230 ; GFX9-DENORM-LABEL: name: test_half_add_mul
231 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
232 ; GFX9-DENORM: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
233 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
234 ; GFX9-DENORM: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
235 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
236 ; GFX9-DENORM: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
237 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
238 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
239 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[FMUL]], [[TRUNC2]]
240 ; GFX9-DENORM: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
241 ; GFX9-DENORM: $vgpr0 = COPY [[ANYEXT]](s32)
242 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
243 ; GFX9-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
244 ; GFX9-UNSAFE-LABEL: name: test_half_add_mul
245 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
246 ; GFX9-UNSAFE: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
247 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
248 ; GFX9-UNSAFE: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
249 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
250 ; GFX9-UNSAFE: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
251 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
252 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
253 ; GFX9-UNSAFE: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
254 ; GFX9-UNSAFE: $vgpr0 = COPY [[ANYEXT]](s32)
255 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
256 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
257 ; GFX10-LABEL: name: test_half_add_mul
258 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
259 ; GFX10: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
260 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
261 ; GFX10: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
262 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
263 ; GFX10: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
264 ; GFX10: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
265 ; GFX10: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
266 ; GFX10: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[FMUL]], [[TRUNC2]]
267 ; GFX10: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
268 ; GFX10: $vgpr0 = COPY [[ANYEXT]](s32)
269 ; GFX10: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
270 ; GFX10: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
271 ; GFX10-CONTRACT-LABEL: name: test_half_add_mul
272 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
273 ; GFX10-CONTRACT: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
274 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
275 ; GFX10-CONTRACT: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
276 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
277 ; GFX10-CONTRACT: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
278 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
279 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
280 ; GFX10-CONTRACT: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
281 ; GFX10-CONTRACT: $vgpr0 = COPY [[ANYEXT]](s32)
282 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
283 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
284 ; GFX10-DENORM-LABEL: name: test_half_add_mul
285 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
286 ; GFX10-DENORM: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
287 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
288 ; GFX10-DENORM: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
289 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
290 ; GFX10-DENORM: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
291 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
292 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
293 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[FMUL]], [[TRUNC2]]
294 ; GFX10-DENORM: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
295 ; GFX10-DENORM: $vgpr0 = COPY [[ANYEXT]](s32)
296 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
297 ; GFX10-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
298 ; GFX10-UNSAFE-LABEL: name: test_half_add_mul
299 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
300 ; GFX10-UNSAFE: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
301 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
302 ; GFX10-UNSAFE: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
303 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
304 ; GFX10-UNSAFE: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
305 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
306 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
307 ; GFX10-UNSAFE: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
308 ; GFX10-UNSAFE: $vgpr0 = COPY [[ANYEXT]](s32)
309 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
310 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
311 %4:_(s32) = COPY $vgpr0
312 %0:_(s16) = G_TRUNC %4(s32)
313 %5:_(s32) = COPY $vgpr1
314 %1:_(s16) = G_TRUNC %5(s32)
315 %6:_(s32) = COPY $vgpr2
316 %2:_(s16) = G_TRUNC %6(s32)
317 %3:sgpr_64 = COPY $sgpr30_sgpr31
318 %7:_(s16) = reassoc G_FMUL %0, %1
319 %8:_(s16) = reassoc G_FADD %7, %2
320 %10:_(s32) = G_ANYEXT %8(s16)
321 $vgpr0 = COPY %10(s32)
322 %9:ccr_sgpr_64 = COPY %3
323 S_SETPC_B64_return %9, implicit $vgpr0
327 name: test_half_add_mul_rhs
330 liveins: $vgpr0, $vgpr1, $vgpr2, $sgpr30_sgpr31
332 ; GFX9-LABEL: name: test_half_add_mul_rhs
333 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
334 ; GFX9: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
335 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
336 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
337 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
338 ; GFX9: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
339 ; GFX9: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
340 ; GFX9: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
341 ; GFX9: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[TRUNC2]], [[FMUL]]
342 ; GFX9: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
343 ; GFX9: $vgpr0 = COPY [[ANYEXT]](s32)
344 ; GFX9: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
345 ; GFX9: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
346 ; GFX9-CONTRACT-LABEL: name: test_half_add_mul_rhs
347 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
348 ; GFX9-CONTRACT: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
349 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
350 ; GFX9-CONTRACT: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
351 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
352 ; GFX9-CONTRACT: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
353 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
354 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
355 ; GFX9-CONTRACT: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
356 ; GFX9-CONTRACT: $vgpr0 = COPY [[ANYEXT]](s32)
357 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
358 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
359 ; GFX9-DENORM-LABEL: name: test_half_add_mul_rhs
360 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
361 ; GFX9-DENORM: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
362 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
363 ; GFX9-DENORM: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
364 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
365 ; GFX9-DENORM: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
366 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
367 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
368 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[TRUNC2]], [[FMUL]]
369 ; GFX9-DENORM: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
370 ; GFX9-DENORM: $vgpr0 = COPY [[ANYEXT]](s32)
371 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
372 ; GFX9-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
373 ; GFX9-UNSAFE-LABEL: name: test_half_add_mul_rhs
374 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
375 ; GFX9-UNSAFE: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
376 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
377 ; GFX9-UNSAFE: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
378 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
379 ; GFX9-UNSAFE: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
380 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
381 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
382 ; GFX9-UNSAFE: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
383 ; GFX9-UNSAFE: $vgpr0 = COPY [[ANYEXT]](s32)
384 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
385 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
386 ; GFX10-LABEL: name: test_half_add_mul_rhs
387 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
388 ; GFX10: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
389 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
390 ; GFX10: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
391 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
392 ; GFX10: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
393 ; GFX10: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
394 ; GFX10: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
395 ; GFX10: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[TRUNC2]], [[FMUL]]
396 ; GFX10: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
397 ; GFX10: $vgpr0 = COPY [[ANYEXT]](s32)
398 ; GFX10: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
399 ; GFX10: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
400 ; GFX10-CONTRACT-LABEL: name: test_half_add_mul_rhs
401 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
402 ; GFX10-CONTRACT: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
403 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
404 ; GFX10-CONTRACT: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
405 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
406 ; GFX10-CONTRACT: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
407 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
408 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
409 ; GFX10-CONTRACT: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
410 ; GFX10-CONTRACT: $vgpr0 = COPY [[ANYEXT]](s32)
411 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
412 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
413 ; GFX10-DENORM-LABEL: name: test_half_add_mul_rhs
414 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
415 ; GFX10-DENORM: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
416 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
417 ; GFX10-DENORM: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
418 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
419 ; GFX10-DENORM: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
420 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
421 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(s16) = reassoc G_FMUL [[TRUNC]], [[TRUNC1]]
422 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(s16) = reassoc G_FADD [[TRUNC2]], [[FMUL]]
423 ; GFX10-DENORM: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FADD]](s16)
424 ; GFX10-DENORM: $vgpr0 = COPY [[ANYEXT]](s32)
425 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
426 ; GFX10-DENORM: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
427 ; GFX10-UNSAFE-LABEL: name: test_half_add_mul_rhs
428 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
429 ; GFX10-UNSAFE: [[TRUNC:%[0-9]+]]:_(s16) = G_TRUNC [[COPY]](s32)
430 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
431 ; GFX10-UNSAFE: [[TRUNC1:%[0-9]+]]:_(s16) = G_TRUNC [[COPY1]](s32)
432 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
433 ; GFX10-UNSAFE: [[TRUNC2:%[0-9]+]]:_(s16) = G_TRUNC [[COPY2]](s32)
434 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
435 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(s16) = G_FMA [[TRUNC]], [[TRUNC1]], [[TRUNC2]]
436 ; GFX10-UNSAFE: [[ANYEXT:%[0-9]+]]:_(s32) = G_ANYEXT [[FMA]](s16)
437 ; GFX10-UNSAFE: $vgpr0 = COPY [[ANYEXT]](s32)
438 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY3]]
439 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY4]], implicit $vgpr0
440 %4:_(s32) = COPY $vgpr0
441 %0:_(s16) = G_TRUNC %4(s32)
442 %5:_(s32) = COPY $vgpr1
443 %1:_(s16) = G_TRUNC %5(s32)
444 %6:_(s32) = COPY $vgpr2
445 %2:_(s16) = G_TRUNC %6(s32)
446 %3:sgpr_64 = COPY $sgpr30_sgpr31
447 %7:_(s16) = reassoc G_FMUL %0, %1
448 %8:_(s16) = reassoc G_FADD %2, %7
449 %10:_(s32) = G_ANYEXT %8(s16)
450 $vgpr0 = COPY %10(s32)
451 %9:ccr_sgpr_64 = COPY %3
452 S_SETPC_B64_return %9, implicit $vgpr0
456 name: test_double_add_mul
459 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $sgpr30_sgpr31
461 ; GFX9-LABEL: name: test_double_add_mul
462 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
463 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
464 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
465 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
466 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
467 ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
468 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
469 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
470 ; GFX9: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
471 ; GFX9: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
472 ; GFX9: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
473 ; GFX9: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[FMUL]], [[MV2]]
474 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
475 ; GFX9: $vgpr0 = COPY [[UV]](s32)
476 ; GFX9: $vgpr1 = COPY [[UV1]](s32)
477 ; GFX9: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
478 ; GFX9: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
479 ; GFX9-CONTRACT-LABEL: name: test_double_add_mul
480 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
481 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
482 ; GFX9-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
483 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
484 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
485 ; GFX9-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
486 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
487 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
488 ; GFX9-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
489 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
490 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
491 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
492 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](s32)
493 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
494 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
495 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
496 ; GFX9-DENORM-LABEL: name: test_double_add_mul
497 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
498 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
499 ; GFX9-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
500 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
501 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
502 ; GFX9-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
503 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
504 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
505 ; GFX9-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
506 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
507 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
508 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[FMUL]], [[MV2]]
509 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
510 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](s32)
511 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](s32)
512 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
513 ; GFX9-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
514 ; GFX9-UNSAFE-LABEL: name: test_double_add_mul
515 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
516 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
517 ; GFX9-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
518 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
519 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
520 ; GFX9-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
521 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
522 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
523 ; GFX9-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
524 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
525 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
526 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
527 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](s32)
528 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
529 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
530 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
531 ; GFX10-LABEL: name: test_double_add_mul
532 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
533 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
534 ; GFX10: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
535 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
536 ; GFX10: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
537 ; GFX10: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
538 ; GFX10: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
539 ; GFX10: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
540 ; GFX10: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
541 ; GFX10: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
542 ; GFX10: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
543 ; GFX10: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[FMUL]], [[MV2]]
544 ; GFX10: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
545 ; GFX10: $vgpr0 = COPY [[UV]](s32)
546 ; GFX10: $vgpr1 = COPY [[UV1]](s32)
547 ; GFX10: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
548 ; GFX10: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
549 ; GFX10-CONTRACT-LABEL: name: test_double_add_mul
550 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
551 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
552 ; GFX10-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
553 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
554 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
555 ; GFX10-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
556 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
557 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
558 ; GFX10-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
559 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
560 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
561 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
562 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](s32)
563 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
564 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
565 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
566 ; GFX10-DENORM-LABEL: name: test_double_add_mul
567 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
568 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
569 ; GFX10-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
570 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
571 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
572 ; GFX10-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
573 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
574 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
575 ; GFX10-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
576 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
577 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
578 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[FMUL]], [[MV2]]
579 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
580 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](s32)
581 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](s32)
582 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
583 ; GFX10-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
584 ; GFX10-UNSAFE-LABEL: name: test_double_add_mul
585 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
586 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
587 ; GFX10-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
588 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
589 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
590 ; GFX10-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
591 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
592 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
593 ; GFX10-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
594 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
595 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
596 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
597 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](s32)
598 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
599 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
600 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
601 %4:_(s32) = COPY $vgpr0
602 %5:_(s32) = COPY $vgpr1
603 %0:_(s64) = G_MERGE_VALUES %4(s32), %5(s32)
604 %6:_(s32) = COPY $vgpr2
605 %7:_(s32) = COPY $vgpr3
606 %1:_(s64) = G_MERGE_VALUES %6(s32), %7(s32)
607 %8:_(s32) = COPY $vgpr4
608 %9:_(s32) = COPY $vgpr5
609 %2:_(s64) = G_MERGE_VALUES %8(s32), %9(s32)
610 %3:sgpr_64 = COPY $sgpr30_sgpr31
611 %10:_(s64) = reassoc G_FMUL %0, %1
612 %11:_(s64) = reassoc G_FADD %10, %2
613 %13:_(s32), %14:_(s32) = G_UNMERGE_VALUES %11(s64)
614 $vgpr0 = COPY %13(s32)
615 $vgpr1 = COPY %14(s32)
616 %12:ccr_sgpr_64 = COPY %3
617 S_SETPC_B64_return %12, implicit $vgpr0, implicit $vgpr1
621 name: test_double_add_mul_rhs
624 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $sgpr30_sgpr31
626 ; GFX9-LABEL: name: test_double_add_mul_rhs
627 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
628 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
629 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
630 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
631 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
632 ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
633 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
634 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
635 ; GFX9: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
636 ; GFX9: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
637 ; GFX9: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
638 ; GFX9: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[MV2]], [[FMUL]]
639 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
640 ; GFX9: $vgpr0 = COPY [[UV]](s32)
641 ; GFX9: $vgpr1 = COPY [[UV1]](s32)
642 ; GFX9: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
643 ; GFX9: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
644 ; GFX9-CONTRACT-LABEL: name: test_double_add_mul_rhs
645 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
646 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
647 ; GFX9-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
648 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
649 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
650 ; GFX9-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
651 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
652 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
653 ; GFX9-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
654 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
655 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
656 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
657 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](s32)
658 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
659 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
660 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
661 ; GFX9-DENORM-LABEL: name: test_double_add_mul_rhs
662 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
663 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
664 ; GFX9-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
665 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
666 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
667 ; GFX9-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
668 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
669 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
670 ; GFX9-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
671 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
672 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
673 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[MV2]], [[FMUL]]
674 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
675 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](s32)
676 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](s32)
677 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
678 ; GFX9-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
679 ; GFX9-UNSAFE-LABEL: name: test_double_add_mul_rhs
680 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
681 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
682 ; GFX9-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
683 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
684 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
685 ; GFX9-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
686 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
687 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
688 ; GFX9-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
689 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
690 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
691 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
692 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](s32)
693 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
694 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
695 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
696 ; GFX10-LABEL: name: test_double_add_mul_rhs
697 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
698 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
699 ; GFX10: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
700 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
701 ; GFX10: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
702 ; GFX10: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
703 ; GFX10: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
704 ; GFX10: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
705 ; GFX10: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
706 ; GFX10: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
707 ; GFX10: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
708 ; GFX10: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[MV2]], [[FMUL]]
709 ; GFX10: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
710 ; GFX10: $vgpr0 = COPY [[UV]](s32)
711 ; GFX10: $vgpr1 = COPY [[UV1]](s32)
712 ; GFX10: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
713 ; GFX10: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
714 ; GFX10-CONTRACT-LABEL: name: test_double_add_mul_rhs
715 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
716 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
717 ; GFX10-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
718 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
719 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
720 ; GFX10-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
721 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
722 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
723 ; GFX10-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
724 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
725 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
726 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
727 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](s32)
728 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
729 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
730 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
731 ; GFX10-DENORM-LABEL: name: test_double_add_mul_rhs
732 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
733 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
734 ; GFX10-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
735 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
736 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
737 ; GFX10-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
738 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
739 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
740 ; GFX10-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
741 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
742 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(s64) = reassoc G_FMUL [[MV]], [[MV1]]
743 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(s64) = reassoc G_FADD [[MV2]], [[FMUL]]
744 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](s64)
745 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](s32)
746 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](s32)
747 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
748 ; GFX10-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
749 ; GFX10-UNSAFE-LABEL: name: test_double_add_mul_rhs
750 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
751 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
752 ; GFX10-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
753 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
754 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
755 ; GFX10-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
756 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
757 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
758 ; GFX10-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
759 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
760 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(s64) = G_FMA [[MV]], [[MV1]], [[MV2]]
761 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](s64)
762 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](s32)
763 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
764 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
765 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
766 %4:_(s32) = COPY $vgpr0
767 %5:_(s32) = COPY $vgpr1
768 %0:_(s64) = G_MERGE_VALUES %4(s32), %5(s32)
769 %6:_(s32) = COPY $vgpr2
770 %7:_(s32) = COPY $vgpr3
771 %1:_(s64) = G_MERGE_VALUES %6(s32), %7(s32)
772 %8:_(s32) = COPY $vgpr4
773 %9:_(s32) = COPY $vgpr5
774 %2:_(s64) = G_MERGE_VALUES %8(s32), %9(s32)
775 %3:sgpr_64 = COPY $sgpr30_sgpr31
776 %10:_(s64) = reassoc G_FMUL %0, %1
777 %11:_(s64) = reassoc G_FADD %2, %10
778 %13:_(s32), %14:_(s32) = G_UNMERGE_VALUES %11(s64)
779 $vgpr0 = COPY %13(s32)
780 $vgpr1 = COPY %14(s32)
781 %12:ccr_sgpr_64 = COPY %3
782 S_SETPC_B64_return %12, implicit $vgpr0, implicit $vgpr1
786 name: test_4xfloat_add_mul
789 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $sgpr30_sgpr31
791 ; GFX9-LABEL: name: test_4xfloat_add_mul
792 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
793 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
794 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
795 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
796 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
797 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
798 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
799 ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
800 ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
801 ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
802 ; GFX9: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
803 ; GFX9: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
804 ; GFX9: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
805 ; GFX9: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
806 ; GFX9: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
807 ; GFX9: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
808 ; GFX9: [[FMUL:%[0-9]+]]:_(<4 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
809 ; GFX9: [[FADD:%[0-9]+]]:_(<4 x s32>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
810 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s32>)
811 ; GFX9: $vgpr0 = COPY [[UV]](s32)
812 ; GFX9: $vgpr1 = COPY [[UV1]](s32)
813 ; GFX9: $vgpr2 = COPY [[UV2]](s32)
814 ; GFX9: $vgpr3 = COPY [[UV3]](s32)
815 ; GFX9: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
816 ; GFX9: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
817 ; GFX9-CONTRACT-LABEL: name: test_4xfloat_add_mul
818 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
819 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
820 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
821 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
822 ; GFX9-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
823 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
824 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
825 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
826 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
827 ; GFX9-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
828 ; GFX9-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
829 ; GFX9-CONTRACT: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
830 ; GFX9-CONTRACT: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
831 ; GFX9-CONTRACT: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
832 ; GFX9-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
833 ; GFX9-CONTRACT: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
834 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(<4 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
835 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s32>)
836 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](s32)
837 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
838 ; GFX9-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
839 ; GFX9-CONTRACT: $vgpr3 = COPY [[UV3]](s32)
840 ; GFX9-CONTRACT: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
841 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
842 ; GFX9-DENORM-LABEL: name: test_4xfloat_add_mul
843 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
844 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
845 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
846 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
847 ; GFX9-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
848 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
849 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
850 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
851 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
852 ; GFX9-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
853 ; GFX9-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
854 ; GFX9-DENORM: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
855 ; GFX9-DENORM: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
856 ; GFX9-DENORM: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
857 ; GFX9-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
858 ; GFX9-DENORM: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
859 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(<4 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
860 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(<4 x s32>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
861 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s32>)
862 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](s32)
863 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](s32)
864 ; GFX9-DENORM: $vgpr2 = COPY [[UV2]](s32)
865 ; GFX9-DENORM: $vgpr3 = COPY [[UV3]](s32)
866 ; GFX9-DENORM: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
867 ; GFX9-DENORM: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
868 ; GFX9-UNSAFE-LABEL: name: test_4xfloat_add_mul
869 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
870 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
871 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
872 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
873 ; GFX9-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
874 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
875 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
876 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
877 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
878 ; GFX9-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
879 ; GFX9-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
880 ; GFX9-UNSAFE: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
881 ; GFX9-UNSAFE: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
882 ; GFX9-UNSAFE: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
883 ; GFX9-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
884 ; GFX9-UNSAFE: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
885 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(<4 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
886 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s32>)
887 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](s32)
888 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
889 ; GFX9-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
890 ; GFX9-UNSAFE: $vgpr3 = COPY [[UV3]](s32)
891 ; GFX9-UNSAFE: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
892 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
893 ; GFX10-LABEL: name: test_4xfloat_add_mul
894 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
895 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
896 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
897 ; GFX10: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
898 ; GFX10: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
899 ; GFX10: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
900 ; GFX10: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
901 ; GFX10: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
902 ; GFX10: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
903 ; GFX10: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
904 ; GFX10: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
905 ; GFX10: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
906 ; GFX10: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
907 ; GFX10: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
908 ; GFX10: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
909 ; GFX10: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
910 ; GFX10: [[FMUL:%[0-9]+]]:_(<4 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
911 ; GFX10: [[FADD:%[0-9]+]]:_(<4 x s32>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
912 ; GFX10: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s32>)
913 ; GFX10: $vgpr0 = COPY [[UV]](s32)
914 ; GFX10: $vgpr1 = COPY [[UV1]](s32)
915 ; GFX10: $vgpr2 = COPY [[UV2]](s32)
916 ; GFX10: $vgpr3 = COPY [[UV3]](s32)
917 ; GFX10: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
918 ; GFX10: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
919 ; GFX10-CONTRACT-LABEL: name: test_4xfloat_add_mul
920 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
921 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
922 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
923 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
924 ; GFX10-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
925 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
926 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
927 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
928 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
929 ; GFX10-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
930 ; GFX10-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
931 ; GFX10-CONTRACT: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
932 ; GFX10-CONTRACT: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
933 ; GFX10-CONTRACT: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
934 ; GFX10-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
935 ; GFX10-CONTRACT: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
936 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(<4 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
937 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s32>)
938 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](s32)
939 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
940 ; GFX10-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
941 ; GFX10-CONTRACT: $vgpr3 = COPY [[UV3]](s32)
942 ; GFX10-CONTRACT: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
943 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
944 ; GFX10-DENORM-LABEL: name: test_4xfloat_add_mul
945 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
946 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
947 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
948 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
949 ; GFX10-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
950 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
951 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
952 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
953 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
954 ; GFX10-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
955 ; GFX10-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
956 ; GFX10-DENORM: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
957 ; GFX10-DENORM: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
958 ; GFX10-DENORM: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
959 ; GFX10-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
960 ; GFX10-DENORM: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
961 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(<4 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
962 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(<4 x s32>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
963 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s32>)
964 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](s32)
965 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](s32)
966 ; GFX10-DENORM: $vgpr2 = COPY [[UV2]](s32)
967 ; GFX10-DENORM: $vgpr3 = COPY [[UV3]](s32)
968 ; GFX10-DENORM: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
969 ; GFX10-DENORM: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
970 ; GFX10-UNSAFE-LABEL: name: test_4xfloat_add_mul
971 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
972 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
973 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
974 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
975 ; GFX10-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32), [[COPY3]](s32)
976 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
977 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
978 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
979 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
980 ; GFX10-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY4]](s32), [[COPY5]](s32), [[COPY6]](s32), [[COPY7]](s32)
981 ; GFX10-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
982 ; GFX10-UNSAFE: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
983 ; GFX10-UNSAFE: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
984 ; GFX10-UNSAFE: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
985 ; GFX10-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[COPY8]](s32), [[COPY9]](s32), [[COPY10]](s32), [[COPY11]](s32)
986 ; GFX10-UNSAFE: [[COPY12:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
987 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(<4 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
988 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s32>)
989 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](s32)
990 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
991 ; GFX10-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
992 ; GFX10-UNSAFE: $vgpr3 = COPY [[UV3]](s32)
993 ; GFX10-UNSAFE: [[COPY13:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY12]]
994 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY13]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
995 %4:_(s32) = COPY $vgpr0
996 %5:_(s32) = COPY $vgpr1
997 %6:_(s32) = COPY $vgpr2
998 %7:_(s32) = COPY $vgpr3
999 %0:_(<4 x s32>) = G_BUILD_VECTOR %4(s32), %5(s32), %6(s32), %7(s32)
1000 %8:_(s32) = COPY $vgpr4
1001 %9:_(s32) = COPY $vgpr5
1002 %10:_(s32) = COPY $vgpr6
1003 %11:_(s32) = COPY $vgpr7
1004 %1:_(<4 x s32>) = G_BUILD_VECTOR %8(s32), %9(s32), %10(s32), %11(s32)
1005 %12:_(s32) = COPY $vgpr8
1006 %13:_(s32) = COPY $vgpr9
1007 %14:_(s32) = COPY $vgpr10
1008 %15:_(s32) = COPY $vgpr11
1009 %2:_(<4 x s32>) = G_BUILD_VECTOR %12(s32), %13(s32), %14(s32), %15(s32)
1010 %3:sgpr_64 = COPY $sgpr30_sgpr31
1011 %16:_(<4 x s32>) = reassoc G_FMUL %0, %1
1012 %17:_(<4 x s32>) = reassoc G_FADD %16, %2
1013 %19:_(s32), %20:_(s32), %21:_(s32), %22:_(s32) = G_UNMERGE_VALUES %17(<4 x s32>)
1014 $vgpr0 = COPY %19(s32)
1015 $vgpr1 = COPY %20(s32)
1016 $vgpr2 = COPY %21(s32)
1017 $vgpr3 = COPY %22(s32)
1018 %18:ccr_sgpr_64 = COPY %3
1019 S_SETPC_B64_return %18, implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3
1023 name: test_3xfloat_add_mul_rhs
1026 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $sgpr30_sgpr31
1028 ; GFX9-LABEL: name: test_3xfloat_add_mul_rhs
1029 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1030 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1031 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1032 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1033 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1034 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1035 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1036 ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1037 ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1038 ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1039 ; GFX9: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1040 ; GFX9: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1041 ; GFX9: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1042 ; GFX9: [[FMUL:%[0-9]+]]:_(<3 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
1043 ; GFX9: [[FADD:%[0-9]+]]:_(<3 x s32>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
1044 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s32>)
1045 ; GFX9: $vgpr0 = COPY [[UV]](s32)
1046 ; GFX9: $vgpr1 = COPY [[UV1]](s32)
1047 ; GFX9: $vgpr2 = COPY [[UV2]](s32)
1048 ; GFX9: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1049 ; GFX9: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1050 ; GFX9-CONTRACT-LABEL: name: test_3xfloat_add_mul_rhs
1051 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1052 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1053 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1054 ; GFX9-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1055 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1056 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1057 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1058 ; GFX9-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1059 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1060 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1061 ; GFX9-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1062 ; GFX9-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1063 ; GFX9-CONTRACT: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1064 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(<3 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
1065 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s32>)
1066 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](s32)
1067 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
1068 ; GFX9-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
1069 ; GFX9-CONTRACT: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1070 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1071 ; GFX9-DENORM-LABEL: name: test_3xfloat_add_mul_rhs
1072 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1073 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1074 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1075 ; GFX9-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1076 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1077 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1078 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1079 ; GFX9-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1080 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1081 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1082 ; GFX9-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1083 ; GFX9-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1084 ; GFX9-DENORM: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1085 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(<3 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
1086 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(<3 x s32>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
1087 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s32>)
1088 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](s32)
1089 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](s32)
1090 ; GFX9-DENORM: $vgpr2 = COPY [[UV2]](s32)
1091 ; GFX9-DENORM: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1092 ; GFX9-DENORM: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1093 ; GFX9-UNSAFE-LABEL: name: test_3xfloat_add_mul_rhs
1094 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1095 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1096 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1097 ; GFX9-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1098 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1099 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1100 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1101 ; GFX9-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1102 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1103 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1104 ; GFX9-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1105 ; GFX9-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1106 ; GFX9-UNSAFE: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1107 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(<3 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
1108 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s32>)
1109 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](s32)
1110 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
1111 ; GFX9-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
1112 ; GFX9-UNSAFE: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1113 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1114 ; GFX10-LABEL: name: test_3xfloat_add_mul_rhs
1115 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1116 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1117 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1118 ; GFX10: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1119 ; GFX10: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1120 ; GFX10: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1121 ; GFX10: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1122 ; GFX10: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1123 ; GFX10: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1124 ; GFX10: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1125 ; GFX10: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1126 ; GFX10: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1127 ; GFX10: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1128 ; GFX10: [[FMUL:%[0-9]+]]:_(<3 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
1129 ; GFX10: [[FADD:%[0-9]+]]:_(<3 x s32>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
1130 ; GFX10: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s32>)
1131 ; GFX10: $vgpr0 = COPY [[UV]](s32)
1132 ; GFX10: $vgpr1 = COPY [[UV1]](s32)
1133 ; GFX10: $vgpr2 = COPY [[UV2]](s32)
1134 ; GFX10: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1135 ; GFX10: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1136 ; GFX10-CONTRACT-LABEL: name: test_3xfloat_add_mul_rhs
1137 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1138 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1139 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1140 ; GFX10-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1141 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1142 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1143 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1144 ; GFX10-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1145 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1146 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1147 ; GFX10-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1148 ; GFX10-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1149 ; GFX10-CONTRACT: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1150 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(<3 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
1151 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s32>)
1152 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](s32)
1153 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
1154 ; GFX10-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
1155 ; GFX10-CONTRACT: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1156 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1157 ; GFX10-DENORM-LABEL: name: test_3xfloat_add_mul_rhs
1158 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1159 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1160 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1161 ; GFX10-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1162 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1163 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1164 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1165 ; GFX10-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1166 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1167 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1168 ; GFX10-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1169 ; GFX10-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1170 ; GFX10-DENORM: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1171 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(<3 x s32>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
1172 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(<3 x s32>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
1173 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s32>)
1174 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](s32)
1175 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](s32)
1176 ; GFX10-DENORM: $vgpr2 = COPY [[UV2]](s32)
1177 ; GFX10-DENORM: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1178 ; GFX10-DENORM: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1179 ; GFX10-UNSAFE-LABEL: name: test_3xfloat_add_mul_rhs
1180 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1181 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1182 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1183 ; GFX10-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY]](s32), [[COPY1]](s32), [[COPY2]](s32)
1184 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1185 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1186 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1187 ; GFX10-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY3]](s32), [[COPY4]](s32), [[COPY5]](s32)
1188 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1189 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1190 ; GFX10-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1191 ; GFX10-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s32>) = G_BUILD_VECTOR [[COPY6]](s32), [[COPY7]](s32), [[COPY8]](s32)
1192 ; GFX10-UNSAFE: [[COPY9:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1193 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(<3 x s32>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
1194 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s32>)
1195 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](s32)
1196 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
1197 ; GFX10-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
1198 ; GFX10-UNSAFE: [[COPY10:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY9]]
1199 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY10]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1200 %4:_(s32) = COPY $vgpr0
1201 %5:_(s32) = COPY $vgpr1
1202 %6:_(s32) = COPY $vgpr2
1203 %0:_(<3 x s32>) = G_BUILD_VECTOR %4(s32), %5(s32), %6(s32)
1204 %7:_(s32) = COPY $vgpr3
1205 %8:_(s32) = COPY $vgpr4
1206 %9:_(s32) = COPY $vgpr5
1207 %1:_(<3 x s32>) = G_BUILD_VECTOR %7(s32), %8(s32), %9(s32)
1208 %10:_(s32) = COPY $vgpr6
1209 %11:_(s32) = COPY $vgpr7
1210 %12:_(s32) = COPY $vgpr8
1211 %2:_(<3 x s32>) = G_BUILD_VECTOR %10(s32), %11(s32), %12(s32)
1212 %3:sgpr_64 = COPY $sgpr30_sgpr31
1213 %13:_(<3 x s32>) = reassoc G_FMUL %0, %1
1214 %14:_(<3 x s32>) = reassoc G_FADD %2, %13
1215 %16:_(s32), %17:_(s32), %18:_(s32) = G_UNMERGE_VALUES %14(<3 x s32>)
1216 $vgpr0 = COPY %16(s32)
1217 $vgpr1 = COPY %17(s32)
1218 $vgpr2 = COPY %18(s32)
1219 %15:ccr_sgpr_64 = COPY %3
1220 S_SETPC_B64_return %15, implicit $vgpr0, implicit $vgpr1, implicit $vgpr2
1224 name: test_4xhalf_add_mul
1227 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $sgpr30_sgpr31
1229 ; GFX9-LABEL: name: test_4xhalf_add_mul
1230 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1231 ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1232 ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1233 ; GFX9: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1234 ; GFX9: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1235 ; GFX9: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1236 ; GFX9: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1237 ; GFX9: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1238 ; GFX9: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1239 ; GFX9: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1240 ; GFX9: [[FMUL:%[0-9]+]]:_(<4 x s16>) = reassoc G_FMUL [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]]
1241 ; GFX9: [[FADD:%[0-9]+]]:_(<4 x s16>) = reassoc G_FADD [[FMUL]], [[CONCAT_VECTORS2]]
1242 ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FADD]](<4 x s16>)
1243 ; GFX9: $vgpr0 = COPY [[UV]](<2 x s16>)
1244 ; GFX9: $vgpr1 = COPY [[UV1]](<2 x s16>)
1245 ; GFX9: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1246 ; GFX9: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1247 ; GFX9-CONTRACT-LABEL: name: test_4xhalf_add_mul
1248 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1249 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1250 ; GFX9-CONTRACT: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1251 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1252 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1253 ; GFX9-CONTRACT: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1254 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1255 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1256 ; GFX9-CONTRACT: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1257 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1258 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(<4 x s16>) = G_FMA [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]], [[CONCAT_VECTORS2]]
1259 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FMA]](<4 x s16>)
1260 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](<2 x s16>)
1261 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](<2 x s16>)
1262 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1263 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1264 ; GFX9-DENORM-LABEL: name: test_4xhalf_add_mul
1265 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1266 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1267 ; GFX9-DENORM: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1268 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1269 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1270 ; GFX9-DENORM: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1271 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1272 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1273 ; GFX9-DENORM: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1274 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1275 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(<4 x s16>) = reassoc G_FMUL [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]]
1276 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(<4 x s16>) = reassoc G_FADD [[FMUL]], [[CONCAT_VECTORS2]]
1277 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FADD]](<4 x s16>)
1278 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](<2 x s16>)
1279 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](<2 x s16>)
1280 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1281 ; GFX9-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1282 ; GFX9-UNSAFE-LABEL: name: test_4xhalf_add_mul
1283 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1284 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1285 ; GFX9-UNSAFE: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1286 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1287 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1288 ; GFX9-UNSAFE: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1289 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1290 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1291 ; GFX9-UNSAFE: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1292 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1293 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(<4 x s16>) = G_FMA [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]], [[CONCAT_VECTORS2]]
1294 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FMA]](<4 x s16>)
1295 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](<2 x s16>)
1296 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](<2 x s16>)
1297 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1298 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1299 ; GFX10-LABEL: name: test_4xhalf_add_mul
1300 ; GFX10: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1301 ; GFX10: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1302 ; GFX10: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1303 ; GFX10: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1304 ; GFX10: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1305 ; GFX10: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1306 ; GFX10: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1307 ; GFX10: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1308 ; GFX10: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1309 ; GFX10: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1310 ; GFX10: [[FMUL:%[0-9]+]]:_(<4 x s16>) = reassoc G_FMUL [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]]
1311 ; GFX10: [[FADD:%[0-9]+]]:_(<4 x s16>) = reassoc G_FADD [[FMUL]], [[CONCAT_VECTORS2]]
1312 ; GFX10: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FADD]](<4 x s16>)
1313 ; GFX10: $vgpr0 = COPY [[UV]](<2 x s16>)
1314 ; GFX10: $vgpr1 = COPY [[UV1]](<2 x s16>)
1315 ; GFX10: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1316 ; GFX10: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1317 ; GFX10-CONTRACT-LABEL: name: test_4xhalf_add_mul
1318 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1319 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1320 ; GFX10-CONTRACT: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1321 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1322 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1323 ; GFX10-CONTRACT: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1324 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1325 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1326 ; GFX10-CONTRACT: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1327 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1328 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(<4 x s16>) = G_FMA [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]], [[CONCAT_VECTORS2]]
1329 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FMA]](<4 x s16>)
1330 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](<2 x s16>)
1331 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](<2 x s16>)
1332 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1333 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1334 ; GFX10-DENORM-LABEL: name: test_4xhalf_add_mul
1335 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1336 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1337 ; GFX10-DENORM: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1338 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1339 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1340 ; GFX10-DENORM: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1341 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1342 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1343 ; GFX10-DENORM: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1344 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1345 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(<4 x s16>) = reassoc G_FMUL [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]]
1346 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(<4 x s16>) = reassoc G_FADD [[FMUL]], [[CONCAT_VECTORS2]]
1347 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FADD]](<4 x s16>)
1348 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](<2 x s16>)
1349 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](<2 x s16>)
1350 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1351 ; GFX10-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1352 ; GFX10-UNSAFE-LABEL: name: test_4xhalf_add_mul
1353 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1354 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1355 ; GFX10-UNSAFE: [[CONCAT_VECTORS:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>)
1356 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1357 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1358 ; GFX10-UNSAFE: [[CONCAT_VECTORS1:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>)
1359 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1360 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1361 ; GFX10-UNSAFE: [[CONCAT_VECTORS2:%[0-9]+]]:_(<4 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>)
1362 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1363 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(<4 x s16>) = G_FMA [[CONCAT_VECTORS]], [[CONCAT_VECTORS1]], [[CONCAT_VECTORS2]]
1364 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[FMA]](<4 x s16>)
1365 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](<2 x s16>)
1366 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](<2 x s16>)
1367 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1368 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1369 %4:_(<2 x s16>) = COPY $vgpr0
1370 %5:_(<2 x s16>) = COPY $vgpr1
1371 %0:_(<4 x s16>) = G_CONCAT_VECTORS %4(<2 x s16>), %5(<2 x s16>)
1372 %6:_(<2 x s16>) = COPY $vgpr2
1373 %7:_(<2 x s16>) = COPY $vgpr3
1374 %1:_(<4 x s16>) = G_CONCAT_VECTORS %6(<2 x s16>), %7(<2 x s16>)
1375 %8:_(<2 x s16>) = COPY $vgpr4
1376 %9:_(<2 x s16>) = COPY $vgpr5
1377 %2:_(<4 x s16>) = G_CONCAT_VECTORS %8(<2 x s16>), %9(<2 x s16>)
1378 %3:sgpr_64 = COPY $sgpr30_sgpr31
1379 %10:_(<4 x s16>) = reassoc G_FMUL %0, %1
1380 %11:_(<4 x s16>) = reassoc G_FADD %10, %2
1381 %13:_(<2 x s16>), %14:_(<2 x s16>) = G_UNMERGE_VALUES %11(<4 x s16>)
1382 $vgpr0 = COPY %13(<2 x s16>)
1383 $vgpr1 = COPY %14(<2 x s16>)
1384 %12:ccr_sgpr_64 = COPY %3
1385 S_SETPC_B64_return %12, implicit $vgpr0, implicit $vgpr1
1389 name: test_3xhalf_add_mul_rhs
1392 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $sgpr30_sgpr31
1394 ; GFX9-LABEL: name: test_3xhalf_add_mul_rhs
1395 ; GFX9: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1396 ; GFX9: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1397 ; GFX9: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1398 ; GFX9: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1399 ; GFX9: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1400 ; GFX9: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1401 ; GFX9: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1402 ; GFX9: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1403 ; GFX9: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1404 ; GFX9: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1405 ; GFX9: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1406 ; GFX9: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1407 ; GFX9: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1408 ; GFX9: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1409 ; GFX9: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1410 ; GFX9: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1411 ; GFX9: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1412 ; GFX9: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1413 ; GFX9: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1414 ; GFX9: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1415 ; GFX9: [[FMUL:%[0-9]+]]:_(<3 x s16>) = reassoc G_FMUL [[BITCAST1]], [[BITCAST3]]
1416 ; GFX9: [[FADD:%[0-9]+]]:_(<3 x s16>) = reassoc G_FADD [[BITCAST5]], [[FMUL]]
1417 ; GFX9: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1418 ; GFX9: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FADD]](<3 x s16>), [[DEF1]](<3 x s16>)
1419 ; GFX9: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1420 ; GFX9: $vgpr0 = COPY [[UV]](<2 x s16>)
1421 ; GFX9: $vgpr1 = COPY [[UV1]](<2 x s16>)
1422 ; GFX9: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1423 ; GFX9: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1424 ; GFX9-CONTRACT-LABEL: name: test_3xhalf_add_mul_rhs
1425 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1426 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1427 ; GFX9-CONTRACT: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1428 ; GFX9-CONTRACT: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1429 ; GFX9-CONTRACT: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1430 ; GFX9-CONTRACT: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1431 ; GFX9-CONTRACT: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1432 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1433 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1434 ; GFX9-CONTRACT: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1435 ; GFX9-CONTRACT: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1436 ; GFX9-CONTRACT: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1437 ; GFX9-CONTRACT: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1438 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1439 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1440 ; GFX9-CONTRACT: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1441 ; GFX9-CONTRACT: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1442 ; GFX9-CONTRACT: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1443 ; GFX9-CONTRACT: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1444 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1445 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(<3 x s16>) = G_FMA [[BITCAST1]], [[BITCAST3]], [[BITCAST5]]
1446 ; GFX9-CONTRACT: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1447 ; GFX9-CONTRACT: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FMA]](<3 x s16>), [[DEF1]](<3 x s16>)
1448 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1449 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](<2 x s16>)
1450 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](<2 x s16>)
1451 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1452 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1453 ; GFX9-DENORM-LABEL: name: test_3xhalf_add_mul_rhs
1454 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1455 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1456 ; GFX9-DENORM: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1457 ; GFX9-DENORM: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1458 ; GFX9-DENORM: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1459 ; GFX9-DENORM: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1460 ; GFX9-DENORM: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1461 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1462 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1463 ; GFX9-DENORM: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1464 ; GFX9-DENORM: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1465 ; GFX9-DENORM: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1466 ; GFX9-DENORM: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1467 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1468 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1469 ; GFX9-DENORM: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1470 ; GFX9-DENORM: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1471 ; GFX9-DENORM: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1472 ; GFX9-DENORM: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1473 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1474 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(<3 x s16>) = reassoc G_FMUL [[BITCAST1]], [[BITCAST3]]
1475 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(<3 x s16>) = reassoc G_FADD [[BITCAST5]], [[FMUL]]
1476 ; GFX9-DENORM: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1477 ; GFX9-DENORM: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FADD]](<3 x s16>), [[DEF1]](<3 x s16>)
1478 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1479 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](<2 x s16>)
1480 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](<2 x s16>)
1481 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1482 ; GFX9-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1483 ; GFX9-UNSAFE-LABEL: name: test_3xhalf_add_mul_rhs
1484 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1485 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1486 ; GFX9-UNSAFE: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1487 ; GFX9-UNSAFE: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1488 ; GFX9-UNSAFE: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1489 ; GFX9-UNSAFE: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1490 ; GFX9-UNSAFE: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1491 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1492 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1493 ; GFX9-UNSAFE: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1494 ; GFX9-UNSAFE: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1495 ; GFX9-UNSAFE: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1496 ; GFX9-UNSAFE: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1497 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1498 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1499 ; GFX9-UNSAFE: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1500 ; GFX9-UNSAFE: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1501 ; GFX9-UNSAFE: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1502 ; GFX9-UNSAFE: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1503 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1504 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(<3 x s16>) = G_FMA [[BITCAST1]], [[BITCAST3]], [[BITCAST5]]
1505 ; GFX9-UNSAFE: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1506 ; GFX9-UNSAFE: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FMA]](<3 x s16>), [[DEF1]](<3 x s16>)
1507 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1508 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](<2 x s16>)
1509 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](<2 x s16>)
1510 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1511 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1512 ; GFX10-LABEL: name: test_3xhalf_add_mul_rhs
1513 ; GFX10: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1514 ; GFX10: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1515 ; GFX10: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1516 ; GFX10: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1517 ; GFX10: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1518 ; GFX10: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1519 ; GFX10: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1520 ; GFX10: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1521 ; GFX10: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1522 ; GFX10: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1523 ; GFX10: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1524 ; GFX10: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1525 ; GFX10: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1526 ; GFX10: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1527 ; GFX10: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1528 ; GFX10: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1529 ; GFX10: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1530 ; GFX10: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1531 ; GFX10: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1532 ; GFX10: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1533 ; GFX10: [[FMUL:%[0-9]+]]:_(<3 x s16>) = reassoc G_FMUL [[BITCAST1]], [[BITCAST3]]
1534 ; GFX10: [[FADD:%[0-9]+]]:_(<3 x s16>) = reassoc G_FADD [[BITCAST5]], [[FMUL]]
1535 ; GFX10: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1536 ; GFX10: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FADD]](<3 x s16>), [[DEF1]](<3 x s16>)
1537 ; GFX10: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1538 ; GFX10: $vgpr0 = COPY [[UV]](<2 x s16>)
1539 ; GFX10: $vgpr1 = COPY [[UV1]](<2 x s16>)
1540 ; GFX10: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1541 ; GFX10: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1542 ; GFX10-CONTRACT-LABEL: name: test_3xhalf_add_mul_rhs
1543 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1544 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1545 ; GFX10-CONTRACT: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1546 ; GFX10-CONTRACT: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1547 ; GFX10-CONTRACT: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1548 ; GFX10-CONTRACT: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1549 ; GFX10-CONTRACT: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1550 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1551 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1552 ; GFX10-CONTRACT: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1553 ; GFX10-CONTRACT: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1554 ; GFX10-CONTRACT: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1555 ; GFX10-CONTRACT: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1556 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1557 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1558 ; GFX10-CONTRACT: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1559 ; GFX10-CONTRACT: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1560 ; GFX10-CONTRACT: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1561 ; GFX10-CONTRACT: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1562 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1563 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(<3 x s16>) = G_FMA [[BITCAST1]], [[BITCAST3]], [[BITCAST5]]
1564 ; GFX10-CONTRACT: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1565 ; GFX10-CONTRACT: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FMA]](<3 x s16>), [[DEF1]](<3 x s16>)
1566 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1567 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](<2 x s16>)
1568 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](<2 x s16>)
1569 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1570 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1571 ; GFX10-DENORM-LABEL: name: test_3xhalf_add_mul_rhs
1572 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1573 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1574 ; GFX10-DENORM: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1575 ; GFX10-DENORM: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1576 ; GFX10-DENORM: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1577 ; GFX10-DENORM: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1578 ; GFX10-DENORM: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1579 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1580 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1581 ; GFX10-DENORM: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1582 ; GFX10-DENORM: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1583 ; GFX10-DENORM: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1584 ; GFX10-DENORM: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1585 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1586 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1587 ; GFX10-DENORM: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1588 ; GFX10-DENORM: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1589 ; GFX10-DENORM: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1590 ; GFX10-DENORM: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1591 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1592 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(<3 x s16>) = reassoc G_FMUL [[BITCAST1]], [[BITCAST3]]
1593 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(<3 x s16>) = reassoc G_FADD [[BITCAST5]], [[FMUL]]
1594 ; GFX10-DENORM: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1595 ; GFX10-DENORM: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FADD]](<3 x s16>), [[DEF1]](<3 x s16>)
1596 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1597 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](<2 x s16>)
1598 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](<2 x s16>)
1599 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1600 ; GFX10-DENORM: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1601 ; GFX10-UNSAFE-LABEL: name: test_3xhalf_add_mul_rhs
1602 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr0
1603 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr1
1604 ; GFX10-UNSAFE: [[DEF:%[0-9]+]]:_(<2 x s16>) = G_IMPLICIT_DEF
1605 ; GFX10-UNSAFE: [[CONCAT_VECTORS:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY]](<2 x s16>), [[COPY1]](<2 x s16>), [[DEF]](<2 x s16>)
1606 ; GFX10-UNSAFE: [[BITCAST:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS]](<6 x s16>)
1607 ; GFX10-UNSAFE: [[TRUNC:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST]](s96)
1608 ; GFX10-UNSAFE: [[BITCAST1:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC]](s48)
1609 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr2
1610 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr3
1611 ; GFX10-UNSAFE: [[CONCAT_VECTORS1:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY2]](<2 x s16>), [[COPY3]](<2 x s16>), [[DEF]](<2 x s16>)
1612 ; GFX10-UNSAFE: [[BITCAST2:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS1]](<6 x s16>)
1613 ; GFX10-UNSAFE: [[TRUNC1:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST2]](s96)
1614 ; GFX10-UNSAFE: [[BITCAST3:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC1]](s48)
1615 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr4
1616 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(<2 x s16>) = COPY $vgpr5
1617 ; GFX10-UNSAFE: [[CONCAT_VECTORS2:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[COPY4]](<2 x s16>), [[COPY5]](<2 x s16>), [[DEF]](<2 x s16>)
1618 ; GFX10-UNSAFE: [[BITCAST4:%[0-9]+]]:_(s96) = G_BITCAST [[CONCAT_VECTORS2]](<6 x s16>)
1619 ; GFX10-UNSAFE: [[TRUNC2:%[0-9]+]]:_(s48) = G_TRUNC [[BITCAST4]](s96)
1620 ; GFX10-UNSAFE: [[BITCAST5:%[0-9]+]]:_(<3 x s16>) = G_BITCAST [[TRUNC2]](s48)
1621 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1622 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(<3 x s16>) = G_FMA [[BITCAST1]], [[BITCAST3]], [[BITCAST5]]
1623 ; GFX10-UNSAFE: [[DEF1:%[0-9]+]]:_(<3 x s16>) = G_IMPLICIT_DEF
1624 ; GFX10-UNSAFE: [[CONCAT_VECTORS3:%[0-9]+]]:_(<6 x s16>) = G_CONCAT_VECTORS [[FMA]](<3 x s16>), [[DEF1]](<3 x s16>)
1625 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(<2 x s16>), [[UV1:%[0-9]+]]:_(<2 x s16>), [[UV2:%[0-9]+]]:_(<2 x s16>) = G_UNMERGE_VALUES [[CONCAT_VECTORS3]](<6 x s16>)
1626 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](<2 x s16>)
1627 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](<2 x s16>)
1628 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY6]]
1629 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY7]], implicit $vgpr0, implicit $vgpr1
1630 %4:_(<2 x s16>) = COPY $vgpr0
1631 %5:_(<2 x s16>) = COPY $vgpr1
1632 %10:_(<2 x s16>) = G_IMPLICIT_DEF
1633 %11:_(<6 x s16>) = G_CONCAT_VECTORS %4(<2 x s16>), %5(<2 x s16>), %10(<2 x s16>)
1634 %0:_(<3 x s16>), %12:_(<3 x s16>) = G_UNMERGE_VALUES %11(<6 x s16>)
1635 %6:_(<2 x s16>) = COPY $vgpr2
1636 %7:_(<2 x s16>) = COPY $vgpr3
1637 %13:_(<6 x s16>) = G_CONCAT_VECTORS %6(<2 x s16>), %7(<2 x s16>), %10(<2 x s16>)
1638 %1:_(<3 x s16>), %14:_(<3 x s16>) = G_UNMERGE_VALUES %13(<6 x s16>)
1639 %8:_(<2 x s16>) = COPY $vgpr4
1640 %9:_(<2 x s16>) = COPY $vgpr5
1641 %15:_(<6 x s16>) = G_CONCAT_VECTORS %8(<2 x s16>), %9(<2 x s16>), %10(<2 x s16>)
1642 %2:_(<3 x s16>), %16:_(<3 x s16>) = G_UNMERGE_VALUES %15(<6 x s16>)
1643 %3:sgpr_64 = COPY $sgpr30_sgpr31
1644 %17:_(<3 x s16>) = reassoc G_FMUL %0, %1
1645 %18:_(<3 x s16>) = reassoc G_FADD %2, %17
1646 %22:_(<3 x s16>) = G_IMPLICIT_DEF
1647 %23:_(<6 x s16>) = G_CONCAT_VECTORS %18(<3 x s16>), %22(<3 x s16>)
1648 %20:_(<2 x s16>), %21:_(<2 x s16>), %24:_(<2 x s16>) = G_UNMERGE_VALUES %23(<6 x s16>)
1649 $vgpr0 = COPY %20(<2 x s16>)
1650 $vgpr1 = COPY %21(<2 x s16>)
1651 %19:ccr_sgpr_64 = COPY %3
1652 S_SETPC_B64_return %19, implicit $vgpr0, implicit $vgpr1
1656 name: test_4xdouble_add_mul
1659 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12, $vgpr13, $vgpr14, $vgpr15, $vgpr16, $vgpr17, $vgpr18, $vgpr19, $vgpr20, $vgpr21, $vgpr22, $vgpr23, $sgpr30_sgpr31
1661 ; GFX9-LABEL: name: test_4xdouble_add_mul
1662 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1663 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1664 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1665 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1666 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1667 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1668 ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1669 ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1670 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
1671 ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
1672 ; GFX9: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
1673 ; GFX9: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
1674 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
1675 ; GFX9: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1676 ; GFX9: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
1677 ; GFX9: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
1678 ; GFX9: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
1679 ; GFX9: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
1680 ; GFX9: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
1681 ; GFX9: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
1682 ; GFX9: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
1683 ; GFX9: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
1684 ; GFX9: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
1685 ; GFX9: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
1686 ; GFX9: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
1687 ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
1688 ; GFX9: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
1689 ; GFX9: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
1690 ; GFX9: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
1691 ; GFX9: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
1692 ; GFX9: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
1693 ; GFX9: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
1694 ; GFX9: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
1695 ; GFX9: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
1696 ; GFX9: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
1697 ; GFX9: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
1698 ; GFX9: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
1699 ; GFX9: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
1700 ; GFX9: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
1701 ; GFX9: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1702 ; GFX9: [[FMUL:%[0-9]+]]:_(<4 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
1703 ; GFX9: [[FADD:%[0-9]+]]:_(<4 x s64>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
1704 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s64>)
1705 ; GFX9: $vgpr0 = COPY [[UV]](s32)
1706 ; GFX9: $vgpr1 = COPY [[UV1]](s32)
1707 ; GFX9: $vgpr2 = COPY [[UV2]](s32)
1708 ; GFX9: $vgpr3 = COPY [[UV3]](s32)
1709 ; GFX9: $vgpr4 = COPY [[UV4]](s32)
1710 ; GFX9: $vgpr5 = COPY [[UV5]](s32)
1711 ; GFX9: $vgpr6 = COPY [[UV6]](s32)
1712 ; GFX9: $vgpr7 = COPY [[UV7]](s32)
1713 ; GFX9: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
1714 ; GFX9: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
1715 ; GFX9-CONTRACT-LABEL: name: test_4xdouble_add_mul
1716 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1717 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1718 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1719 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1720 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1721 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1722 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1723 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1724 ; GFX9-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
1725 ; GFX9-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
1726 ; GFX9-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
1727 ; GFX9-CONTRACT: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
1728 ; GFX9-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
1729 ; GFX9-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1730 ; GFX9-CONTRACT: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
1731 ; GFX9-CONTRACT: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
1732 ; GFX9-CONTRACT: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
1733 ; GFX9-CONTRACT: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
1734 ; GFX9-CONTRACT: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
1735 ; GFX9-CONTRACT: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
1736 ; GFX9-CONTRACT: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
1737 ; GFX9-CONTRACT: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
1738 ; GFX9-CONTRACT: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
1739 ; GFX9-CONTRACT: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
1740 ; GFX9-CONTRACT: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
1741 ; GFX9-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
1742 ; GFX9-CONTRACT: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
1743 ; GFX9-CONTRACT: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
1744 ; GFX9-CONTRACT: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
1745 ; GFX9-CONTRACT: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
1746 ; GFX9-CONTRACT: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
1747 ; GFX9-CONTRACT: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
1748 ; GFX9-CONTRACT: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
1749 ; GFX9-CONTRACT: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
1750 ; GFX9-CONTRACT: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
1751 ; GFX9-CONTRACT: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
1752 ; GFX9-CONTRACT: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
1753 ; GFX9-CONTRACT: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
1754 ; GFX9-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
1755 ; GFX9-CONTRACT: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1756 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(<4 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
1757 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s64>)
1758 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](s32)
1759 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
1760 ; GFX9-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
1761 ; GFX9-CONTRACT: $vgpr3 = COPY [[UV3]](s32)
1762 ; GFX9-CONTRACT: $vgpr4 = COPY [[UV4]](s32)
1763 ; GFX9-CONTRACT: $vgpr5 = COPY [[UV5]](s32)
1764 ; GFX9-CONTRACT: $vgpr6 = COPY [[UV6]](s32)
1765 ; GFX9-CONTRACT: $vgpr7 = COPY [[UV7]](s32)
1766 ; GFX9-CONTRACT: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
1767 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
1768 ; GFX9-DENORM-LABEL: name: test_4xdouble_add_mul
1769 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1770 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1771 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1772 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1773 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1774 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1775 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1776 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1777 ; GFX9-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
1778 ; GFX9-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
1779 ; GFX9-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
1780 ; GFX9-DENORM: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
1781 ; GFX9-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
1782 ; GFX9-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1783 ; GFX9-DENORM: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
1784 ; GFX9-DENORM: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
1785 ; GFX9-DENORM: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
1786 ; GFX9-DENORM: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
1787 ; GFX9-DENORM: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
1788 ; GFX9-DENORM: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
1789 ; GFX9-DENORM: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
1790 ; GFX9-DENORM: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
1791 ; GFX9-DENORM: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
1792 ; GFX9-DENORM: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
1793 ; GFX9-DENORM: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
1794 ; GFX9-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
1795 ; GFX9-DENORM: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
1796 ; GFX9-DENORM: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
1797 ; GFX9-DENORM: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
1798 ; GFX9-DENORM: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
1799 ; GFX9-DENORM: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
1800 ; GFX9-DENORM: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
1801 ; GFX9-DENORM: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
1802 ; GFX9-DENORM: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
1803 ; GFX9-DENORM: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
1804 ; GFX9-DENORM: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
1805 ; GFX9-DENORM: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
1806 ; GFX9-DENORM: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
1807 ; GFX9-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
1808 ; GFX9-DENORM: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1809 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(<4 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
1810 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(<4 x s64>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
1811 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s64>)
1812 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](s32)
1813 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](s32)
1814 ; GFX9-DENORM: $vgpr2 = COPY [[UV2]](s32)
1815 ; GFX9-DENORM: $vgpr3 = COPY [[UV3]](s32)
1816 ; GFX9-DENORM: $vgpr4 = COPY [[UV4]](s32)
1817 ; GFX9-DENORM: $vgpr5 = COPY [[UV5]](s32)
1818 ; GFX9-DENORM: $vgpr6 = COPY [[UV6]](s32)
1819 ; GFX9-DENORM: $vgpr7 = COPY [[UV7]](s32)
1820 ; GFX9-DENORM: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
1821 ; GFX9-DENORM: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
1822 ; GFX9-UNSAFE-LABEL: name: test_4xdouble_add_mul
1823 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1824 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1825 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1826 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1827 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1828 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1829 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1830 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1831 ; GFX9-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
1832 ; GFX9-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
1833 ; GFX9-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
1834 ; GFX9-UNSAFE: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
1835 ; GFX9-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
1836 ; GFX9-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1837 ; GFX9-UNSAFE: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
1838 ; GFX9-UNSAFE: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
1839 ; GFX9-UNSAFE: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
1840 ; GFX9-UNSAFE: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
1841 ; GFX9-UNSAFE: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
1842 ; GFX9-UNSAFE: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
1843 ; GFX9-UNSAFE: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
1844 ; GFX9-UNSAFE: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
1845 ; GFX9-UNSAFE: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
1846 ; GFX9-UNSAFE: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
1847 ; GFX9-UNSAFE: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
1848 ; GFX9-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
1849 ; GFX9-UNSAFE: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
1850 ; GFX9-UNSAFE: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
1851 ; GFX9-UNSAFE: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
1852 ; GFX9-UNSAFE: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
1853 ; GFX9-UNSAFE: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
1854 ; GFX9-UNSAFE: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
1855 ; GFX9-UNSAFE: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
1856 ; GFX9-UNSAFE: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
1857 ; GFX9-UNSAFE: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
1858 ; GFX9-UNSAFE: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
1859 ; GFX9-UNSAFE: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
1860 ; GFX9-UNSAFE: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
1861 ; GFX9-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
1862 ; GFX9-UNSAFE: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1863 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(<4 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
1864 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s64>)
1865 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](s32)
1866 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
1867 ; GFX9-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
1868 ; GFX9-UNSAFE: $vgpr3 = COPY [[UV3]](s32)
1869 ; GFX9-UNSAFE: $vgpr4 = COPY [[UV4]](s32)
1870 ; GFX9-UNSAFE: $vgpr5 = COPY [[UV5]](s32)
1871 ; GFX9-UNSAFE: $vgpr6 = COPY [[UV6]](s32)
1872 ; GFX9-UNSAFE: $vgpr7 = COPY [[UV7]](s32)
1873 ; GFX9-UNSAFE: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
1874 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
1875 ; GFX10-LABEL: name: test_4xdouble_add_mul
1876 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1877 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1878 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1879 ; GFX10: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1880 ; GFX10: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1881 ; GFX10: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1882 ; GFX10: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1883 ; GFX10: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1884 ; GFX10: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
1885 ; GFX10: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
1886 ; GFX10: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
1887 ; GFX10: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
1888 ; GFX10: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
1889 ; GFX10: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1890 ; GFX10: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
1891 ; GFX10: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
1892 ; GFX10: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
1893 ; GFX10: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
1894 ; GFX10: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
1895 ; GFX10: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
1896 ; GFX10: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
1897 ; GFX10: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
1898 ; GFX10: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
1899 ; GFX10: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
1900 ; GFX10: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
1901 ; GFX10: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
1902 ; GFX10: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
1903 ; GFX10: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
1904 ; GFX10: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
1905 ; GFX10: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
1906 ; GFX10: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
1907 ; GFX10: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
1908 ; GFX10: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
1909 ; GFX10: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
1910 ; GFX10: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
1911 ; GFX10: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
1912 ; GFX10: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
1913 ; GFX10: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
1914 ; GFX10: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
1915 ; GFX10: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1916 ; GFX10: [[FMUL:%[0-9]+]]:_(<4 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
1917 ; GFX10: [[FADD:%[0-9]+]]:_(<4 x s64>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
1918 ; GFX10: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s64>)
1919 ; GFX10: $vgpr0 = COPY [[UV]](s32)
1920 ; GFX10: $vgpr1 = COPY [[UV1]](s32)
1921 ; GFX10: $vgpr2 = COPY [[UV2]](s32)
1922 ; GFX10: $vgpr3 = COPY [[UV3]](s32)
1923 ; GFX10: $vgpr4 = COPY [[UV4]](s32)
1924 ; GFX10: $vgpr5 = COPY [[UV5]](s32)
1925 ; GFX10: $vgpr6 = COPY [[UV6]](s32)
1926 ; GFX10: $vgpr7 = COPY [[UV7]](s32)
1927 ; GFX10: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
1928 ; GFX10: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
1929 ; GFX10-CONTRACT-LABEL: name: test_4xdouble_add_mul
1930 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1931 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1932 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1933 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1934 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1935 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1936 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1937 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1938 ; GFX10-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
1939 ; GFX10-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
1940 ; GFX10-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
1941 ; GFX10-CONTRACT: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
1942 ; GFX10-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
1943 ; GFX10-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1944 ; GFX10-CONTRACT: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
1945 ; GFX10-CONTRACT: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
1946 ; GFX10-CONTRACT: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
1947 ; GFX10-CONTRACT: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
1948 ; GFX10-CONTRACT: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
1949 ; GFX10-CONTRACT: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
1950 ; GFX10-CONTRACT: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
1951 ; GFX10-CONTRACT: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
1952 ; GFX10-CONTRACT: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
1953 ; GFX10-CONTRACT: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
1954 ; GFX10-CONTRACT: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
1955 ; GFX10-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
1956 ; GFX10-CONTRACT: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
1957 ; GFX10-CONTRACT: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
1958 ; GFX10-CONTRACT: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
1959 ; GFX10-CONTRACT: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
1960 ; GFX10-CONTRACT: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
1961 ; GFX10-CONTRACT: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
1962 ; GFX10-CONTRACT: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
1963 ; GFX10-CONTRACT: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
1964 ; GFX10-CONTRACT: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
1965 ; GFX10-CONTRACT: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
1966 ; GFX10-CONTRACT: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
1967 ; GFX10-CONTRACT: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
1968 ; GFX10-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
1969 ; GFX10-CONTRACT: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
1970 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(<4 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
1971 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s64>)
1972 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](s32)
1973 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
1974 ; GFX10-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
1975 ; GFX10-CONTRACT: $vgpr3 = COPY [[UV3]](s32)
1976 ; GFX10-CONTRACT: $vgpr4 = COPY [[UV4]](s32)
1977 ; GFX10-CONTRACT: $vgpr5 = COPY [[UV5]](s32)
1978 ; GFX10-CONTRACT: $vgpr6 = COPY [[UV6]](s32)
1979 ; GFX10-CONTRACT: $vgpr7 = COPY [[UV7]](s32)
1980 ; GFX10-CONTRACT: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
1981 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
1982 ; GFX10-DENORM-LABEL: name: test_4xdouble_add_mul
1983 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
1984 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
1985 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
1986 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
1987 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
1988 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
1989 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
1990 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
1991 ; GFX10-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
1992 ; GFX10-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
1993 ; GFX10-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
1994 ; GFX10-DENORM: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
1995 ; GFX10-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
1996 ; GFX10-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
1997 ; GFX10-DENORM: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
1998 ; GFX10-DENORM: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
1999 ; GFX10-DENORM: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2000 ; GFX10-DENORM: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2001 ; GFX10-DENORM: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2002 ; GFX10-DENORM: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2003 ; GFX10-DENORM: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2004 ; GFX10-DENORM: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2005 ; GFX10-DENORM: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2006 ; GFX10-DENORM: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2007 ; GFX10-DENORM: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2008 ; GFX10-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
2009 ; GFX10-DENORM: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2010 ; GFX10-DENORM: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2011 ; GFX10-DENORM: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
2012 ; GFX10-DENORM: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
2013 ; GFX10-DENORM: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
2014 ; GFX10-DENORM: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
2015 ; GFX10-DENORM: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
2016 ; GFX10-DENORM: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
2017 ; GFX10-DENORM: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2018 ; GFX10-DENORM: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
2019 ; GFX10-DENORM: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
2020 ; GFX10-DENORM: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
2021 ; GFX10-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
2022 ; GFX10-DENORM: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2023 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(<4 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
2024 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(<4 x s64>) = reassoc G_FADD [[FMUL]], [[BUILD_VECTOR2]]
2025 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<4 x s64>)
2026 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](s32)
2027 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](s32)
2028 ; GFX10-DENORM: $vgpr2 = COPY [[UV2]](s32)
2029 ; GFX10-DENORM: $vgpr3 = COPY [[UV3]](s32)
2030 ; GFX10-DENORM: $vgpr4 = COPY [[UV4]](s32)
2031 ; GFX10-DENORM: $vgpr5 = COPY [[UV5]](s32)
2032 ; GFX10-DENORM: $vgpr6 = COPY [[UV6]](s32)
2033 ; GFX10-DENORM: $vgpr7 = COPY [[UV7]](s32)
2034 ; GFX10-DENORM: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
2035 ; GFX10-DENORM: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
2036 ; GFX10-UNSAFE-LABEL: name: test_4xdouble_add_mul
2037 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2038 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2039 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2040 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2041 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2042 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2043 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2044 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2045 ; GFX10-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2046 ; GFX10-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2047 ; GFX10-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2048 ; GFX10-UNSAFE: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2049 ; GFX10-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64), [[MV3]](s64)
2050 ; GFX10-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2051 ; GFX10-UNSAFE: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2052 ; GFX10-UNSAFE: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2053 ; GFX10-UNSAFE: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2054 ; GFX10-UNSAFE: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2055 ; GFX10-UNSAFE: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2056 ; GFX10-UNSAFE: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2057 ; GFX10-UNSAFE: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2058 ; GFX10-UNSAFE: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2059 ; GFX10-UNSAFE: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2060 ; GFX10-UNSAFE: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2061 ; GFX10-UNSAFE: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2062 ; GFX10-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV4]](s64), [[MV5]](s64), [[MV6]](s64), [[MV7]](s64)
2063 ; GFX10-UNSAFE: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2064 ; GFX10-UNSAFE: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2065 ; GFX10-UNSAFE: [[COPY18:%[0-9]+]]:_(s32) = COPY $vgpr18
2066 ; GFX10-UNSAFE: [[COPY19:%[0-9]+]]:_(s32) = COPY $vgpr19
2067 ; GFX10-UNSAFE: [[COPY20:%[0-9]+]]:_(s32) = COPY $vgpr20
2068 ; GFX10-UNSAFE: [[COPY21:%[0-9]+]]:_(s32) = COPY $vgpr21
2069 ; GFX10-UNSAFE: [[COPY22:%[0-9]+]]:_(s32) = COPY $vgpr22
2070 ; GFX10-UNSAFE: [[COPY23:%[0-9]+]]:_(s32) = COPY $vgpr23
2071 ; GFX10-UNSAFE: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2072 ; GFX10-UNSAFE: [[MV9:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY18]](s32), [[COPY19]](s32)
2073 ; GFX10-UNSAFE: [[MV10:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY20]](s32), [[COPY21]](s32)
2074 ; GFX10-UNSAFE: [[MV11:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY22]](s32), [[COPY23]](s32)
2075 ; GFX10-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<4 x s64>) = G_BUILD_VECTOR [[MV8]](s64), [[MV9]](s64), [[MV10]](s64), [[MV11]](s64)
2076 ; GFX10-UNSAFE: [[COPY24:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2077 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(<4 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
2078 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32), [[UV6:%[0-9]+]]:_(s32), [[UV7:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<4 x s64>)
2079 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](s32)
2080 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
2081 ; GFX10-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
2082 ; GFX10-UNSAFE: $vgpr3 = COPY [[UV3]](s32)
2083 ; GFX10-UNSAFE: $vgpr4 = COPY [[UV4]](s32)
2084 ; GFX10-UNSAFE: $vgpr5 = COPY [[UV5]](s32)
2085 ; GFX10-UNSAFE: $vgpr6 = COPY [[UV6]](s32)
2086 ; GFX10-UNSAFE: $vgpr7 = COPY [[UV7]](s32)
2087 ; GFX10-UNSAFE: [[COPY25:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY24]]
2088 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY25]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
2089 %4:_(s32) = COPY $vgpr0
2090 %5:_(s32) = COPY $vgpr1
2091 %6:_(s32) = COPY $vgpr2
2092 %7:_(s32) = COPY $vgpr3
2093 %8:_(s32) = COPY $vgpr4
2094 %9:_(s32) = COPY $vgpr5
2095 %10:_(s32) = COPY $vgpr6
2096 %11:_(s32) = COPY $vgpr7
2097 %28:_(s64) = G_MERGE_VALUES %4(s32), %5(s32)
2098 %29:_(s64) = G_MERGE_VALUES %6(s32), %7(s32)
2099 %30:_(s64) = G_MERGE_VALUES %8(s32), %9(s32)
2100 %31:_(s64) = G_MERGE_VALUES %10(s32), %11(s32)
2101 %0:_(<4 x s64>) = G_BUILD_VECTOR %28(s64), %29(s64), %30(s64), %31(s64)
2102 %12:_(s32) = COPY $vgpr8
2103 %13:_(s32) = COPY $vgpr9
2104 %14:_(s32) = COPY $vgpr10
2105 %15:_(s32) = COPY $vgpr11
2106 %16:_(s32) = COPY $vgpr12
2107 %17:_(s32) = COPY $vgpr13
2108 %18:_(s32) = COPY $vgpr14
2109 %19:_(s32) = COPY $vgpr15
2110 %32:_(s64) = G_MERGE_VALUES %12(s32), %13(s32)
2111 %33:_(s64) = G_MERGE_VALUES %14(s32), %15(s32)
2112 %34:_(s64) = G_MERGE_VALUES %16(s32), %17(s32)
2113 %35:_(s64) = G_MERGE_VALUES %18(s32), %19(s32)
2114 %1:_(<4 x s64>) = G_BUILD_VECTOR %32(s64), %33(s64), %34(s64), %35(s64)
2115 %20:_(s32) = COPY $vgpr16
2116 %21:_(s32) = COPY $vgpr17
2117 %22:_(s32) = COPY $vgpr18
2118 %23:_(s32) = COPY $vgpr19
2119 %24:_(s32) = COPY $vgpr20
2120 %25:_(s32) = COPY $vgpr21
2121 %26:_(s32) = COPY $vgpr22
2122 %27:_(s32) = COPY $vgpr23
2123 %36:_(s64) = G_MERGE_VALUES %20(s32), %21(s32)
2124 %37:_(s64) = G_MERGE_VALUES %22(s32), %23(s32)
2125 %38:_(s64) = G_MERGE_VALUES %24(s32), %25(s32)
2126 %39:_(s64) = G_MERGE_VALUES %26(s32), %27(s32)
2127 %2:_(<4 x s64>) = G_BUILD_VECTOR %36(s64), %37(s64), %38(s64), %39(s64)
2128 %3:sgpr_64 = COPY $sgpr30_sgpr31
2129 %40:_(<4 x s64>) = reassoc G_FMUL %0, %1
2130 %41:_(<4 x s64>) = reassoc G_FADD %40, %2
2131 %43:_(s32), %44:_(s32), %45:_(s32), %46:_(s32), %47:_(s32), %48:_(s32), %49:_(s32), %50:_(s32) = G_UNMERGE_VALUES %41(<4 x s64>)
2132 $vgpr0 = COPY %43(s32)
2133 $vgpr1 = COPY %44(s32)
2134 $vgpr2 = COPY %45(s32)
2135 $vgpr3 = COPY %46(s32)
2136 $vgpr4 = COPY %47(s32)
2137 $vgpr5 = COPY %48(s32)
2138 $vgpr6 = COPY %49(s32)
2139 $vgpr7 = COPY %50(s32)
2140 %42:ccr_sgpr_64 = COPY %3
2141 S_SETPC_B64_return %42, implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5, implicit $vgpr6, implicit $vgpr7
2145 name: test_3xdouble_add_mul_rhs
2148 liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5, $vgpr6, $vgpr7, $vgpr8, $vgpr9, $vgpr10, $vgpr11, $vgpr12, $vgpr13, $vgpr14, $vgpr15, $vgpr16, $vgpr17, $sgpr30_sgpr31
2150 ; GFX9-LABEL: name: test_3xdouble_add_mul_rhs
2151 ; GFX9: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2152 ; GFX9: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2153 ; GFX9: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2154 ; GFX9: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2155 ; GFX9: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2156 ; GFX9: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2157 ; GFX9: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2158 ; GFX9: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2159 ; GFX9: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2160 ; GFX9: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2161 ; GFX9: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2162 ; GFX9: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2163 ; GFX9: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2164 ; GFX9: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2165 ; GFX9: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2166 ; GFX9: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2167 ; GFX9: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2168 ; GFX9: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2169 ; GFX9: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2170 ; GFX9: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2171 ; GFX9: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2172 ; GFX9: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2173 ; GFX9: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2174 ; GFX9: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2175 ; GFX9: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2176 ; GFX9: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2177 ; GFX9: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2178 ; GFX9: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2179 ; GFX9: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2180 ; GFX9: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2181 ; GFX9: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2182 ; GFX9: [[FMUL:%[0-9]+]]:_(<3 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
2183 ; GFX9: [[FADD:%[0-9]+]]:_(<3 x s64>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
2184 ; GFX9: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s64>)
2185 ; GFX9: $vgpr0 = COPY [[UV]](s32)
2186 ; GFX9: $vgpr1 = COPY [[UV1]](s32)
2187 ; GFX9: $vgpr2 = COPY [[UV2]](s32)
2188 ; GFX9: $vgpr3 = COPY [[UV3]](s32)
2189 ; GFX9: $vgpr4 = COPY [[UV4]](s32)
2190 ; GFX9: $vgpr5 = COPY [[UV5]](s32)
2191 ; GFX9: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2192 ; GFX9: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2193 ; GFX9-CONTRACT-LABEL: name: test_3xdouble_add_mul_rhs
2194 ; GFX9-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2195 ; GFX9-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2196 ; GFX9-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2197 ; GFX9-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2198 ; GFX9-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2199 ; GFX9-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2200 ; GFX9-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2201 ; GFX9-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2202 ; GFX9-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2203 ; GFX9-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2204 ; GFX9-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2205 ; GFX9-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2206 ; GFX9-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2207 ; GFX9-CONTRACT: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2208 ; GFX9-CONTRACT: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2209 ; GFX9-CONTRACT: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2210 ; GFX9-CONTRACT: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2211 ; GFX9-CONTRACT: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2212 ; GFX9-CONTRACT: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2213 ; GFX9-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2214 ; GFX9-CONTRACT: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2215 ; GFX9-CONTRACT: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2216 ; GFX9-CONTRACT: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2217 ; GFX9-CONTRACT: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2218 ; GFX9-CONTRACT: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2219 ; GFX9-CONTRACT: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2220 ; GFX9-CONTRACT: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2221 ; GFX9-CONTRACT: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2222 ; GFX9-CONTRACT: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2223 ; GFX9-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2224 ; GFX9-CONTRACT: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2225 ; GFX9-CONTRACT: [[FMA:%[0-9]+]]:_(<3 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
2226 ; GFX9-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s64>)
2227 ; GFX9-CONTRACT: $vgpr0 = COPY [[UV]](s32)
2228 ; GFX9-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
2229 ; GFX9-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
2230 ; GFX9-CONTRACT: $vgpr3 = COPY [[UV3]](s32)
2231 ; GFX9-CONTRACT: $vgpr4 = COPY [[UV4]](s32)
2232 ; GFX9-CONTRACT: $vgpr5 = COPY [[UV5]](s32)
2233 ; GFX9-CONTRACT: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2234 ; GFX9-CONTRACT: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2235 ; GFX9-DENORM-LABEL: name: test_3xdouble_add_mul_rhs
2236 ; GFX9-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2237 ; GFX9-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2238 ; GFX9-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2239 ; GFX9-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2240 ; GFX9-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2241 ; GFX9-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2242 ; GFX9-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2243 ; GFX9-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2244 ; GFX9-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2245 ; GFX9-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2246 ; GFX9-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2247 ; GFX9-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2248 ; GFX9-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2249 ; GFX9-DENORM: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2250 ; GFX9-DENORM: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2251 ; GFX9-DENORM: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2252 ; GFX9-DENORM: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2253 ; GFX9-DENORM: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2254 ; GFX9-DENORM: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2255 ; GFX9-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2256 ; GFX9-DENORM: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2257 ; GFX9-DENORM: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2258 ; GFX9-DENORM: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2259 ; GFX9-DENORM: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2260 ; GFX9-DENORM: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2261 ; GFX9-DENORM: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2262 ; GFX9-DENORM: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2263 ; GFX9-DENORM: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2264 ; GFX9-DENORM: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2265 ; GFX9-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2266 ; GFX9-DENORM: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2267 ; GFX9-DENORM: [[FMUL:%[0-9]+]]:_(<3 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
2268 ; GFX9-DENORM: [[FADD:%[0-9]+]]:_(<3 x s64>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
2269 ; GFX9-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s64>)
2270 ; GFX9-DENORM: $vgpr0 = COPY [[UV]](s32)
2271 ; GFX9-DENORM: $vgpr1 = COPY [[UV1]](s32)
2272 ; GFX9-DENORM: $vgpr2 = COPY [[UV2]](s32)
2273 ; GFX9-DENORM: $vgpr3 = COPY [[UV3]](s32)
2274 ; GFX9-DENORM: $vgpr4 = COPY [[UV4]](s32)
2275 ; GFX9-DENORM: $vgpr5 = COPY [[UV5]](s32)
2276 ; GFX9-DENORM: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2277 ; GFX9-DENORM: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2278 ; GFX9-UNSAFE-LABEL: name: test_3xdouble_add_mul_rhs
2279 ; GFX9-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2280 ; GFX9-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2281 ; GFX9-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2282 ; GFX9-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2283 ; GFX9-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2284 ; GFX9-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2285 ; GFX9-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2286 ; GFX9-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2287 ; GFX9-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2288 ; GFX9-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2289 ; GFX9-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2290 ; GFX9-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2291 ; GFX9-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2292 ; GFX9-UNSAFE: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2293 ; GFX9-UNSAFE: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2294 ; GFX9-UNSAFE: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2295 ; GFX9-UNSAFE: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2296 ; GFX9-UNSAFE: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2297 ; GFX9-UNSAFE: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2298 ; GFX9-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2299 ; GFX9-UNSAFE: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2300 ; GFX9-UNSAFE: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2301 ; GFX9-UNSAFE: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2302 ; GFX9-UNSAFE: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2303 ; GFX9-UNSAFE: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2304 ; GFX9-UNSAFE: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2305 ; GFX9-UNSAFE: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2306 ; GFX9-UNSAFE: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2307 ; GFX9-UNSAFE: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2308 ; GFX9-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2309 ; GFX9-UNSAFE: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2310 ; GFX9-UNSAFE: [[FMA:%[0-9]+]]:_(<3 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
2311 ; GFX9-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s64>)
2312 ; GFX9-UNSAFE: $vgpr0 = COPY [[UV]](s32)
2313 ; GFX9-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
2314 ; GFX9-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
2315 ; GFX9-UNSAFE: $vgpr3 = COPY [[UV3]](s32)
2316 ; GFX9-UNSAFE: $vgpr4 = COPY [[UV4]](s32)
2317 ; GFX9-UNSAFE: $vgpr5 = COPY [[UV5]](s32)
2318 ; GFX9-UNSAFE: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2319 ; GFX9-UNSAFE: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2320 ; GFX10-LABEL: name: test_3xdouble_add_mul_rhs
2321 ; GFX10: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2322 ; GFX10: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2323 ; GFX10: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2324 ; GFX10: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2325 ; GFX10: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2326 ; GFX10: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2327 ; GFX10: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2328 ; GFX10: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2329 ; GFX10: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2330 ; GFX10: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2331 ; GFX10: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2332 ; GFX10: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2333 ; GFX10: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2334 ; GFX10: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2335 ; GFX10: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2336 ; GFX10: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2337 ; GFX10: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2338 ; GFX10: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2339 ; GFX10: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2340 ; GFX10: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2341 ; GFX10: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2342 ; GFX10: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2343 ; GFX10: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2344 ; GFX10: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2345 ; GFX10: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2346 ; GFX10: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2347 ; GFX10: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2348 ; GFX10: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2349 ; GFX10: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2350 ; GFX10: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2351 ; GFX10: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2352 ; GFX10: [[FMUL:%[0-9]+]]:_(<3 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
2353 ; GFX10: [[FADD:%[0-9]+]]:_(<3 x s64>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
2354 ; GFX10: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s64>)
2355 ; GFX10: $vgpr0 = COPY [[UV]](s32)
2356 ; GFX10: $vgpr1 = COPY [[UV1]](s32)
2357 ; GFX10: $vgpr2 = COPY [[UV2]](s32)
2358 ; GFX10: $vgpr3 = COPY [[UV3]](s32)
2359 ; GFX10: $vgpr4 = COPY [[UV4]](s32)
2360 ; GFX10: $vgpr5 = COPY [[UV5]](s32)
2361 ; GFX10: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2362 ; GFX10: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2363 ; GFX10-CONTRACT-LABEL: name: test_3xdouble_add_mul_rhs
2364 ; GFX10-CONTRACT: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2365 ; GFX10-CONTRACT: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2366 ; GFX10-CONTRACT: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2367 ; GFX10-CONTRACT: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2368 ; GFX10-CONTRACT: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2369 ; GFX10-CONTRACT: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2370 ; GFX10-CONTRACT: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2371 ; GFX10-CONTRACT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2372 ; GFX10-CONTRACT: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2373 ; GFX10-CONTRACT: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2374 ; GFX10-CONTRACT: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2375 ; GFX10-CONTRACT: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2376 ; GFX10-CONTRACT: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2377 ; GFX10-CONTRACT: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2378 ; GFX10-CONTRACT: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2379 ; GFX10-CONTRACT: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2380 ; GFX10-CONTRACT: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2381 ; GFX10-CONTRACT: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2382 ; GFX10-CONTRACT: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2383 ; GFX10-CONTRACT: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2384 ; GFX10-CONTRACT: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2385 ; GFX10-CONTRACT: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2386 ; GFX10-CONTRACT: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2387 ; GFX10-CONTRACT: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2388 ; GFX10-CONTRACT: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2389 ; GFX10-CONTRACT: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2390 ; GFX10-CONTRACT: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2391 ; GFX10-CONTRACT: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2392 ; GFX10-CONTRACT: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2393 ; GFX10-CONTRACT: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2394 ; GFX10-CONTRACT: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2395 ; GFX10-CONTRACT: [[FMA:%[0-9]+]]:_(<3 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
2396 ; GFX10-CONTRACT: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s64>)
2397 ; GFX10-CONTRACT: $vgpr0 = COPY [[UV]](s32)
2398 ; GFX10-CONTRACT: $vgpr1 = COPY [[UV1]](s32)
2399 ; GFX10-CONTRACT: $vgpr2 = COPY [[UV2]](s32)
2400 ; GFX10-CONTRACT: $vgpr3 = COPY [[UV3]](s32)
2401 ; GFX10-CONTRACT: $vgpr4 = COPY [[UV4]](s32)
2402 ; GFX10-CONTRACT: $vgpr5 = COPY [[UV5]](s32)
2403 ; GFX10-CONTRACT: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2404 ; GFX10-CONTRACT: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2405 ; GFX10-DENORM-LABEL: name: test_3xdouble_add_mul_rhs
2406 ; GFX10-DENORM: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2407 ; GFX10-DENORM: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2408 ; GFX10-DENORM: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2409 ; GFX10-DENORM: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2410 ; GFX10-DENORM: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2411 ; GFX10-DENORM: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2412 ; GFX10-DENORM: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2413 ; GFX10-DENORM: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2414 ; GFX10-DENORM: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2415 ; GFX10-DENORM: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2416 ; GFX10-DENORM: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2417 ; GFX10-DENORM: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2418 ; GFX10-DENORM: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2419 ; GFX10-DENORM: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2420 ; GFX10-DENORM: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2421 ; GFX10-DENORM: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2422 ; GFX10-DENORM: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2423 ; GFX10-DENORM: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2424 ; GFX10-DENORM: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2425 ; GFX10-DENORM: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2426 ; GFX10-DENORM: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2427 ; GFX10-DENORM: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2428 ; GFX10-DENORM: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2429 ; GFX10-DENORM: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2430 ; GFX10-DENORM: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2431 ; GFX10-DENORM: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2432 ; GFX10-DENORM: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2433 ; GFX10-DENORM: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2434 ; GFX10-DENORM: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2435 ; GFX10-DENORM: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2436 ; GFX10-DENORM: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2437 ; GFX10-DENORM: [[FMUL:%[0-9]+]]:_(<3 x s64>) = reassoc G_FMUL [[BUILD_VECTOR]], [[BUILD_VECTOR1]]
2438 ; GFX10-DENORM: [[FADD:%[0-9]+]]:_(<3 x s64>) = reassoc G_FADD [[BUILD_VECTOR2]], [[FMUL]]
2439 ; GFX10-DENORM: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FADD]](<3 x s64>)
2440 ; GFX10-DENORM: $vgpr0 = COPY [[UV]](s32)
2441 ; GFX10-DENORM: $vgpr1 = COPY [[UV1]](s32)
2442 ; GFX10-DENORM: $vgpr2 = COPY [[UV2]](s32)
2443 ; GFX10-DENORM: $vgpr3 = COPY [[UV3]](s32)
2444 ; GFX10-DENORM: $vgpr4 = COPY [[UV4]](s32)
2445 ; GFX10-DENORM: $vgpr5 = COPY [[UV5]](s32)
2446 ; GFX10-DENORM: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2447 ; GFX10-DENORM: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2448 ; GFX10-UNSAFE-LABEL: name: test_3xdouble_add_mul_rhs
2449 ; GFX10-UNSAFE: [[COPY:%[0-9]+]]:_(s32) = COPY $vgpr0
2450 ; GFX10-UNSAFE: [[COPY1:%[0-9]+]]:_(s32) = COPY $vgpr1
2451 ; GFX10-UNSAFE: [[COPY2:%[0-9]+]]:_(s32) = COPY $vgpr2
2452 ; GFX10-UNSAFE: [[COPY3:%[0-9]+]]:_(s32) = COPY $vgpr3
2453 ; GFX10-UNSAFE: [[COPY4:%[0-9]+]]:_(s32) = COPY $vgpr4
2454 ; GFX10-UNSAFE: [[COPY5:%[0-9]+]]:_(s32) = COPY $vgpr5
2455 ; GFX10-UNSAFE: [[MV:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY]](s32), [[COPY1]](s32)
2456 ; GFX10-UNSAFE: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY2]](s32), [[COPY3]](s32)
2457 ; GFX10-UNSAFE: [[MV2:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY4]](s32), [[COPY5]](s32)
2458 ; GFX10-UNSAFE: [[BUILD_VECTOR:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV]](s64), [[MV1]](s64), [[MV2]](s64)
2459 ; GFX10-UNSAFE: [[COPY6:%[0-9]+]]:_(s32) = COPY $vgpr6
2460 ; GFX10-UNSAFE: [[COPY7:%[0-9]+]]:_(s32) = COPY $vgpr7
2461 ; GFX10-UNSAFE: [[COPY8:%[0-9]+]]:_(s32) = COPY $vgpr8
2462 ; GFX10-UNSAFE: [[COPY9:%[0-9]+]]:_(s32) = COPY $vgpr9
2463 ; GFX10-UNSAFE: [[COPY10:%[0-9]+]]:_(s32) = COPY $vgpr10
2464 ; GFX10-UNSAFE: [[COPY11:%[0-9]+]]:_(s32) = COPY $vgpr11
2465 ; GFX10-UNSAFE: [[MV3:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY6]](s32), [[COPY7]](s32)
2466 ; GFX10-UNSAFE: [[MV4:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY8]](s32), [[COPY9]](s32)
2467 ; GFX10-UNSAFE: [[MV5:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY10]](s32), [[COPY11]](s32)
2468 ; GFX10-UNSAFE: [[BUILD_VECTOR1:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV3]](s64), [[MV4]](s64), [[MV5]](s64)
2469 ; GFX10-UNSAFE: [[COPY12:%[0-9]+]]:_(s32) = COPY $vgpr12
2470 ; GFX10-UNSAFE: [[COPY13:%[0-9]+]]:_(s32) = COPY $vgpr13
2471 ; GFX10-UNSAFE: [[COPY14:%[0-9]+]]:_(s32) = COPY $vgpr14
2472 ; GFX10-UNSAFE: [[COPY15:%[0-9]+]]:_(s32) = COPY $vgpr15
2473 ; GFX10-UNSAFE: [[COPY16:%[0-9]+]]:_(s32) = COPY $vgpr16
2474 ; GFX10-UNSAFE: [[COPY17:%[0-9]+]]:_(s32) = COPY $vgpr17
2475 ; GFX10-UNSAFE: [[MV6:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY12]](s32), [[COPY13]](s32)
2476 ; GFX10-UNSAFE: [[MV7:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY14]](s32), [[COPY15]](s32)
2477 ; GFX10-UNSAFE: [[MV8:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[COPY16]](s32), [[COPY17]](s32)
2478 ; GFX10-UNSAFE: [[BUILD_VECTOR2:%[0-9]+]]:_(<3 x s64>) = G_BUILD_VECTOR [[MV6]](s64), [[MV7]](s64), [[MV8]](s64)
2479 ; GFX10-UNSAFE: [[COPY18:%[0-9]+]]:sgpr_64 = COPY $sgpr30_sgpr31
2480 ; GFX10-UNSAFE: [[FMA:%[0-9]+]]:_(<3 x s64>) = G_FMA [[BUILD_VECTOR]], [[BUILD_VECTOR1]], [[BUILD_VECTOR2]]
2481 ; GFX10-UNSAFE: [[UV:%[0-9]+]]:_(s32), [[UV1:%[0-9]+]]:_(s32), [[UV2:%[0-9]+]]:_(s32), [[UV3:%[0-9]+]]:_(s32), [[UV4:%[0-9]+]]:_(s32), [[UV5:%[0-9]+]]:_(s32) = G_UNMERGE_VALUES [[FMA]](<3 x s64>)
2482 ; GFX10-UNSAFE: $vgpr0 = COPY [[UV]](s32)
2483 ; GFX10-UNSAFE: $vgpr1 = COPY [[UV1]](s32)
2484 ; GFX10-UNSAFE: $vgpr2 = COPY [[UV2]](s32)
2485 ; GFX10-UNSAFE: $vgpr3 = COPY [[UV3]](s32)
2486 ; GFX10-UNSAFE: $vgpr4 = COPY [[UV4]](s32)
2487 ; GFX10-UNSAFE: $vgpr5 = COPY [[UV5]](s32)
2488 ; GFX10-UNSAFE: [[COPY19:%[0-9]+]]:ccr_sgpr_64 = COPY [[COPY18]]
2489 ; GFX10-UNSAFE: S_SETPC_B64_return [[COPY19]], implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5
2490 %4:_(s32) = COPY $vgpr0
2491 %5:_(s32) = COPY $vgpr1
2492 %6:_(s32) = COPY $vgpr2
2493 %7:_(s32) = COPY $vgpr3
2494 %8:_(s32) = COPY $vgpr4
2495 %9:_(s32) = COPY $vgpr5
2496 %22:_(s64) = G_MERGE_VALUES %4(s32), %5(s32)
2497 %23:_(s64) = G_MERGE_VALUES %6(s32), %7(s32)
2498 %24:_(s64) = G_MERGE_VALUES %8(s32), %9(s32)
2499 %0:_(<3 x s64>) = G_BUILD_VECTOR %22(s64), %23(s64), %24(s64)
2500 %10:_(s32) = COPY $vgpr6
2501 %11:_(s32) = COPY $vgpr7
2502 %12:_(s32) = COPY $vgpr8
2503 %13:_(s32) = COPY $vgpr9
2504 %14:_(s32) = COPY $vgpr10
2505 %15:_(s32) = COPY $vgpr11
2506 %25:_(s64) = G_MERGE_VALUES %10(s32), %11(s32)
2507 %26:_(s64) = G_MERGE_VALUES %12(s32), %13(s32)
2508 %27:_(s64) = G_MERGE_VALUES %14(s32), %15(s32)
2509 %1:_(<3 x s64>) = G_BUILD_VECTOR %25(s64), %26(s64), %27(s64)
2510 %16:_(s32) = COPY $vgpr12
2511 %17:_(s32) = COPY $vgpr13
2512 %18:_(s32) = COPY $vgpr14
2513 %19:_(s32) = COPY $vgpr15
2514 %20:_(s32) = COPY $vgpr16
2515 %21:_(s32) = COPY $vgpr17
2516 %28:_(s64) = G_MERGE_VALUES %16(s32), %17(s32)
2517 %29:_(s64) = G_MERGE_VALUES %18(s32), %19(s32)
2518 %30:_(s64) = G_MERGE_VALUES %20(s32), %21(s32)
2519 %2:_(<3 x s64>) = G_BUILD_VECTOR %28(s64), %29(s64), %30(s64)
2520 %3:sgpr_64 = COPY $sgpr30_sgpr31
2521 %31:_(<3 x s64>) = reassoc G_FMUL %0, %1
2522 %32:_(<3 x s64>) = reassoc G_FADD %2, %31
2523 %34:_(s32), %35:_(s32), %36:_(s32), %37:_(s32), %38:_(s32), %39:_(s32) = G_UNMERGE_VALUES %32(<3 x s64>)
2524 $vgpr0 = COPY %34(s32)
2525 $vgpr1 = COPY %35(s32)
2526 $vgpr2 = COPY %36(s32)
2527 $vgpr3 = COPY %37(s32)
2528 $vgpr4 = COPY %38(s32)
2529 $vgpr5 = COPY %39(s32)
2530 %33:ccr_sgpr_64 = COPY %3
2531 S_SETPC_B64_return %33, implicit $vgpr0, implicit $vgpr1, implicit $vgpr2, implicit $vgpr3, implicit $vgpr4, implicit $vgpr5