1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -disable-peephole -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefixes=SSE
3 ; RUN: llc < %s -disable-peephole -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefixes=AVX,AVX1
4 ; RUN: llc < %s -disable-peephole -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefixes=AVX,AVX2
5 ; RUN: llc < %s -disable-peephole -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefixes=AVX,AVX512,AVX512F
6 ; RUN: llc < %s -disable-peephole -mtriple=x86_64-unknown-unknown -mattr=+avx512vl | FileCheck %s --check-prefixes=AVX,AVX512,AVX512VL
9 ; Basic matrix multiply tests based on the pattern:
11 ; using matrix_ty = float __attribute__((matrix_type(2,2)));
12 ; matrix_ty test_mul2x2(matrix_ty a0, matrix_ty a1) nounwind {
17 define <4 x float> @test_mul2x2_f32(<4 x float> %a0, <4 x float> %a1) nounwind {
18 ; SSE-LABEL: test_mul2x2_f32:
19 ; SSE: # %bb.0: # %entry
20 ; SSE-NEXT: movaps %xmm1, %xmm2
21 ; SSE-NEXT: unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
22 ; SSE-NEXT: mulps %xmm0, %xmm2
23 ; SSE-NEXT: movaps %xmm1, %xmm3
24 ; SSE-NEXT: unpckhps {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3]
25 ; SSE-NEXT: mulps %xmm0, %xmm3
26 ; SSE-NEXT: movaps %xmm0, %xmm4
27 ; SSE-NEXT: unpckhpd {{.*#+}} xmm4 = xmm4[1],xmm0[1]
28 ; SSE-NEXT: movaps %xmm1, %xmm0
29 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm1[1,1]
30 ; SSE-NEXT: mulps %xmm4, %xmm0
31 ; SSE-NEXT: addps %xmm2, %xmm0
32 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3,3,3]
33 ; SSE-NEXT: mulps %xmm4, %xmm1
34 ; SSE-NEXT: addps %xmm3, %xmm1
35 ; SSE-NEXT: movlhps {{.*#+}} xmm0 = xmm0[0],xmm1[0]
38 ; AVX1-LABEL: test_mul2x2_f32:
39 ; AVX1: # %bb.0: # %entry
40 ; AVX1-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0]
41 ; AVX1-NEXT: vmovsldup {{.*#+}} xmm3 = xmm1[0,0,2,2]
42 ; AVX1-NEXT: vmulps %xmm3, %xmm0, %xmm3
43 ; AVX1-NEXT: vmovshdup {{.*#+}} xmm4 = xmm1[1,1,3,3]
44 ; AVX1-NEXT: vmulps %xmm4, %xmm2, %xmm4
45 ; AVX1-NEXT: vaddps %xmm4, %xmm3, %xmm3
46 ; AVX1-NEXT: vshufps {{.*#+}} xmm4 = xmm1[2,2,2,2]
47 ; AVX1-NEXT: vmulps %xmm4, %xmm0, %xmm0
48 ; AVX1-NEXT: vshufps {{.*#+}} xmm1 = xmm1[3,3,3,3]
49 ; AVX1-NEXT: vmulps %xmm1, %xmm2, %xmm1
50 ; AVX1-NEXT: vaddps %xmm1, %xmm0, %xmm0
51 ; AVX1-NEXT: vmovlhps {{.*#+}} xmm0 = xmm3[0],xmm0[0]
54 ; AVX2-LABEL: test_mul2x2_f32:
55 ; AVX2: # %bb.0: # %entry
56 ; AVX2-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0]
57 ; AVX2-NEXT: vbroadcastss %xmm1, %xmm3
58 ; AVX2-NEXT: vmulps %xmm3, %xmm0, %xmm3
59 ; AVX2-NEXT: vmovshdup {{.*#+}} xmm4 = xmm1[1,1,3,3]
60 ; AVX2-NEXT: vmulps %xmm4, %xmm2, %xmm4
61 ; AVX2-NEXT: vaddps %xmm4, %xmm3, %xmm3
62 ; AVX2-NEXT: vshufps {{.*#+}} xmm4 = xmm1[2,2,2,2]
63 ; AVX2-NEXT: vmulps %xmm4, %xmm0, %xmm0
64 ; AVX2-NEXT: vshufps {{.*#+}} xmm1 = xmm1[3,3,3,3]
65 ; AVX2-NEXT: vmulps %xmm1, %xmm2, %xmm1
66 ; AVX2-NEXT: vaddps %xmm1, %xmm0, %xmm0
67 ; AVX2-NEXT: vmovlhps {{.*#+}} xmm0 = xmm3[0],xmm0[0]
70 ; AVX512-LABEL: test_mul2x2_f32:
71 ; AVX512: # %bb.0: # %entry
72 ; AVX512-NEXT: vshufpd {{.*#+}} xmm2 = xmm0[1,0]
73 ; AVX512-NEXT: vbroadcastss %xmm1, %xmm3
74 ; AVX512-NEXT: vmulps %xmm3, %xmm0, %xmm3
75 ; AVX512-NEXT: vmovshdup {{.*#+}} xmm4 = xmm1[1,1,3,3]
76 ; AVX512-NEXT: vmulps %xmm4, %xmm2, %xmm4
77 ; AVX512-NEXT: vaddps %xmm4, %xmm3, %xmm3
78 ; AVX512-NEXT: vshufps {{.*#+}} xmm4 = xmm1[2,2,2,2]
79 ; AVX512-NEXT: vmulps %xmm4, %xmm0, %xmm0
80 ; AVX512-NEXT: vshufps {{.*#+}} xmm1 = xmm1[3,3,3,3]
81 ; AVX512-NEXT: vmulps %xmm1, %xmm2, %xmm1
82 ; AVX512-NEXT: vaddps %xmm1, %xmm0, %xmm0
83 ; AVX512-NEXT: vmovlhps {{.*#+}} xmm0 = xmm3[0],xmm0[0]
86 %split = shufflevector <4 x float> %a0, <4 x float> poison, <2 x i32> <i32 0, i32 1>
87 %split1 = shufflevector <4 x float> %a0, <4 x float> poison, <2 x i32> <i32 2, i32 3>
88 %splat.splat = shufflevector <4 x float> %a1, <4 x float> undef, <2 x i32> zeroinitializer
89 %0 = fmul <2 x float> %split, %splat.splat
90 %splat.splat6 = shufflevector <4 x float> %a1, <4 x float> undef, <2 x i32> <i32 1, i32 1>
91 %1 = fmul <2 x float> %split1, %splat.splat6
92 %2 = fadd <2 x float> %0, %1
93 %splat.splat9 = shufflevector <4 x float> %a1, <4 x float> undef, <2 x i32> <i32 2, i32 2>
94 %3 = fmul <2 x float> %split, %splat.splat9
95 %splat.splat12 = shufflevector <4 x float> %a1, <4 x float> undef, <2 x i32> <i32 3, i32 3>
96 %4 = fmul <2 x float> %split1, %splat.splat12
97 %5 = fadd <2 x float> %3, %4
98 %6 = shufflevector <2 x float> %2, <2 x float> %5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
102 define <4 x double> @test_mul2x2_f64(<4 x double> %a0, <4 x double> %a1) nounwind {
103 ; SSE-LABEL: test_mul2x2_f64:
104 ; SSE: # %bb.0: # %entry
105 ; SSE-NEXT: movapd %xmm2, %xmm4
106 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm2[0]
107 ; SSE-NEXT: mulpd %xmm0, %xmm4
108 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1,1]
109 ; SSE-NEXT: mulpd %xmm1, %xmm2
110 ; SSE-NEXT: addpd %xmm2, %xmm4
111 ; SSE-NEXT: movapd %xmm3, %xmm2
112 ; SSE-NEXT: unpcklpd {{.*#+}} xmm2 = xmm2[0],xmm3[0]
113 ; SSE-NEXT: mulpd %xmm0, %xmm2
114 ; SSE-NEXT: unpckhpd {{.*#+}} xmm3 = xmm3[1,1]
115 ; SSE-NEXT: mulpd %xmm3, %xmm1
116 ; SSE-NEXT: addpd %xmm2, %xmm1
117 ; SSE-NEXT: movapd %xmm4, %xmm0
120 ; AVX-LABEL: test_mul2x2_f64:
121 ; AVX: # %bb.0: # %entry
122 ; AVX-NEXT: vextractf128 $1, %ymm0, %xmm2
123 ; AVX-NEXT: vmovddup {{.*#+}} xmm3 = xmm1[0,0]
124 ; AVX-NEXT: vmulpd %xmm3, %xmm0, %xmm3
125 ; AVX-NEXT: vshufpd {{.*#+}} xmm4 = xmm1[1,1]
126 ; AVX-NEXT: vmulpd %xmm4, %xmm2, %xmm4
127 ; AVX-NEXT: vaddpd %xmm4, %xmm3, %xmm3
128 ; AVX-NEXT: vextractf128 $1, %ymm1, %xmm1
129 ; AVX-NEXT: vmovddup {{.*#+}} xmm4 = xmm1[0,0]
130 ; AVX-NEXT: vmulpd %xmm4, %xmm0, %xmm0
131 ; AVX-NEXT: vshufpd {{.*#+}} xmm1 = xmm1[1,1]
132 ; AVX-NEXT: vmulpd %xmm1, %xmm2, %xmm1
133 ; AVX-NEXT: vaddpd %xmm1, %xmm0, %xmm0
134 ; AVX-NEXT: vinsertf128 $1, %xmm0, %ymm3, %ymm0
137 %split = shufflevector <4 x double> %a0, <4 x double> poison, <2 x i32> <i32 0, i32 1>
138 %split1 = shufflevector <4 x double> %a0, <4 x double> poison, <2 x i32> <i32 2, i32 3>
139 %splat.splat = shufflevector <4 x double> %a1, <4 x double> undef, <2 x i32> zeroinitializer
140 %0 = fmul <2 x double> %split, %splat.splat
141 %splat.splat6 = shufflevector <4 x double> %a1, <4 x double> undef, <2 x i32> <i32 1, i32 1>
142 %1 = fmul <2 x double> %split1, %splat.splat6
143 %2 = fadd <2 x double> %0, %1
144 %splat.splat9 = shufflevector <4 x double> %a1, <4 x double> undef, <2 x i32> <i32 2, i32 2>
145 %3 = fmul <2 x double> %split, %splat.splat9
146 %splat.splat12 = shufflevector <4 x double> %a1, <4 x double> undef, <2 x i32> <i32 3, i32 3>
147 %4 = fmul <2 x double> %split1, %splat.splat12
148 %5 = fadd <2 x double> %3, %4
149 %6 = shufflevector <2 x double> %2, <2 x double> %5, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
153 define <9 x float> @test_mul3x3_f32(<9 x float> %a0, <9 x float> %a1) nounwind {
154 ; SSE-LABEL: test_mul3x3_f32:
155 ; SSE: # %bb.0: # %entry
156 ; SSE-NEXT: movq %rdi, %rax
157 ; SSE-NEXT: movss {{.*#+}} xmm8 = mem[0],zero,zero,zero
158 ; SSE-NEXT: movss {{.*#+}} xmm10 = mem[0],zero,zero,zero
159 ; SSE-NEXT: movss {{.*#+}} xmm9 = mem[0],zero,zero,zero
160 ; SSE-NEXT: movss {{.*#+}} xmm11 = mem[0],zero,zero,zero
161 ; SSE-NEXT: movss {{.*#+}} xmm12 = mem[0],zero,zero,zero
162 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
163 ; SSE-NEXT: movaps %xmm2, %xmm13
164 ; SSE-NEXT: mulss %xmm12, %xmm13
165 ; SSE-NEXT: unpcklps {{.*#+}} xmm12 = xmm12[0,0,1,1]
166 ; SSE-NEXT: mulps %xmm0, %xmm12
167 ; SSE-NEXT: unpcklps {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1]
168 ; SSE-NEXT: movaps %xmm5, %xmm1
169 ; SSE-NEXT: mulss %xmm11, %xmm1
170 ; SSE-NEXT: unpcklps {{.*#+}} xmm11 = xmm11[0,0,1,1]
171 ; SSE-NEXT: mulps %xmm3, %xmm11
172 ; SSE-NEXT: addps %xmm12, %xmm11
173 ; SSE-NEXT: movss {{.*#+}} xmm4 = mem[0],zero,zero,zero
174 ; SSE-NEXT: unpcklps {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1]
175 ; SSE-NEXT: movaps %xmm9, %xmm7
176 ; SSE-NEXT: mulss %xmm4, %xmm7
177 ; SSE-NEXT: unpcklps {{.*#+}} xmm4 = xmm4[0,0,1,1]
178 ; SSE-NEXT: mulps %xmm6, %xmm4
179 ; SSE-NEXT: addps %xmm11, %xmm4
180 ; SSE-NEXT: movss {{.*#+}} xmm11 = mem[0],zero,zero,zero
181 ; SSE-NEXT: addss %xmm13, %xmm1
182 ; SSE-NEXT: addss %xmm7, %xmm1
183 ; SSE-NEXT: movaps %xmm2, %xmm7
184 ; SSE-NEXT: mulss %xmm11, %xmm7
185 ; SSE-NEXT: unpcklps {{.*#+}} xmm11 = xmm11[0,0,1,1]
186 ; SSE-NEXT: mulps %xmm0, %xmm11
187 ; SSE-NEXT: movaps %xmm5, %xmm12
188 ; SSE-NEXT: mulss %xmm10, %xmm12
189 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0,0,1,1]
190 ; SSE-NEXT: mulps %xmm3, %xmm10
191 ; SSE-NEXT: addps %xmm11, %xmm10
192 ; SSE-NEXT: movaps %xmm9, %xmm11
193 ; SSE-NEXT: mulss %xmm8, %xmm11
194 ; SSE-NEXT: unpcklps {{.*#+}} xmm8 = xmm8[0,0,1,1]
195 ; SSE-NEXT: mulps %xmm6, %xmm8
196 ; SSE-NEXT: addps %xmm10, %xmm8
197 ; SSE-NEXT: addss %xmm7, %xmm12
198 ; SSE-NEXT: addss %xmm11, %xmm12
199 ; SSE-NEXT: movaps %xmm8, %xmm7
200 ; SSE-NEXT: movlhps {{.*#+}} xmm7 = xmm7[0],xmm12[0]
201 ; SSE-NEXT: movss {{.*#+}} xmm10 = mem[0],zero,zero,zero
202 ; SSE-NEXT: mulss %xmm10, %xmm2
203 ; SSE-NEXT: unpcklps {{.*#+}} xmm10 = xmm10[0,0,1,1]
204 ; SSE-NEXT: mulps %xmm0, %xmm10
205 ; SSE-NEXT: movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
206 ; SSE-NEXT: mulss %xmm0, %xmm5
207 ; SSE-NEXT: unpcklps {{.*#+}} xmm0 = xmm0[0,0,1,1]
208 ; SSE-NEXT: mulps %xmm3, %xmm0
209 ; SSE-NEXT: addps %xmm10, %xmm0
210 ; SSE-NEXT: movss {{.*#+}} xmm3 = mem[0],zero,zero,zero
211 ; SSE-NEXT: mulss %xmm3, %xmm9
212 ; SSE-NEXT: unpcklps {{.*#+}} xmm3 = xmm3[0,0,1,1]
213 ; SSE-NEXT: mulps %xmm6, %xmm3
214 ; SSE-NEXT: addps %xmm0, %xmm3
215 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[1,2],xmm3[0,1]
216 ; SSE-NEXT: addss %xmm2, %xmm5
217 ; SSE-NEXT: addss %xmm9, %xmm5
218 ; SSE-NEXT: movlhps {{.*#+}} xmm8 = xmm8[0],xmm1[0]
219 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,1],xmm8[2,0]
220 ; SSE-NEXT: movss %xmm5, 32(%rdi)
221 ; SSE-NEXT: movaps %xmm7, 16(%rdi)
222 ; SSE-NEXT: movaps %xmm4, (%rdi)
225 ; AVX1-LABEL: test_mul3x3_f32:
226 ; AVX1: # %bb.0: # %entry
227 ; AVX1-NEXT: movq %rdi, %rax
228 ; AVX1-NEXT: vmovss {{.*#+}} xmm8 = mem[0],zero,zero,zero
229 ; AVX1-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[2,3]
230 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm1
231 ; AVX1-NEXT: vmulps %xmm1, %xmm0, %xmm9
232 ; AVX1-NEXT: vinsertps {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
233 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm4
234 ; AVX1-NEXT: vmulps %xmm4, %xmm3, %xmm10
235 ; AVX1-NEXT: vaddps %xmm10, %xmm9, %xmm9
236 ; AVX1-NEXT: vinsertps {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[2,3]
237 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm7
238 ; AVX1-NEXT: vmulps %xmm7, %xmm6, %xmm10
239 ; AVX1-NEXT: vaddps %xmm10, %xmm9, %xmm9
240 ; AVX1-NEXT: vmulss %xmm1, %xmm2, %xmm1
241 ; AVX1-NEXT: vmulss %xmm4, %xmm5, %xmm4
242 ; AVX1-NEXT: vaddss %xmm4, %xmm1, %xmm1
243 ; AVX1-NEXT: vmulss %xmm7, %xmm8, %xmm4
244 ; AVX1-NEXT: vaddss %xmm4, %xmm1, %xmm1
245 ; AVX1-NEXT: vinsertps {{.*#+}} xmm1 = xmm9[0,1],xmm1[0],xmm9[3]
246 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm4
247 ; AVX1-NEXT: vmulps %xmm4, %xmm0, %xmm7
248 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm9
249 ; AVX1-NEXT: vmulps %xmm3, %xmm9, %xmm10
250 ; AVX1-NEXT: vaddps %xmm7, %xmm10, %xmm7
251 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm10
252 ; AVX1-NEXT: vmulps %xmm6, %xmm10, %xmm11
253 ; AVX1-NEXT: vaddps %xmm7, %xmm11, %xmm7
254 ; AVX1-NEXT: vmulss %xmm4, %xmm2, %xmm4
255 ; AVX1-NEXT: vmulss %xmm5, %xmm9, %xmm9
256 ; AVX1-NEXT: vaddss %xmm4, %xmm9, %xmm4
257 ; AVX1-NEXT: vmulss %xmm10, %xmm8, %xmm9
258 ; AVX1-NEXT: vaddss %xmm4, %xmm9, %xmm4
259 ; AVX1-NEXT: vinsertps {{.*#+}} xmm4 = xmm7[0,1],xmm4[0],xmm7[3]
260 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm9
261 ; AVX1-NEXT: vmulps %xmm0, %xmm9, %xmm0
262 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm10
263 ; AVX1-NEXT: vmulps %xmm3, %xmm10, %xmm3
264 ; AVX1-NEXT: vaddps %xmm3, %xmm0, %xmm0
265 ; AVX1-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm3
266 ; AVX1-NEXT: vmulps %xmm3, %xmm6, %xmm6
267 ; AVX1-NEXT: vaddps %xmm6, %xmm0, %xmm0
268 ; AVX1-NEXT: vmulss %xmm2, %xmm9, %xmm2
269 ; AVX1-NEXT: vmulss %xmm5, %xmm10, %xmm5
270 ; AVX1-NEXT: vaddss %xmm5, %xmm2, %xmm2
271 ; AVX1-NEXT: vmulss %xmm3, %xmm8, %xmm3
272 ; AVX1-NEXT: vaddss %xmm3, %xmm2, %xmm2
273 ; AVX1-NEXT: vinsertps {{.*#+}} xmm1 = xmm1[0,1,2],xmm7[0]
274 ; AVX1-NEXT: vshufps {{.*#+}} xmm3 = xmm4[1,2,2,3]
275 ; AVX1-NEXT: vinsertf128 $1, %xmm3, %ymm1, %ymm1
276 ; AVX1-NEXT: vmovddup {{.*#+}} xmm0 = xmm0[0,0]
277 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm0, %ymm0
278 ; AVX1-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
279 ; AVX1-NEXT: vmovss %xmm2, 32(%rdi)
280 ; AVX1-NEXT: vmovaps %ymm0, (%rdi)
281 ; AVX1-NEXT: vzeroupper
284 ; AVX2-LABEL: test_mul3x3_f32:
285 ; AVX2: # %bb.0: # %entry
286 ; AVX2-NEXT: movq %rdi, %rax
287 ; AVX2-NEXT: vmovss {{.*#+}} xmm8 = mem[0],zero,zero,zero
288 ; AVX2-NEXT: vinsertps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[2,3]
289 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm1
290 ; AVX2-NEXT: vmulps %xmm1, %xmm0, %xmm9
291 ; AVX2-NEXT: vinsertps {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[2,3]
292 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm4
293 ; AVX2-NEXT: vmulps %xmm4, %xmm3, %xmm10
294 ; AVX2-NEXT: vaddps %xmm10, %xmm9, %xmm9
295 ; AVX2-NEXT: vinsertps {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[2,3]
296 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm7
297 ; AVX2-NEXT: vmulps %xmm7, %xmm6, %xmm10
298 ; AVX2-NEXT: vaddps %xmm10, %xmm9, %xmm9
299 ; AVX2-NEXT: vmulss %xmm1, %xmm2, %xmm1
300 ; AVX2-NEXT: vmulss %xmm4, %xmm5, %xmm4
301 ; AVX2-NEXT: vaddss %xmm4, %xmm1, %xmm1
302 ; AVX2-NEXT: vmulss %xmm7, %xmm8, %xmm4
303 ; AVX2-NEXT: vaddss %xmm4, %xmm1, %xmm1
304 ; AVX2-NEXT: vinsertps {{.*#+}} xmm1 = xmm9[0,1],xmm1[0],xmm9[3]
305 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm4
306 ; AVX2-NEXT: vmulps %xmm4, %xmm0, %xmm7
307 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm9
308 ; AVX2-NEXT: vmulps %xmm3, %xmm9, %xmm10
309 ; AVX2-NEXT: vaddps %xmm7, %xmm10, %xmm7
310 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm10
311 ; AVX2-NEXT: vmulps %xmm6, %xmm10, %xmm11
312 ; AVX2-NEXT: vaddps %xmm7, %xmm11, %xmm7
313 ; AVX2-NEXT: vmulss %xmm4, %xmm2, %xmm4
314 ; AVX2-NEXT: vmulss %xmm5, %xmm9, %xmm9
315 ; AVX2-NEXT: vaddss %xmm4, %xmm9, %xmm4
316 ; AVX2-NEXT: vmulss %xmm10, %xmm8, %xmm9
317 ; AVX2-NEXT: vaddss %xmm4, %xmm9, %xmm4
318 ; AVX2-NEXT: vinsertps {{.*#+}} xmm4 = xmm7[0,1],xmm4[0],xmm7[3]
319 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm7
320 ; AVX2-NEXT: vmulps %xmm7, %xmm0, %xmm0
321 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm9
322 ; AVX2-NEXT: vmulps %xmm3, %xmm9, %xmm3
323 ; AVX2-NEXT: vaddps %xmm3, %xmm0, %xmm0
324 ; AVX2-NEXT: vbroadcastss {{[0-9]+}}(%rsp), %xmm3
325 ; AVX2-NEXT: vmulps %xmm3, %xmm6, %xmm6
326 ; AVX2-NEXT: vaddps %xmm6, %xmm0, %xmm0
327 ; AVX2-NEXT: vmulss %xmm7, %xmm2, %xmm2
328 ; AVX2-NEXT: vmulss %xmm5, %xmm9, %xmm5
329 ; AVX2-NEXT: vaddss %xmm5, %xmm2, %xmm2
330 ; AVX2-NEXT: vmulss %xmm3, %xmm8, %xmm3
331 ; AVX2-NEXT: vaddss %xmm3, %xmm2, %xmm2
332 ; AVX2-NEXT: vinsertf128 $1, %xmm4, %ymm1, %ymm1
333 ; AVX2-NEXT: vmovaps {{.*#+}} ymm3 = [0,1,2,4,5,6,u,u]
334 ; AVX2-NEXT: vpermps %ymm1, %ymm3, %ymm1
335 ; AVX2-NEXT: vbroadcastsd %xmm0, %ymm0
336 ; AVX2-NEXT: vblendps {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
337 ; AVX2-NEXT: vmovss %xmm2, 32(%rdi)
338 ; AVX2-NEXT: vmovaps %ymm0, (%rdi)
339 ; AVX2-NEXT: vzeroupper
342 ; AVX512F-LABEL: test_mul3x3_f32:
343 ; AVX512F: # %bb.0: # %entry
344 ; AVX512F-NEXT: valignd {{.*#+}} zmm2 = zmm0[3,4,5,6,7,8,9,10,11,12,13,14,15,0,1,2]
345 ; AVX512F-NEXT: vbroadcastss %xmm1, %xmm3
346 ; AVX512F-NEXT: vmulps %xmm3, %xmm0, %xmm3
347 ; AVX512F-NEXT: vextractf128 $1, %ymm0, %xmm5
348 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm6 = xmm1[1,1,3,3]
349 ; AVX512F-NEXT: vmulps %xmm6, %xmm2, %xmm4
350 ; AVX512F-NEXT: vaddps %xmm4, %xmm3, %xmm4
351 ; AVX512F-NEXT: vshufpd {{.*#+}} xmm3 = xmm5[1,0]
352 ; AVX512F-NEXT: vshufps {{.*#+}} xmm7 = xmm1[3,3,3,3]
353 ; AVX512F-NEXT: vshufpd {{.*#+}} xmm8 = xmm1[1,0]
354 ; AVX512F-NEXT: vshufps {{.*#+}} xmm9 = xmm1[2,2,2,2]
355 ; AVX512F-NEXT: vmulps %xmm3, %xmm9, %xmm9
356 ; AVX512F-NEXT: vaddps %xmm4, %xmm9, %xmm9
357 ; AVX512F-NEXT: vshufpd {{.*#+}} xmm4 = xmm0[1,0]
358 ; AVX512F-NEXT: vmulss %xmm1, %xmm4, %xmm10
359 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm5 = xmm5[1,1,3,3]
360 ; AVX512F-NEXT: vmulss %xmm6, %xmm5, %xmm6
361 ; AVX512F-NEXT: vaddss %xmm6, %xmm10, %xmm6
362 ; AVX512F-NEXT: vextractf32x4 $2, %zmm0, %xmm10
363 ; AVX512F-NEXT: vmulss %xmm8, %xmm10, %xmm8
364 ; AVX512F-NEXT: vaddss %xmm6, %xmm8, %xmm6
365 ; AVX512F-NEXT: vinsertps {{.*#+}} xmm6 = xmm9[0,1],xmm6[0],xmm9[3]
366 ; AVX512F-NEXT: vmulps %xmm7, %xmm0, %xmm8
367 ; AVX512F-NEXT: vextractf128 $1, %ymm1, %xmm9
368 ; AVX512F-NEXT: vmovsldup {{.*#+}} xmm11 = xmm9[0,0,2,2]
369 ; AVX512F-NEXT: vmulps %xmm2, %xmm11, %xmm11
370 ; AVX512F-NEXT: vaddps %xmm11, %xmm8, %xmm8
371 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm11 = xmm9[1,1,3,3]
372 ; AVX512F-NEXT: vmulps %xmm3, %xmm11, %xmm12
373 ; AVX512F-NEXT: vaddps %xmm12, %xmm8, %xmm8
374 ; AVX512F-NEXT: vmulss %xmm7, %xmm4, %xmm7
375 ; AVX512F-NEXT: vmulss %xmm5, %xmm9, %xmm12
376 ; AVX512F-NEXT: vaddss %xmm7, %xmm12, %xmm7
377 ; AVX512F-NEXT: vmulss %xmm11, %xmm10, %xmm11
378 ; AVX512F-NEXT: vaddss %xmm7, %xmm11, %xmm7
379 ; AVX512F-NEXT: vinsertps {{.*#+}} xmm7 = xmm8[0,1],xmm7[0],xmm8[3]
380 ; AVX512F-NEXT: vshufps {{.*#+}} xmm8 = xmm9[3,3,3,3]
381 ; AVX512F-NEXT: vshufpd {{.*#+}} xmm11 = xmm9[1,0]
382 ; AVX512F-NEXT: vshufps {{.*#+}} xmm9 = xmm9[2,2,2,2]
383 ; AVX512F-NEXT: vmulps %xmm0, %xmm9, %xmm0
384 ; AVX512F-NEXT: vmulps %xmm2, %xmm8, %xmm2
385 ; AVX512F-NEXT: vaddps %xmm2, %xmm0, %xmm0
386 ; AVX512F-NEXT: vextractf32x4 $2, %zmm1, %xmm1
387 ; AVX512F-NEXT: vbroadcastss %xmm1, %xmm2
388 ; AVX512F-NEXT: vmulps %xmm2, %xmm3, %xmm2
389 ; AVX512F-NEXT: vaddps %xmm2, %xmm0, %xmm0
390 ; AVX512F-NEXT: vmulss %xmm4, %xmm11, %xmm2
391 ; AVX512F-NEXT: vmulss %xmm5, %xmm8, %xmm3
392 ; AVX512F-NEXT: vaddss %xmm3, %xmm2, %xmm2
393 ; AVX512F-NEXT: vmulss %xmm1, %xmm10, %xmm1
394 ; AVX512F-NEXT: vaddss %xmm1, %xmm2, %xmm1
395 ; AVX512F-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0,1],xmm1[0],xmm0[3]
396 ; AVX512F-NEXT: vinsertf32x4 $1, %xmm7, %zmm6, %zmm2
397 ; AVX512F-NEXT: vmovaps {{.*#+}} zmm0 = [0,1,2,4,5,6,16,17,18,u,u,u,u,u,u,u]
398 ; AVX512F-NEXT: vpermi2ps %zmm1, %zmm2, %zmm0
401 ; AVX512VL-LABEL: test_mul3x3_f32:
402 ; AVX512VL: # %bb.0: # %entry
403 ; AVX512VL-NEXT: valignd {{.*#+}} zmm2 = zmm0[3,4,5,6,7,8,9,10,11,12,13,14,15,0,1,2]
404 ; AVX512VL-NEXT: vbroadcastss %xmm1, %xmm3
405 ; AVX512VL-NEXT: vmulps %xmm3, %xmm0, %xmm3
406 ; AVX512VL-NEXT: vextractf128 $1, %ymm0, %xmm4
407 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm5 = xmm1[1,1,3,3]
408 ; AVX512VL-NEXT: vmulps %xmm5, %xmm2, %xmm6
409 ; AVX512VL-NEXT: vaddps %xmm6, %xmm3, %xmm3
410 ; AVX512VL-NEXT: vshufpd {{.*#+}} xmm6 = xmm4[1,0]
411 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm7 = xmm1[3,3,3,3]
412 ; AVX512VL-NEXT: vshufpd {{.*#+}} xmm8 = xmm1[1,0]
413 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm9 = xmm1[2,2,2,2]
414 ; AVX512VL-NEXT: vmulps %xmm6, %xmm9, %xmm9
415 ; AVX512VL-NEXT: vaddps %xmm3, %xmm9, %xmm3
416 ; AVX512VL-NEXT: vshufpd {{.*#+}} xmm9 = xmm0[1,0]
417 ; AVX512VL-NEXT: vmulss %xmm1, %xmm9, %xmm10
418 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm4 = xmm4[1,1,3,3]
419 ; AVX512VL-NEXT: vmulss %xmm5, %xmm4, %xmm5
420 ; AVX512VL-NEXT: vaddss %xmm5, %xmm10, %xmm5
421 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm0, %xmm10
422 ; AVX512VL-NEXT: vmulss %xmm8, %xmm10, %xmm8
423 ; AVX512VL-NEXT: vaddss %xmm5, %xmm8, %xmm5
424 ; AVX512VL-NEXT: vinsertps {{.*#+}} xmm3 = xmm3[0,1],xmm5[0],xmm3[3]
425 ; AVX512VL-NEXT: vmulps %xmm7, %xmm0, %xmm5
426 ; AVX512VL-NEXT: vextractf128 $1, %ymm1, %xmm8
427 ; AVX512VL-NEXT: vmovsldup {{.*#+}} xmm11 = xmm8[0,0,2,2]
428 ; AVX512VL-NEXT: vmulps %xmm2, %xmm11, %xmm11
429 ; AVX512VL-NEXT: vaddps %xmm5, %xmm11, %xmm5
430 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm11 = xmm8[1,1,3,3]
431 ; AVX512VL-NEXT: vmulps %xmm6, %xmm11, %xmm12
432 ; AVX512VL-NEXT: vaddps %xmm5, %xmm12, %xmm5
433 ; AVX512VL-NEXT: vmulss %xmm7, %xmm9, %xmm7
434 ; AVX512VL-NEXT: vmulss %xmm4, %xmm8, %xmm12
435 ; AVX512VL-NEXT: vaddss %xmm7, %xmm12, %xmm7
436 ; AVX512VL-NEXT: vmulss %xmm11, %xmm10, %xmm11
437 ; AVX512VL-NEXT: vaddss %xmm7, %xmm11, %xmm7
438 ; AVX512VL-NEXT: vinsertps {{.*#+}} xmm5 = xmm5[0,1],xmm7[0],xmm5[3]
439 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm7 = xmm8[3,3,3,3]
440 ; AVX512VL-NEXT: vshufpd {{.*#+}} xmm11 = xmm8[1,0]
441 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm8 = xmm8[2,2,2,2]
442 ; AVX512VL-NEXT: vmulps %xmm0, %xmm8, %xmm0
443 ; AVX512VL-NEXT: vmulps %xmm7, %xmm2, %xmm2
444 ; AVX512VL-NEXT: vaddps %xmm2, %xmm0, %xmm0
445 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm1, %xmm1
446 ; AVX512VL-NEXT: vbroadcastss %xmm1, %xmm2
447 ; AVX512VL-NEXT: vmulps %xmm2, %xmm6, %xmm2
448 ; AVX512VL-NEXT: vaddps %xmm2, %xmm0, %xmm0
449 ; AVX512VL-NEXT: vmulss %xmm11, %xmm9, %xmm2
450 ; AVX512VL-NEXT: vmulss %xmm7, %xmm4, %xmm4
451 ; AVX512VL-NEXT: vaddss %xmm4, %xmm2, %xmm2
452 ; AVX512VL-NEXT: vmulss %xmm1, %xmm10, %xmm1
453 ; AVX512VL-NEXT: vaddss %xmm1, %xmm2, %xmm1
454 ; AVX512VL-NEXT: vinsertps {{.*#+}} xmm1 = xmm0[0,1],xmm1[0],xmm0[3]
455 ; AVX512VL-NEXT: vinsertf32x4 $1, %xmm5, %zmm3, %zmm2
456 ; AVX512VL-NEXT: vmovaps {{.*#+}} zmm0 = [0,1,2,4,5,6,16,17,18,u,u,u,u,u,u,u]
457 ; AVX512VL-NEXT: vpermi2ps %zmm1, %zmm2, %zmm0
458 ; AVX512VL-NEXT: retq
460 %block = shufflevector <9 x float> %a0, <9 x float> poison, <2 x i32> <i32 0, i32 1>
461 %splat.splat = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> zeroinitializer
462 %0 = fmul <2 x float> %block, %splat.splat
463 %block6 = shufflevector <9 x float> %a0, <9 x float> poison, <2 x i32> <i32 3, i32 4>
464 %splat.splat8 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 1, i32 1>
465 %1 = fmul <2 x float> %block6, %splat.splat8
466 %2 = fadd <2 x float> %0, %1
467 %block9 = shufflevector <9 x float> %a0, <9 x float> poison, <2 x i32> <i32 6, i32 7>
468 %splat.splat11 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 2, i32 2>
469 %3 = fmul <2 x float> %block9, %splat.splat11
470 %4 = fadd <2 x float> %2, %3
471 %5 = shufflevector <2 x float> %4, <2 x float> poison, <3 x i32> <i32 0, i32 1, i32 undef>
472 %block12 = shufflevector <9 x float> %a0, <9 x float> poison, <1 x i32> <i32 2>
473 %splat.splatinsert13 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> zeroinitializer
474 %6 = fmul <1 x float> %block12, %splat.splatinsert13
475 %block15 = shufflevector <9 x float> %a0, <9 x float> poison, <1 x i32> <i32 5>
476 %splat.splatinsert16 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> <i32 1>
477 %7 = fmul <1 x float> %block15, %splat.splatinsert16
478 %8 = fadd <1 x float> %6, %7
479 %block18 = shufflevector <9 x float> %a0, <9 x float> poison, <1 x i32> <i32 8>
480 %splat.splatinsert19 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> <i32 2>
481 %9 = fmul <1 x float> %block18, %splat.splatinsert19
482 %10 = fadd <1 x float> %8, %9
483 %11 = shufflevector <1 x float> %10, <1 x float> poison, <3 x i32> <i32 0, i32 undef, i32 undef>
484 %12 = shufflevector <3 x float> %5, <3 x float> %11, <3 x i32> <i32 0, i32 1, i32 3>
485 %splat.splat23 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 3, i32 3>
486 %13 = fmul <2 x float> %block, %splat.splat23
487 %splat.splat26 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 4, i32 4>
488 %14 = fmul <2 x float> %block6, %splat.splat26
489 %15 = fadd <2 x float> %13, %14
490 %splat.splat29 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 5, i32 5>
491 %16 = fmul <2 x float> %block9, %splat.splat29
492 %17 = fadd <2 x float> %15, %16
493 %18 = shufflevector <2 x float> %17, <2 x float> poison, <3 x i32> <i32 0, i32 1, i32 undef>
494 %splat.splatinsert31 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> <i32 3>
495 %19 = fmul <1 x float> %block12, %splat.splatinsert31
496 %splat.splatinsert34 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> <i32 4>
497 %20 = fmul <1 x float> %block15, %splat.splatinsert34
498 %21 = fadd <1 x float> %19, %20
499 %splat.splatinsert37 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> <i32 5>
500 %22 = fmul <1 x float> %block18, %splat.splatinsert37
501 %23 = fadd <1 x float> %21, %22
502 %24 = shufflevector <1 x float> %23, <1 x float> poison, <3 x i32> <i32 0, i32 undef, i32 undef>
503 %25 = shufflevector <3 x float> %18, <3 x float> %24, <3 x i32> <i32 0, i32 1, i32 3>
504 %splat.splat41 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 6, i32 6>
505 %26 = fmul <2 x float> %block, %splat.splat41
506 %splat.splat44 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 7, i32 7>
507 %27 = fmul <2 x float> %block6, %splat.splat44
508 %28 = fadd <2 x float> %26, %27
509 %splat.splat47 = shufflevector <9 x float> %a1, <9 x float> undef, <2 x i32> <i32 8, i32 8>
510 %29 = fmul <2 x float> %block9, %splat.splat47
511 %30 = fadd <2 x float> %28, %29
512 %31 = shufflevector <2 x float> %30, <2 x float> poison, <3 x i32> <i32 0, i32 1, i32 undef>
513 %splat.splatinsert49 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> <i32 6>
514 %32 = fmul <1 x float> %block12, %splat.splatinsert49
515 %splat.splatinsert52 = shufflevector <9 x float> %a1, <9 x float> undef, <1 x i32> <i32 7>
516 %33 = fmul <1 x float> %block15, %splat.splatinsert52
517 %34 = fadd <1 x float> %32, %33
518 %35 = fmul <9 x float> %a0, %a1
519 %36 = shufflevector <9 x float> %35, <9 x float> poison, <1 x i32> <i32 8>
520 %37 = fadd <1 x float> %34, %36
521 %38 = shufflevector <1 x float> %37, <1 x float> poison, <3 x i32> <i32 0, i32 undef, i32 undef>
522 %39 = shufflevector <3 x float> %31, <3 x float> %38, <3 x i32> <i32 0, i32 1, i32 3>
523 %40 = shufflevector <3 x float> %12, <3 x float> %25, <6 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5>
524 %41 = shufflevector <3 x float> %39, <3 x float> poison, <6 x i32> <i32 0, i32 1, i32 2, i32 undef, i32 undef, i32 undef>
525 %42 = shufflevector <6 x float> %40, <6 x float> %41, <9 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8>
529 define <9 x double> @test_mul3x3_f64(<9 x double> %a0, <9 x double> %a1) nounwind {
530 ; SSE-LABEL: test_mul3x3_f64:
531 ; SSE: # %bb.0: # %entry
532 ; SSE-NEXT: movq %rdi, %rax
533 ; SSE-NEXT: movsd {{.*#+}} xmm8 = mem[0],zero
534 ; SSE-NEXT: movsd {{.*#+}} xmm10 = mem[0],zero
535 ; SSE-NEXT: movsd {{.*#+}} xmm9 = mem[0],zero
536 ; SSE-NEXT: movsd {{.*#+}} xmm11 = mem[0],zero
537 ; SSE-NEXT: movsd {{.*#+}} xmm12 = mem[0],zero
538 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
539 ; SSE-NEXT: movapd %xmm2, %xmm13
540 ; SSE-NEXT: mulsd %xmm12, %xmm13
541 ; SSE-NEXT: unpcklpd {{.*#+}} xmm12 = xmm12[0,0]
542 ; SSE-NEXT: mulpd %xmm0, %xmm12
543 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm4[0]
544 ; SSE-NEXT: movapd %xmm5, %xmm1
545 ; SSE-NEXT: mulsd %xmm11, %xmm1
546 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0,0]
547 ; SSE-NEXT: mulpd %xmm3, %xmm11
548 ; SSE-NEXT: addpd %xmm12, %xmm11
549 ; SSE-NEXT: movsd {{.*#+}} xmm4 = mem[0],zero
550 ; SSE-NEXT: unpcklpd {{.*#+}} xmm6 = xmm6[0],xmm7[0]
551 ; SSE-NEXT: movapd %xmm9, %xmm7
552 ; SSE-NEXT: mulsd %xmm4, %xmm7
553 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0,0]
554 ; SSE-NEXT: mulpd %xmm6, %xmm4
555 ; SSE-NEXT: addpd %xmm11, %xmm4
556 ; SSE-NEXT: movsd {{.*#+}} xmm11 = mem[0],zero
557 ; SSE-NEXT: addsd %xmm13, %xmm1
558 ; SSE-NEXT: addsd %xmm7, %xmm1
559 ; SSE-NEXT: movapd %xmm2, %xmm12
560 ; SSE-NEXT: mulsd %xmm11, %xmm12
561 ; SSE-NEXT: unpcklpd {{.*#+}} xmm11 = xmm11[0,0]
562 ; SSE-NEXT: mulpd %xmm0, %xmm11
563 ; SSE-NEXT: movapd %xmm5, %xmm7
564 ; SSE-NEXT: mulsd %xmm10, %xmm7
565 ; SSE-NEXT: unpcklpd {{.*#+}} xmm10 = xmm10[0,0]
566 ; SSE-NEXT: mulpd %xmm3, %xmm10
567 ; SSE-NEXT: addpd %xmm11, %xmm10
568 ; SSE-NEXT: movapd %xmm9, %xmm11
569 ; SSE-NEXT: mulsd %xmm8, %xmm11
570 ; SSE-NEXT: unpcklpd {{.*#+}} xmm8 = xmm8[0,0]
571 ; SSE-NEXT: mulpd %xmm6, %xmm8
572 ; SSE-NEXT: addpd %xmm10, %xmm8
573 ; SSE-NEXT: addsd %xmm12, %xmm7
574 ; SSE-NEXT: addsd %xmm11, %xmm7
575 ; SSE-NEXT: movsd {{.*#+}} xmm10 = mem[0],zero
576 ; SSE-NEXT: mulsd %xmm10, %xmm2
577 ; SSE-NEXT: unpcklpd {{.*#+}} xmm10 = xmm10[0,0]
578 ; SSE-NEXT: mulpd %xmm0, %xmm10
579 ; SSE-NEXT: movsd {{.*#+}} xmm0 = mem[0],zero
580 ; SSE-NEXT: mulsd %xmm0, %xmm5
581 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0,0]
582 ; SSE-NEXT: mulpd %xmm3, %xmm0
583 ; SSE-NEXT: addpd %xmm10, %xmm0
584 ; SSE-NEXT: movsd {{.*#+}} xmm3 = mem[0],zero
585 ; SSE-NEXT: mulsd %xmm3, %xmm9
586 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0,0]
587 ; SSE-NEXT: mulpd %xmm6, %xmm3
588 ; SSE-NEXT: addpd %xmm0, %xmm3
589 ; SSE-NEXT: addsd %xmm2, %xmm5
590 ; SSE-NEXT: addsd %xmm9, %xmm5
591 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm8[0]
592 ; SSE-NEXT: shufpd {{.*#+}} xmm8 = xmm8[1],xmm7[0]
593 ; SSE-NEXT: movsd %xmm5, 64(%rdi)
594 ; SSE-NEXT: movapd %xmm3, 48(%rdi)
595 ; SSE-NEXT: movapd %xmm4, (%rdi)
596 ; SSE-NEXT: movapd %xmm8, 32(%rdi)
597 ; SSE-NEXT: movapd %xmm1, 16(%rdi)
600 ; AVX1-LABEL: test_mul3x3_f64:
601 ; AVX1: # %bb.0: # %entry
602 ; AVX1-NEXT: movq %rdi, %rax
603 ; AVX1-NEXT: vmovsd {{.*#+}} xmm8 = mem[0],zero
604 ; AVX1-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],xmm1[0]
605 ; AVX1-NEXT: vmovddup {{.*#+}} xmm9 = mem[0,0]
606 ; AVX1-NEXT: vmulpd %xmm1, %xmm9, %xmm0
607 ; AVX1-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],xmm4[0]
608 ; AVX1-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
609 ; AVX1-NEXT: vmulpd %xmm4, %xmm3, %xmm10
610 ; AVX1-NEXT: vaddpd %xmm0, %xmm10, %xmm0
611 ; AVX1-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm6[0],xmm7[0]
612 ; AVX1-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
613 ; AVX1-NEXT: vmulpd %xmm7, %xmm6, %xmm10
614 ; AVX1-NEXT: vaddpd %xmm0, %xmm10, %xmm0
615 ; AVX1-NEXT: vmulsd %xmm2, %xmm9, %xmm9
616 ; AVX1-NEXT: vmulsd %xmm4, %xmm5, %xmm4
617 ; AVX1-NEXT: vaddsd %xmm4, %xmm9, %xmm4
618 ; AVX1-NEXT: vmulsd %xmm7, %xmm8, %xmm7
619 ; AVX1-NEXT: vaddsd %xmm7, %xmm4, %xmm4
620 ; AVX1-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
621 ; AVX1-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
622 ; AVX1-NEXT: vmulpd %xmm7, %xmm1, %xmm9
623 ; AVX1-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
624 ; AVX1-NEXT: vmulpd %xmm3, %xmm10, %xmm11
625 ; AVX1-NEXT: vaddpd %xmm11, %xmm9, %xmm9
626 ; AVX1-NEXT: vmovddup {{.*#+}} xmm11 = mem[0,0]
627 ; AVX1-NEXT: vmulpd %xmm6, %xmm11, %xmm12
628 ; AVX1-NEXT: vaddpd %xmm12, %xmm9, %xmm9
629 ; AVX1-NEXT: vmulsd %xmm7, %xmm2, %xmm7
630 ; AVX1-NEXT: vmulsd %xmm5, %xmm10, %xmm10
631 ; AVX1-NEXT: vaddsd %xmm7, %xmm10, %xmm7
632 ; AVX1-NEXT: vmulsd %xmm11, %xmm8, %xmm10
633 ; AVX1-NEXT: vaddsd %xmm7, %xmm10, %xmm7
634 ; AVX1-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
635 ; AVX1-NEXT: vmulpd %xmm1, %xmm10, %xmm1
636 ; AVX1-NEXT: vmovddup {{.*#+}} xmm11 = mem[0,0]
637 ; AVX1-NEXT: vmulpd %xmm3, %xmm11, %xmm3
638 ; AVX1-NEXT: vaddpd %xmm3, %xmm1, %xmm1
639 ; AVX1-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
640 ; AVX1-NEXT: vmulpd %xmm3, %xmm6, %xmm6
641 ; AVX1-NEXT: vaddpd %xmm6, %xmm1, %xmm1
642 ; AVX1-NEXT: vmulsd %xmm2, %xmm10, %xmm2
643 ; AVX1-NEXT: vmulsd %xmm5, %xmm11, %xmm5
644 ; AVX1-NEXT: vaddsd %xmm5, %xmm2, %xmm2
645 ; AVX1-NEXT: vmulsd %xmm3, %xmm8, %xmm3
646 ; AVX1-NEXT: vaddsd %xmm3, %xmm2, %xmm2
647 ; AVX1-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm0
648 ; AVX1-NEXT: vshufpd {{.*#+}} ymm0 = ymm4[0],ymm0[1],ymm4[2],ymm0[2]
649 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm7, %ymm3
650 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm9, %ymm1
651 ; AVX1-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[1],ymm3[0],ymm1[2],ymm3[3]
652 ; AVX1-NEXT: vmovsd %xmm2, 64(%rdi)
653 ; AVX1-NEXT: vmovapd %ymm1, 32(%rdi)
654 ; AVX1-NEXT: vmovapd %ymm0, (%rdi)
655 ; AVX1-NEXT: vzeroupper
658 ; AVX2-LABEL: test_mul3x3_f64:
659 ; AVX2: # %bb.0: # %entry
660 ; AVX2-NEXT: movq %rdi, %rax
661 ; AVX2-NEXT: vmovsd {{.*#+}} xmm8 = mem[0],zero
662 ; AVX2-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm0[0],xmm1[0]
663 ; AVX2-NEXT: vmovddup {{.*#+}} xmm9 = mem[0,0]
664 ; AVX2-NEXT: vmulpd %xmm1, %xmm9, %xmm0
665 ; AVX2-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],xmm4[0]
666 ; AVX2-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
667 ; AVX2-NEXT: vmulpd %xmm4, %xmm3, %xmm10
668 ; AVX2-NEXT: vaddpd %xmm0, %xmm10, %xmm0
669 ; AVX2-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm6[0],xmm7[0]
670 ; AVX2-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
671 ; AVX2-NEXT: vmulpd %xmm7, %xmm6, %xmm10
672 ; AVX2-NEXT: vaddpd %xmm0, %xmm10, %xmm0
673 ; AVX2-NEXT: vmulsd %xmm2, %xmm9, %xmm9
674 ; AVX2-NEXT: vmulsd %xmm4, %xmm5, %xmm4
675 ; AVX2-NEXT: vaddsd %xmm4, %xmm9, %xmm4
676 ; AVX2-NEXT: vmulsd %xmm7, %xmm8, %xmm7
677 ; AVX2-NEXT: vaddsd %xmm7, %xmm4, %xmm4
678 ; AVX2-NEXT: vinsertf128 $1, %xmm4, %ymm0, %ymm4
679 ; AVX2-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
680 ; AVX2-NEXT: vmulpd %xmm7, %xmm1, %xmm9
681 ; AVX2-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
682 ; AVX2-NEXT: vmulpd %xmm3, %xmm10, %xmm11
683 ; AVX2-NEXT: vaddpd %xmm11, %xmm9, %xmm9
684 ; AVX2-NEXT: vmovddup {{.*#+}} xmm11 = mem[0,0]
685 ; AVX2-NEXT: vmulpd %xmm6, %xmm11, %xmm12
686 ; AVX2-NEXT: vaddpd %xmm12, %xmm9, %xmm9
687 ; AVX2-NEXT: vmulsd %xmm7, %xmm2, %xmm7
688 ; AVX2-NEXT: vmulsd %xmm5, %xmm10, %xmm10
689 ; AVX2-NEXT: vaddsd %xmm7, %xmm10, %xmm7
690 ; AVX2-NEXT: vmulsd %xmm11, %xmm8, %xmm10
691 ; AVX2-NEXT: vaddsd %xmm7, %xmm10, %xmm7
692 ; AVX2-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
693 ; AVX2-NEXT: vmulpd %xmm1, %xmm10, %xmm1
694 ; AVX2-NEXT: vmovddup {{.*#+}} xmm11 = mem[0,0]
695 ; AVX2-NEXT: vmulpd %xmm3, %xmm11, %xmm3
696 ; AVX2-NEXT: vaddpd %xmm3, %xmm1, %xmm1
697 ; AVX2-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
698 ; AVX2-NEXT: vmulpd %xmm3, %xmm6, %xmm6
699 ; AVX2-NEXT: vaddpd %xmm6, %xmm1, %xmm1
700 ; AVX2-NEXT: vmulsd %xmm2, %xmm10, %xmm2
701 ; AVX2-NEXT: vmulsd %xmm5, %xmm11, %xmm5
702 ; AVX2-NEXT: vaddsd %xmm5, %xmm2, %xmm2
703 ; AVX2-NEXT: vmulsd %xmm3, %xmm8, %xmm3
704 ; AVX2-NEXT: vaddsd %xmm3, %xmm2, %xmm2
705 ; AVX2-NEXT: vinsertf128 $1, %xmm9, %ymm0, %ymm0
706 ; AVX2-NEXT: vshufpd {{.*#+}} ymm0 = ymm4[0],ymm0[1],ymm4[2],ymm0[2]
707 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm7, %ymm3
708 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm9, %ymm1
709 ; AVX2-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[1],ymm3[0],ymm1[2],ymm3[3]
710 ; AVX2-NEXT: vmovsd %xmm2, 64(%rdi)
711 ; AVX2-NEXT: vmovapd %ymm1, 32(%rdi)
712 ; AVX2-NEXT: vmovapd %ymm0, (%rdi)
713 ; AVX2-NEXT: vzeroupper
716 ; AVX512F-LABEL: test_mul3x3_f64:
717 ; AVX512F: # %bb.0: # %entry
718 ; AVX512F-NEXT: movq %rdi, %rax
719 ; AVX512F-NEXT: vmovsd {{.*#+}} xmm8 = mem[0],zero
720 ; AVX512F-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
721 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm9 = mem[0,0]
722 ; AVX512F-NEXT: vmulpd %xmm0, %xmm9, %xmm10
723 ; AVX512F-NEXT: vunpcklpd {{.*#+}} xmm1 = xmm3[0],xmm4[0]
724 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
725 ; AVX512F-NEXT: vmulpd %xmm3, %xmm1, %xmm4
726 ; AVX512F-NEXT: vaddpd %xmm4, %xmm10, %xmm4
727 ; AVX512F-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm6[0],xmm7[0]
728 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
729 ; AVX512F-NEXT: vmulpd %xmm7, %xmm6, %xmm10
730 ; AVX512F-NEXT: vaddpd %xmm4, %xmm10, %xmm4
731 ; AVX512F-NEXT: vmulsd %xmm2, %xmm9, %xmm9
732 ; AVX512F-NEXT: vmulsd %xmm3, %xmm5, %xmm3
733 ; AVX512F-NEXT: vaddsd %xmm3, %xmm9, %xmm3
734 ; AVX512F-NEXT: vmulsd %xmm7, %xmm8, %xmm7
735 ; AVX512F-NEXT: vaddsd %xmm7, %xmm3, %xmm3
736 ; AVX512F-NEXT: vinsertf128 $1, %xmm3, %ymm4, %ymm3
737 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
738 ; AVX512F-NEXT: vmulpd %xmm4, %xmm0, %xmm7
739 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm9 = mem[0,0]
740 ; AVX512F-NEXT: vmulpd %xmm1, %xmm9, %xmm10
741 ; AVX512F-NEXT: vaddpd %xmm7, %xmm10, %xmm7
742 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
743 ; AVX512F-NEXT: vmulpd %xmm6, %xmm10, %xmm11
744 ; AVX512F-NEXT: vaddpd %xmm7, %xmm11, %xmm7
745 ; AVX512F-NEXT: vmulsd %xmm4, %xmm2, %xmm4
746 ; AVX512F-NEXT: vmulsd %xmm5, %xmm9, %xmm9
747 ; AVX512F-NEXT: vaddsd %xmm4, %xmm9, %xmm4
748 ; AVX512F-NEXT: vmulsd %xmm10, %xmm8, %xmm9
749 ; AVX512F-NEXT: vaddsd %xmm4, %xmm9, %xmm4
750 ; AVX512F-NEXT: vinsertf128 $1, %xmm4, %ymm7, %ymm4
751 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
752 ; AVX512F-NEXT: vmulpd %xmm7, %xmm0, %xmm0
753 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm9 = mem[0,0]
754 ; AVX512F-NEXT: vmulpd %xmm1, %xmm9, %xmm1
755 ; AVX512F-NEXT: vaddpd %xmm1, %xmm0, %xmm0
756 ; AVX512F-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
757 ; AVX512F-NEXT: vmulpd %xmm1, %xmm6, %xmm6
758 ; AVX512F-NEXT: vaddpd %xmm6, %xmm0, %xmm0
759 ; AVX512F-NEXT: vmulsd %xmm7, %xmm2, %xmm2
760 ; AVX512F-NEXT: vmulsd %xmm5, %xmm9, %xmm5
761 ; AVX512F-NEXT: vaddsd %xmm5, %xmm2, %xmm2
762 ; AVX512F-NEXT: vmulsd %xmm1, %xmm8, %xmm1
763 ; AVX512F-NEXT: vaddsd %xmm1, %xmm2, %xmm1
764 ; AVX512F-NEXT: vinsertf64x4 $1, %ymm4, %zmm3, %zmm2
765 ; AVX512F-NEXT: vmovapd {{.*#+}} zmm3 = [0,1,2,4,5,6,8,9]
766 ; AVX512F-NEXT: vpermi2pd %zmm0, %zmm2, %zmm3
767 ; AVX512F-NEXT: vmovsd %xmm1, 64(%rdi)
768 ; AVX512F-NEXT: vmovapd %zmm3, (%rdi)
769 ; AVX512F-NEXT: vzeroupper
772 ; AVX512VL-LABEL: test_mul3x3_f64:
773 ; AVX512VL: # %bb.0: # %entry
774 ; AVX512VL-NEXT: movq %rdi, %rax
775 ; AVX512VL-NEXT: vmovsd {{.*#+}} xmm8 = mem[0],zero
776 ; AVX512VL-NEXT: vunpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
777 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm1 = mem[0,0]
778 ; AVX512VL-NEXT: vmulpd %xmm1, %xmm0, %xmm9
779 ; AVX512VL-NEXT: vunpcklpd {{.*#+}} xmm3 = xmm3[0],xmm4[0]
780 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
781 ; AVX512VL-NEXT: vmulpd %xmm4, %xmm3, %xmm10
782 ; AVX512VL-NEXT: vaddpd %xmm10, %xmm9, %xmm9
783 ; AVX512VL-NEXT: vunpcklpd {{.*#+}} xmm6 = xmm6[0],xmm7[0]
784 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
785 ; AVX512VL-NEXT: vmulpd %xmm7, %xmm6, %xmm10
786 ; AVX512VL-NEXT: vaddpd %xmm10, %xmm9, %xmm9
787 ; AVX512VL-NEXT: vmulsd %xmm1, %xmm2, %xmm1
788 ; AVX512VL-NEXT: vmulsd %xmm4, %xmm5, %xmm4
789 ; AVX512VL-NEXT: vaddsd %xmm4, %xmm1, %xmm1
790 ; AVX512VL-NEXT: vmulsd %xmm7, %xmm8, %xmm4
791 ; AVX512VL-NEXT: vaddsd %xmm4, %xmm1, %xmm1
792 ; AVX512VL-NEXT: vinsertf128 $1, %xmm1, %ymm9, %ymm1
793 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm4 = mem[0,0]
794 ; AVX512VL-NEXT: vmulpd %xmm4, %xmm0, %xmm7
795 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm9 = mem[0,0]
796 ; AVX512VL-NEXT: vmulpd %xmm3, %xmm9, %xmm10
797 ; AVX512VL-NEXT: vaddpd %xmm7, %xmm10, %xmm7
798 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm10 = mem[0,0]
799 ; AVX512VL-NEXT: vmulpd %xmm6, %xmm10, %xmm11
800 ; AVX512VL-NEXT: vaddpd %xmm7, %xmm11, %xmm7
801 ; AVX512VL-NEXT: vmulsd %xmm4, %xmm2, %xmm4
802 ; AVX512VL-NEXT: vmulsd %xmm5, %xmm9, %xmm9
803 ; AVX512VL-NEXT: vaddsd %xmm4, %xmm9, %xmm4
804 ; AVX512VL-NEXT: vmulsd %xmm10, %xmm8, %xmm9
805 ; AVX512VL-NEXT: vaddsd %xmm4, %xmm9, %xmm4
806 ; AVX512VL-NEXT: vinsertf128 $1, %xmm4, %ymm7, %ymm4
807 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm7 = mem[0,0]
808 ; AVX512VL-NEXT: vmulpd %xmm7, %xmm0, %xmm0
809 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm9 = mem[0,0]
810 ; AVX512VL-NEXT: vmulpd %xmm3, %xmm9, %xmm3
811 ; AVX512VL-NEXT: vaddpd %xmm3, %xmm0, %xmm0
812 ; AVX512VL-NEXT: vmovddup {{.*#+}} xmm3 = mem[0,0]
813 ; AVX512VL-NEXT: vmulpd %xmm3, %xmm6, %xmm6
814 ; AVX512VL-NEXT: vaddpd %xmm6, %xmm0, %xmm0
815 ; AVX512VL-NEXT: vmulsd %xmm7, %xmm2, %xmm2
816 ; AVX512VL-NEXT: vmulsd %xmm5, %xmm9, %xmm5
817 ; AVX512VL-NEXT: vaddsd %xmm5, %xmm2, %xmm2
818 ; AVX512VL-NEXT: vmulsd %xmm3, %xmm8, %xmm3
819 ; AVX512VL-NEXT: vaddsd %xmm3, %xmm2, %xmm2
820 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm4, %zmm1, %zmm1
821 ; AVX512VL-NEXT: vmovapd {{.*#+}} zmm3 = [0,1,2,4,5,6,8,9]
822 ; AVX512VL-NEXT: vpermi2pd %zmm0, %zmm1, %zmm3
823 ; AVX512VL-NEXT: vmovsd %xmm2, 64(%rdi)
824 ; AVX512VL-NEXT: vmovapd %zmm3, (%rdi)
825 ; AVX512VL-NEXT: vzeroupper
826 ; AVX512VL-NEXT: retq
828 %block = shufflevector <9 x double> %a0, <9 x double> poison, <2 x i32> <i32 0, i32 1>
829 %splat.splat = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> zeroinitializer
830 %0 = fmul <2 x double> %block, %splat.splat
831 %block6 = shufflevector <9 x double> %a0, <9 x double> poison, <2 x i32> <i32 3, i32 4>
832 %splat.splat8 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 1, i32 1>
833 %1 = fmul <2 x double> %block6, %splat.splat8
834 %2 = fadd <2 x double> %0, %1
835 %block9 = shufflevector <9 x double> %a0, <9 x double> poison, <2 x i32> <i32 6, i32 7>
836 %splat.splat11 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 2, i32 2>
837 %3 = fmul <2 x double> %block9, %splat.splat11
838 %4 = fadd <2 x double> %2, %3
839 %5 = shufflevector <2 x double> %4, <2 x double> poison, <3 x i32> <i32 0, i32 1, i32 undef>
840 %block12 = shufflevector <9 x double> %a0, <9 x double> poison, <1 x i32> <i32 2>
841 %splat.splatinsert13 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> zeroinitializer
842 %6 = fmul <1 x double> %block12, %splat.splatinsert13
843 %block15 = shufflevector <9 x double> %a0, <9 x double> poison, <1 x i32> <i32 5>
844 %splat.splatinsert16 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> <i32 1>
845 %7 = fmul <1 x double> %block15, %splat.splatinsert16
846 %8 = fadd <1 x double> %6, %7
847 %block18 = shufflevector <9 x double> %a0, <9 x double> poison, <1 x i32> <i32 8>
848 %splat.splatinsert19 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> <i32 2>
849 %9 = fmul <1 x double> %block18, %splat.splatinsert19
850 %10 = fadd <1 x double> %8, %9
851 %11 = shufflevector <1 x double> %10, <1 x double> poison, <3 x i32> <i32 0, i32 undef, i32 undef>
852 %12 = shufflevector <3 x double> %5, <3 x double> %11, <3 x i32> <i32 0, i32 1, i32 3>
853 %splat.splat23 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 3, i32 3>
854 %13 = fmul <2 x double> %block, %splat.splat23
855 %splat.splat26 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 4, i32 4>
856 %14 = fmul <2 x double> %block6, %splat.splat26
857 %15 = fadd <2 x double> %13, %14
858 %splat.splat29 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 5, i32 5>
859 %16 = fmul <2 x double> %block9, %splat.splat29
860 %17 = fadd <2 x double> %15, %16
861 %18 = shufflevector <2 x double> %17, <2 x double> poison, <3 x i32> <i32 0, i32 1, i32 undef>
862 %splat.splatinsert31 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> <i32 3>
863 %19 = fmul <1 x double> %block12, %splat.splatinsert31
864 %splat.splatinsert34 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> <i32 4>
865 %20 = fmul <1 x double> %block15, %splat.splatinsert34
866 %21 = fadd <1 x double> %19, %20
867 %splat.splatinsert37 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> <i32 5>
868 %22 = fmul <1 x double> %block18, %splat.splatinsert37
869 %23 = fadd <1 x double> %21, %22
870 %24 = shufflevector <1 x double> %23, <1 x double> poison, <3 x i32> <i32 0, i32 undef, i32 undef>
871 %25 = shufflevector <3 x double> %18, <3 x double> %24, <3 x i32> <i32 0, i32 1, i32 3>
872 %splat.splat41 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 6, i32 6>
873 %26 = fmul <2 x double> %block, %splat.splat41
874 %splat.splat44 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 7, i32 7>
875 %27 = fmul <2 x double> %block6, %splat.splat44
876 %28 = fadd <2 x double> %26, %27
877 %splat.splat47 = shufflevector <9 x double> %a1, <9 x double> undef, <2 x i32> <i32 8, i32 8>
878 %29 = fmul <2 x double> %block9, %splat.splat47
879 %30 = fadd <2 x double> %28, %29
880 %31 = shufflevector <2 x double> %30, <2 x double> poison, <3 x i32> <i32 0, i32 1, i32 undef>
881 %splat.splatinsert49 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> <i32 6>
882 %32 = fmul <1 x double> %block12, %splat.splatinsert49
883 %splat.splatinsert52 = shufflevector <9 x double> %a1, <9 x double> undef, <1 x i32> <i32 7>
884 %33 = fmul <1 x double> %block15, %splat.splatinsert52
885 %34 = fadd <1 x double> %32, %33
886 %35 = fmul <9 x double> %a0, %a1
887 %36 = shufflevector <9 x double> %35, <9 x double> poison, <1 x i32> <i32 8>
888 %37 = fadd <1 x double> %34, %36
889 %38 = shufflevector <1 x double> %37, <1 x double> poison, <3 x i32> <i32 0, i32 undef, i32 undef>
890 %39 = shufflevector <3 x double> %31, <3 x double> %38, <3 x i32> <i32 0, i32 1, i32 3>
891 %40 = shufflevector <3 x double> %12, <3 x double> %25, <6 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5>
892 %41 = shufflevector <3 x double> %39, <3 x double> poison, <6 x i32> <i32 0, i32 1, i32 2, i32 undef, i32 undef, i32 undef>
893 %42 = shufflevector <6 x double> %40, <6 x double> %41, <9 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8>
897 define <16 x float> @test_mul4x4_f32(<16 x float> %a0, <16 x float> %a1) nounwind {
898 ; SSE-LABEL: test_mul4x4_f32:
899 ; SSE: # %bb.0: # %entry
900 ; SSE-NEXT: movaps %xmm0, %xmm9
901 ; SSE-NEXT: movaps %xmm4, %xmm0
902 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[0,0],xmm4[0,0]
903 ; SSE-NEXT: mulps %xmm9, %xmm0
904 ; SSE-NEXT: movaps %xmm4, %xmm8
905 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[1,1],xmm4[1,1]
906 ; SSE-NEXT: mulps %xmm1, %xmm8
907 ; SSE-NEXT: addps %xmm0, %xmm8
908 ; SSE-NEXT: movaps %xmm4, %xmm0
909 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[2,2],xmm4[2,2]
910 ; SSE-NEXT: mulps %xmm2, %xmm0
911 ; SSE-NEXT: addps %xmm8, %xmm0
912 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,3,3,3]
913 ; SSE-NEXT: mulps %xmm3, %xmm4
914 ; SSE-NEXT: addps %xmm4, %xmm0
915 ; SSE-NEXT: movaps %xmm5, %xmm4
916 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,0],xmm5[0,0]
917 ; SSE-NEXT: mulps %xmm9, %xmm4
918 ; SSE-NEXT: movaps %xmm5, %xmm10
919 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[1,1],xmm5[1,1]
920 ; SSE-NEXT: mulps %xmm1, %xmm10
921 ; SSE-NEXT: addps %xmm4, %xmm10
922 ; SSE-NEXT: movaps %xmm5, %xmm8
923 ; SSE-NEXT: shufps {{.*#+}} xmm8 = xmm8[2,2],xmm5[2,2]
924 ; SSE-NEXT: mulps %xmm2, %xmm8
925 ; SSE-NEXT: addps %xmm10, %xmm8
926 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[3,3,3,3]
927 ; SSE-NEXT: mulps %xmm3, %xmm5
928 ; SSE-NEXT: addps %xmm5, %xmm8
929 ; SSE-NEXT: movaps %xmm6, %xmm4
930 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,0],xmm6[0,0]
931 ; SSE-NEXT: mulps %xmm9, %xmm4
932 ; SSE-NEXT: movaps %xmm6, %xmm10
933 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[1,1],xmm6[1,1]
934 ; SSE-NEXT: mulps %xmm1, %xmm10
935 ; SSE-NEXT: addps %xmm4, %xmm10
936 ; SSE-NEXT: movaps %xmm6, %xmm5
937 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[2,2],xmm6[2,2]
938 ; SSE-NEXT: mulps %xmm2, %xmm5
939 ; SSE-NEXT: addps %xmm10, %xmm5
940 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[3,3,3,3]
941 ; SSE-NEXT: mulps %xmm3, %xmm6
942 ; SSE-NEXT: addps %xmm6, %xmm5
943 ; SSE-NEXT: movaps %xmm7, %xmm4
944 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[0,0],xmm7[0,0]
945 ; SSE-NEXT: mulps %xmm9, %xmm4
946 ; SSE-NEXT: movaps %xmm7, %xmm6
947 ; SSE-NEXT: shufps {{.*#+}} xmm6 = xmm6[1,1],xmm7[1,1]
948 ; SSE-NEXT: mulps %xmm1, %xmm6
949 ; SSE-NEXT: addps %xmm4, %xmm6
950 ; SSE-NEXT: movaps %xmm7, %xmm1
951 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm7[2,2]
952 ; SSE-NEXT: mulps %xmm2, %xmm1
953 ; SSE-NEXT: addps %xmm6, %xmm1
954 ; SSE-NEXT: shufps {{.*#+}} xmm7 = xmm7[3,3,3,3]
955 ; SSE-NEXT: mulps %xmm7, %xmm3
956 ; SSE-NEXT: addps %xmm1, %xmm3
957 ; SSE-NEXT: movaps %xmm8, %xmm1
958 ; SSE-NEXT: movaps %xmm5, %xmm2
961 ; AVX1-LABEL: test_mul4x4_f32:
962 ; AVX1: # %bb.0: # %entry
963 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm5
964 ; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm4
965 ; AVX1-NEXT: vshufps {{.*#+}} xmm6 = xmm2[0,0,0,0]
966 ; AVX1-NEXT: vmulps %xmm6, %xmm0, %xmm6
967 ; AVX1-NEXT: vshufps {{.*#+}} xmm7 = xmm2[1,1,1,1]
968 ; AVX1-NEXT: vmulps %xmm7, %xmm5, %xmm7
969 ; AVX1-NEXT: vaddps %xmm7, %xmm6, %xmm6
970 ; AVX1-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
971 ; AVX1-NEXT: vmulps %xmm7, %xmm1, %xmm7
972 ; AVX1-NEXT: vaddps %xmm7, %xmm6, %xmm6
973 ; AVX1-NEXT: vshufps {{.*#+}} xmm7 = xmm2[3,3,3,3]
974 ; AVX1-NEXT: vmulps %xmm7, %xmm4, %xmm7
975 ; AVX1-NEXT: vaddps %xmm7, %xmm6, %xmm6
976 ; AVX1-NEXT: vextractf128 $1, %ymm2, %xmm2
977 ; AVX1-NEXT: vshufps {{.*#+}} xmm7 = xmm2[0,0,0,0]
978 ; AVX1-NEXT: vmulps %xmm7, %xmm0, %xmm7
979 ; AVX1-NEXT: vshufps {{.*#+}} xmm8 = xmm2[1,1,1,1]
980 ; AVX1-NEXT: vmulps %xmm5, %xmm8, %xmm8
981 ; AVX1-NEXT: vaddps %xmm7, %xmm8, %xmm7
982 ; AVX1-NEXT: vshufps {{.*#+}} xmm8 = xmm2[2,2,2,2]
983 ; AVX1-NEXT: vmulps %xmm1, %xmm8, %xmm8
984 ; AVX1-NEXT: vaddps %xmm7, %xmm8, %xmm7
985 ; AVX1-NEXT: vshufps {{.*#+}} xmm2 = xmm2[3,3,3,3]
986 ; AVX1-NEXT: vmulps %xmm2, %xmm4, %xmm2
987 ; AVX1-NEXT: vaddps %xmm2, %xmm7, %xmm2
988 ; AVX1-NEXT: vshufps {{.*#+}} xmm7 = xmm3[0,0,0,0]
989 ; AVX1-NEXT: vmulps %xmm7, %xmm0, %xmm7
990 ; AVX1-NEXT: vshufps {{.*#+}} xmm8 = xmm3[1,1,1,1]
991 ; AVX1-NEXT: vmulps %xmm5, %xmm8, %xmm8
992 ; AVX1-NEXT: vaddps %xmm7, %xmm8, %xmm7
993 ; AVX1-NEXT: vshufps {{.*#+}} xmm8 = xmm3[2,2,2,2]
994 ; AVX1-NEXT: vmulps %xmm1, %xmm8, %xmm8
995 ; AVX1-NEXT: vaddps %xmm7, %xmm8, %xmm7
996 ; AVX1-NEXT: vshufps {{.*#+}} xmm8 = xmm3[3,3,3,3]
997 ; AVX1-NEXT: vmulps %xmm4, %xmm8, %xmm8
998 ; AVX1-NEXT: vaddps %xmm7, %xmm8, %xmm7
999 ; AVX1-NEXT: vextractf128 $1, %ymm3, %xmm3
1000 ; AVX1-NEXT: vshufps {{.*#+}} xmm8 = xmm3[0,0,0,0]
1001 ; AVX1-NEXT: vmulps %xmm0, %xmm8, %xmm0
1002 ; AVX1-NEXT: vshufps {{.*#+}} xmm8 = xmm3[1,1,1,1]
1003 ; AVX1-NEXT: vmulps %xmm5, %xmm8, %xmm5
1004 ; AVX1-NEXT: vaddps %xmm5, %xmm0, %xmm0
1005 ; AVX1-NEXT: vshufps {{.*#+}} xmm5 = xmm3[2,2,2,2]
1006 ; AVX1-NEXT: vmulps %xmm5, %xmm1, %xmm1
1007 ; AVX1-NEXT: vaddps %xmm1, %xmm0, %xmm0
1008 ; AVX1-NEXT: vshufps {{.*#+}} xmm1 = xmm3[3,3,3,3]
1009 ; AVX1-NEXT: vmulps %xmm1, %xmm4, %xmm1
1010 ; AVX1-NEXT: vaddps %xmm1, %xmm0, %xmm1
1011 ; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm0
1012 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm7, %ymm1
1015 ; AVX2-LABEL: test_mul4x4_f32:
1016 ; AVX2: # %bb.0: # %entry
1017 ; AVX2-NEXT: vextractf128 $1, %ymm0, %xmm5
1018 ; AVX2-NEXT: vextractf128 $1, %ymm1, %xmm4
1019 ; AVX2-NEXT: vbroadcastss %xmm2, %xmm6
1020 ; AVX2-NEXT: vmulps %xmm6, %xmm0, %xmm6
1021 ; AVX2-NEXT: vshufps {{.*#+}} xmm7 = xmm2[1,1,1,1]
1022 ; AVX2-NEXT: vmulps %xmm7, %xmm5, %xmm7
1023 ; AVX2-NEXT: vaddps %xmm7, %xmm6, %xmm6
1024 ; AVX2-NEXT: vshufps {{.*#+}} xmm7 = xmm2[2,2,2,2]
1025 ; AVX2-NEXT: vmulps %xmm7, %xmm1, %xmm7
1026 ; AVX2-NEXT: vaddps %xmm7, %xmm6, %xmm6
1027 ; AVX2-NEXT: vshufps {{.*#+}} xmm7 = xmm2[3,3,3,3]
1028 ; AVX2-NEXT: vmulps %xmm7, %xmm4, %xmm7
1029 ; AVX2-NEXT: vaddps %xmm7, %xmm6, %xmm6
1030 ; AVX2-NEXT: vextractf128 $1, %ymm2, %xmm2
1031 ; AVX2-NEXT: vbroadcastss %xmm2, %xmm7
1032 ; AVX2-NEXT: vmulps %xmm7, %xmm0, %xmm7
1033 ; AVX2-NEXT: vshufps {{.*#+}} xmm8 = xmm2[1,1,1,1]
1034 ; AVX2-NEXT: vmulps %xmm5, %xmm8, %xmm8
1035 ; AVX2-NEXT: vaddps %xmm7, %xmm8, %xmm7
1036 ; AVX2-NEXT: vshufps {{.*#+}} xmm8 = xmm2[2,2,2,2]
1037 ; AVX2-NEXT: vmulps %xmm1, %xmm8, %xmm8
1038 ; AVX2-NEXT: vaddps %xmm7, %xmm8, %xmm7
1039 ; AVX2-NEXT: vshufps {{.*#+}} xmm2 = xmm2[3,3,3,3]
1040 ; AVX2-NEXT: vmulps %xmm2, %xmm4, %xmm2
1041 ; AVX2-NEXT: vaddps %xmm2, %xmm7, %xmm2
1042 ; AVX2-NEXT: vbroadcastss %xmm3, %xmm7
1043 ; AVX2-NEXT: vmulps %xmm7, %xmm0, %xmm7
1044 ; AVX2-NEXT: vshufps {{.*#+}} xmm8 = xmm3[1,1,1,1]
1045 ; AVX2-NEXT: vmulps %xmm5, %xmm8, %xmm8
1046 ; AVX2-NEXT: vaddps %xmm7, %xmm8, %xmm7
1047 ; AVX2-NEXT: vshufps {{.*#+}} xmm8 = xmm3[2,2,2,2]
1048 ; AVX2-NEXT: vmulps %xmm1, %xmm8, %xmm8
1049 ; AVX2-NEXT: vaddps %xmm7, %xmm8, %xmm7
1050 ; AVX2-NEXT: vshufps {{.*#+}} xmm8 = xmm3[3,3,3,3]
1051 ; AVX2-NEXT: vmulps %xmm4, %xmm8, %xmm8
1052 ; AVX2-NEXT: vaddps %xmm7, %xmm8, %xmm7
1053 ; AVX2-NEXT: vextractf128 $1, %ymm3, %xmm3
1054 ; AVX2-NEXT: vbroadcastss %xmm3, %xmm8
1055 ; AVX2-NEXT: vmulps %xmm0, %xmm8, %xmm0
1056 ; AVX2-NEXT: vshufps {{.*#+}} xmm8 = xmm3[1,1,1,1]
1057 ; AVX2-NEXT: vmulps %xmm5, %xmm8, %xmm5
1058 ; AVX2-NEXT: vaddps %xmm5, %xmm0, %xmm0
1059 ; AVX2-NEXT: vshufps {{.*#+}} xmm5 = xmm3[2,2,2,2]
1060 ; AVX2-NEXT: vmulps %xmm5, %xmm1, %xmm1
1061 ; AVX2-NEXT: vaddps %xmm1, %xmm0, %xmm0
1062 ; AVX2-NEXT: vshufps {{.*#+}} xmm1 = xmm3[3,3,3,3]
1063 ; AVX2-NEXT: vmulps %xmm1, %xmm4, %xmm1
1064 ; AVX2-NEXT: vaddps %xmm1, %xmm0, %xmm1
1065 ; AVX2-NEXT: vinsertf128 $1, %xmm2, %ymm6, %ymm0
1066 ; AVX2-NEXT: vinsertf128 $1, %xmm1, %ymm7, %ymm1
1069 ; AVX512F-LABEL: test_mul4x4_f32:
1070 ; AVX512F: # %bb.0: # %entry
1071 ; AVX512F-NEXT: vextractf128 $1, %ymm0, %xmm4
1072 ; AVX512F-NEXT: vextractf32x4 $2, %zmm0, %xmm3
1073 ; AVX512F-NEXT: vextractf32x4 $3, %zmm0, %xmm2
1074 ; AVX512F-NEXT: vbroadcastss %xmm1, %xmm5
1075 ; AVX512F-NEXT: vmulps %xmm5, %xmm0, %xmm5
1076 ; AVX512F-NEXT: vshufps {{.*#+}} xmm6 = xmm1[1,1,1,1]
1077 ; AVX512F-NEXT: vmulps %xmm6, %xmm4, %xmm6
1078 ; AVX512F-NEXT: vaddps %xmm6, %xmm5, %xmm5
1079 ; AVX512F-NEXT: vshufps {{.*#+}} xmm6 = xmm1[2,2,2,2]
1080 ; AVX512F-NEXT: vmulps %xmm6, %xmm3, %xmm6
1081 ; AVX512F-NEXT: vaddps %xmm6, %xmm5, %xmm5
1082 ; AVX512F-NEXT: vshufps {{.*#+}} xmm6 = xmm1[3,3,3,3]
1083 ; AVX512F-NEXT: vmulps %xmm6, %xmm2, %xmm6
1084 ; AVX512F-NEXT: vaddps %xmm6, %xmm5, %xmm5
1085 ; AVX512F-NEXT: vextractf128 $1, %ymm1, %xmm6
1086 ; AVX512F-NEXT: vbroadcastss %xmm6, %xmm7
1087 ; AVX512F-NEXT: vmulps %xmm7, %xmm0, %xmm7
1088 ; AVX512F-NEXT: vshufps {{.*#+}} xmm8 = xmm6[1,1,1,1]
1089 ; AVX512F-NEXT: vmulps %xmm4, %xmm8, %xmm8
1090 ; AVX512F-NEXT: vaddps %xmm7, %xmm8, %xmm7
1091 ; AVX512F-NEXT: vshufps {{.*#+}} xmm8 = xmm6[2,2,2,2]
1092 ; AVX512F-NEXT: vmulps %xmm3, %xmm8, %xmm8
1093 ; AVX512F-NEXT: vaddps %xmm7, %xmm8, %xmm7
1094 ; AVX512F-NEXT: vshufps {{.*#+}} xmm6 = xmm6[3,3,3,3]
1095 ; AVX512F-NEXT: vmulps %xmm6, %xmm2, %xmm6
1096 ; AVX512F-NEXT: vaddps %xmm6, %xmm7, %xmm6
1097 ; AVX512F-NEXT: vextractf32x4 $2, %zmm1, %xmm7
1098 ; AVX512F-NEXT: vbroadcastss %xmm7, %xmm8
1099 ; AVX512F-NEXT: vmulps %xmm0, %xmm8, %xmm8
1100 ; AVX512F-NEXT: vshufps {{.*#+}} xmm9 = xmm7[1,1,1,1]
1101 ; AVX512F-NEXT: vmulps %xmm4, %xmm9, %xmm9
1102 ; AVX512F-NEXT: vaddps %xmm9, %xmm8, %xmm8
1103 ; AVX512F-NEXT: vshufps {{.*#+}} xmm9 = xmm7[2,2,2,2]
1104 ; AVX512F-NEXT: vmulps %xmm3, %xmm9, %xmm9
1105 ; AVX512F-NEXT: vaddps %xmm9, %xmm8, %xmm8
1106 ; AVX512F-NEXT: vshufps {{.*#+}} xmm7 = xmm7[3,3,3,3]
1107 ; AVX512F-NEXT: vmulps %xmm7, %xmm2, %xmm7
1108 ; AVX512F-NEXT: vaddps %xmm7, %xmm8, %xmm7
1109 ; AVX512F-NEXT: vextractf32x4 $3, %zmm1, %xmm1
1110 ; AVX512F-NEXT: vbroadcastss %xmm1, %xmm8
1111 ; AVX512F-NEXT: vmulps %xmm0, %xmm8, %xmm0
1112 ; AVX512F-NEXT: vshufps {{.*#+}} xmm8 = xmm1[1,1,1,1]
1113 ; AVX512F-NEXT: vmulps %xmm4, %xmm8, %xmm4
1114 ; AVX512F-NEXT: vaddps %xmm4, %xmm0, %xmm0
1115 ; AVX512F-NEXT: vshufps {{.*#+}} xmm4 = xmm1[2,2,2,2]
1116 ; AVX512F-NEXT: vmulps %xmm4, %xmm3, %xmm3
1117 ; AVX512F-NEXT: vaddps %xmm3, %xmm0, %xmm0
1118 ; AVX512F-NEXT: vshufps {{.*#+}} xmm1 = xmm1[3,3,3,3]
1119 ; AVX512F-NEXT: vmulps %xmm1, %xmm2, %xmm1
1120 ; AVX512F-NEXT: vaddps %xmm1, %xmm0, %xmm0
1121 ; AVX512F-NEXT: vinsertf128 $1, %xmm0, %ymm7, %ymm0
1122 ; AVX512F-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm1
1123 ; AVX512F-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0
1124 ; AVX512F-NEXT: retq
1126 ; AVX512VL-LABEL: test_mul4x4_f32:
1127 ; AVX512VL: # %bb.0: # %entry
1128 ; AVX512VL-NEXT: vextractf128 $1, %ymm0, %xmm2
1129 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm0, %xmm3
1130 ; AVX512VL-NEXT: vextractf32x4 $3, %zmm0, %xmm4
1131 ; AVX512VL-NEXT: vbroadcastss %xmm1, %xmm5
1132 ; AVX512VL-NEXT: vmulps %xmm5, %xmm0, %xmm5
1133 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm6 = xmm1[1,1,1,1]
1134 ; AVX512VL-NEXT: vmulps %xmm6, %xmm2, %xmm6
1135 ; AVX512VL-NEXT: vaddps %xmm6, %xmm5, %xmm5
1136 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm6 = xmm1[2,2,2,2]
1137 ; AVX512VL-NEXT: vmulps %xmm6, %xmm3, %xmm6
1138 ; AVX512VL-NEXT: vaddps %xmm6, %xmm5, %xmm5
1139 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm6 = xmm1[3,3,3,3]
1140 ; AVX512VL-NEXT: vmulps %xmm6, %xmm4, %xmm6
1141 ; AVX512VL-NEXT: vaddps %xmm6, %xmm5, %xmm5
1142 ; AVX512VL-NEXT: vextractf128 $1, %ymm1, %xmm6
1143 ; AVX512VL-NEXT: vbroadcastss %xmm6, %xmm7
1144 ; AVX512VL-NEXT: vmulps %xmm7, %xmm0, %xmm7
1145 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm8 = xmm6[1,1,1,1]
1146 ; AVX512VL-NEXT: vmulps %xmm2, %xmm8, %xmm8
1147 ; AVX512VL-NEXT: vaddps %xmm7, %xmm8, %xmm7
1148 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm8 = xmm6[2,2,2,2]
1149 ; AVX512VL-NEXT: vmulps %xmm3, %xmm8, %xmm8
1150 ; AVX512VL-NEXT: vaddps %xmm7, %xmm8, %xmm7
1151 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm6 = xmm6[3,3,3,3]
1152 ; AVX512VL-NEXT: vmulps %xmm6, %xmm4, %xmm6
1153 ; AVX512VL-NEXT: vaddps %xmm6, %xmm7, %xmm6
1154 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm1, %xmm7
1155 ; AVX512VL-NEXT: vbroadcastss %xmm7, %xmm8
1156 ; AVX512VL-NEXT: vmulps %xmm0, %xmm8, %xmm8
1157 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm9 = xmm7[1,1,1,1]
1158 ; AVX512VL-NEXT: vmulps %xmm2, %xmm9, %xmm9
1159 ; AVX512VL-NEXT: vaddps %xmm9, %xmm8, %xmm8
1160 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm9 = xmm7[2,2,2,2]
1161 ; AVX512VL-NEXT: vmulps %xmm3, %xmm9, %xmm9
1162 ; AVX512VL-NEXT: vaddps %xmm9, %xmm8, %xmm8
1163 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm7 = xmm7[3,3,3,3]
1164 ; AVX512VL-NEXT: vmulps %xmm7, %xmm4, %xmm7
1165 ; AVX512VL-NEXT: vaddps %xmm7, %xmm8, %xmm7
1166 ; AVX512VL-NEXT: vextractf32x4 $3, %zmm1, %xmm1
1167 ; AVX512VL-NEXT: vbroadcastss %xmm1, %xmm8
1168 ; AVX512VL-NEXT: vmulps %xmm0, %xmm8, %xmm0
1169 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm8 = xmm1[1,1,1,1]
1170 ; AVX512VL-NEXT: vmulps %xmm2, %xmm8, %xmm2
1171 ; AVX512VL-NEXT: vaddps %xmm2, %xmm0, %xmm0
1172 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm2 = xmm1[2,2,2,2]
1173 ; AVX512VL-NEXT: vmulps %xmm2, %xmm3, %xmm2
1174 ; AVX512VL-NEXT: vaddps %xmm2, %xmm0, %xmm0
1175 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm1 = xmm1[3,3,3,3]
1176 ; AVX512VL-NEXT: vmulps %xmm1, %xmm4, %xmm1
1177 ; AVX512VL-NEXT: vaddps %xmm1, %xmm0, %xmm0
1178 ; AVX512VL-NEXT: vinsertf128 $1, %xmm0, %ymm7, %ymm0
1179 ; AVX512VL-NEXT: vinsertf128 $1, %xmm6, %ymm5, %ymm1
1180 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm0, %zmm1, %zmm0
1181 ; AVX512VL-NEXT: retq
1183 %split = shufflevector <16 x float> %a0, <16 x float> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
1184 %split1 = shufflevector <16 x float> %a0, <16 x float> poison, <4 x i32> <i32 4, i32 5, i32 6, i32 7>
1185 %split2 = shufflevector <16 x float> %a0, <16 x float> poison, <4 x i32> <i32 8, i32 9, i32 10, i32 11>
1186 %split3 = shufflevector <16 x float> %a0, <16 x float> poison, <4 x i32> <i32 12, i32 13, i32 14, i32 15>
1187 %splat.splat = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> zeroinitializer
1188 %0 = fmul <4 x float> %split, %splat.splat
1189 %splat.splat10 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 1, i32 1, i32 1, i32 1>
1190 %1 = fmul <4 x float> %split1, %splat.splat10
1191 %2 = fadd <4 x float> %0, %1
1192 %splat.splat13 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 2, i32 2, i32 2, i32 2>
1193 %3 = fmul <4 x float> %split2, %splat.splat13
1194 %4 = fadd <4 x float> %2, %3
1195 %splat.splat16 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 3, i32 3, i32 3, i32 3>
1196 %5 = fmul <4 x float> %split3, %splat.splat16
1197 %6 = fadd <4 x float> %4, %5
1198 %splat.splat19 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 4, i32 4, i32 4, i32 4>
1199 %7 = fmul <4 x float> %split, %splat.splat19
1200 %splat.splat22 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 5, i32 5, i32 5, i32 5>
1201 %8 = fmul <4 x float> %split1, %splat.splat22
1202 %9 = fadd <4 x float> %7, %8
1203 %splat.splat25 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 6, i32 6, i32 6, i32 6>
1204 %10 = fmul <4 x float> %split2, %splat.splat25
1205 %11 = fadd <4 x float> %9, %10
1206 %splat.splat28 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 7, i32 7, i32 7, i32 7>
1207 %12 = fmul <4 x float> %split3, %splat.splat28
1208 %13 = fadd <4 x float> %11, %12
1209 %splat.splat31 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 8, i32 8, i32 8, i32 8>
1210 %14 = fmul <4 x float> %split, %splat.splat31
1211 %splat.splat34 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 9, i32 9, i32 9, i32 9>
1212 %15 = fmul <4 x float> %split1, %splat.splat34
1213 %16 = fadd <4 x float> %14, %15
1214 %splat.splat37 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 10, i32 10, i32 10, i32 10>
1215 %17 = fmul <4 x float> %split2, %splat.splat37
1216 %18 = fadd <4 x float> %16, %17
1217 %splat.splat40 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 11, i32 11, i32 11, i32 11>
1218 %19 = fmul <4 x float> %split3, %splat.splat40
1219 %20 = fadd <4 x float> %18, %19
1220 %splat.splat43 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 12, i32 12, i32 12, i32 12>
1221 %21 = fmul <4 x float> %split, %splat.splat43
1222 %splat.splat46 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 13, i32 13, i32 13, i32 13>
1223 %22 = fmul <4 x float> %split1, %splat.splat46
1224 %23 = fadd <4 x float> %21, %22
1225 %splat.splat49 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 14, i32 14, i32 14, i32 14>
1226 %24 = fmul <4 x float> %split2, %splat.splat49
1227 %25 = fadd <4 x float> %23, %24
1228 %splat.splat52 = shufflevector <16 x float> %a1, <16 x float> undef, <4 x i32> <i32 15, i32 15, i32 15, i32 15>
1229 %26 = fmul <4 x float> %split3, %splat.splat52
1230 %27 = fadd <4 x float> %25, %26
1231 %28 = shufflevector <4 x float> %6, <4 x float> %13, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
1232 %29 = shufflevector <4 x float> %20, <4 x float> %27, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
1233 %30 = shufflevector <8 x float> %28, <8 x float> %29, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1234 ret <16 x float> %30
1237 define <16 x double> @test_mul4x4_f64(<16 x double> %a0, <16 x double> %a1) nounwind {
1238 ; SSE-LABEL: test_mul4x4_f64:
1239 ; SSE: # %bb.0: # %entry
1240 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1241 ; SSE-NEXT: movapd %xmm5, %xmm6
1242 ; SSE-NEXT: movapd %xmm4, %xmm5
1243 ; SSE-NEXT: movq %rdi, %rax
1244 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm11
1245 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
1246 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm12
1247 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm8
1248 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm10
1249 ; SSE-NEXT: movapd %xmm10, %xmm13
1250 ; SSE-NEXT: unpcklpd {{.*#+}} xmm13 = xmm13[0],xmm10[0]
1251 ; SSE-NEXT: movapd %xmm1, %xmm14
1252 ; SSE-NEXT: mulpd %xmm13, %xmm14
1253 ; SSE-NEXT: mulpd %xmm0, %xmm13
1254 ; SSE-NEXT: unpckhpd {{.*#+}} xmm10 = xmm10[1,1]
1255 ; SSE-NEXT: movapd %xmm3, %xmm15
1256 ; SSE-NEXT: mulpd %xmm10, %xmm15
1257 ; SSE-NEXT: addpd %xmm14, %xmm15
1258 ; SSE-NEXT: mulpd %xmm2, %xmm10
1259 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1260 ; SSE-NEXT: addpd %xmm13, %xmm10
1261 ; SSE-NEXT: movapd %xmm8, %xmm13
1262 ; SSE-NEXT: unpcklpd {{.*#+}} xmm13 = xmm13[0],xmm8[0]
1263 ; SSE-NEXT: movapd %xmm4, %xmm14
1264 ; SSE-NEXT: mulpd %xmm13, %xmm14
1265 ; SSE-NEXT: addpd %xmm10, %xmm14
1266 ; SSE-NEXT: movapd %xmm6, %xmm4
1267 ; SSE-NEXT: mulpd %xmm6, %xmm13
1268 ; SSE-NEXT: addpd %xmm15, %xmm13
1269 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1,1]
1270 ; SSE-NEXT: movapd %xmm7, %xmm10
1271 ; SSE-NEXT: mulpd %xmm8, %xmm10
1272 ; SSE-NEXT: addpd %xmm13, %xmm10
1273 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1274 ; SSE-NEXT: mulpd %xmm6, %xmm8
1275 ; SSE-NEXT: addpd %xmm14, %xmm8
1276 ; SSE-NEXT: movapd %xmm12, %xmm13
1277 ; SSE-NEXT: unpcklpd {{.*#+}} xmm13 = xmm13[0],xmm12[0]
1278 ; SSE-NEXT: movapd %xmm1, %xmm14
1279 ; SSE-NEXT: mulpd %xmm13, %xmm14
1280 ; SSE-NEXT: mulpd %xmm0, %xmm13
1281 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1,1]
1282 ; SSE-NEXT: movapd %xmm3, %xmm15
1283 ; SSE-NEXT: mulpd %xmm12, %xmm15
1284 ; SSE-NEXT: addpd %xmm14, %xmm15
1285 ; SSE-NEXT: mulpd %xmm2, %xmm12
1286 ; SSE-NEXT: addpd %xmm13, %xmm12
1287 ; SSE-NEXT: movapd %xmm9, %xmm13
1288 ; SSE-NEXT: unpcklpd {{.*#+}} xmm13 = xmm13[0],xmm9[0]
1289 ; SSE-NEXT: movapd %xmm5, %xmm14
1290 ; SSE-NEXT: mulpd %xmm13, %xmm14
1291 ; SSE-NEXT: addpd %xmm12, %xmm14
1292 ; SSE-NEXT: mulpd %xmm4, %xmm13
1293 ; SSE-NEXT: movapd %xmm4, %xmm2
1294 ; SSE-NEXT: addpd %xmm15, %xmm13
1295 ; SSE-NEXT: unpckhpd {{.*#+}} xmm9 = xmm9[1,1]
1296 ; SSE-NEXT: movapd %xmm7, %xmm12
1297 ; SSE-NEXT: mulpd %xmm9, %xmm12
1298 ; SSE-NEXT: addpd %xmm13, %xmm12
1299 ; SSE-NEXT: mulpd %xmm6, %xmm9
1300 ; SSE-NEXT: addpd %xmm14, %xmm9
1301 ; SSE-NEXT: movapd %xmm11, %xmm14
1302 ; SSE-NEXT: unpcklpd {{.*#+}} xmm14 = xmm14[0],xmm11[0]
1303 ; SSE-NEXT: movapd %xmm1, %xmm13
1304 ; SSE-NEXT: mulpd %xmm14, %xmm13
1305 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1,1]
1306 ; SSE-NEXT: movapd %xmm3, %xmm15
1307 ; SSE-NEXT: mulpd %xmm11, %xmm15
1308 ; SSE-NEXT: addpd %xmm13, %xmm15
1309 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm13
1310 ; SSE-NEXT: mulpd %xmm0, %xmm14
1311 ; SSE-NEXT: movapd %xmm0, %xmm6
1312 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
1313 ; SSE-NEXT: mulpd %xmm0, %xmm11
1314 ; SSE-NEXT: addpd %xmm14, %xmm11
1315 ; SSE-NEXT: movapd %xmm13, %xmm14
1316 ; SSE-NEXT: unpcklpd {{.*#+}} xmm14 = xmm14[0],xmm13[0]
1317 ; SSE-NEXT: movapd %xmm5, %xmm4
1318 ; SSE-NEXT: mulpd %xmm14, %xmm4
1319 ; SSE-NEXT: addpd %xmm11, %xmm4
1320 ; SSE-NEXT: mulpd %xmm2, %xmm14
1321 ; SSE-NEXT: addpd %xmm15, %xmm14
1322 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1,1]
1323 ; SSE-NEXT: movapd %xmm7, %xmm11
1324 ; SSE-NEXT: mulpd %xmm13, %xmm11
1325 ; SSE-NEXT: addpd %xmm14, %xmm11
1326 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm14
1327 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Reload
1328 ; SSE-NEXT: mulpd %xmm15, %xmm13
1329 ; SSE-NEXT: addpd %xmm4, %xmm13
1330 ; SSE-NEXT: movapd %xmm14, %xmm4
1331 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm14[0]
1332 ; SSE-NEXT: mulpd %xmm4, %xmm1
1333 ; SSE-NEXT: mulpd %xmm6, %xmm4
1334 ; SSE-NEXT: unpckhpd {{.*#+}} xmm14 = xmm14[1,1]
1335 ; SSE-NEXT: mulpd %xmm14, %xmm3
1336 ; SSE-NEXT: addpd %xmm1, %xmm3
1337 ; SSE-NEXT: mulpd %xmm0, %xmm14
1338 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
1339 ; SSE-NEXT: addpd %xmm4, %xmm14
1340 ; SSE-NEXT: movapd %xmm0, %xmm1
1341 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm0[0]
1342 ; SSE-NEXT: mulpd %xmm1, %xmm5
1343 ; SSE-NEXT: addpd %xmm14, %xmm5
1344 ; SSE-NEXT: mulpd %xmm2, %xmm1
1345 ; SSE-NEXT: addpd %xmm3, %xmm1
1346 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
1347 ; SSE-NEXT: mulpd %xmm0, %xmm7
1348 ; SSE-NEXT: addpd %xmm1, %xmm7
1349 ; SSE-NEXT: mulpd %xmm15, %xmm0
1350 ; SSE-NEXT: addpd %xmm5, %xmm0
1351 ; SSE-NEXT: movapd %xmm7, 112(%rdi)
1352 ; SSE-NEXT: movapd %xmm0, 96(%rdi)
1353 ; SSE-NEXT: movapd %xmm11, 80(%rdi)
1354 ; SSE-NEXT: movapd %xmm13, 64(%rdi)
1355 ; SSE-NEXT: movapd %xmm12, 48(%rdi)
1356 ; SSE-NEXT: movapd %xmm9, 32(%rdi)
1357 ; SSE-NEXT: movapd %xmm10, 16(%rdi)
1358 ; SSE-NEXT: movapd %xmm8, (%rdi)
1361 ; AVX1-LABEL: test_mul4x4_f64:
1362 ; AVX1: # %bb.0: # %entry
1363 ; AVX1-NEXT: vmovddup {{.*#+}} xmm8 = xmm4[0,0]
1364 ; AVX1-NEXT: vinsertf128 $1, %xmm8, %ymm8, %ymm8
1365 ; AVX1-NEXT: vmulpd %ymm0, %ymm8, %ymm8
1366 ; AVX1-NEXT: vshufpd {{.*#+}} xmm9 = xmm4[1,1]
1367 ; AVX1-NEXT: vinsertf128 $1, %xmm9, %ymm9, %ymm9
1368 ; AVX1-NEXT: vmulpd %ymm1, %ymm9, %ymm9
1369 ; AVX1-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1370 ; AVX1-NEXT: vperm2f128 {{.*#+}} ymm4 = ymm4[2,3,2,3]
1371 ; AVX1-NEXT: vmovddup {{.*#+}} ymm9 = ymm4[0,0,2,2]
1372 ; AVX1-NEXT: vmulpd %ymm2, %ymm9, %ymm9
1373 ; AVX1-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1374 ; AVX1-NEXT: vshufpd {{.*#+}} ymm4 = ymm4[1,1,3,3]
1375 ; AVX1-NEXT: vmulpd %ymm4, %ymm3, %ymm4
1376 ; AVX1-NEXT: vaddpd %ymm4, %ymm8, %ymm4
1377 ; AVX1-NEXT: vmovddup {{.*#+}} xmm8 = xmm5[0,0]
1378 ; AVX1-NEXT: vinsertf128 $1, %xmm8, %ymm8, %ymm8
1379 ; AVX1-NEXT: vmulpd %ymm0, %ymm8, %ymm8
1380 ; AVX1-NEXT: vshufpd {{.*#+}} xmm9 = xmm5[1,1]
1381 ; AVX1-NEXT: vinsertf128 $1, %xmm9, %ymm9, %ymm9
1382 ; AVX1-NEXT: vmulpd %ymm1, %ymm9, %ymm9
1383 ; AVX1-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1384 ; AVX1-NEXT: vperm2f128 {{.*#+}} ymm5 = ymm5[2,3,2,3]
1385 ; AVX1-NEXT: vmovddup {{.*#+}} ymm9 = ymm5[0,0,2,2]
1386 ; AVX1-NEXT: vmulpd %ymm2, %ymm9, %ymm9
1387 ; AVX1-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1388 ; AVX1-NEXT: vshufpd {{.*#+}} ymm5 = ymm5[1,1,3,3]
1389 ; AVX1-NEXT: vmulpd %ymm5, %ymm3, %ymm5
1390 ; AVX1-NEXT: vaddpd %ymm5, %ymm8, %ymm5
1391 ; AVX1-NEXT: vmovddup {{.*#+}} xmm8 = xmm6[0,0]
1392 ; AVX1-NEXT: vinsertf128 $1, %xmm8, %ymm8, %ymm8
1393 ; AVX1-NEXT: vmulpd %ymm0, %ymm8, %ymm8
1394 ; AVX1-NEXT: vshufpd {{.*#+}} xmm9 = xmm6[1,1]
1395 ; AVX1-NEXT: vinsertf128 $1, %xmm9, %ymm9, %ymm9
1396 ; AVX1-NEXT: vmulpd %ymm1, %ymm9, %ymm9
1397 ; AVX1-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1398 ; AVX1-NEXT: vperm2f128 {{.*#+}} ymm6 = ymm6[2,3,2,3]
1399 ; AVX1-NEXT: vmovddup {{.*#+}} ymm9 = ymm6[0,0,2,2]
1400 ; AVX1-NEXT: vmulpd %ymm2, %ymm9, %ymm9
1401 ; AVX1-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1402 ; AVX1-NEXT: vshufpd {{.*#+}} ymm6 = ymm6[1,1,3,3]
1403 ; AVX1-NEXT: vmulpd %ymm6, %ymm3, %ymm6
1404 ; AVX1-NEXT: vaddpd %ymm6, %ymm8, %ymm6
1405 ; AVX1-NEXT: vmovddup {{.*#+}} xmm8 = xmm7[0,0]
1406 ; AVX1-NEXT: vinsertf128 $1, %xmm8, %ymm8, %ymm8
1407 ; AVX1-NEXT: vmulpd %ymm0, %ymm8, %ymm0
1408 ; AVX1-NEXT: vshufpd {{.*#+}} xmm8 = xmm7[1,1]
1409 ; AVX1-NEXT: vinsertf128 $1, %xmm8, %ymm8, %ymm8
1410 ; AVX1-NEXT: vmulpd %ymm1, %ymm8, %ymm1
1411 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm0
1412 ; AVX1-NEXT: vperm2f128 {{.*#+}} ymm1 = ymm7[2,3,2,3]
1413 ; AVX1-NEXT: vmovddup {{.*#+}} ymm7 = ymm1[0,0,2,2]
1414 ; AVX1-NEXT: vmulpd %ymm7, %ymm2, %ymm2
1415 ; AVX1-NEXT: vaddpd %ymm2, %ymm0, %ymm0
1416 ; AVX1-NEXT: vshufpd {{.*#+}} ymm1 = ymm1[1,1,3,3]
1417 ; AVX1-NEXT: vmulpd %ymm1, %ymm3, %ymm1
1418 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm3
1419 ; AVX1-NEXT: vmovapd %ymm4, %ymm0
1420 ; AVX1-NEXT: vmovapd %ymm5, %ymm1
1421 ; AVX1-NEXT: vmovapd %ymm6, %ymm2
1424 ; AVX2-LABEL: test_mul4x4_f64:
1425 ; AVX2: # %bb.0: # %entry
1426 ; AVX2-NEXT: vbroadcastsd %xmm4, %ymm8
1427 ; AVX2-NEXT: vmulpd %ymm0, %ymm8, %ymm8
1428 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm4[1,1,1,1]
1429 ; AVX2-NEXT: vmulpd %ymm1, %ymm9, %ymm9
1430 ; AVX2-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1431 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm4[2,2,2,2]
1432 ; AVX2-NEXT: vmulpd %ymm2, %ymm9, %ymm9
1433 ; AVX2-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1434 ; AVX2-NEXT: vpermpd {{.*#+}} ymm4 = ymm4[3,3,3,3]
1435 ; AVX2-NEXT: vmulpd %ymm4, %ymm3, %ymm4
1436 ; AVX2-NEXT: vaddpd %ymm4, %ymm8, %ymm4
1437 ; AVX2-NEXT: vbroadcastsd %xmm5, %ymm8
1438 ; AVX2-NEXT: vmulpd %ymm0, %ymm8, %ymm8
1439 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm5[1,1,1,1]
1440 ; AVX2-NEXT: vmulpd %ymm1, %ymm9, %ymm9
1441 ; AVX2-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1442 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm5[2,2,2,2]
1443 ; AVX2-NEXT: vmulpd %ymm2, %ymm9, %ymm9
1444 ; AVX2-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1445 ; AVX2-NEXT: vpermpd {{.*#+}} ymm5 = ymm5[3,3,3,3]
1446 ; AVX2-NEXT: vmulpd %ymm5, %ymm3, %ymm5
1447 ; AVX2-NEXT: vaddpd %ymm5, %ymm8, %ymm5
1448 ; AVX2-NEXT: vbroadcastsd %xmm6, %ymm8
1449 ; AVX2-NEXT: vmulpd %ymm0, %ymm8, %ymm8
1450 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm6[1,1,1,1]
1451 ; AVX2-NEXT: vmulpd %ymm1, %ymm9, %ymm9
1452 ; AVX2-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1453 ; AVX2-NEXT: vpermpd {{.*#+}} ymm9 = ymm6[2,2,2,2]
1454 ; AVX2-NEXT: vmulpd %ymm2, %ymm9, %ymm9
1455 ; AVX2-NEXT: vaddpd %ymm9, %ymm8, %ymm8
1456 ; AVX2-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[3,3,3,3]
1457 ; AVX2-NEXT: vmulpd %ymm6, %ymm3, %ymm6
1458 ; AVX2-NEXT: vaddpd %ymm6, %ymm8, %ymm6
1459 ; AVX2-NEXT: vbroadcastsd %xmm7, %ymm8
1460 ; AVX2-NEXT: vmulpd %ymm0, %ymm8, %ymm0
1461 ; AVX2-NEXT: vpermpd {{.*#+}} ymm8 = ymm7[1,1,1,1]
1462 ; AVX2-NEXT: vmulpd %ymm1, %ymm8, %ymm1
1463 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
1464 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm7[2,2,2,2]
1465 ; AVX2-NEXT: vmulpd %ymm1, %ymm2, %ymm1
1466 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
1467 ; AVX2-NEXT: vpermpd {{.*#+}} ymm1 = ymm7[3,3,3,3]
1468 ; AVX2-NEXT: vmulpd %ymm1, %ymm3, %ymm1
1469 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm3
1470 ; AVX2-NEXT: vmovapd %ymm4, %ymm0
1471 ; AVX2-NEXT: vmovapd %ymm5, %ymm1
1472 ; AVX2-NEXT: vmovapd %ymm6, %ymm2
1475 ; AVX512F-LABEL: test_mul4x4_f64:
1476 ; AVX512F: # %bb.0: # %entry
1477 ; AVX512F-NEXT: vextractf64x4 $1, %zmm0, %ymm5
1478 ; AVX512F-NEXT: vextractf64x4 $1, %zmm1, %ymm4
1479 ; AVX512F-NEXT: vbroadcastsd %xmm2, %ymm6
1480 ; AVX512F-NEXT: vmulpd %ymm6, %ymm0, %ymm6
1481 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm7 = ymm2[1,1,1,1]
1482 ; AVX512F-NEXT: vmulpd %ymm7, %ymm5, %ymm7
1483 ; AVX512F-NEXT: vaddpd %ymm7, %ymm6, %ymm6
1484 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm7 = ymm2[2,2,2,2]
1485 ; AVX512F-NEXT: vmulpd %ymm7, %ymm1, %ymm7
1486 ; AVX512F-NEXT: vaddpd %ymm7, %ymm6, %ymm6
1487 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm7 = ymm2[3,3,3,3]
1488 ; AVX512F-NEXT: vmulpd %ymm7, %ymm4, %ymm7
1489 ; AVX512F-NEXT: vaddpd %ymm7, %ymm6, %ymm6
1490 ; AVX512F-NEXT: vextractf64x4 $1, %zmm2, %ymm2
1491 ; AVX512F-NEXT: vbroadcastsd %xmm2, %ymm7
1492 ; AVX512F-NEXT: vmulpd %ymm7, %ymm0, %ymm7
1493 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm8 = ymm2[1,1,1,1]
1494 ; AVX512F-NEXT: vmulpd %ymm5, %ymm8, %ymm8
1495 ; AVX512F-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1496 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm8 = ymm2[2,2,2,2]
1497 ; AVX512F-NEXT: vmulpd %ymm1, %ymm8, %ymm8
1498 ; AVX512F-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1499 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[3,3,3,3]
1500 ; AVX512F-NEXT: vmulpd %ymm2, %ymm4, %ymm2
1501 ; AVX512F-NEXT: vaddpd %ymm2, %ymm7, %ymm2
1502 ; AVX512F-NEXT: vbroadcastsd %xmm3, %ymm7
1503 ; AVX512F-NEXT: vmulpd %ymm7, %ymm0, %ymm7
1504 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[1,1,1,1]
1505 ; AVX512F-NEXT: vmulpd %ymm5, %ymm8, %ymm8
1506 ; AVX512F-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1507 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[2,2,2,2]
1508 ; AVX512F-NEXT: vmulpd %ymm1, %ymm8, %ymm8
1509 ; AVX512F-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1510 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[3,3,3,3]
1511 ; AVX512F-NEXT: vmulpd %ymm4, %ymm8, %ymm8
1512 ; AVX512F-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1513 ; AVX512F-NEXT: vextractf64x4 $1, %zmm3, %ymm3
1514 ; AVX512F-NEXT: vbroadcastsd %xmm3, %ymm8
1515 ; AVX512F-NEXT: vmulpd %ymm0, %ymm8, %ymm0
1516 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[1,1,1,1]
1517 ; AVX512F-NEXT: vmulpd %ymm5, %ymm8, %ymm5
1518 ; AVX512F-NEXT: vaddpd %ymm5, %ymm0, %ymm0
1519 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm5 = ymm3[2,2,2,2]
1520 ; AVX512F-NEXT: vmulpd %ymm5, %ymm1, %ymm1
1521 ; AVX512F-NEXT: vaddpd %ymm1, %ymm0, %ymm0
1522 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm1 = ymm3[3,3,3,3]
1523 ; AVX512F-NEXT: vmulpd %ymm1, %ymm4, %ymm1
1524 ; AVX512F-NEXT: vaddpd %ymm1, %ymm0, %ymm1
1525 ; AVX512F-NEXT: vinsertf64x4 $1, %ymm2, %zmm6, %zmm0
1526 ; AVX512F-NEXT: vinsertf64x4 $1, %ymm1, %zmm7, %zmm1
1527 ; AVX512F-NEXT: retq
1529 ; AVX512VL-LABEL: test_mul4x4_f64:
1530 ; AVX512VL: # %bb.0: # %entry
1531 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm0, %ymm4
1532 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm1, %ymm5
1533 ; AVX512VL-NEXT: vbroadcastsd %xmm2, %ymm6
1534 ; AVX512VL-NEXT: vmulpd %ymm6, %ymm0, %ymm6
1535 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm7 = ymm2[1,1,1,1]
1536 ; AVX512VL-NEXT: vmulpd %ymm7, %ymm4, %ymm7
1537 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm6, %ymm6
1538 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm7 = ymm2[2,2,2,2]
1539 ; AVX512VL-NEXT: vmulpd %ymm7, %ymm1, %ymm7
1540 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm6, %ymm6
1541 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm7 = ymm2[3,3,3,3]
1542 ; AVX512VL-NEXT: vmulpd %ymm7, %ymm5, %ymm7
1543 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm6, %ymm6
1544 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm2, %ymm2
1545 ; AVX512VL-NEXT: vbroadcastsd %xmm2, %ymm7
1546 ; AVX512VL-NEXT: vmulpd %ymm7, %ymm0, %ymm7
1547 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm8 = ymm2[1,1,1,1]
1548 ; AVX512VL-NEXT: vmulpd %ymm4, %ymm8, %ymm8
1549 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1550 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm8 = ymm2[2,2,2,2]
1551 ; AVX512VL-NEXT: vmulpd %ymm1, %ymm8, %ymm8
1552 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1553 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm2 = ymm2[3,3,3,3]
1554 ; AVX512VL-NEXT: vmulpd %ymm2, %ymm5, %ymm2
1555 ; AVX512VL-NEXT: vaddpd %ymm2, %ymm7, %ymm2
1556 ; AVX512VL-NEXT: vbroadcastsd %xmm3, %ymm7
1557 ; AVX512VL-NEXT: vmulpd %ymm7, %ymm0, %ymm7
1558 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[1,1,1,1]
1559 ; AVX512VL-NEXT: vmulpd %ymm4, %ymm8, %ymm8
1560 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1561 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[2,2,2,2]
1562 ; AVX512VL-NEXT: vmulpd %ymm1, %ymm8, %ymm8
1563 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1564 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[3,3,3,3]
1565 ; AVX512VL-NEXT: vmulpd %ymm5, %ymm8, %ymm8
1566 ; AVX512VL-NEXT: vaddpd %ymm7, %ymm8, %ymm7
1567 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm3, %ymm3
1568 ; AVX512VL-NEXT: vbroadcastsd %xmm3, %ymm8
1569 ; AVX512VL-NEXT: vmulpd %ymm0, %ymm8, %ymm0
1570 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm8 = ymm3[1,1,1,1]
1571 ; AVX512VL-NEXT: vmulpd %ymm4, %ymm8, %ymm4
1572 ; AVX512VL-NEXT: vaddpd %ymm4, %ymm0, %ymm0
1573 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm4 = ymm3[2,2,2,2]
1574 ; AVX512VL-NEXT: vmulpd %ymm4, %ymm1, %ymm1
1575 ; AVX512VL-NEXT: vaddpd %ymm1, %ymm0, %ymm0
1576 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm1 = ymm3[3,3,3,3]
1577 ; AVX512VL-NEXT: vmulpd %ymm1, %ymm5, %ymm1
1578 ; AVX512VL-NEXT: vaddpd %ymm1, %ymm0, %ymm1
1579 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm2, %zmm6, %zmm0
1580 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm1, %zmm7, %zmm1
1581 ; AVX512VL-NEXT: retq
1583 %split = shufflevector <16 x double> %a0, <16 x double> poison, <4 x i32> <i32 0, i32 1, i32 2, i32 3>
1584 %split1 = shufflevector <16 x double> %a0, <16 x double> poison, <4 x i32> <i32 4, i32 5, i32 6, i32 7>
1585 %split2 = shufflevector <16 x double> %a0, <16 x double> poison, <4 x i32> <i32 8, i32 9, i32 10, i32 11>
1586 %split3 = shufflevector <16 x double> %a0, <16 x double> poison, <4 x i32> <i32 12, i32 13, i32 14, i32 15>
1587 %splat.splat = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> zeroinitializer
1588 %0 = fmul <4 x double> %split, %splat.splat
1589 %splat.splat10 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 1, i32 1, i32 1, i32 1>
1590 %1 = fmul <4 x double> %split1, %splat.splat10
1591 %2 = fadd <4 x double> %0, %1
1592 %splat.splat13 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 2, i32 2, i32 2, i32 2>
1593 %3 = fmul <4 x double> %split2, %splat.splat13
1594 %4 = fadd <4 x double> %2, %3
1595 %splat.splat16 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 3, i32 3, i32 3, i32 3>
1596 %5 = fmul <4 x double> %split3, %splat.splat16
1597 %6 = fadd <4 x double> %4, %5
1598 %splat.splat19 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 4, i32 4, i32 4, i32 4>
1599 %7 = fmul <4 x double> %split, %splat.splat19
1600 %splat.splat22 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 5, i32 5, i32 5, i32 5>
1601 %8 = fmul <4 x double> %split1, %splat.splat22
1602 %9 = fadd <4 x double> %7, %8
1603 %splat.splat25 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 6, i32 6, i32 6, i32 6>
1604 %10 = fmul <4 x double> %split2, %splat.splat25
1605 %11 = fadd <4 x double> %9, %10
1606 %splat.splat28 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 7, i32 7, i32 7, i32 7>
1607 %12 = fmul <4 x double> %split3, %splat.splat28
1608 %13 = fadd <4 x double> %11, %12
1609 %splat.splat31 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 8, i32 8, i32 8, i32 8>
1610 %14 = fmul <4 x double> %split, %splat.splat31
1611 %splat.splat34 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 9, i32 9, i32 9, i32 9>
1612 %15 = fmul <4 x double> %split1, %splat.splat34
1613 %16 = fadd <4 x double> %14, %15
1614 %splat.splat37 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 10, i32 10, i32 10, i32 10>
1615 %17 = fmul <4 x double> %split2, %splat.splat37
1616 %18 = fadd <4 x double> %16, %17
1617 %splat.splat40 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 11, i32 11, i32 11, i32 11>
1618 %19 = fmul <4 x double> %split3, %splat.splat40
1619 %20 = fadd <4 x double> %18, %19
1620 %splat.splat43 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 12, i32 12, i32 12, i32 12>
1621 %21 = fmul <4 x double> %split, %splat.splat43
1622 %splat.splat46 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 13, i32 13, i32 13, i32 13>
1623 %22 = fmul <4 x double> %split1, %splat.splat46
1624 %23 = fadd <4 x double> %21, %22
1625 %splat.splat49 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 14, i32 14, i32 14, i32 14>
1626 %24 = fmul <4 x double> %split2, %splat.splat49
1627 %25 = fadd <4 x double> %23, %24
1628 %splat.splat52 = shufflevector <16 x double> %a1, <16 x double> undef, <4 x i32> <i32 15, i32 15, i32 15, i32 15>
1629 %26 = fmul <4 x double> %split3, %splat.splat52
1630 %27 = fadd <4 x double> %25, %26
1631 %28 = shufflevector <4 x double> %6, <4 x double> %13, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
1632 %29 = shufflevector <4 x double> %20, <4 x double> %27, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
1633 %30 = shufflevector <8 x double> %28, <8 x double> %29, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
1634 ret <16 x double> %30
1637 define <64 x float> @test_mul8x8_f32(<64 x float> %a0, <64 x float> %a1) nounwind {
1638 ; SSE-LABEL: test_mul8x8_f32:
1639 ; SSE: # %bb.0: # %entry
1640 ; SSE-NEXT: subq $120, %rsp
1641 ; SSE-NEXT: movaps %xmm5, %xmm11
1642 ; SSE-NEXT: movaps %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1643 ; SSE-NEXT: movaps %xmm1, %xmm9
1644 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1645 ; SSE-NEXT: movq %rdi, %rax
1646 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm8
1647 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm13
1648 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm14
1649 ; SSE-NEXT: movaps %xmm14, %xmm15
1650 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[0,0],xmm14[0,0]
1651 ; SSE-NEXT: movaps %xmm1, %xmm5
1652 ; SSE-NEXT: mulps %xmm15, %xmm5
1653 ; SSE-NEXT: mulps %xmm0, %xmm15
1654 ; SSE-NEXT: movaps %xmm14, %xmm0
1655 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm14[1,1]
1656 ; SSE-NEXT: movaps %xmm3, %xmm10
1657 ; SSE-NEXT: movaps %xmm3, %xmm12
1658 ; SSE-NEXT: mulps %xmm0, %xmm10
1659 ; SSE-NEXT: addps %xmm5, %xmm10
1660 ; SSE-NEXT: mulps %xmm2, %xmm0
1661 ; SSE-NEXT: addps %xmm15, %xmm0
1662 ; SSE-NEXT: movaps %xmm14, %xmm1
1663 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm14[2,2]
1664 ; SSE-NEXT: movaps %xmm4, %xmm2
1665 ; SSE-NEXT: movaps %xmm4, %xmm15
1666 ; SSE-NEXT: mulps %xmm1, %xmm2
1667 ; SSE-NEXT: addps %xmm0, %xmm2
1668 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm5
1669 ; SSE-NEXT: mulps %xmm11, %xmm1
1670 ; SSE-NEXT: addps %xmm10, %xmm1
1671 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[3,3,3,3]
1672 ; SSE-NEXT: movaps %xmm7, %xmm3
1673 ; SSE-NEXT: mulps %xmm14, %xmm3
1674 ; SSE-NEXT: addps %xmm1, %xmm3
1675 ; SSE-NEXT: mulps %xmm6, %xmm14
1676 ; SSE-NEXT: addps %xmm2, %xmm14
1677 ; SSE-NEXT: movaps %xmm5, %xmm1
1678 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm5[0,0]
1679 ; SSE-NEXT: movaps %xmm13, %xmm2
1680 ; SSE-NEXT: mulps %xmm1, %xmm2
1681 ; SSE-NEXT: addps %xmm14, %xmm2
1682 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
1683 ; SSE-NEXT: addps %xmm3, %xmm1
1684 ; SSE-NEXT: movaps %xmm5, %xmm0
1685 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm5[1,1]
1686 ; SSE-NEXT: movaps %xmm8, %xmm3
1687 ; SSE-NEXT: mulps %xmm0, %xmm3
1688 ; SSE-NEXT: addps %xmm1, %xmm3
1689 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm0
1690 ; SSE-NEXT: addps %xmm2, %xmm0
1691 ; SSE-NEXT: movaps %xmm5, %xmm1
1692 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm5[2,2]
1693 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm2
1694 ; SSE-NEXT: mulps %xmm1, %xmm2
1695 ; SSE-NEXT: addps %xmm0, %xmm2
1696 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
1697 ; SSE-NEXT: addps %xmm3, %xmm1
1698 ; SSE-NEXT: shufps {{.*#+}} xmm5 = xmm5[3,3,3,3]
1699 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1700 ; SSE-NEXT: mulps %xmm5, %xmm0
1701 ; SSE-NEXT: addps %xmm1, %xmm0
1702 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1703 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm5
1704 ; SSE-NEXT: addps %xmm2, %xmm5
1705 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1706 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1707 ; SSE-NEXT: movaps %xmm0, %xmm1
1708 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[0,0]
1709 ; SSE-NEXT: movaps %xmm9, %xmm2
1710 ; SSE-NEXT: mulps %xmm1, %xmm2
1711 ; SSE-NEXT: movaps %xmm0, %xmm3
1712 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1],xmm0[1,1]
1713 ; SSE-NEXT: movaps %xmm12, %xmm4
1714 ; SSE-NEXT: mulps %xmm3, %xmm4
1715 ; SSE-NEXT: addps %xmm2, %xmm4
1716 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1717 ; SSE-NEXT: mulps %xmm5, %xmm1
1718 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
1719 ; SSE-NEXT: mulps %xmm13, %xmm3
1720 ; SSE-NEXT: addps %xmm1, %xmm3
1721 ; SSE-NEXT: movaps %xmm0, %xmm1
1722 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,2]
1723 ; SSE-NEXT: movaps %xmm15, %xmm2
1724 ; SSE-NEXT: mulps %xmm1, %xmm2
1725 ; SSE-NEXT: addps %xmm3, %xmm2
1726 ; SSE-NEXT: movaps %xmm11, %xmm8
1727 ; SSE-NEXT: mulps %xmm11, %xmm1
1728 ; SSE-NEXT: addps %xmm4, %xmm1
1729 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3,3,3]
1730 ; SSE-NEXT: movaps %xmm7, %xmm3
1731 ; SSE-NEXT: mulps %xmm0, %xmm3
1732 ; SSE-NEXT: addps %xmm1, %xmm3
1733 ; SSE-NEXT: movaps %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1734 ; SSE-NEXT: mulps %xmm6, %xmm0
1735 ; SSE-NEXT: addps %xmm2, %xmm0
1736 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm4
1737 ; SSE-NEXT: movaps %xmm4, %xmm1
1738 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm4[0,0]
1739 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm14
1740 ; SSE-NEXT: movaps %xmm14, %xmm2
1741 ; SSE-NEXT: mulps %xmm1, %xmm2
1742 ; SSE-NEXT: addps %xmm0, %xmm2
1743 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
1744 ; SSE-NEXT: addps %xmm3, %xmm1
1745 ; SSE-NEXT: movaps %xmm4, %xmm0
1746 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
1747 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm11
1748 ; SSE-NEXT: movaps %xmm11, %xmm3
1749 ; SSE-NEXT: mulps %xmm0, %xmm3
1750 ; SSE-NEXT: addps %xmm1, %xmm3
1751 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm1
1752 ; SSE-NEXT: mulps %xmm1, %xmm0
1753 ; SSE-NEXT: addps %xmm2, %xmm0
1754 ; SSE-NEXT: movaps %xmm4, %xmm1
1755 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm4[2,2]
1756 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm2
1757 ; SSE-NEXT: mulps %xmm1, %xmm2
1758 ; SSE-NEXT: addps %xmm0, %xmm2
1759 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
1760 ; SSE-NEXT: addps %xmm3, %xmm1
1761 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,3,3,3]
1762 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1763 ; SSE-NEXT: mulps %xmm4, %xmm0
1764 ; SSE-NEXT: addps %xmm1, %xmm0
1765 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1766 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm4
1767 ; SSE-NEXT: addps %xmm2, %xmm4
1768 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1769 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1770 ; SSE-NEXT: movaps %xmm0, %xmm1
1771 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[0,0]
1772 ; SSE-NEXT: movaps %xmm9, %xmm2
1773 ; SSE-NEXT: mulps %xmm1, %xmm2
1774 ; SSE-NEXT: movaps %xmm0, %xmm3
1775 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1],xmm0[1,1]
1776 ; SSE-NEXT: movaps %xmm12, %xmm4
1777 ; SSE-NEXT: mulps %xmm3, %xmm4
1778 ; SSE-NEXT: addps %xmm2, %xmm4
1779 ; SSE-NEXT: mulps %xmm5, %xmm1
1780 ; SSE-NEXT: movaps %xmm5, %xmm10
1781 ; SSE-NEXT: mulps %xmm13, %xmm3
1782 ; SSE-NEXT: addps %xmm1, %xmm3
1783 ; SSE-NEXT: movaps %xmm0, %xmm1
1784 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,2]
1785 ; SSE-NEXT: movaps %xmm15, %xmm2
1786 ; SSE-NEXT: movaps %xmm15, %xmm5
1787 ; SSE-NEXT: mulps %xmm1, %xmm2
1788 ; SSE-NEXT: addps %xmm3, %xmm2
1789 ; SSE-NEXT: mulps %xmm8, %xmm1
1790 ; SSE-NEXT: addps %xmm4, %xmm1
1791 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3,3,3]
1792 ; SSE-NEXT: movaps %xmm7, %xmm3
1793 ; SSE-NEXT: mulps %xmm0, %xmm3
1794 ; SSE-NEXT: addps %xmm1, %xmm3
1795 ; SSE-NEXT: mulps %xmm6, %xmm0
1796 ; SSE-NEXT: addps %xmm2, %xmm0
1797 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm4
1798 ; SSE-NEXT: movaps %xmm4, %xmm1
1799 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm4[0,0]
1800 ; SSE-NEXT: movaps %xmm14, %xmm2
1801 ; SSE-NEXT: mulps %xmm1, %xmm2
1802 ; SSE-NEXT: addps %xmm0, %xmm2
1803 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm14
1804 ; SSE-NEXT: mulps %xmm14, %xmm1
1805 ; SSE-NEXT: addps %xmm3, %xmm1
1806 ; SSE-NEXT: movaps %xmm4, %xmm0
1807 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
1808 ; SSE-NEXT: movaps %xmm11, %xmm3
1809 ; SSE-NEXT: mulps %xmm0, %xmm3
1810 ; SSE-NEXT: addps %xmm1, %xmm3
1811 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm0
1812 ; SSE-NEXT: addps %xmm2, %xmm0
1813 ; SSE-NEXT: movaps %xmm4, %xmm1
1814 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm4[2,2]
1815 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm2
1816 ; SSE-NEXT: mulps %xmm1, %xmm2
1817 ; SSE-NEXT: addps %xmm0, %xmm2
1818 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm11
1819 ; SSE-NEXT: mulps %xmm11, %xmm1
1820 ; SSE-NEXT: addps %xmm3, %xmm1
1821 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,3,3,3]
1822 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1823 ; SSE-NEXT: mulps %xmm4, %xmm0
1824 ; SSE-NEXT: addps %xmm1, %xmm0
1825 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1826 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1827 ; SSE-NEXT: mulps %xmm0, %xmm4
1828 ; SSE-NEXT: addps %xmm2, %xmm4
1829 ; SSE-NEXT: movaps %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1830 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1831 ; SSE-NEXT: movaps %xmm0, %xmm1
1832 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[0,0]
1833 ; SSE-NEXT: movaps %xmm9, %xmm2
1834 ; SSE-NEXT: mulps %xmm1, %xmm2
1835 ; SSE-NEXT: movaps %xmm0, %xmm3
1836 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1],xmm0[1,1]
1837 ; SSE-NEXT: movaps %xmm12, %xmm4
1838 ; SSE-NEXT: mulps %xmm3, %xmm4
1839 ; SSE-NEXT: addps %xmm2, %xmm4
1840 ; SSE-NEXT: movaps %xmm10, %xmm15
1841 ; SSE-NEXT: mulps %xmm10, %xmm1
1842 ; SSE-NEXT: mulps %xmm13, %xmm3
1843 ; SSE-NEXT: addps %xmm1, %xmm3
1844 ; SSE-NEXT: movaps %xmm0, %xmm1
1845 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,2]
1846 ; SSE-NEXT: movaps %xmm5, %xmm2
1847 ; SSE-NEXT: mulps %xmm1, %xmm2
1848 ; SSE-NEXT: addps %xmm3, %xmm2
1849 ; SSE-NEXT: mulps %xmm8, %xmm1
1850 ; SSE-NEXT: addps %xmm4, %xmm1
1851 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3,3,3]
1852 ; SSE-NEXT: movaps %xmm7, %xmm4
1853 ; SSE-NEXT: movaps %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1854 ; SSE-NEXT: movaps %xmm7, %xmm3
1855 ; SSE-NEXT: mulps %xmm0, %xmm3
1856 ; SSE-NEXT: addps %xmm1, %xmm3
1857 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
1858 ; SSE-NEXT: mulps %xmm6, %xmm0
1859 ; SSE-NEXT: addps %xmm2, %xmm0
1860 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm10
1861 ; SSE-NEXT: movaps %xmm10, %xmm1
1862 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm10[0,0]
1863 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm2
1864 ; SSE-NEXT: mulps %xmm1, %xmm2
1865 ; SSE-NEXT: addps %xmm0, %xmm2
1866 ; SSE-NEXT: mulps %xmm14, %xmm1
1867 ; SSE-NEXT: addps %xmm3, %xmm1
1868 ; SSE-NEXT: movaps %xmm10, %xmm0
1869 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm10[1,1]
1870 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm3
1871 ; SSE-NEXT: mulps %xmm0, %xmm3
1872 ; SSE-NEXT: addps %xmm1, %xmm3
1873 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm0
1874 ; SSE-NEXT: addps %xmm2, %xmm0
1875 ; SSE-NEXT: movaps %xmm10, %xmm1
1876 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm10[2,2]
1877 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm2
1878 ; SSE-NEXT: mulps %xmm1, %xmm2
1879 ; SSE-NEXT: addps %xmm0, %xmm2
1880 ; SSE-NEXT: mulps %xmm11, %xmm1
1881 ; SSE-NEXT: addps %xmm3, %xmm1
1882 ; SSE-NEXT: shufps {{.*#+}} xmm10 = xmm10[3,3,3,3]
1883 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm11
1884 ; SSE-NEXT: movaps %xmm11, %xmm0
1885 ; SSE-NEXT: mulps %xmm10, %xmm0
1886 ; SSE-NEXT: addps %xmm1, %xmm0
1887 ; SSE-NEXT: movaps %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1888 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm10
1889 ; SSE-NEXT: addps %xmm2, %xmm10
1890 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1891 ; SSE-NEXT: movaps %xmm0, %xmm1
1892 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[0,0]
1893 ; SSE-NEXT: movaps %xmm9, %xmm2
1894 ; SSE-NEXT: movaps %xmm9, %xmm14
1895 ; SSE-NEXT: mulps %xmm1, %xmm2
1896 ; SSE-NEXT: movaps %xmm0, %xmm3
1897 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[1,1],xmm0[1,1]
1898 ; SSE-NEXT: movaps %xmm12, %xmm7
1899 ; SSE-NEXT: mulps %xmm3, %xmm7
1900 ; SSE-NEXT: addps %xmm2, %xmm7
1901 ; SSE-NEXT: mulps %xmm15, %xmm1
1902 ; SSE-NEXT: mulps %xmm13, %xmm3
1903 ; SSE-NEXT: addps %xmm1, %xmm3
1904 ; SSE-NEXT: movaps %xmm0, %xmm1
1905 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,2]
1906 ; SSE-NEXT: movaps %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1907 ; SSE-NEXT: movaps %xmm5, %xmm2
1908 ; SSE-NEXT: mulps %xmm1, %xmm2
1909 ; SSE-NEXT: addps %xmm3, %xmm2
1910 ; SSE-NEXT: movaps %xmm8, %xmm9
1911 ; SSE-NEXT: mulps %xmm8, %xmm1
1912 ; SSE-NEXT: addps %xmm7, %xmm1
1913 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3,3,3]
1914 ; SSE-NEXT: movaps %xmm4, %xmm7
1915 ; SSE-NEXT: mulps %xmm0, %xmm7
1916 ; SSE-NEXT: addps %xmm1, %xmm7
1917 ; SSE-NEXT: movaps %xmm6, %xmm3
1918 ; SSE-NEXT: mulps %xmm6, %xmm0
1919 ; SSE-NEXT: addps %xmm2, %xmm0
1920 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm4
1921 ; SSE-NEXT: movaps %xmm4, %xmm1
1922 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm4[0,0]
1923 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm2
1924 ; SSE-NEXT: mulps %xmm1, %xmm2
1925 ; SSE-NEXT: addps %xmm0, %xmm2
1926 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
1927 ; SSE-NEXT: addps %xmm7, %xmm1
1928 ; SSE-NEXT: movaps %xmm4, %xmm0
1929 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
1930 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm7
1931 ; SSE-NEXT: mulps %xmm0, %xmm7
1932 ; SSE-NEXT: addps %xmm1, %xmm7
1933 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm1
1934 ; SSE-NEXT: mulps %xmm1, %xmm0
1935 ; SSE-NEXT: addps %xmm2, %xmm0
1936 ; SSE-NEXT: movaps %xmm4, %xmm1
1937 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm4[2,2]
1938 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm2
1939 ; SSE-NEXT: mulps %xmm1, %xmm2
1940 ; SSE-NEXT: addps %xmm0, %xmm2
1941 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
1942 ; SSE-NEXT: addps %xmm7, %xmm1
1943 ; SSE-NEXT: shufps {{.*#+}} xmm4 = xmm4[3,3,3,3]
1944 ; SSE-NEXT: movaps %xmm11, %xmm0
1945 ; SSE-NEXT: mulps %xmm4, %xmm0
1946 ; SSE-NEXT: addps %xmm1, %xmm0
1947 ; SSE-NEXT: movaps %xmm0, (%rsp) # 16-byte Spill
1948 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm4
1949 ; SSE-NEXT: addps %xmm2, %xmm4
1950 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1951 ; SSE-NEXT: movaps %xmm0, %xmm1
1952 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[0,0]
1953 ; SSE-NEXT: movaps %xmm14, %xmm6
1954 ; SSE-NEXT: movaps %xmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1955 ; SSE-NEXT: movaps %xmm14, %xmm2
1956 ; SSE-NEXT: mulps %xmm1, %xmm2
1957 ; SSE-NEXT: movaps %xmm0, %xmm14
1958 ; SSE-NEXT: shufps {{.*#+}} xmm14 = xmm14[1,1],xmm0[1,1]
1959 ; SSE-NEXT: movaps %xmm12, %xmm15
1960 ; SSE-NEXT: movaps %xmm12, %xmm13
1961 ; SSE-NEXT: movaps %xmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
1962 ; SSE-NEXT: mulps %xmm14, %xmm15
1963 ; SSE-NEXT: addps %xmm2, %xmm15
1964 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
1965 ; SSE-NEXT: mulps %xmm8, %xmm1
1966 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Reload
1967 ; SSE-NEXT: mulps %xmm7, %xmm14
1968 ; SSE-NEXT: addps %xmm1, %xmm14
1969 ; SSE-NEXT: movaps %xmm0, %xmm1
1970 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,2]
1971 ; SSE-NEXT: movaps %xmm5, %xmm2
1972 ; SSE-NEXT: mulps %xmm1, %xmm2
1973 ; SSE-NEXT: addps %xmm14, %xmm2
1974 ; SSE-NEXT: mulps %xmm9, %xmm1
1975 ; SSE-NEXT: movaps %xmm9, %xmm11
1976 ; SSE-NEXT: addps %xmm15, %xmm1
1977 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3,3,3]
1978 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
1979 ; SSE-NEXT: movaps %xmm5, %xmm14
1980 ; SSE-NEXT: mulps %xmm0, %xmm14
1981 ; SSE-NEXT: addps %xmm1, %xmm14
1982 ; SSE-NEXT: mulps %xmm3, %xmm0
1983 ; SSE-NEXT: movaps %xmm3, %xmm12
1984 ; SSE-NEXT: addps %xmm2, %xmm0
1985 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm3
1986 ; SSE-NEXT: movaps %xmm3, %xmm1
1987 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm3[0,0]
1988 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm15
1989 ; SSE-NEXT: mulps %xmm1, %xmm15
1990 ; SSE-NEXT: addps %xmm0, %xmm15
1991 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
1992 ; SSE-NEXT: mulps %xmm0, %xmm1
1993 ; SSE-NEXT: addps %xmm14, %xmm1
1994 ; SSE-NEXT: movaps %xmm3, %xmm0
1995 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm3[1,1]
1996 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm14
1997 ; SSE-NEXT: mulps %xmm0, %xmm14
1998 ; SSE-NEXT: addps %xmm1, %xmm14
1999 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm0
2000 ; SSE-NEXT: addps %xmm15, %xmm0
2001 ; SSE-NEXT: movaps %xmm3, %xmm1
2002 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm3[2,2]
2003 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm15
2004 ; SSE-NEXT: mulps %xmm1, %xmm15
2005 ; SSE-NEXT: addps %xmm0, %xmm15
2006 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
2007 ; SSE-NEXT: addps %xmm14, %xmm1
2008 ; SSE-NEXT: shufps {{.*#+}} xmm3 = xmm3[3,3,3,3]
2009 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm14
2010 ; SSE-NEXT: mulps %xmm3, %xmm14
2011 ; SSE-NEXT: addps %xmm1, %xmm14
2012 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm3
2013 ; SSE-NEXT: addps %xmm15, %xmm3
2014 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
2015 ; SSE-NEXT: movaps %xmm0, %xmm1
2016 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[0,0],xmm0[0,0]
2017 ; SSE-NEXT: mulps %xmm1, %xmm6
2018 ; SSE-NEXT: movaps %xmm0, %xmm15
2019 ; SSE-NEXT: shufps {{.*#+}} xmm15 = xmm15[1,1],xmm0[1,1]
2020 ; SSE-NEXT: mulps %xmm15, %xmm13
2021 ; SSE-NEXT: addps %xmm6, %xmm13
2022 ; SSE-NEXT: mulps %xmm8, %xmm1
2023 ; SSE-NEXT: mulps %xmm7, %xmm15
2024 ; SSE-NEXT: addps %xmm1, %xmm15
2025 ; SSE-NEXT: movaps %xmm0, %xmm1
2026 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[2,2],xmm0[2,2]
2027 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
2028 ; SSE-NEXT: movaps %xmm6, %xmm2
2029 ; SSE-NEXT: mulps %xmm1, %xmm2
2030 ; SSE-NEXT: addps %xmm15, %xmm2
2031 ; SSE-NEXT: mulps %xmm9, %xmm1
2032 ; SSE-NEXT: addps %xmm13, %xmm1
2033 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3,3,3]
2034 ; SSE-NEXT: movaps %xmm5, %xmm9
2035 ; SSE-NEXT: mulps %xmm0, %xmm9
2036 ; SSE-NEXT: addps %xmm1, %xmm9
2037 ; SSE-NEXT: mulps %xmm12, %xmm0
2038 ; SSE-NEXT: movaps %xmm12, %xmm5
2039 ; SSE-NEXT: addps %xmm2, %xmm0
2040 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm1
2041 ; SSE-NEXT: movaps %xmm1, %xmm2
2042 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,0],xmm1[0,0]
2043 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm15
2044 ; SSE-NEXT: mulps %xmm2, %xmm15
2045 ; SSE-NEXT: addps %xmm0, %xmm15
2046 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
2047 ; SSE-NEXT: mulps %xmm0, %xmm2
2048 ; SSE-NEXT: addps %xmm9, %xmm2
2049 ; SSE-NEXT: movaps %xmm1, %xmm0
2050 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm1[1,1]
2051 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm9
2052 ; SSE-NEXT: mulps %xmm0, %xmm9
2053 ; SSE-NEXT: addps %xmm2, %xmm9
2054 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm0
2055 ; SSE-NEXT: addps %xmm15, %xmm0
2056 ; SSE-NEXT: movaps %xmm1, %xmm2
2057 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,2],xmm1[2,2]
2058 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm13
2059 ; SSE-NEXT: mulps %xmm2, %xmm13
2060 ; SSE-NEXT: addps %xmm0, %xmm13
2061 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm2
2062 ; SSE-NEXT: addps %xmm9, %xmm2
2063 ; SSE-NEXT: shufps {{.*#+}} xmm1 = xmm1[3,3,3,3]
2064 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm15
2065 ; SSE-NEXT: mulps %xmm1, %xmm15
2066 ; SSE-NEXT: addps %xmm2, %xmm15
2067 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm1
2068 ; SSE-NEXT: addps %xmm13, %xmm1
2069 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
2070 ; SSE-NEXT: movaps %xmm0, %xmm2
2071 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,0],xmm0[0,0]
2072 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
2073 ; SSE-NEXT: mulps %xmm2, %xmm13
2074 ; SSE-NEXT: mulps %xmm8, %xmm2
2075 ; SSE-NEXT: movaps %xmm0, %xmm9
2076 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[1,1],xmm0[1,1]
2077 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
2078 ; SSE-NEXT: mulps %xmm9, %xmm8
2079 ; SSE-NEXT: addps %xmm13, %xmm8
2080 ; SSE-NEXT: mulps %xmm7, %xmm9
2081 ; SSE-NEXT: addps %xmm2, %xmm9
2082 ; SSE-NEXT: movaps %xmm0, %xmm2
2083 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,2],xmm0[2,2]
2084 ; SSE-NEXT: mulps %xmm2, %xmm6
2085 ; SSE-NEXT: addps %xmm9, %xmm6
2086 ; SSE-NEXT: mulps %xmm11, %xmm2
2087 ; SSE-NEXT: addps %xmm8, %xmm2
2088 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[3,3,3,3]
2089 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
2090 ; SSE-NEXT: mulps %xmm0, %xmm9
2091 ; SSE-NEXT: addps %xmm2, %xmm9
2092 ; SSE-NEXT: movaps %xmm9, %xmm12
2093 ; SSE-NEXT: mulps %xmm5, %xmm0
2094 ; SSE-NEXT: addps %xmm6, %xmm0
2095 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm9
2096 ; SSE-NEXT: movaps %xmm9, %xmm2
2097 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[0,0],xmm9[0,0]
2098 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm13
2099 ; SSE-NEXT: mulps %xmm2, %xmm13
2100 ; SSE-NEXT: addps %xmm0, %xmm13
2101 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm2
2102 ; SSE-NEXT: addps %xmm12, %xmm2
2103 ; SSE-NEXT: movaps %xmm9, %xmm0
2104 ; SSE-NEXT: shufps {{.*#+}} xmm0 = xmm0[1,1],xmm9[1,1]
2105 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm12
2106 ; SSE-NEXT: mulps %xmm0, %xmm12
2107 ; SSE-NEXT: addps %xmm2, %xmm12
2108 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm0
2109 ; SSE-NEXT: addps %xmm13, %xmm0
2110 ; SSE-NEXT: movaps %xmm9, %xmm2
2111 ; SSE-NEXT: shufps {{.*#+}} xmm2 = xmm2[2,2],xmm9[2,2]
2112 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm5
2113 ; SSE-NEXT: mulps %xmm2, %xmm5
2114 ; SSE-NEXT: addps %xmm0, %xmm5
2115 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm2
2116 ; SSE-NEXT: addps %xmm12, %xmm2
2117 ; SSE-NEXT: shufps {{.*#+}} xmm9 = xmm9[3,3,3,3]
2118 ; SSE-NEXT: movaps {{[0-9]+}}(%rsp), %xmm0
2119 ; SSE-NEXT: mulps %xmm9, %xmm0
2120 ; SSE-NEXT: addps %xmm2, %xmm0
2121 ; SSE-NEXT: mulps {{[0-9]+}}(%rsp), %xmm9
2122 ; SSE-NEXT: addps %xmm5, %xmm9
2123 ; SSE-NEXT: movaps %xmm0, 240(%rdi)
2124 ; SSE-NEXT: movaps %xmm9, 224(%rdi)
2125 ; SSE-NEXT: movaps %xmm15, 208(%rdi)
2126 ; SSE-NEXT: movaps %xmm1, 192(%rdi)
2127 ; SSE-NEXT: movaps %xmm14, 176(%rdi)
2128 ; SSE-NEXT: movaps %xmm3, 160(%rdi)
2129 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
2130 ; SSE-NEXT: movaps %xmm0, 144(%rdi)
2131 ; SSE-NEXT: movaps %xmm4, 128(%rdi)
2132 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2133 ; SSE-NEXT: movaps %xmm0, 112(%rdi)
2134 ; SSE-NEXT: movaps %xmm10, 96(%rdi)
2135 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2136 ; SSE-NEXT: movaps %xmm0, 80(%rdi)
2137 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2138 ; SSE-NEXT: movaps %xmm0, 64(%rdi)
2139 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2140 ; SSE-NEXT: movaps %xmm0, 48(%rdi)
2141 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2142 ; SSE-NEXT: movaps %xmm0, 32(%rdi)
2143 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2144 ; SSE-NEXT: movaps %xmm0, 16(%rdi)
2145 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
2146 ; SSE-NEXT: movaps %xmm0, (%rdi)
2147 ; SSE-NEXT: addq $120, %rsp
2150 ; AVX1-LABEL: test_mul8x8_f32:
2151 ; AVX1: # %bb.0: # %entry
2152 ; AVX1-NEXT: pushq %rbp
2153 ; AVX1-NEXT: movq %rsp, %rbp
2154 ; AVX1-NEXT: andq $-32, %rsp
2155 ; AVX1-NEXT: subq $32, %rsp
2156 ; AVX1-NEXT: movq %rdi, %rax
2157 ; AVX1-NEXT: vbroadcastss 16(%rbp), %ymm8
2158 ; AVX1-NEXT: vmulps %ymm0, %ymm8, %ymm8
2159 ; AVX1-NEXT: vbroadcastss 20(%rbp), %ymm9
2160 ; AVX1-NEXT: vmulps %ymm1, %ymm9, %ymm9
2161 ; AVX1-NEXT: vaddps %ymm9, %ymm8, %ymm8
2162 ; AVX1-NEXT: vbroadcastss 24(%rbp), %ymm9
2163 ; AVX1-NEXT: vmulps %ymm2, %ymm9, %ymm9
2164 ; AVX1-NEXT: vaddps %ymm9, %ymm8, %ymm8
2165 ; AVX1-NEXT: vbroadcastss 28(%rbp), %ymm9
2166 ; AVX1-NEXT: vmulps %ymm3, %ymm9, %ymm9
2167 ; AVX1-NEXT: vaddps %ymm9, %ymm8, %ymm8
2168 ; AVX1-NEXT: vbroadcastss 32(%rbp), %ymm9
2169 ; AVX1-NEXT: vmulps %ymm4, %ymm9, %ymm9
2170 ; AVX1-NEXT: vaddps %ymm9, %ymm8, %ymm8
2171 ; AVX1-NEXT: vbroadcastss 36(%rbp), %ymm9
2172 ; AVX1-NEXT: vmulps %ymm5, %ymm9, %ymm9
2173 ; AVX1-NEXT: vaddps %ymm9, %ymm8, %ymm8
2174 ; AVX1-NEXT: vbroadcastss 40(%rbp), %ymm9
2175 ; AVX1-NEXT: vmulps %ymm6, %ymm9, %ymm9
2176 ; AVX1-NEXT: vaddps %ymm9, %ymm8, %ymm8
2177 ; AVX1-NEXT: vbroadcastss 44(%rbp), %ymm9
2178 ; AVX1-NEXT: vmulps %ymm7, %ymm9, %ymm9
2179 ; AVX1-NEXT: vaddps %ymm9, %ymm8, %ymm8
2180 ; AVX1-NEXT: vbroadcastss 48(%rbp), %ymm9
2181 ; AVX1-NEXT: vmulps %ymm0, %ymm9, %ymm9
2182 ; AVX1-NEXT: vbroadcastss 52(%rbp), %ymm10
2183 ; AVX1-NEXT: vmulps %ymm1, %ymm10, %ymm10
2184 ; AVX1-NEXT: vaddps %ymm10, %ymm9, %ymm9
2185 ; AVX1-NEXT: vbroadcastss 56(%rbp), %ymm10
2186 ; AVX1-NEXT: vmulps %ymm2, %ymm10, %ymm10
2187 ; AVX1-NEXT: vaddps %ymm10, %ymm9, %ymm9
2188 ; AVX1-NEXT: vbroadcastss 60(%rbp), %ymm10
2189 ; AVX1-NEXT: vmulps %ymm3, %ymm10, %ymm10
2190 ; AVX1-NEXT: vaddps %ymm10, %ymm9, %ymm9
2191 ; AVX1-NEXT: vbroadcastss 64(%rbp), %ymm10
2192 ; AVX1-NEXT: vmulps %ymm4, %ymm10, %ymm10
2193 ; AVX1-NEXT: vaddps %ymm10, %ymm9, %ymm9
2194 ; AVX1-NEXT: vbroadcastss 68(%rbp), %ymm10
2195 ; AVX1-NEXT: vmulps %ymm5, %ymm10, %ymm10
2196 ; AVX1-NEXT: vaddps %ymm10, %ymm9, %ymm9
2197 ; AVX1-NEXT: vbroadcastss 72(%rbp), %ymm10
2198 ; AVX1-NEXT: vmulps %ymm6, %ymm10, %ymm10
2199 ; AVX1-NEXT: vaddps %ymm10, %ymm9, %ymm9
2200 ; AVX1-NEXT: vbroadcastss 76(%rbp), %ymm10
2201 ; AVX1-NEXT: vmulps %ymm7, %ymm10, %ymm10
2202 ; AVX1-NEXT: vaddps %ymm10, %ymm9, %ymm9
2203 ; AVX1-NEXT: vbroadcastss 80(%rbp), %ymm10
2204 ; AVX1-NEXT: vmulps %ymm0, %ymm10, %ymm10
2205 ; AVX1-NEXT: vbroadcastss 84(%rbp), %ymm11
2206 ; AVX1-NEXT: vmulps %ymm1, %ymm11, %ymm11
2207 ; AVX1-NEXT: vaddps %ymm11, %ymm10, %ymm10
2208 ; AVX1-NEXT: vbroadcastss 88(%rbp), %ymm11
2209 ; AVX1-NEXT: vmulps %ymm2, %ymm11, %ymm11
2210 ; AVX1-NEXT: vaddps %ymm11, %ymm10, %ymm10
2211 ; AVX1-NEXT: vbroadcastss 92(%rbp), %ymm11
2212 ; AVX1-NEXT: vmulps %ymm3, %ymm11, %ymm11
2213 ; AVX1-NEXT: vaddps %ymm11, %ymm10, %ymm10
2214 ; AVX1-NEXT: vbroadcastss 96(%rbp), %ymm11
2215 ; AVX1-NEXT: vmulps %ymm4, %ymm11, %ymm11
2216 ; AVX1-NEXT: vaddps %ymm11, %ymm10, %ymm10
2217 ; AVX1-NEXT: vbroadcastss 100(%rbp), %ymm11
2218 ; AVX1-NEXT: vmulps %ymm5, %ymm11, %ymm11
2219 ; AVX1-NEXT: vaddps %ymm11, %ymm10, %ymm10
2220 ; AVX1-NEXT: vbroadcastss 104(%rbp), %ymm11
2221 ; AVX1-NEXT: vmulps %ymm6, %ymm11, %ymm11
2222 ; AVX1-NEXT: vaddps %ymm11, %ymm10, %ymm10
2223 ; AVX1-NEXT: vbroadcastss 108(%rbp), %ymm11
2224 ; AVX1-NEXT: vmulps %ymm7, %ymm11, %ymm11
2225 ; AVX1-NEXT: vaddps %ymm11, %ymm10, %ymm10
2226 ; AVX1-NEXT: vbroadcastss 112(%rbp), %ymm11
2227 ; AVX1-NEXT: vmulps %ymm0, %ymm11, %ymm11
2228 ; AVX1-NEXT: vbroadcastss 116(%rbp), %ymm12
2229 ; AVX1-NEXT: vmulps %ymm1, %ymm12, %ymm12
2230 ; AVX1-NEXT: vaddps %ymm12, %ymm11, %ymm11
2231 ; AVX1-NEXT: vbroadcastss 120(%rbp), %ymm12
2232 ; AVX1-NEXT: vmulps %ymm2, %ymm12, %ymm12
2233 ; AVX1-NEXT: vaddps %ymm12, %ymm11, %ymm11
2234 ; AVX1-NEXT: vbroadcastss 124(%rbp), %ymm12
2235 ; AVX1-NEXT: vmulps %ymm3, %ymm12, %ymm12
2236 ; AVX1-NEXT: vaddps %ymm12, %ymm11, %ymm11
2237 ; AVX1-NEXT: vbroadcastss 128(%rbp), %ymm12
2238 ; AVX1-NEXT: vmulps %ymm4, %ymm12, %ymm12
2239 ; AVX1-NEXT: vaddps %ymm12, %ymm11, %ymm11
2240 ; AVX1-NEXT: vbroadcastss 132(%rbp), %ymm12
2241 ; AVX1-NEXT: vmulps %ymm5, %ymm12, %ymm12
2242 ; AVX1-NEXT: vaddps %ymm12, %ymm11, %ymm11
2243 ; AVX1-NEXT: vbroadcastss 136(%rbp), %ymm12
2244 ; AVX1-NEXT: vmulps %ymm6, %ymm12, %ymm12
2245 ; AVX1-NEXT: vaddps %ymm12, %ymm11, %ymm11
2246 ; AVX1-NEXT: vbroadcastss 140(%rbp), %ymm12
2247 ; AVX1-NEXT: vmulps %ymm7, %ymm12, %ymm12
2248 ; AVX1-NEXT: vaddps %ymm12, %ymm11, %ymm11
2249 ; AVX1-NEXT: vbroadcastss 144(%rbp), %ymm12
2250 ; AVX1-NEXT: vmulps %ymm0, %ymm12, %ymm12
2251 ; AVX1-NEXT: vbroadcastss 148(%rbp), %ymm13
2252 ; AVX1-NEXT: vmulps %ymm1, %ymm13, %ymm13
2253 ; AVX1-NEXT: vaddps %ymm13, %ymm12, %ymm12
2254 ; AVX1-NEXT: vbroadcastss 152(%rbp), %ymm13
2255 ; AVX1-NEXT: vmulps %ymm2, %ymm13, %ymm13
2256 ; AVX1-NEXT: vaddps %ymm13, %ymm12, %ymm12
2257 ; AVX1-NEXT: vbroadcastss 156(%rbp), %ymm13
2258 ; AVX1-NEXT: vmulps %ymm3, %ymm13, %ymm13
2259 ; AVX1-NEXT: vaddps %ymm13, %ymm12, %ymm12
2260 ; AVX1-NEXT: vbroadcastss 160(%rbp), %ymm13
2261 ; AVX1-NEXT: vmulps %ymm4, %ymm13, %ymm13
2262 ; AVX1-NEXT: vaddps %ymm13, %ymm12, %ymm12
2263 ; AVX1-NEXT: vbroadcastss 164(%rbp), %ymm13
2264 ; AVX1-NEXT: vmulps %ymm5, %ymm13, %ymm13
2265 ; AVX1-NEXT: vaddps %ymm13, %ymm12, %ymm12
2266 ; AVX1-NEXT: vbroadcastss 168(%rbp), %ymm13
2267 ; AVX1-NEXT: vmulps %ymm6, %ymm13, %ymm13
2268 ; AVX1-NEXT: vaddps %ymm13, %ymm12, %ymm12
2269 ; AVX1-NEXT: vbroadcastss 172(%rbp), %ymm13
2270 ; AVX1-NEXT: vmulps %ymm7, %ymm13, %ymm13
2271 ; AVX1-NEXT: vaddps %ymm13, %ymm12, %ymm12
2272 ; AVX1-NEXT: vbroadcastss 176(%rbp), %ymm13
2273 ; AVX1-NEXT: vmulps %ymm0, %ymm13, %ymm13
2274 ; AVX1-NEXT: vbroadcastss 180(%rbp), %ymm14
2275 ; AVX1-NEXT: vmulps %ymm1, %ymm14, %ymm14
2276 ; AVX1-NEXT: vaddps %ymm14, %ymm13, %ymm13
2277 ; AVX1-NEXT: vbroadcastss 184(%rbp), %ymm14
2278 ; AVX1-NEXT: vmulps %ymm2, %ymm14, %ymm14
2279 ; AVX1-NEXT: vaddps %ymm14, %ymm13, %ymm13
2280 ; AVX1-NEXT: vbroadcastss 188(%rbp), %ymm14
2281 ; AVX1-NEXT: vmulps %ymm3, %ymm14, %ymm14
2282 ; AVX1-NEXT: vaddps %ymm14, %ymm13, %ymm13
2283 ; AVX1-NEXT: vbroadcastss 192(%rbp), %ymm14
2284 ; AVX1-NEXT: vmulps %ymm4, %ymm14, %ymm14
2285 ; AVX1-NEXT: vaddps %ymm14, %ymm13, %ymm13
2286 ; AVX1-NEXT: vbroadcastss 196(%rbp), %ymm14
2287 ; AVX1-NEXT: vmulps %ymm5, %ymm14, %ymm14
2288 ; AVX1-NEXT: vaddps %ymm14, %ymm13, %ymm13
2289 ; AVX1-NEXT: vbroadcastss 200(%rbp), %ymm14
2290 ; AVX1-NEXT: vmulps %ymm6, %ymm14, %ymm14
2291 ; AVX1-NEXT: vaddps %ymm14, %ymm13, %ymm13
2292 ; AVX1-NEXT: vbroadcastss 204(%rbp), %ymm14
2293 ; AVX1-NEXT: vmulps %ymm7, %ymm14, %ymm14
2294 ; AVX1-NEXT: vaddps %ymm14, %ymm13, %ymm13
2295 ; AVX1-NEXT: vbroadcastss 208(%rbp), %ymm14
2296 ; AVX1-NEXT: vmulps %ymm0, %ymm14, %ymm14
2297 ; AVX1-NEXT: vbroadcastss 212(%rbp), %ymm15
2298 ; AVX1-NEXT: vmulps %ymm1, %ymm15, %ymm15
2299 ; AVX1-NEXT: vaddps %ymm15, %ymm14, %ymm14
2300 ; AVX1-NEXT: vbroadcastss 216(%rbp), %ymm15
2301 ; AVX1-NEXT: vmulps %ymm2, %ymm15, %ymm15
2302 ; AVX1-NEXT: vaddps %ymm15, %ymm14, %ymm14
2303 ; AVX1-NEXT: vbroadcastss 220(%rbp), %ymm15
2304 ; AVX1-NEXT: vmulps %ymm3, %ymm15, %ymm15
2305 ; AVX1-NEXT: vaddps %ymm15, %ymm14, %ymm14
2306 ; AVX1-NEXT: vbroadcastss 224(%rbp), %ymm15
2307 ; AVX1-NEXT: vmulps %ymm4, %ymm15, %ymm15
2308 ; AVX1-NEXT: vaddps %ymm15, %ymm14, %ymm14
2309 ; AVX1-NEXT: vbroadcastss 228(%rbp), %ymm15
2310 ; AVX1-NEXT: vmulps %ymm5, %ymm15, %ymm15
2311 ; AVX1-NEXT: vaddps %ymm15, %ymm14, %ymm14
2312 ; AVX1-NEXT: vbroadcastss 232(%rbp), %ymm15
2313 ; AVX1-NEXT: vmulps %ymm6, %ymm15, %ymm15
2314 ; AVX1-NEXT: vaddps %ymm15, %ymm14, %ymm14
2315 ; AVX1-NEXT: vbroadcastss 236(%rbp), %ymm15
2316 ; AVX1-NEXT: vmulps %ymm7, %ymm15, %ymm15
2317 ; AVX1-NEXT: vaddps %ymm15, %ymm14, %ymm14
2318 ; AVX1-NEXT: vbroadcastss 240(%rbp), %ymm15
2319 ; AVX1-NEXT: vmulps %ymm0, %ymm15, %ymm0
2320 ; AVX1-NEXT: vbroadcastss 244(%rbp), %ymm15
2321 ; AVX1-NEXT: vmulps %ymm1, %ymm15, %ymm1
2322 ; AVX1-NEXT: vaddps %ymm1, %ymm0, %ymm0
2323 ; AVX1-NEXT: vbroadcastss 248(%rbp), %ymm1
2324 ; AVX1-NEXT: vmulps %ymm1, %ymm2, %ymm1
2325 ; AVX1-NEXT: vaddps %ymm1, %ymm0, %ymm0
2326 ; AVX1-NEXT: vbroadcastss 252(%rbp), %ymm1
2327 ; AVX1-NEXT: vmulps %ymm1, %ymm3, %ymm1
2328 ; AVX1-NEXT: vaddps %ymm1, %ymm0, %ymm0
2329 ; AVX1-NEXT: vbroadcastss 256(%rbp), %ymm1
2330 ; AVX1-NEXT: vmulps %ymm1, %ymm4, %ymm1
2331 ; AVX1-NEXT: vaddps %ymm1, %ymm0, %ymm0
2332 ; AVX1-NEXT: vbroadcastss 260(%rbp), %ymm1
2333 ; AVX1-NEXT: vmulps %ymm1, %ymm5, %ymm1
2334 ; AVX1-NEXT: vaddps %ymm1, %ymm0, %ymm0
2335 ; AVX1-NEXT: vbroadcastss 264(%rbp), %ymm1
2336 ; AVX1-NEXT: vmulps %ymm1, %ymm6, %ymm1
2337 ; AVX1-NEXT: vaddps %ymm1, %ymm0, %ymm0
2338 ; AVX1-NEXT: vbroadcastss 268(%rbp), %ymm1
2339 ; AVX1-NEXT: vmulps %ymm1, %ymm7, %ymm1
2340 ; AVX1-NEXT: vaddps %ymm1, %ymm0, %ymm0
2341 ; AVX1-NEXT: vmovaps %ymm0, 224(%rdi)
2342 ; AVX1-NEXT: vmovaps %ymm14, 192(%rdi)
2343 ; AVX1-NEXT: vmovaps %ymm13, 160(%rdi)
2344 ; AVX1-NEXT: vmovaps %ymm12, 128(%rdi)
2345 ; AVX1-NEXT: vmovaps %ymm11, 96(%rdi)
2346 ; AVX1-NEXT: vmovaps %ymm10, 64(%rdi)
2347 ; AVX1-NEXT: vmovaps %ymm9, 32(%rdi)
2348 ; AVX1-NEXT: vmovaps %ymm8, (%rdi)
2349 ; AVX1-NEXT: movq %rbp, %rsp
2350 ; AVX1-NEXT: popq %rbp
2351 ; AVX1-NEXT: vzeroupper
2354 ; AVX2-LABEL: test_mul8x8_f32:
2355 ; AVX2: # %bb.0: # %entry
2356 ; AVX2-NEXT: pushq %rbp
2357 ; AVX2-NEXT: movq %rsp, %rbp
2358 ; AVX2-NEXT: andq $-32, %rsp
2359 ; AVX2-NEXT: subq $32, %rsp
2360 ; AVX2-NEXT: movq %rdi, %rax
2361 ; AVX2-NEXT: vbroadcastss 16(%rbp), %ymm8
2362 ; AVX2-NEXT: vmulps %ymm0, %ymm8, %ymm8
2363 ; AVX2-NEXT: vbroadcastss 20(%rbp), %ymm9
2364 ; AVX2-NEXT: vmulps %ymm1, %ymm9, %ymm9
2365 ; AVX2-NEXT: vaddps %ymm9, %ymm8, %ymm8
2366 ; AVX2-NEXT: vbroadcastss 24(%rbp), %ymm9
2367 ; AVX2-NEXT: vmulps %ymm2, %ymm9, %ymm9
2368 ; AVX2-NEXT: vaddps %ymm9, %ymm8, %ymm8
2369 ; AVX2-NEXT: vbroadcastss 28(%rbp), %ymm9
2370 ; AVX2-NEXT: vmulps %ymm3, %ymm9, %ymm9
2371 ; AVX2-NEXT: vaddps %ymm9, %ymm8, %ymm8
2372 ; AVX2-NEXT: vbroadcastss 32(%rbp), %ymm9
2373 ; AVX2-NEXT: vmulps %ymm4, %ymm9, %ymm9
2374 ; AVX2-NEXT: vaddps %ymm9, %ymm8, %ymm8
2375 ; AVX2-NEXT: vbroadcastss 36(%rbp), %ymm9
2376 ; AVX2-NEXT: vmulps %ymm5, %ymm9, %ymm9
2377 ; AVX2-NEXT: vaddps %ymm9, %ymm8, %ymm8
2378 ; AVX2-NEXT: vbroadcastss 40(%rbp), %ymm9
2379 ; AVX2-NEXT: vmulps %ymm6, %ymm9, %ymm9
2380 ; AVX2-NEXT: vaddps %ymm9, %ymm8, %ymm8
2381 ; AVX2-NEXT: vbroadcastss 44(%rbp), %ymm9
2382 ; AVX2-NEXT: vmulps %ymm7, %ymm9, %ymm9
2383 ; AVX2-NEXT: vaddps %ymm9, %ymm8, %ymm8
2384 ; AVX2-NEXT: vbroadcastss 48(%rbp), %ymm9
2385 ; AVX2-NEXT: vmulps %ymm0, %ymm9, %ymm9
2386 ; AVX2-NEXT: vbroadcastss 52(%rbp), %ymm10
2387 ; AVX2-NEXT: vmulps %ymm1, %ymm10, %ymm10
2388 ; AVX2-NEXT: vaddps %ymm10, %ymm9, %ymm9
2389 ; AVX2-NEXT: vbroadcastss 56(%rbp), %ymm10
2390 ; AVX2-NEXT: vmulps %ymm2, %ymm10, %ymm10
2391 ; AVX2-NEXT: vaddps %ymm10, %ymm9, %ymm9
2392 ; AVX2-NEXT: vbroadcastss 60(%rbp), %ymm10
2393 ; AVX2-NEXT: vmulps %ymm3, %ymm10, %ymm10
2394 ; AVX2-NEXT: vaddps %ymm10, %ymm9, %ymm9
2395 ; AVX2-NEXT: vbroadcastss 64(%rbp), %ymm10
2396 ; AVX2-NEXT: vmulps %ymm4, %ymm10, %ymm10
2397 ; AVX2-NEXT: vaddps %ymm10, %ymm9, %ymm9
2398 ; AVX2-NEXT: vbroadcastss 68(%rbp), %ymm10
2399 ; AVX2-NEXT: vmulps %ymm5, %ymm10, %ymm10
2400 ; AVX2-NEXT: vaddps %ymm10, %ymm9, %ymm9
2401 ; AVX2-NEXT: vbroadcastss 72(%rbp), %ymm10
2402 ; AVX2-NEXT: vmulps %ymm6, %ymm10, %ymm10
2403 ; AVX2-NEXT: vaddps %ymm10, %ymm9, %ymm9
2404 ; AVX2-NEXT: vbroadcastss 76(%rbp), %ymm10
2405 ; AVX2-NEXT: vmulps %ymm7, %ymm10, %ymm10
2406 ; AVX2-NEXT: vaddps %ymm10, %ymm9, %ymm9
2407 ; AVX2-NEXT: vbroadcastss 80(%rbp), %ymm10
2408 ; AVX2-NEXT: vmulps %ymm0, %ymm10, %ymm10
2409 ; AVX2-NEXT: vbroadcastss 84(%rbp), %ymm11
2410 ; AVX2-NEXT: vmulps %ymm1, %ymm11, %ymm11
2411 ; AVX2-NEXT: vaddps %ymm11, %ymm10, %ymm10
2412 ; AVX2-NEXT: vbroadcastss 88(%rbp), %ymm11
2413 ; AVX2-NEXT: vmulps %ymm2, %ymm11, %ymm11
2414 ; AVX2-NEXT: vaddps %ymm11, %ymm10, %ymm10
2415 ; AVX2-NEXT: vbroadcastss 92(%rbp), %ymm11
2416 ; AVX2-NEXT: vmulps %ymm3, %ymm11, %ymm11
2417 ; AVX2-NEXT: vaddps %ymm11, %ymm10, %ymm10
2418 ; AVX2-NEXT: vbroadcastss 96(%rbp), %ymm11
2419 ; AVX2-NEXT: vmulps %ymm4, %ymm11, %ymm11
2420 ; AVX2-NEXT: vaddps %ymm11, %ymm10, %ymm10
2421 ; AVX2-NEXT: vbroadcastss 100(%rbp), %ymm11
2422 ; AVX2-NEXT: vmulps %ymm5, %ymm11, %ymm11
2423 ; AVX2-NEXT: vaddps %ymm11, %ymm10, %ymm10
2424 ; AVX2-NEXT: vbroadcastss 104(%rbp), %ymm11
2425 ; AVX2-NEXT: vmulps %ymm6, %ymm11, %ymm11
2426 ; AVX2-NEXT: vaddps %ymm11, %ymm10, %ymm10
2427 ; AVX2-NEXT: vbroadcastss 108(%rbp), %ymm11
2428 ; AVX2-NEXT: vmulps %ymm7, %ymm11, %ymm11
2429 ; AVX2-NEXT: vaddps %ymm11, %ymm10, %ymm10
2430 ; AVX2-NEXT: vbroadcastss 112(%rbp), %ymm11
2431 ; AVX2-NEXT: vmulps %ymm0, %ymm11, %ymm11
2432 ; AVX2-NEXT: vbroadcastss 116(%rbp), %ymm12
2433 ; AVX2-NEXT: vmulps %ymm1, %ymm12, %ymm12
2434 ; AVX2-NEXT: vaddps %ymm12, %ymm11, %ymm11
2435 ; AVX2-NEXT: vbroadcastss 120(%rbp), %ymm12
2436 ; AVX2-NEXT: vmulps %ymm2, %ymm12, %ymm12
2437 ; AVX2-NEXT: vaddps %ymm12, %ymm11, %ymm11
2438 ; AVX2-NEXT: vbroadcastss 124(%rbp), %ymm12
2439 ; AVX2-NEXT: vmulps %ymm3, %ymm12, %ymm12
2440 ; AVX2-NEXT: vaddps %ymm12, %ymm11, %ymm11
2441 ; AVX2-NEXT: vbroadcastss 128(%rbp), %ymm12
2442 ; AVX2-NEXT: vmulps %ymm4, %ymm12, %ymm12
2443 ; AVX2-NEXT: vaddps %ymm12, %ymm11, %ymm11
2444 ; AVX2-NEXT: vbroadcastss 132(%rbp), %ymm12
2445 ; AVX2-NEXT: vmulps %ymm5, %ymm12, %ymm12
2446 ; AVX2-NEXT: vaddps %ymm12, %ymm11, %ymm11
2447 ; AVX2-NEXT: vbroadcastss 136(%rbp), %ymm12
2448 ; AVX2-NEXT: vmulps %ymm6, %ymm12, %ymm12
2449 ; AVX2-NEXT: vaddps %ymm12, %ymm11, %ymm11
2450 ; AVX2-NEXT: vbroadcastss 140(%rbp), %ymm12
2451 ; AVX2-NEXT: vmulps %ymm7, %ymm12, %ymm12
2452 ; AVX2-NEXT: vaddps %ymm12, %ymm11, %ymm11
2453 ; AVX2-NEXT: vbroadcastss 144(%rbp), %ymm12
2454 ; AVX2-NEXT: vmulps %ymm0, %ymm12, %ymm12
2455 ; AVX2-NEXT: vbroadcastss 148(%rbp), %ymm13
2456 ; AVX2-NEXT: vmulps %ymm1, %ymm13, %ymm13
2457 ; AVX2-NEXT: vaddps %ymm13, %ymm12, %ymm12
2458 ; AVX2-NEXT: vbroadcastss 152(%rbp), %ymm13
2459 ; AVX2-NEXT: vmulps %ymm2, %ymm13, %ymm13
2460 ; AVX2-NEXT: vaddps %ymm13, %ymm12, %ymm12
2461 ; AVX2-NEXT: vbroadcastss 156(%rbp), %ymm13
2462 ; AVX2-NEXT: vmulps %ymm3, %ymm13, %ymm13
2463 ; AVX2-NEXT: vaddps %ymm13, %ymm12, %ymm12
2464 ; AVX2-NEXT: vbroadcastss 160(%rbp), %ymm13
2465 ; AVX2-NEXT: vmulps %ymm4, %ymm13, %ymm13
2466 ; AVX2-NEXT: vaddps %ymm13, %ymm12, %ymm12
2467 ; AVX2-NEXT: vbroadcastss 164(%rbp), %ymm13
2468 ; AVX2-NEXT: vmulps %ymm5, %ymm13, %ymm13
2469 ; AVX2-NEXT: vaddps %ymm13, %ymm12, %ymm12
2470 ; AVX2-NEXT: vbroadcastss 168(%rbp), %ymm13
2471 ; AVX2-NEXT: vmulps %ymm6, %ymm13, %ymm13
2472 ; AVX2-NEXT: vaddps %ymm13, %ymm12, %ymm12
2473 ; AVX2-NEXT: vbroadcastss 172(%rbp), %ymm13
2474 ; AVX2-NEXT: vmulps %ymm7, %ymm13, %ymm13
2475 ; AVX2-NEXT: vaddps %ymm13, %ymm12, %ymm12
2476 ; AVX2-NEXT: vbroadcastss 176(%rbp), %ymm13
2477 ; AVX2-NEXT: vmulps %ymm0, %ymm13, %ymm13
2478 ; AVX2-NEXT: vbroadcastss 180(%rbp), %ymm14
2479 ; AVX2-NEXT: vmulps %ymm1, %ymm14, %ymm14
2480 ; AVX2-NEXT: vaddps %ymm14, %ymm13, %ymm13
2481 ; AVX2-NEXT: vbroadcastss 184(%rbp), %ymm14
2482 ; AVX2-NEXT: vmulps %ymm2, %ymm14, %ymm14
2483 ; AVX2-NEXT: vaddps %ymm14, %ymm13, %ymm13
2484 ; AVX2-NEXT: vbroadcastss 188(%rbp), %ymm14
2485 ; AVX2-NEXT: vmulps %ymm3, %ymm14, %ymm14
2486 ; AVX2-NEXT: vaddps %ymm14, %ymm13, %ymm13
2487 ; AVX2-NEXT: vbroadcastss 192(%rbp), %ymm14
2488 ; AVX2-NEXT: vmulps %ymm4, %ymm14, %ymm14
2489 ; AVX2-NEXT: vaddps %ymm14, %ymm13, %ymm13
2490 ; AVX2-NEXT: vbroadcastss 196(%rbp), %ymm14
2491 ; AVX2-NEXT: vmulps %ymm5, %ymm14, %ymm14
2492 ; AVX2-NEXT: vaddps %ymm14, %ymm13, %ymm13
2493 ; AVX2-NEXT: vbroadcastss 200(%rbp), %ymm14
2494 ; AVX2-NEXT: vmulps %ymm6, %ymm14, %ymm14
2495 ; AVX2-NEXT: vaddps %ymm14, %ymm13, %ymm13
2496 ; AVX2-NEXT: vbroadcastss 204(%rbp), %ymm14
2497 ; AVX2-NEXT: vmulps %ymm7, %ymm14, %ymm14
2498 ; AVX2-NEXT: vaddps %ymm14, %ymm13, %ymm13
2499 ; AVX2-NEXT: vbroadcastss 208(%rbp), %ymm14
2500 ; AVX2-NEXT: vmulps %ymm0, %ymm14, %ymm14
2501 ; AVX2-NEXT: vbroadcastss 212(%rbp), %ymm15
2502 ; AVX2-NEXT: vmulps %ymm1, %ymm15, %ymm15
2503 ; AVX2-NEXT: vaddps %ymm15, %ymm14, %ymm14
2504 ; AVX2-NEXT: vbroadcastss 216(%rbp), %ymm15
2505 ; AVX2-NEXT: vmulps %ymm2, %ymm15, %ymm15
2506 ; AVX2-NEXT: vaddps %ymm15, %ymm14, %ymm14
2507 ; AVX2-NEXT: vbroadcastss 220(%rbp), %ymm15
2508 ; AVX2-NEXT: vmulps %ymm3, %ymm15, %ymm15
2509 ; AVX2-NEXT: vaddps %ymm15, %ymm14, %ymm14
2510 ; AVX2-NEXT: vbroadcastss 224(%rbp), %ymm15
2511 ; AVX2-NEXT: vmulps %ymm4, %ymm15, %ymm15
2512 ; AVX2-NEXT: vaddps %ymm15, %ymm14, %ymm14
2513 ; AVX2-NEXT: vbroadcastss 228(%rbp), %ymm15
2514 ; AVX2-NEXT: vmulps %ymm5, %ymm15, %ymm15
2515 ; AVX2-NEXT: vaddps %ymm15, %ymm14, %ymm14
2516 ; AVX2-NEXT: vbroadcastss 232(%rbp), %ymm15
2517 ; AVX2-NEXT: vmulps %ymm6, %ymm15, %ymm15
2518 ; AVX2-NEXT: vaddps %ymm15, %ymm14, %ymm14
2519 ; AVX2-NEXT: vbroadcastss 236(%rbp), %ymm15
2520 ; AVX2-NEXT: vmulps %ymm7, %ymm15, %ymm15
2521 ; AVX2-NEXT: vaddps %ymm15, %ymm14, %ymm14
2522 ; AVX2-NEXT: vbroadcastss 240(%rbp), %ymm15
2523 ; AVX2-NEXT: vmulps %ymm0, %ymm15, %ymm0
2524 ; AVX2-NEXT: vbroadcastss 244(%rbp), %ymm15
2525 ; AVX2-NEXT: vmulps %ymm1, %ymm15, %ymm1
2526 ; AVX2-NEXT: vaddps %ymm1, %ymm0, %ymm0
2527 ; AVX2-NEXT: vbroadcastss 248(%rbp), %ymm1
2528 ; AVX2-NEXT: vmulps %ymm1, %ymm2, %ymm1
2529 ; AVX2-NEXT: vaddps %ymm1, %ymm0, %ymm0
2530 ; AVX2-NEXT: vbroadcastss 252(%rbp), %ymm1
2531 ; AVX2-NEXT: vmulps %ymm1, %ymm3, %ymm1
2532 ; AVX2-NEXT: vaddps %ymm1, %ymm0, %ymm0
2533 ; AVX2-NEXT: vbroadcastss 256(%rbp), %ymm1
2534 ; AVX2-NEXT: vmulps %ymm1, %ymm4, %ymm1
2535 ; AVX2-NEXT: vaddps %ymm1, %ymm0, %ymm0
2536 ; AVX2-NEXT: vbroadcastss 260(%rbp), %ymm1
2537 ; AVX2-NEXT: vmulps %ymm1, %ymm5, %ymm1
2538 ; AVX2-NEXT: vaddps %ymm1, %ymm0, %ymm0
2539 ; AVX2-NEXT: vbroadcastss 264(%rbp), %ymm1
2540 ; AVX2-NEXT: vmulps %ymm1, %ymm6, %ymm1
2541 ; AVX2-NEXT: vaddps %ymm1, %ymm0, %ymm0
2542 ; AVX2-NEXT: vbroadcastss 268(%rbp), %ymm1
2543 ; AVX2-NEXT: vmulps %ymm1, %ymm7, %ymm1
2544 ; AVX2-NEXT: vaddps %ymm1, %ymm0, %ymm0
2545 ; AVX2-NEXT: vmovaps %ymm0, 224(%rdi)
2546 ; AVX2-NEXT: vmovaps %ymm14, 192(%rdi)
2547 ; AVX2-NEXT: vmovaps %ymm13, 160(%rdi)
2548 ; AVX2-NEXT: vmovaps %ymm12, 128(%rdi)
2549 ; AVX2-NEXT: vmovaps %ymm11, 96(%rdi)
2550 ; AVX2-NEXT: vmovaps %ymm10, 64(%rdi)
2551 ; AVX2-NEXT: vmovaps %ymm9, 32(%rdi)
2552 ; AVX2-NEXT: vmovaps %ymm8, (%rdi)
2553 ; AVX2-NEXT: movq %rbp, %rsp
2554 ; AVX2-NEXT: popq %rbp
2555 ; AVX2-NEXT: vzeroupper
2558 ; AVX512F-LABEL: test_mul8x8_f32:
2559 ; AVX512F: # %bb.0: # %entry
2560 ; AVX512F-NEXT: vextractf64x4 $1, %zmm0, %ymm11
2561 ; AVX512F-NEXT: vextractf64x4 $1, %zmm1, %ymm10
2562 ; AVX512F-NEXT: vextractf64x4 $1, %zmm2, %ymm9
2563 ; AVX512F-NEXT: vextractf64x4 $1, %zmm3, %ymm8
2564 ; AVX512F-NEXT: vbroadcastss %xmm4, %ymm12
2565 ; AVX512F-NEXT: vmulps %ymm0, %ymm12, %ymm12
2566 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm13 = xmm4[1,1,3,3]
2567 ; AVX512F-NEXT: vbroadcastsd %xmm13, %ymm13
2568 ; AVX512F-NEXT: vmulps %ymm13, %ymm11, %ymm13
2569 ; AVX512F-NEXT: vaddps %ymm13, %ymm12, %ymm12
2570 ; AVX512F-NEXT: vshufps {{.*#+}} xmm13 = xmm4[2,2,2,2]
2571 ; AVX512F-NEXT: vbroadcastsd %xmm13, %ymm13
2572 ; AVX512F-NEXT: vmulps %ymm1, %ymm13, %ymm13
2573 ; AVX512F-NEXT: vaddps %ymm13, %ymm12, %ymm12
2574 ; AVX512F-NEXT: vshufps {{.*#+}} xmm13 = xmm4[3,3,3,3]
2575 ; AVX512F-NEXT: vbroadcastsd %xmm13, %ymm13
2576 ; AVX512F-NEXT: vmulps %ymm13, %ymm10, %ymm13
2577 ; AVX512F-NEXT: vaddps %ymm13, %ymm12, %ymm12
2578 ; AVX512F-NEXT: vextractf128 $1, %ymm4, %xmm13
2579 ; AVX512F-NEXT: vbroadcastss %xmm13, %ymm13
2580 ; AVX512F-NEXT: vmulps %ymm2, %ymm13, %ymm13
2581 ; AVX512F-NEXT: vaddps %ymm13, %ymm12, %ymm12
2582 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm13 = ymm4[1,1,3,3,5,5,7,7]
2583 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2584 ; AVX512F-NEXT: vmulps %ymm13, %ymm9, %ymm13
2585 ; AVX512F-NEXT: vaddps %ymm13, %ymm12, %ymm12
2586 ; AVX512F-NEXT: vshufps {{.*#+}} ymm13 = ymm4[2,2,2,2,6,6,6,6]
2587 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2588 ; AVX512F-NEXT: vmulps %ymm3, %ymm13, %ymm13
2589 ; AVX512F-NEXT: vaddps %ymm13, %ymm12, %ymm12
2590 ; AVX512F-NEXT: vshufps {{.*#+}} ymm13 = ymm4[3,3,3,3,7,7,7,7]
2591 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2592 ; AVX512F-NEXT: vmulps %ymm13, %ymm8, %ymm13
2593 ; AVX512F-NEXT: vaddps %ymm13, %ymm12, %ymm12
2594 ; AVX512F-NEXT: vmovups %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
2595 ; AVX512F-NEXT: vextractf64x4 $1, %zmm4, %ymm13
2596 ; AVX512F-NEXT: vextractf32x4 $2, %zmm4, %xmm14
2597 ; AVX512F-NEXT: vbroadcastss %xmm14, %ymm14
2598 ; AVX512F-NEXT: vmulps %ymm0, %ymm14, %ymm14
2599 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm15 = xmm13[1,1,3,3]
2600 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2601 ; AVX512F-NEXT: vmulps %ymm15, %ymm11, %ymm15
2602 ; AVX512F-NEXT: vaddps %ymm15, %ymm14, %ymm14
2603 ; AVX512F-NEXT: vshufps {{.*#+}} xmm15 = xmm13[2,2,2,2]
2604 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2605 ; AVX512F-NEXT: vmulps %ymm1, %ymm15, %ymm15
2606 ; AVX512F-NEXT: vaddps %ymm15, %ymm14, %ymm14
2607 ; AVX512F-NEXT: vshufps {{.*#+}} xmm15 = xmm13[3,3,3,3]
2608 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2609 ; AVX512F-NEXT: vmulps %ymm15, %ymm10, %ymm15
2610 ; AVX512F-NEXT: vaddps %ymm15, %ymm14, %ymm14
2611 ; AVX512F-NEXT: vextractf32x4 $3, %zmm4, %xmm4
2612 ; AVX512F-NEXT: vbroadcastss %xmm4, %ymm4
2613 ; AVX512F-NEXT: vmulps %ymm4, %ymm2, %ymm4
2614 ; AVX512F-NEXT: vaddps %ymm4, %ymm14, %ymm4
2615 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm14 = ymm13[1,1,3,3,5,5,7,7]
2616 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2617 ; AVX512F-NEXT: vmulps %ymm14, %ymm9, %ymm14
2618 ; AVX512F-NEXT: vaddps %ymm4, %ymm14, %ymm4
2619 ; AVX512F-NEXT: vshufps {{.*#+}} ymm14 = ymm13[2,2,2,2,6,6,6,6]
2620 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2621 ; AVX512F-NEXT: vmulps %ymm3, %ymm14, %ymm14
2622 ; AVX512F-NEXT: vaddps %ymm4, %ymm14, %ymm4
2623 ; AVX512F-NEXT: vshufps {{.*#+}} ymm13 = ymm13[3,3,3,3,7,7,7,7]
2624 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2625 ; AVX512F-NEXT: vmulps %ymm13, %ymm8, %ymm13
2626 ; AVX512F-NEXT: vaddps %ymm4, %ymm13, %ymm4
2627 ; AVX512F-NEXT: vmovups %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
2628 ; AVX512F-NEXT: vbroadcastss %xmm5, %ymm13
2629 ; AVX512F-NEXT: vmulps %ymm0, %ymm13, %ymm13
2630 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm14 = xmm5[1,1,3,3]
2631 ; AVX512F-NEXT: vbroadcastsd %xmm14, %ymm14
2632 ; AVX512F-NEXT: vmulps %ymm14, %ymm11, %ymm14
2633 ; AVX512F-NEXT: vaddps %ymm14, %ymm13, %ymm13
2634 ; AVX512F-NEXT: vshufps {{.*#+}} xmm14 = xmm5[2,2,2,2]
2635 ; AVX512F-NEXT: vbroadcastsd %xmm14, %ymm14
2636 ; AVX512F-NEXT: vmulps %ymm1, %ymm14, %ymm14
2637 ; AVX512F-NEXT: vaddps %ymm14, %ymm13, %ymm13
2638 ; AVX512F-NEXT: vshufps {{.*#+}} xmm14 = xmm5[3,3,3,3]
2639 ; AVX512F-NEXT: vbroadcastsd %xmm14, %ymm14
2640 ; AVX512F-NEXT: vmulps %ymm14, %ymm10, %ymm14
2641 ; AVX512F-NEXT: vaddps %ymm14, %ymm13, %ymm13
2642 ; AVX512F-NEXT: vextractf128 $1, %ymm5, %xmm14
2643 ; AVX512F-NEXT: vbroadcastss %xmm14, %ymm14
2644 ; AVX512F-NEXT: vmulps %ymm2, %ymm14, %ymm14
2645 ; AVX512F-NEXT: vaddps %ymm14, %ymm13, %ymm13
2646 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm14 = ymm5[1,1,3,3,5,5,7,7]
2647 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2648 ; AVX512F-NEXT: vmulps %ymm14, %ymm9, %ymm14
2649 ; AVX512F-NEXT: vaddps %ymm14, %ymm13, %ymm13
2650 ; AVX512F-NEXT: vshufps {{.*#+}} ymm14 = ymm5[2,2,2,2,6,6,6,6]
2651 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2652 ; AVX512F-NEXT: vmulps %ymm3, %ymm14, %ymm14
2653 ; AVX512F-NEXT: vaddps %ymm14, %ymm13, %ymm13
2654 ; AVX512F-NEXT: vshufps {{.*#+}} ymm14 = ymm5[3,3,3,3,7,7,7,7]
2655 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2656 ; AVX512F-NEXT: vmulps %ymm14, %ymm8, %ymm14
2657 ; AVX512F-NEXT: vaddps %ymm14, %ymm13, %ymm13
2658 ; AVX512F-NEXT: vextractf64x4 $1, %zmm5, %ymm14
2659 ; AVX512F-NEXT: vextractf32x4 $2, %zmm5, %xmm15
2660 ; AVX512F-NEXT: vbroadcastss %xmm15, %ymm15
2661 ; AVX512F-NEXT: vmulps %ymm0, %ymm15, %ymm15
2662 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm12 = xmm14[1,1,3,3]
2663 ; AVX512F-NEXT: vbroadcastsd %xmm12, %ymm12
2664 ; AVX512F-NEXT: vmulps %ymm12, %ymm11, %ymm12
2665 ; AVX512F-NEXT: vaddps %ymm12, %ymm15, %ymm12
2666 ; AVX512F-NEXT: vshufps {{.*#+}} xmm15 = xmm14[2,2,2,2]
2667 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2668 ; AVX512F-NEXT: vmulps %ymm1, %ymm15, %ymm15
2669 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2670 ; AVX512F-NEXT: vshufps {{.*#+}} xmm15 = xmm14[3,3,3,3]
2671 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2672 ; AVX512F-NEXT: vmulps %ymm15, %ymm10, %ymm15
2673 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2674 ; AVX512F-NEXT: vextractf32x4 $3, %zmm5, %xmm5
2675 ; AVX512F-NEXT: vbroadcastss %xmm5, %ymm5
2676 ; AVX512F-NEXT: vmulps %ymm5, %ymm2, %ymm5
2677 ; AVX512F-NEXT: vaddps %ymm5, %ymm12, %ymm5
2678 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm12 = ymm14[1,1,3,3,5,5,7,7]
2679 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,2,2,2]
2680 ; AVX512F-NEXT: vmulps %ymm12, %ymm9, %ymm12
2681 ; AVX512F-NEXT: vaddps %ymm5, %ymm12, %ymm5
2682 ; AVX512F-NEXT: vshufps {{.*#+}} ymm12 = ymm14[2,2,2,2,6,6,6,6]
2683 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,2,2,2]
2684 ; AVX512F-NEXT: vmulps %ymm3, %ymm12, %ymm12
2685 ; AVX512F-NEXT: vaddps %ymm5, %ymm12, %ymm5
2686 ; AVX512F-NEXT: vshufps {{.*#+}} ymm12 = ymm14[3,3,3,3,7,7,7,7]
2687 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm12 = ymm12[2,2,2,2]
2688 ; AVX512F-NEXT: vmulps %ymm12, %ymm8, %ymm12
2689 ; AVX512F-NEXT: vaddps %ymm5, %ymm12, %ymm5
2690 ; AVX512F-NEXT: vbroadcastss %xmm6, %ymm12
2691 ; AVX512F-NEXT: vmulps %ymm0, %ymm12, %ymm12
2692 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm14 = xmm6[1,1,3,3]
2693 ; AVX512F-NEXT: vbroadcastsd %xmm14, %ymm14
2694 ; AVX512F-NEXT: vmulps %ymm14, %ymm11, %ymm14
2695 ; AVX512F-NEXT: vaddps %ymm14, %ymm12, %ymm12
2696 ; AVX512F-NEXT: vshufps {{.*#+}} xmm14 = xmm6[2,2,2,2]
2697 ; AVX512F-NEXT: vbroadcastsd %xmm14, %ymm14
2698 ; AVX512F-NEXT: vmulps %ymm1, %ymm14, %ymm14
2699 ; AVX512F-NEXT: vaddps %ymm14, %ymm12, %ymm12
2700 ; AVX512F-NEXT: vshufps {{.*#+}} xmm14 = xmm6[3,3,3,3]
2701 ; AVX512F-NEXT: vbroadcastsd %xmm14, %ymm14
2702 ; AVX512F-NEXT: vmulps %ymm14, %ymm10, %ymm14
2703 ; AVX512F-NEXT: vaddps %ymm14, %ymm12, %ymm12
2704 ; AVX512F-NEXT: vextractf128 $1, %ymm6, %xmm14
2705 ; AVX512F-NEXT: vbroadcastss %xmm14, %ymm14
2706 ; AVX512F-NEXT: vmulps %ymm2, %ymm14, %ymm14
2707 ; AVX512F-NEXT: vaddps %ymm14, %ymm12, %ymm12
2708 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm14 = ymm6[1,1,3,3,5,5,7,7]
2709 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2710 ; AVX512F-NEXT: vmulps %ymm14, %ymm9, %ymm14
2711 ; AVX512F-NEXT: vaddps %ymm14, %ymm12, %ymm12
2712 ; AVX512F-NEXT: vshufps {{.*#+}} ymm14 = ymm6[2,2,2,2,6,6,6,6]
2713 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2714 ; AVX512F-NEXT: vmulps %ymm3, %ymm14, %ymm14
2715 ; AVX512F-NEXT: vaddps %ymm14, %ymm12, %ymm12
2716 ; AVX512F-NEXT: vshufps {{.*#+}} ymm14 = ymm6[3,3,3,3,7,7,7,7]
2717 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2718 ; AVX512F-NEXT: vmulps %ymm14, %ymm8, %ymm14
2719 ; AVX512F-NEXT: vaddps %ymm14, %ymm12, %ymm14
2720 ; AVX512F-NEXT: vextractf32x4 $2, %zmm6, %xmm12
2721 ; AVX512F-NEXT: vbroadcastss %xmm12, %ymm12
2722 ; AVX512F-NEXT: vmulps %ymm0, %ymm12, %ymm12
2723 ; AVX512F-NEXT: vextractf64x4 $1, %zmm6, %ymm15
2724 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm4 = xmm15[1,1,3,3]
2725 ; AVX512F-NEXT: vbroadcastsd %xmm4, %ymm4
2726 ; AVX512F-NEXT: vmulps %ymm4, %ymm11, %ymm4
2727 ; AVX512F-NEXT: vaddps %ymm4, %ymm12, %ymm4
2728 ; AVX512F-NEXT: vshufps {{.*#+}} xmm12 = xmm15[2,2,2,2]
2729 ; AVX512F-NEXT: vbroadcastsd %xmm12, %ymm12
2730 ; AVX512F-NEXT: vmulps %ymm1, %ymm12, %ymm12
2731 ; AVX512F-NEXT: vaddps %ymm4, %ymm12, %ymm4
2732 ; AVX512F-NEXT: vshufps {{.*#+}} xmm12 = xmm15[3,3,3,3]
2733 ; AVX512F-NEXT: vbroadcastsd %xmm12, %ymm12
2734 ; AVX512F-NEXT: vmulps %ymm12, %ymm10, %ymm12
2735 ; AVX512F-NEXT: vaddps %ymm4, %ymm12, %ymm4
2736 ; AVX512F-NEXT: vextractf32x4 $3, %zmm6, %xmm6
2737 ; AVX512F-NEXT: vbroadcastss %xmm6, %ymm6
2738 ; AVX512F-NEXT: vmulps %ymm6, %ymm2, %ymm6
2739 ; AVX512F-NEXT: vaddps %ymm6, %ymm4, %ymm4
2740 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm6 = ymm15[1,1,3,3,5,5,7,7]
2741 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[2,2,2,2]
2742 ; AVX512F-NEXT: vmulps %ymm6, %ymm9, %ymm6
2743 ; AVX512F-NEXT: vaddps %ymm6, %ymm4, %ymm4
2744 ; AVX512F-NEXT: vshufps {{.*#+}} ymm6 = ymm15[2,2,2,2,6,6,6,6]
2745 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[2,2,2,2]
2746 ; AVX512F-NEXT: vmulps %ymm6, %ymm3, %ymm6
2747 ; AVX512F-NEXT: vaddps %ymm6, %ymm4, %ymm4
2748 ; AVX512F-NEXT: vshufps {{.*#+}} ymm6 = ymm15[3,3,3,3,7,7,7,7]
2749 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm6 = ymm6[2,2,2,2]
2750 ; AVX512F-NEXT: vmulps %ymm6, %ymm8, %ymm6
2751 ; AVX512F-NEXT: vaddps %ymm6, %ymm4, %ymm6
2752 ; AVX512F-NEXT: vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
2753 ; AVX512F-NEXT: vinsertf64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm4 # 32-byte Folded Reload
2754 ; AVX512F-NEXT: vbroadcastss %xmm7, %ymm12
2755 ; AVX512F-NEXT: vmulps %ymm0, %ymm12, %ymm12
2756 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm15 = xmm7[1,1,3,3]
2757 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2758 ; AVX512F-NEXT: vmulps %ymm15, %ymm11, %ymm15
2759 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2760 ; AVX512F-NEXT: vshufps {{.*#+}} xmm15 = xmm7[2,2,2,2]
2761 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2762 ; AVX512F-NEXT: vmulps %ymm1, %ymm15, %ymm15
2763 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2764 ; AVX512F-NEXT: vshufps {{.*#+}} xmm15 = xmm7[3,3,3,3]
2765 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2766 ; AVX512F-NEXT: vmulps %ymm15, %ymm10, %ymm15
2767 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2768 ; AVX512F-NEXT: vextractf128 $1, %ymm7, %xmm15
2769 ; AVX512F-NEXT: vbroadcastss %xmm15, %ymm15
2770 ; AVX512F-NEXT: vmulps %ymm2, %ymm15, %ymm15
2771 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2772 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm15 = ymm7[1,1,3,3,5,5,7,7]
2773 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2774 ; AVX512F-NEXT: vmulps %ymm15, %ymm9, %ymm15
2775 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2776 ; AVX512F-NEXT: vshufps {{.*#+}} ymm15 = ymm7[2,2,2,2,6,6,6,6]
2777 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2778 ; AVX512F-NEXT: vmulps %ymm3, %ymm15, %ymm15
2779 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2780 ; AVX512F-NEXT: vshufps {{.*#+}} ymm15 = ymm7[3,3,3,3,7,7,7,7]
2781 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2782 ; AVX512F-NEXT: vmulps %ymm15, %ymm8, %ymm15
2783 ; AVX512F-NEXT: vaddps %ymm15, %ymm12, %ymm12
2784 ; AVX512F-NEXT: vinsertf64x4 $1, %ymm5, %zmm13, %zmm5
2785 ; AVX512F-NEXT: vextractf64x4 $1, %zmm7, %ymm13
2786 ; AVX512F-NEXT: vextractf32x4 $2, %zmm7, %xmm15
2787 ; AVX512F-NEXT: vbroadcastss %xmm15, %ymm15
2788 ; AVX512F-NEXT: vmulps %ymm0, %ymm15, %ymm0
2789 ; AVX512F-NEXT: vmovshdup {{.*#+}} xmm15 = xmm13[1,1,3,3]
2790 ; AVX512F-NEXT: vbroadcastsd %xmm15, %ymm15
2791 ; AVX512F-NEXT: vmulps %ymm15, %ymm11, %ymm11
2792 ; AVX512F-NEXT: vaddps %ymm0, %ymm11, %ymm0
2793 ; AVX512F-NEXT: vshufps {{.*#+}} xmm11 = xmm13[2,2,2,2]
2794 ; AVX512F-NEXT: vbroadcastsd %xmm11, %ymm11
2795 ; AVX512F-NEXT: vmulps %ymm1, %ymm11, %ymm1
2796 ; AVX512F-NEXT: vaddps %ymm1, %ymm0, %ymm0
2797 ; AVX512F-NEXT: vshufps {{.*#+}} xmm1 = xmm13[3,3,3,3]
2798 ; AVX512F-NEXT: vbroadcastsd %xmm1, %ymm1
2799 ; AVX512F-NEXT: vmulps %ymm1, %ymm10, %ymm1
2800 ; AVX512F-NEXT: vaddps %ymm1, %ymm0, %ymm0
2801 ; AVX512F-NEXT: vextractf32x4 $3, %zmm7, %xmm1
2802 ; AVX512F-NEXT: vbroadcastss %xmm1, %ymm1
2803 ; AVX512F-NEXT: vmulps %ymm1, %ymm2, %ymm1
2804 ; AVX512F-NEXT: vaddps %ymm1, %ymm0, %ymm0
2805 ; AVX512F-NEXT: vmovshdup {{.*#+}} ymm1 = ymm13[1,1,3,3,5,5,7,7]
2806 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,2,2,2]
2807 ; AVX512F-NEXT: vmulps %ymm1, %ymm9, %ymm1
2808 ; AVX512F-NEXT: vaddps %ymm1, %ymm0, %ymm0
2809 ; AVX512F-NEXT: vshufps {{.*#+}} ymm1 = ymm13[2,2,2,2,6,6,6,6]
2810 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,2,2,2]
2811 ; AVX512F-NEXT: vmulps %ymm1, %ymm3, %ymm1
2812 ; AVX512F-NEXT: vaddps %ymm1, %ymm0, %ymm0
2813 ; AVX512F-NEXT: vshufps {{.*#+}} ymm1 = ymm13[3,3,3,3,7,7,7,7]
2814 ; AVX512F-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,2,2,2]
2815 ; AVX512F-NEXT: vmulps %ymm1, %ymm8, %ymm1
2816 ; AVX512F-NEXT: vaddps %ymm1, %ymm0, %ymm0
2817 ; AVX512F-NEXT: vinsertf64x4 $1, %ymm6, %zmm14, %zmm2
2818 ; AVX512F-NEXT: vinsertf64x4 $1, %ymm0, %zmm12, %zmm3
2819 ; AVX512F-NEXT: vmovaps %zmm4, %zmm0
2820 ; AVX512F-NEXT: vmovaps %zmm5, %zmm1
2821 ; AVX512F-NEXT: retq
2823 ; AVX512VL-LABEL: test_mul8x8_f32:
2824 ; AVX512VL: # %bb.0: # %entry
2825 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm0, %ymm11
2826 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm1, %ymm10
2827 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm2, %ymm9
2828 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm3, %ymm8
2829 ; AVX512VL-NEXT: vbroadcastss %xmm4, %ymm12
2830 ; AVX512VL-NEXT: vmulps %ymm0, %ymm12, %ymm12
2831 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm13 = xmm4[1,1,3,3]
2832 ; AVX512VL-NEXT: vbroadcastsd %xmm13, %ymm13
2833 ; AVX512VL-NEXT: vmulps %ymm13, %ymm11, %ymm13
2834 ; AVX512VL-NEXT: vaddps %ymm13, %ymm12, %ymm12
2835 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm13 = xmm4[2,2,2,2]
2836 ; AVX512VL-NEXT: vbroadcastsd %xmm13, %ymm13
2837 ; AVX512VL-NEXT: vmulps %ymm1, %ymm13, %ymm13
2838 ; AVX512VL-NEXT: vaddps %ymm13, %ymm12, %ymm12
2839 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm13 = xmm4[3,3,3,3]
2840 ; AVX512VL-NEXT: vbroadcastsd %xmm13, %ymm13
2841 ; AVX512VL-NEXT: vmulps %ymm13, %ymm10, %ymm13
2842 ; AVX512VL-NEXT: vaddps %ymm13, %ymm12, %ymm12
2843 ; AVX512VL-NEXT: vextractf128 $1, %ymm4, %xmm13
2844 ; AVX512VL-NEXT: vbroadcastss %xmm13, %ymm13
2845 ; AVX512VL-NEXT: vmulps %ymm2, %ymm13, %ymm13
2846 ; AVX512VL-NEXT: vaddps %ymm13, %ymm12, %ymm12
2847 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm13 = ymm4[1,1,3,3,5,5,7,7]
2848 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2849 ; AVX512VL-NEXT: vmulps %ymm13, %ymm9, %ymm13
2850 ; AVX512VL-NEXT: vaddps %ymm13, %ymm12, %ymm12
2851 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm13 = ymm4[2,2,2,2,6,6,6,6]
2852 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2853 ; AVX512VL-NEXT: vmulps %ymm3, %ymm13, %ymm13
2854 ; AVX512VL-NEXT: vaddps %ymm13, %ymm12, %ymm12
2855 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm13 = ymm4[3,3,3,3,7,7,7,7]
2856 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2857 ; AVX512VL-NEXT: vmulps %ymm13, %ymm8, %ymm13
2858 ; AVX512VL-NEXT: vaddps %ymm13, %ymm12, %ymm12
2859 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm4, %ymm13
2860 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm4, %xmm14
2861 ; AVX512VL-NEXT: vbroadcastss %xmm14, %ymm14
2862 ; AVX512VL-NEXT: vmulps %ymm0, %ymm14, %ymm14
2863 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm15 = xmm13[1,1,3,3]
2864 ; AVX512VL-NEXT: vbroadcastsd %xmm15, %ymm15
2865 ; AVX512VL-NEXT: vmulps %ymm15, %ymm11, %ymm15
2866 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2867 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm15 = xmm13[2,2,2,2]
2868 ; AVX512VL-NEXT: vbroadcastsd %xmm15, %ymm15
2869 ; AVX512VL-NEXT: vmulps %ymm1, %ymm15, %ymm15
2870 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2871 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm15 = xmm13[3,3,3,3]
2872 ; AVX512VL-NEXT: vbroadcastsd %xmm15, %ymm15
2873 ; AVX512VL-NEXT: vmulps %ymm15, %ymm10, %ymm15
2874 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2875 ; AVX512VL-NEXT: vextractf32x4 $3, %zmm4, %xmm4
2876 ; AVX512VL-NEXT: vbroadcastss %xmm4, %ymm4
2877 ; AVX512VL-NEXT: vmulps %ymm4, %ymm2, %ymm4
2878 ; AVX512VL-NEXT: vaddps %ymm4, %ymm14, %ymm4
2879 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm14 = ymm13[1,1,3,3,5,5,7,7]
2880 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2881 ; AVX512VL-NEXT: vmulps %ymm14, %ymm9, %ymm14
2882 ; AVX512VL-NEXT: vaddps %ymm4, %ymm14, %ymm4
2883 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm14 = ymm13[2,2,2,2,6,6,6,6]
2884 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2885 ; AVX512VL-NEXT: vmulps %ymm3, %ymm14, %ymm14
2886 ; AVX512VL-NEXT: vaddps %ymm4, %ymm14, %ymm4
2887 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm13 = ymm13[3,3,3,3,7,7,7,7]
2888 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm13 = ymm13[2,2,2,2]
2889 ; AVX512VL-NEXT: vmulps %ymm13, %ymm8, %ymm13
2890 ; AVX512VL-NEXT: vaddps %ymm4, %ymm13, %ymm4
2891 ; AVX512VL-NEXT: vbroadcastss %xmm5, %ymm13
2892 ; AVX512VL-NEXT: vmulps %ymm0, %ymm13, %ymm13
2893 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm14 = xmm5[1,1,3,3]
2894 ; AVX512VL-NEXT: vbroadcastsd %xmm14, %ymm14
2895 ; AVX512VL-NEXT: vmulps %ymm14, %ymm11, %ymm14
2896 ; AVX512VL-NEXT: vaddps %ymm14, %ymm13, %ymm13
2897 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm14 = xmm5[2,2,2,2]
2898 ; AVX512VL-NEXT: vbroadcastsd %xmm14, %ymm14
2899 ; AVX512VL-NEXT: vmulps %ymm1, %ymm14, %ymm14
2900 ; AVX512VL-NEXT: vaddps %ymm14, %ymm13, %ymm13
2901 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm14 = xmm5[3,3,3,3]
2902 ; AVX512VL-NEXT: vbroadcastsd %xmm14, %ymm14
2903 ; AVX512VL-NEXT: vmulps %ymm14, %ymm10, %ymm14
2904 ; AVX512VL-NEXT: vaddps %ymm14, %ymm13, %ymm13
2905 ; AVX512VL-NEXT: vextractf128 $1, %ymm5, %xmm14
2906 ; AVX512VL-NEXT: vbroadcastss %xmm14, %ymm14
2907 ; AVX512VL-NEXT: vmulps %ymm2, %ymm14, %ymm14
2908 ; AVX512VL-NEXT: vaddps %ymm14, %ymm13, %ymm13
2909 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm14 = ymm5[1,1,3,3,5,5,7,7]
2910 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2911 ; AVX512VL-NEXT: vmulps %ymm14, %ymm9, %ymm14
2912 ; AVX512VL-NEXT: vaddps %ymm14, %ymm13, %ymm13
2913 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm14 = ymm5[2,2,2,2,6,6,6,6]
2914 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2915 ; AVX512VL-NEXT: vmulps %ymm3, %ymm14, %ymm14
2916 ; AVX512VL-NEXT: vaddps %ymm14, %ymm13, %ymm13
2917 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm14 = ymm5[3,3,3,3,7,7,7,7]
2918 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2919 ; AVX512VL-NEXT: vmulps %ymm14, %ymm8, %ymm14
2920 ; AVX512VL-NEXT: vaddps %ymm14, %ymm13, %ymm13
2921 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm5, %ymm14
2922 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm5, %xmm15
2923 ; AVX512VL-NEXT: vbroadcastss %xmm15, %ymm15
2924 ; AVX512VL-NEXT: vmulps %ymm0, %ymm15, %ymm15
2925 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm16 = xmm14[1,1,3,3]
2926 ; AVX512VL-NEXT: vbroadcastsd %xmm16, %ymm16
2927 ; AVX512VL-NEXT: vmulps %ymm16, %ymm11, %ymm16
2928 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
2929 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm16 = xmm14[2,2,2,2]
2930 ; AVX512VL-NEXT: vbroadcastsd %xmm16, %ymm16
2931 ; AVX512VL-NEXT: vmulps %ymm16, %ymm1, %ymm16
2932 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
2933 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm16 = xmm14[3,3,3,3]
2934 ; AVX512VL-NEXT: vbroadcastsd %xmm16, %ymm16
2935 ; AVX512VL-NEXT: vmulps %ymm16, %ymm10, %ymm16
2936 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
2937 ; AVX512VL-NEXT: vextractf32x4 $3, %zmm5, %xmm5
2938 ; AVX512VL-NEXT: vbroadcastss %xmm5, %ymm5
2939 ; AVX512VL-NEXT: vmulps %ymm5, %ymm2, %ymm5
2940 ; AVX512VL-NEXT: vaddps %ymm5, %ymm15, %ymm5
2941 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm15 = ymm14[1,1,3,3,5,5,7,7]
2942 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2943 ; AVX512VL-NEXT: vmulps %ymm15, %ymm9, %ymm15
2944 ; AVX512VL-NEXT: vaddps %ymm5, %ymm15, %ymm5
2945 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm15 = ymm14[2,2,2,2,6,6,6,6]
2946 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2947 ; AVX512VL-NEXT: vmulps %ymm3, %ymm15, %ymm15
2948 ; AVX512VL-NEXT: vaddps %ymm5, %ymm15, %ymm5
2949 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm14 = ymm14[3,3,3,3,7,7,7,7]
2950 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm14 = ymm14[2,2,2,2]
2951 ; AVX512VL-NEXT: vmulps %ymm14, %ymm8, %ymm14
2952 ; AVX512VL-NEXT: vaddps %ymm5, %ymm14, %ymm5
2953 ; AVX512VL-NEXT: vbroadcastss %xmm6, %ymm14
2954 ; AVX512VL-NEXT: vmulps %ymm0, %ymm14, %ymm14
2955 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm15 = xmm6[1,1,3,3]
2956 ; AVX512VL-NEXT: vbroadcastsd %xmm15, %ymm15
2957 ; AVX512VL-NEXT: vmulps %ymm15, %ymm11, %ymm15
2958 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2959 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm15 = xmm6[2,2,2,2]
2960 ; AVX512VL-NEXT: vbroadcastsd %xmm15, %ymm15
2961 ; AVX512VL-NEXT: vmulps %ymm1, %ymm15, %ymm15
2962 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2963 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm15 = xmm6[3,3,3,3]
2964 ; AVX512VL-NEXT: vbroadcastsd %xmm15, %ymm15
2965 ; AVX512VL-NEXT: vmulps %ymm15, %ymm10, %ymm15
2966 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2967 ; AVX512VL-NEXT: vextractf128 $1, %ymm6, %xmm15
2968 ; AVX512VL-NEXT: vbroadcastss %xmm15, %ymm15
2969 ; AVX512VL-NEXT: vmulps %ymm2, %ymm15, %ymm15
2970 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2971 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm15 = ymm6[1,1,3,3,5,5,7,7]
2972 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2973 ; AVX512VL-NEXT: vmulps %ymm15, %ymm9, %ymm15
2974 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2975 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm15 = ymm6[2,2,2,2,6,6,6,6]
2976 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2977 ; AVX512VL-NEXT: vmulps %ymm3, %ymm15, %ymm15
2978 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2979 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm15 = ymm6[3,3,3,3,7,7,7,7]
2980 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
2981 ; AVX512VL-NEXT: vmulps %ymm15, %ymm8, %ymm15
2982 ; AVX512VL-NEXT: vaddps %ymm15, %ymm14, %ymm14
2983 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm6, %ymm15
2984 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm6, %xmm16
2985 ; AVX512VL-NEXT: vbroadcastss %xmm16, %ymm16
2986 ; AVX512VL-NEXT: vmulps %ymm16, %ymm0, %ymm16
2987 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm17 = xmm15[1,1,3,3]
2988 ; AVX512VL-NEXT: vbroadcastsd %xmm17, %ymm17
2989 ; AVX512VL-NEXT: vmulps %ymm17, %ymm11, %ymm17
2990 ; AVX512VL-NEXT: vaddps %ymm17, %ymm16, %ymm16
2991 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm17 = xmm15[2,2,2,2]
2992 ; AVX512VL-NEXT: vbroadcastsd %xmm17, %ymm17
2993 ; AVX512VL-NEXT: vmulps %ymm17, %ymm1, %ymm17
2994 ; AVX512VL-NEXT: vaddps %ymm17, %ymm16, %ymm16
2995 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm17 = xmm15[3,3,3,3]
2996 ; AVX512VL-NEXT: vbroadcastsd %xmm17, %ymm17
2997 ; AVX512VL-NEXT: vmulps %ymm17, %ymm10, %ymm17
2998 ; AVX512VL-NEXT: vaddps %ymm17, %ymm16, %ymm16
2999 ; AVX512VL-NEXT: vextractf32x4 $3, %zmm6, %xmm6
3000 ; AVX512VL-NEXT: vbroadcastss %xmm6, %ymm6
3001 ; AVX512VL-NEXT: vmulps %ymm6, %ymm2, %ymm6
3002 ; AVX512VL-NEXT: vaddps %ymm6, %ymm16, %ymm6
3003 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm16 = ymm15[1,1,3,3,5,5,7,7]
3004 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm16 = ymm16[2,2,2,2]
3005 ; AVX512VL-NEXT: vmulps %ymm16, %ymm9, %ymm16
3006 ; AVX512VL-NEXT: vaddps %ymm16, %ymm6, %ymm6
3007 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm16 = ymm15[2,2,2,2,6,6,6,6]
3008 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm16 = ymm16[2,2,2,2]
3009 ; AVX512VL-NEXT: vmulps %ymm16, %ymm3, %ymm16
3010 ; AVX512VL-NEXT: vaddps %ymm16, %ymm6, %ymm6
3011 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm15 = ymm15[3,3,3,3,7,7,7,7]
3012 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm15 = ymm15[2,2,2,2]
3013 ; AVX512VL-NEXT: vmulps %ymm15, %ymm8, %ymm15
3014 ; AVX512VL-NEXT: vaddps %ymm6, %ymm15, %ymm6
3015 ; AVX512VL-NEXT: vbroadcastss %xmm7, %ymm15
3016 ; AVX512VL-NEXT: vmulps %ymm0, %ymm15, %ymm15
3017 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm16 = xmm7[1,1,3,3]
3018 ; AVX512VL-NEXT: vbroadcastsd %xmm16, %ymm16
3019 ; AVX512VL-NEXT: vmulps %ymm16, %ymm11, %ymm16
3020 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
3021 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm16 = xmm7[2,2,2,2]
3022 ; AVX512VL-NEXT: vbroadcastsd %xmm16, %ymm16
3023 ; AVX512VL-NEXT: vmulps %ymm16, %ymm1, %ymm16
3024 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
3025 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm16 = xmm7[3,3,3,3]
3026 ; AVX512VL-NEXT: vbroadcastsd %xmm16, %ymm16
3027 ; AVX512VL-NEXT: vmulps %ymm16, %ymm10, %ymm16
3028 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
3029 ; AVX512VL-NEXT: vextractf32x4 $1, %ymm7, %xmm16
3030 ; AVX512VL-NEXT: vbroadcastss %xmm16, %ymm16
3031 ; AVX512VL-NEXT: vmulps %ymm16, %ymm2, %ymm16
3032 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
3033 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm16 = ymm7[1,1,3,3,5,5,7,7]
3034 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm16 = ymm16[2,2,2,2]
3035 ; AVX512VL-NEXT: vmulps %ymm16, %ymm9, %ymm16
3036 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
3037 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm16 = ymm7[2,2,2,2,6,6,6,6]
3038 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm16 = ymm16[2,2,2,2]
3039 ; AVX512VL-NEXT: vmulps %ymm16, %ymm3, %ymm16
3040 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
3041 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm16 = ymm7[3,3,3,3,7,7,7,7]
3042 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm16 = ymm16[2,2,2,2]
3043 ; AVX512VL-NEXT: vmulps %ymm16, %ymm8, %ymm16
3044 ; AVX512VL-NEXT: vaddps %ymm16, %ymm15, %ymm15
3045 ; AVX512VL-NEXT: vextractf64x4 $1, %zmm7, %ymm16
3046 ; AVX512VL-NEXT: vextractf32x4 $2, %zmm7, %xmm17
3047 ; AVX512VL-NEXT: vbroadcastss %xmm17, %ymm17
3048 ; AVX512VL-NEXT: vmulps %ymm17, %ymm0, %ymm0
3049 ; AVX512VL-NEXT: vmovshdup {{.*#+}} xmm17 = xmm16[1,1,3,3]
3050 ; AVX512VL-NEXT: vbroadcastsd %xmm17, %ymm17
3051 ; AVX512VL-NEXT: vmulps %ymm17, %ymm11, %ymm11
3052 ; AVX512VL-NEXT: vaddps %ymm0, %ymm11, %ymm0
3053 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm11 = xmm16[2,2,2,2]
3054 ; AVX512VL-NEXT: vbroadcastsd %xmm11, %ymm11
3055 ; AVX512VL-NEXT: vmulps %ymm1, %ymm11, %ymm1
3056 ; AVX512VL-NEXT: vaddps %ymm1, %ymm0, %ymm0
3057 ; AVX512VL-NEXT: vshufps {{.*#+}} xmm1 = xmm16[3,3,3,3]
3058 ; AVX512VL-NEXT: vbroadcastsd %xmm1, %ymm1
3059 ; AVX512VL-NEXT: vmulps %ymm1, %ymm10, %ymm1
3060 ; AVX512VL-NEXT: vaddps %ymm1, %ymm0, %ymm0
3061 ; AVX512VL-NEXT: vextractf32x4 $3, %zmm7, %xmm1
3062 ; AVX512VL-NEXT: vbroadcastss %xmm1, %ymm1
3063 ; AVX512VL-NEXT: vmulps %ymm1, %ymm2, %ymm1
3064 ; AVX512VL-NEXT: vaddps %ymm1, %ymm0, %ymm0
3065 ; AVX512VL-NEXT: vmovshdup {{.*#+}} ymm1 = ymm16[1,1,3,3,5,5,7,7]
3066 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,2,2,2]
3067 ; AVX512VL-NEXT: vmulps %ymm1, %ymm9, %ymm1
3068 ; AVX512VL-NEXT: vaddps %ymm1, %ymm0, %ymm0
3069 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm1 = ymm16[2,2,2,2,6,6,6,6]
3070 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,2,2,2]
3071 ; AVX512VL-NEXT: vmulps %ymm1, %ymm3, %ymm1
3072 ; AVX512VL-NEXT: vaddps %ymm1, %ymm0, %ymm0
3073 ; AVX512VL-NEXT: vshufps {{.*#+}} ymm1 = ymm16[3,3,3,3,7,7,7,7]
3074 ; AVX512VL-NEXT: vpermpd {{.*#+}} ymm1 = ymm1[2,2,2,2]
3075 ; AVX512VL-NEXT: vmulps %ymm1, %ymm8, %ymm1
3076 ; AVX512VL-NEXT: vaddps %ymm1, %ymm0, %ymm3
3077 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm4, %zmm12, %zmm0
3078 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm5, %zmm13, %zmm1
3079 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm6, %zmm14, %zmm2
3080 ; AVX512VL-NEXT: vinsertf64x4 $1, %ymm3, %zmm15, %zmm3
3081 ; AVX512VL-NEXT: retq
3083 %split = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
3084 %split1 = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
3085 %split2 = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
3086 %split3 = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
3087 %split4 = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39>
3088 %split5 = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
3089 %split6 = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55>
3090 %split7 = shufflevector <64 x float> %a0, <64 x float> poison, <8 x i32> <i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3091 %splat.splat = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> zeroinitializer
3092 %0 = fmul <8 x float> %split, %splat.splat
3093 %splat.splat18 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1>
3094 %1 = fmul <8 x float> %split1, %splat.splat18
3095 %2 = fadd <8 x float> %0, %1
3096 %splat.splat21 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2>
3097 %3 = fmul <8 x float> %split2, %splat.splat21
3098 %4 = fadd <8 x float> %2, %3
3099 %splat.splat24 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3>
3100 %5 = fmul <8 x float> %split3, %splat.splat24
3101 %6 = fadd <8 x float> %4, %5
3102 %splat.splat27 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4>
3103 %7 = fmul <8 x float> %split4, %splat.splat27
3104 %8 = fadd <8 x float> %6, %7
3105 %splat.splat30 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5>
3106 %9 = fmul <8 x float> %split5, %splat.splat30
3107 %10 = fadd <8 x float> %8, %9
3108 %splat.splat33 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6>
3109 %11 = fmul <8 x float> %split6, %splat.splat33
3110 %12 = fadd <8 x float> %10, %11
3111 %splat.splat36 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7>
3112 %13 = fmul <8 x float> %split7, %splat.splat36
3113 %14 = fadd <8 x float> %12, %13
3114 %splat.splat39 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8>
3115 %15 = fmul <8 x float> %split, %splat.splat39
3116 %splat.splat42 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9>
3117 %16 = fmul <8 x float> %split1, %splat.splat42
3118 %17 = fadd <8 x float> %15, %16
3119 %splat.splat45 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10>
3120 %18 = fmul <8 x float> %split2, %splat.splat45
3121 %19 = fadd <8 x float> %17, %18
3122 %splat.splat48 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11>
3123 %20 = fmul <8 x float> %split3, %splat.splat48
3124 %21 = fadd <8 x float> %19, %20
3125 %splat.splat51 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12>
3126 %22 = fmul <8 x float> %split4, %splat.splat51
3127 %23 = fadd <8 x float> %21, %22
3128 %splat.splat54 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13>
3129 %24 = fmul <8 x float> %split5, %splat.splat54
3130 %25 = fadd <8 x float> %23, %24
3131 %splat.splat57 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14>
3132 %26 = fmul <8 x float> %split6, %splat.splat57
3133 %27 = fadd <8 x float> %25, %26
3134 %splat.splat60 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
3135 %28 = fmul <8 x float> %split7, %splat.splat60
3136 %29 = fadd <8 x float> %27, %28
3137 %splat.splat63 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
3138 %30 = fmul <8 x float> %split, %splat.splat63
3139 %splat.splat66 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17>
3140 %31 = fmul <8 x float> %split1, %splat.splat66
3141 %32 = fadd <8 x float> %30, %31
3142 %splat.splat69 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18>
3143 %33 = fmul <8 x float> %split2, %splat.splat69
3144 %34 = fadd <8 x float> %32, %33
3145 %splat.splat72 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19>
3146 %35 = fmul <8 x float> %split3, %splat.splat72
3147 %36 = fadd <8 x float> %34, %35
3148 %splat.splat75 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20>
3149 %37 = fmul <8 x float> %split4, %splat.splat75
3150 %38 = fadd <8 x float> %36, %37
3151 %splat.splat78 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21>
3152 %39 = fmul <8 x float> %split5, %splat.splat78
3153 %40 = fadd <8 x float> %38, %39
3154 %splat.splat81 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22>
3155 %41 = fmul <8 x float> %split6, %splat.splat81
3156 %42 = fadd <8 x float> %40, %41
3157 %splat.splat84 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23>
3158 %43 = fmul <8 x float> %split7, %splat.splat84
3159 %44 = fadd <8 x float> %42, %43
3160 %splat.splat87 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24>
3161 %45 = fmul <8 x float> %split, %splat.splat87
3162 %splat.splat90 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25>
3163 %46 = fmul <8 x float> %split1, %splat.splat90
3164 %47 = fadd <8 x float> %45, %46
3165 %splat.splat93 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26>
3166 %48 = fmul <8 x float> %split2, %splat.splat93
3167 %49 = fadd <8 x float> %47, %48
3168 %splat.splat96 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27>
3169 %50 = fmul <8 x float> %split3, %splat.splat96
3170 %51 = fadd <8 x float> %49, %50
3171 %splat.splat99 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28>
3172 %52 = fmul <8 x float> %split4, %splat.splat99
3173 %53 = fadd <8 x float> %51, %52
3174 %splat.splat102 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29>
3175 %54 = fmul <8 x float> %split5, %splat.splat102
3176 %55 = fadd <8 x float> %53, %54
3177 %splat.splat105 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30>
3178 %56 = fmul <8 x float> %split6, %splat.splat105
3179 %57 = fadd <8 x float> %55, %56
3180 %splat.splat108 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31>
3181 %58 = fmul <8 x float> %split7, %splat.splat108
3182 %59 = fadd <8 x float> %57, %58
3183 %splat.splat111 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32>
3184 %60 = fmul <8 x float> %split, %splat.splat111
3185 %splat.splat114 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33>
3186 %61 = fmul <8 x float> %split1, %splat.splat114
3187 %62 = fadd <8 x float> %60, %61
3188 %splat.splat117 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34>
3189 %63 = fmul <8 x float> %split2, %splat.splat117
3190 %64 = fadd <8 x float> %62, %63
3191 %splat.splat120 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35>
3192 %65 = fmul <8 x float> %split3, %splat.splat120
3193 %66 = fadd <8 x float> %64, %65
3194 %splat.splat123 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36>
3195 %67 = fmul <8 x float> %split4, %splat.splat123
3196 %68 = fadd <8 x float> %66, %67
3197 %splat.splat126 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37>
3198 %69 = fmul <8 x float> %split5, %splat.splat126
3199 %70 = fadd <8 x float> %68, %69
3200 %splat.splat129 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38>
3201 %71 = fmul <8 x float> %split6, %splat.splat129
3202 %72 = fadd <8 x float> %70, %71
3203 %splat.splat132 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39>
3204 %73 = fmul <8 x float> %split7, %splat.splat132
3205 %74 = fadd <8 x float> %72, %73
3206 %splat.splat135 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40>
3207 %75 = fmul <8 x float> %split, %splat.splat135
3208 %splat.splat138 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41>
3209 %76 = fmul <8 x float> %split1, %splat.splat138
3210 %77 = fadd <8 x float> %75, %76
3211 %splat.splat141 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42>
3212 %78 = fmul <8 x float> %split2, %splat.splat141
3213 %79 = fadd <8 x float> %77, %78
3214 %splat.splat144 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43>
3215 %80 = fmul <8 x float> %split3, %splat.splat144
3216 %81 = fadd <8 x float> %79, %80
3217 %splat.splat147 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44>
3218 %82 = fmul <8 x float> %split4, %splat.splat147
3219 %83 = fadd <8 x float> %81, %82
3220 %splat.splat150 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45>
3221 %84 = fmul <8 x float> %split5, %splat.splat150
3222 %85 = fadd <8 x float> %83, %84
3223 %splat.splat153 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46>
3224 %86 = fmul <8 x float> %split6, %splat.splat153
3225 %87 = fadd <8 x float> %85, %86
3226 %splat.splat156 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47>
3227 %88 = fmul <8 x float> %split7, %splat.splat156
3228 %89 = fadd <8 x float> %87, %88
3229 %splat.splat159 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48>
3230 %90 = fmul <8 x float> %split, %splat.splat159
3231 %splat.splat162 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49>
3232 %91 = fmul <8 x float> %split1, %splat.splat162
3233 %92 = fadd <8 x float> %90, %91
3234 %splat.splat165 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50>
3235 %93 = fmul <8 x float> %split2, %splat.splat165
3236 %94 = fadd <8 x float> %92, %93
3237 %splat.splat168 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51>
3238 %95 = fmul <8 x float> %split3, %splat.splat168
3239 %96 = fadd <8 x float> %94, %95
3240 %splat.splat171 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52>
3241 %97 = fmul <8 x float> %split4, %splat.splat171
3242 %98 = fadd <8 x float> %96, %97
3243 %splat.splat174 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53>
3244 %99 = fmul <8 x float> %split5, %splat.splat174
3245 %100 = fadd <8 x float> %98, %99
3246 %splat.splat177 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54>
3247 %101 = fmul <8 x float> %split6, %splat.splat177
3248 %102 = fadd <8 x float> %100, %101
3249 %splat.splat180 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55>
3250 %103 = fmul <8 x float> %split7, %splat.splat180
3251 %104 = fadd <8 x float> %102, %103
3252 %splat.splat183 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56>
3253 %105 = fmul <8 x float> %split, %splat.splat183
3254 %splat.splat186 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57>
3255 %106 = fmul <8 x float> %split1, %splat.splat186
3256 %107 = fadd <8 x float> %105, %106
3257 %splat.splat189 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58>
3258 %108 = fmul <8 x float> %split2, %splat.splat189
3259 %109 = fadd <8 x float> %107, %108
3260 %splat.splat192 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59>
3261 %110 = fmul <8 x float> %split3, %splat.splat192
3262 %111 = fadd <8 x float> %109, %110
3263 %splat.splat195 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60>
3264 %112 = fmul <8 x float> %split4, %splat.splat195
3265 %113 = fadd <8 x float> %111, %112
3266 %splat.splat198 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61>
3267 %114 = fmul <8 x float> %split5, %splat.splat198
3268 %115 = fadd <8 x float> %113, %114
3269 %splat.splat201 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62>
3270 %116 = fmul <8 x float> %split6, %splat.splat201
3271 %117 = fadd <8 x float> %115, %116
3272 %splat.splat204 = shufflevector <64 x float> %a1, <64 x float> undef, <8 x i32> <i32 63, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63>
3273 %118 = fmul <8 x float> %split7, %splat.splat204
3274 %119 = fadd <8 x float> %117, %118
3275 %120 = shufflevector <8 x float> %14, <8 x float> %29, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
3276 %121 = shufflevector <8 x float> %44, <8 x float> %59, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
3277 %122 = shufflevector <8 x float> %74, <8 x float> %89, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
3278 %123 = shufflevector <8 x float> %104, <8 x float> %119, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
3279 %124 = shufflevector <16 x float> %120, <16 x float> %121, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
3280 %125 = shufflevector <16 x float> %122, <16 x float> %123, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
3281 %126 = shufflevector <32 x float> %124, <32 x float> %125, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
3282 ret <64 x float> %126
3285 define <64 x double> @test_mul8x8_f64(<64 x double> %a0, <64 x double> %a1) nounwind {
3286 ; SSE-LABEL: test_mul8x8_f64:
3287 ; SSE: # %bb.0: # %entry
3288 ; SSE-NEXT: subq $328, %rsp # imm = 0x148
3289 ; SSE-NEXT: movapd %xmm7, %xmm15
3290 ; SSE-NEXT: movapd %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3291 ; SSE-NEXT: movapd %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3292 ; SSE-NEXT: movapd %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3293 ; SSE-NEXT: movapd %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3294 ; SSE-NEXT: movapd %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3295 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3296 ; SSE-NEXT: movq %rdi, %rax
3297 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm14
3298 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm11
3299 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm13
3300 ; SSE-NEXT: movapd %xmm13, %xmm12
3301 ; SSE-NEXT: unpcklpd {{.*#+}} xmm12 = xmm12[0],xmm13[0]
3302 ; SSE-NEXT: movapd %xmm3, %xmm10
3303 ; SSE-NEXT: mulpd %xmm12, %xmm10
3304 ; SSE-NEXT: movapd %xmm2, %xmm8
3305 ; SSE-NEXT: mulpd %xmm12, %xmm8
3306 ; SSE-NEXT: movapd %xmm1, %xmm9
3307 ; SSE-NEXT: mulpd %xmm12, %xmm9
3308 ; SSE-NEXT: mulpd %xmm0, %xmm12
3309 ; SSE-NEXT: unpckhpd {{.*#+}} xmm13 = xmm13[1,1]
3310 ; SSE-NEXT: movapd %xmm7, %xmm2
3311 ; SSE-NEXT: mulpd %xmm13, %xmm2
3312 ; SSE-NEXT: addpd %xmm10, %xmm2
3313 ; SSE-NEXT: movapd %xmm6, %xmm7
3314 ; SSE-NEXT: movapd %xmm6, %xmm10
3315 ; SSE-NEXT: mulpd %xmm13, %xmm7
3316 ; SSE-NEXT: addpd %xmm8, %xmm7
3317 ; SSE-NEXT: movapd %xmm5, %xmm8
3318 ; SSE-NEXT: mulpd %xmm13, %xmm8
3319 ; SSE-NEXT: addpd %xmm9, %xmm8
3320 ; SSE-NEXT: mulpd %xmm4, %xmm13
3321 ; SSE-NEXT: addpd %xmm12, %xmm13
3322 ; SSE-NEXT: movapd %xmm11, %xmm6
3323 ; SSE-NEXT: unpcklpd {{.*#+}} xmm6 = xmm6[0],xmm11[0]
3324 ; SSE-NEXT: movapd %xmm14, %xmm1
3325 ; SSE-NEXT: mulpd %xmm6, %xmm1
3326 ; SSE-NEXT: addpd %xmm13, %xmm1
3327 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3328 ; SSE-NEXT: mulpd %xmm6, %xmm3
3329 ; SSE-NEXT: addpd %xmm8, %xmm3
3330 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3331 ; SSE-NEXT: mulpd %xmm6, %xmm5
3332 ; SSE-NEXT: addpd %xmm7, %xmm5
3333 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm6
3334 ; SSE-NEXT: addpd %xmm2, %xmm6
3335 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1,1]
3336 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3337 ; SSE-NEXT: mulpd %xmm11, %xmm2
3338 ; SSE-NEXT: addpd %xmm6, %xmm2
3339 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3340 ; SSE-NEXT: mulpd %xmm11, %xmm4
3341 ; SSE-NEXT: addpd %xmm5, %xmm4
3342 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3343 ; SSE-NEXT: mulpd %xmm11, %xmm5
3344 ; SSE-NEXT: addpd %xmm3, %xmm5
3345 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm11
3346 ; SSE-NEXT: addpd %xmm1, %xmm11
3347 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3348 ; SSE-NEXT: movapd %xmm1, %xmm6
3349 ; SSE-NEXT: unpcklpd {{.*#+}} xmm6 = xmm6[0],xmm1[0]
3350 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3351 ; SSE-NEXT: mulpd %xmm6, %xmm3
3352 ; SSE-NEXT: addpd %xmm11, %xmm3
3353 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3354 ; SSE-NEXT: mulpd %xmm6, %xmm7
3355 ; SSE-NEXT: addpd %xmm5, %xmm7
3356 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3357 ; SSE-NEXT: mulpd %xmm6, %xmm5
3358 ; SSE-NEXT: addpd %xmm4, %xmm5
3359 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm6
3360 ; SSE-NEXT: addpd %xmm2, %xmm6
3361 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3362 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3363 ; SSE-NEXT: mulpd %xmm1, %xmm0
3364 ; SSE-NEXT: addpd %xmm6, %xmm0
3365 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3366 ; SSE-NEXT: mulpd %xmm1, %xmm4
3367 ; SSE-NEXT: addpd %xmm5, %xmm4
3368 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3369 ; SSE-NEXT: mulpd %xmm1, %xmm5
3370 ; SSE-NEXT: addpd %xmm7, %xmm5
3371 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm1
3372 ; SSE-NEXT: addpd %xmm3, %xmm1
3373 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm6
3374 ; SSE-NEXT: movapd %xmm6, %xmm3
3375 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm6[0]
3376 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3377 ; SSE-NEXT: mulpd %xmm3, %xmm2
3378 ; SSE-NEXT: addpd %xmm1, %xmm2
3379 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3380 ; SSE-NEXT: mulpd %xmm3, %xmm1
3381 ; SSE-NEXT: addpd %xmm5, %xmm1
3382 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3383 ; SSE-NEXT: mulpd %xmm3, %xmm5
3384 ; SSE-NEXT: addpd %xmm4, %xmm5
3385 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm3
3386 ; SSE-NEXT: addpd %xmm0, %xmm3
3387 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1,1]
3388 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3389 ; SSE-NEXT: mulpd %xmm6, %xmm0
3390 ; SSE-NEXT: addpd %xmm3, %xmm0
3391 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3392 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3393 ; SSE-NEXT: mulpd %xmm6, %xmm0
3394 ; SSE-NEXT: addpd %xmm5, %xmm0
3395 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3396 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3397 ; SSE-NEXT: mulpd %xmm6, %xmm0
3398 ; SSE-NEXT: addpd %xmm1, %xmm0
3399 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3400 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm6
3401 ; SSE-NEXT: addpd %xmm2, %xmm6
3402 ; SSE-NEXT: movapd %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3403 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3404 ; SSE-NEXT: movapd %xmm1, %xmm0
3405 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
3406 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3407 ; SSE-NEXT: movapd %xmm11, %xmm3
3408 ; SSE-NEXT: mulpd %xmm0, %xmm3
3409 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3410 ; SSE-NEXT: movapd %xmm15, %xmm8
3411 ; SSE-NEXT: movapd %xmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3412 ; SSE-NEXT: movapd %xmm15, %xmm2
3413 ; SSE-NEXT: mulpd %xmm1, %xmm2
3414 ; SSE-NEXT: addpd %xmm3, %xmm2
3415 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
3416 ; SSE-NEXT: movapd %xmm9, %xmm3
3417 ; SSE-NEXT: mulpd %xmm0, %xmm3
3418 ; SSE-NEXT: movapd %xmm10, %xmm15
3419 ; SSE-NEXT: movapd %xmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3420 ; SSE-NEXT: movapd %xmm10, %xmm4
3421 ; SSE-NEXT: mulpd %xmm1, %xmm4
3422 ; SSE-NEXT: addpd %xmm3, %xmm4
3423 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3424 ; SSE-NEXT: movapd %xmm13, %xmm3
3425 ; SSE-NEXT: mulpd %xmm0, %xmm3
3426 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3427 ; SSE-NEXT: movapd %xmm10, %xmm5
3428 ; SSE-NEXT: mulpd %xmm1, %xmm5
3429 ; SSE-NEXT: addpd %xmm3, %xmm5
3430 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
3431 ; SSE-NEXT: mulpd %xmm12, %xmm0
3432 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3433 ; SSE-NEXT: mulpd %xmm14, %xmm1
3434 ; SSE-NEXT: addpd %xmm0, %xmm1
3435 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3436 ; SSE-NEXT: movapd %xmm0, %xmm6
3437 ; SSE-NEXT: unpcklpd {{.*#+}} xmm6 = xmm6[0],xmm0[0]
3438 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3439 ; SSE-NEXT: mulpd %xmm6, %xmm3
3440 ; SSE-NEXT: addpd %xmm1, %xmm3
3441 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3442 ; SSE-NEXT: mulpd %xmm6, %xmm1
3443 ; SSE-NEXT: addpd %xmm5, %xmm1
3444 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3445 ; SSE-NEXT: mulpd %xmm6, %xmm5
3446 ; SSE-NEXT: addpd %xmm4, %xmm5
3447 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm6
3448 ; SSE-NEXT: addpd %xmm2, %xmm6
3449 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
3450 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3451 ; SSE-NEXT: mulpd %xmm0, %xmm2
3452 ; SSE-NEXT: addpd %xmm6, %xmm2
3453 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3454 ; SSE-NEXT: mulpd %xmm0, %xmm4
3455 ; SSE-NEXT: addpd %xmm5, %xmm4
3456 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3457 ; SSE-NEXT: mulpd %xmm0, %xmm5
3458 ; SSE-NEXT: addpd %xmm1, %xmm5
3459 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3460 ; SSE-NEXT: mulpd %xmm1, %xmm0
3461 ; SSE-NEXT: addpd %xmm3, %xmm0
3462 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3463 ; SSE-NEXT: movapd %xmm1, %xmm6
3464 ; SSE-NEXT: unpcklpd {{.*#+}} xmm6 = xmm6[0],xmm1[0]
3465 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3466 ; SSE-NEXT: mulpd %xmm6, %xmm3
3467 ; SSE-NEXT: addpd %xmm0, %xmm3
3468 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3469 ; SSE-NEXT: mulpd %xmm6, %xmm7
3470 ; SSE-NEXT: addpd %xmm5, %xmm7
3471 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3472 ; SSE-NEXT: mulpd %xmm6, %xmm5
3473 ; SSE-NEXT: addpd %xmm4, %xmm5
3474 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm6
3475 ; SSE-NEXT: addpd %xmm2, %xmm6
3476 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3477 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3478 ; SSE-NEXT: mulpd %xmm1, %xmm0
3479 ; SSE-NEXT: addpd %xmm6, %xmm0
3480 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3481 ; SSE-NEXT: mulpd %xmm1, %xmm4
3482 ; SSE-NEXT: addpd %xmm5, %xmm4
3483 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3484 ; SSE-NEXT: mulpd %xmm1, %xmm5
3485 ; SSE-NEXT: addpd %xmm7, %xmm5
3486 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3487 ; SSE-NEXT: mulpd %xmm2, %xmm1
3488 ; SSE-NEXT: addpd %xmm3, %xmm1
3489 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm6
3490 ; SSE-NEXT: movapd %xmm6, %xmm3
3491 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm6[0]
3492 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3493 ; SSE-NEXT: mulpd %xmm3, %xmm2
3494 ; SSE-NEXT: addpd %xmm1, %xmm2
3495 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3496 ; SSE-NEXT: mulpd %xmm3, %xmm1
3497 ; SSE-NEXT: addpd %xmm5, %xmm1
3498 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3499 ; SSE-NEXT: mulpd %xmm3, %xmm5
3500 ; SSE-NEXT: addpd %xmm4, %xmm5
3501 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm3
3502 ; SSE-NEXT: addpd %xmm0, %xmm3
3503 ; SSE-NEXT: unpckhpd {{.*#+}} xmm6 = xmm6[1,1]
3504 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3505 ; SSE-NEXT: mulpd %xmm6, %xmm0
3506 ; SSE-NEXT: addpd %xmm3, %xmm0
3507 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3508 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3509 ; SSE-NEXT: mulpd %xmm6, %xmm0
3510 ; SSE-NEXT: addpd %xmm5, %xmm0
3511 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3512 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3513 ; SSE-NEXT: mulpd %xmm6, %xmm0
3514 ; SSE-NEXT: addpd %xmm1, %xmm0
3515 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3516 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm6
3517 ; SSE-NEXT: addpd %xmm2, %xmm6
3518 ; SSE-NEXT: movapd %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3519 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3520 ; SSE-NEXT: movapd %xmm1, %xmm0
3521 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
3522 ; SSE-NEXT: movapd %xmm11, %xmm3
3523 ; SSE-NEXT: mulpd %xmm0, %xmm3
3524 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3525 ; SSE-NEXT: movapd %xmm8, %xmm2
3526 ; SSE-NEXT: mulpd %xmm1, %xmm2
3527 ; SSE-NEXT: addpd %xmm3, %xmm2
3528 ; SSE-NEXT: movapd %xmm9, %xmm3
3529 ; SSE-NEXT: mulpd %xmm0, %xmm3
3530 ; SSE-NEXT: movapd %xmm15, %xmm4
3531 ; SSE-NEXT: mulpd %xmm1, %xmm4
3532 ; SSE-NEXT: addpd %xmm3, %xmm4
3533 ; SSE-NEXT: movapd %xmm13, %xmm8
3534 ; SSE-NEXT: movapd %xmm13, %xmm3
3535 ; SSE-NEXT: mulpd %xmm0, %xmm3
3536 ; SSE-NEXT: movapd %xmm10, %xmm5
3537 ; SSE-NEXT: movapd %xmm10, %xmm15
3538 ; SSE-NEXT: mulpd %xmm1, %xmm5
3539 ; SSE-NEXT: addpd %xmm3, %xmm5
3540 ; SSE-NEXT: movapd %xmm12, %xmm10
3541 ; SSE-NEXT: mulpd %xmm12, %xmm0
3542 ; SSE-NEXT: movapd %xmm14, %xmm9
3543 ; SSE-NEXT: mulpd %xmm14, %xmm1
3544 ; SSE-NEXT: addpd %xmm0, %xmm1
3545 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3546 ; SSE-NEXT: movapd %xmm0, %xmm6
3547 ; SSE-NEXT: unpcklpd {{.*#+}} xmm6 = xmm6[0],xmm0[0]
3548 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3549 ; SSE-NEXT: mulpd %xmm6, %xmm3
3550 ; SSE-NEXT: addpd %xmm1, %xmm3
3551 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3552 ; SSE-NEXT: mulpd %xmm6, %xmm1
3553 ; SSE-NEXT: addpd %xmm5, %xmm1
3554 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3555 ; SSE-NEXT: mulpd %xmm6, %xmm5
3556 ; SSE-NEXT: addpd %xmm4, %xmm5
3557 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3558 ; SSE-NEXT: mulpd %xmm4, %xmm6
3559 ; SSE-NEXT: addpd %xmm2, %xmm6
3560 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
3561 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3562 ; SSE-NEXT: mulpd %xmm0, %xmm2
3563 ; SSE-NEXT: addpd %xmm6, %xmm2
3564 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3565 ; SSE-NEXT: mulpd %xmm0, %xmm4
3566 ; SSE-NEXT: addpd %xmm5, %xmm4
3567 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3568 ; SSE-NEXT: mulpd %xmm0, %xmm5
3569 ; SSE-NEXT: addpd %xmm1, %xmm5
3570 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm0
3571 ; SSE-NEXT: addpd %xmm3, %xmm0
3572 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3573 ; SSE-NEXT: movapd %xmm1, %xmm6
3574 ; SSE-NEXT: unpcklpd {{.*#+}} xmm6 = xmm6[0],xmm1[0]
3575 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3576 ; SSE-NEXT: mulpd %xmm6, %xmm3
3577 ; SSE-NEXT: addpd %xmm0, %xmm3
3578 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3579 ; SSE-NEXT: mulpd %xmm6, %xmm7
3580 ; SSE-NEXT: addpd %xmm5, %xmm7
3581 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3582 ; SSE-NEXT: mulpd %xmm6, %xmm5
3583 ; SSE-NEXT: addpd %xmm4, %xmm5
3584 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm6
3585 ; SSE-NEXT: addpd %xmm2, %xmm6
3586 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3587 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3588 ; SSE-NEXT: mulpd %xmm1, %xmm0
3589 ; SSE-NEXT: addpd %xmm6, %xmm0
3590 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3591 ; SSE-NEXT: mulpd %xmm1, %xmm4
3592 ; SSE-NEXT: addpd %xmm5, %xmm4
3593 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3594 ; SSE-NEXT: mulpd %xmm1, %xmm5
3595 ; SSE-NEXT: addpd %xmm7, %xmm5
3596 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3597 ; SSE-NEXT: mulpd %xmm2, %xmm1
3598 ; SSE-NEXT: addpd %xmm3, %xmm1
3599 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3600 ; SSE-NEXT: movapd %xmm7, %xmm3
3601 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm7[0]
3602 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3603 ; SSE-NEXT: mulpd %xmm3, %xmm2
3604 ; SSE-NEXT: addpd %xmm1, %xmm2
3605 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3606 ; SSE-NEXT: mulpd %xmm3, %xmm1
3607 ; SSE-NEXT: addpd %xmm5, %xmm1
3608 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3609 ; SSE-NEXT: mulpd %xmm3, %xmm5
3610 ; SSE-NEXT: addpd %xmm4, %xmm5
3611 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3612 ; SSE-NEXT: mulpd %xmm4, %xmm3
3613 ; SSE-NEXT: addpd %xmm0, %xmm3
3614 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1,1]
3615 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3616 ; SSE-NEXT: mulpd %xmm7, %xmm0
3617 ; SSE-NEXT: addpd %xmm3, %xmm0
3618 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3619 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3620 ; SSE-NEXT: mulpd %xmm7, %xmm0
3621 ; SSE-NEXT: addpd %xmm5, %xmm0
3622 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3623 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3624 ; SSE-NEXT: mulpd %xmm7, %xmm0
3625 ; SSE-NEXT: addpd %xmm1, %xmm0
3626 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3627 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3628 ; SSE-NEXT: mulpd %xmm0, %xmm7
3629 ; SSE-NEXT: addpd %xmm2, %xmm7
3630 ; SSE-NEXT: movapd %xmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3631 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3632 ; SSE-NEXT: movapd %xmm1, %xmm0
3633 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
3634 ; SSE-NEXT: movapd %xmm11, %xmm3
3635 ; SSE-NEXT: movapd %xmm11, %xmm12
3636 ; SSE-NEXT: mulpd %xmm0, %xmm3
3637 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3638 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3639 ; SSE-NEXT: movapd %xmm6, %xmm2
3640 ; SSE-NEXT: mulpd %xmm1, %xmm2
3641 ; SSE-NEXT: addpd %xmm3, %xmm2
3642 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Reload
3643 ; SSE-NEXT: movapd %xmm11, %xmm3
3644 ; SSE-NEXT: mulpd %xmm0, %xmm3
3645 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3646 ; SSE-NEXT: movapd %xmm13, %xmm4
3647 ; SSE-NEXT: mulpd %xmm1, %xmm4
3648 ; SSE-NEXT: addpd %xmm3, %xmm4
3649 ; SSE-NEXT: movapd %xmm8, %xmm3
3650 ; SSE-NEXT: movapd %xmm8, %xmm14
3651 ; SSE-NEXT: mulpd %xmm0, %xmm3
3652 ; SSE-NEXT: movapd %xmm15, %xmm8
3653 ; SSE-NEXT: movapd %xmm15, %xmm5
3654 ; SSE-NEXT: mulpd %xmm1, %xmm5
3655 ; SSE-NEXT: addpd %xmm3, %xmm5
3656 ; SSE-NEXT: mulpd %xmm10, %xmm0
3657 ; SSE-NEXT: mulpd %xmm9, %xmm1
3658 ; SSE-NEXT: movapd %xmm9, %xmm10
3659 ; SSE-NEXT: addpd %xmm0, %xmm1
3660 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3661 ; SSE-NEXT: movapd %xmm0, %xmm7
3662 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm0[0]
3663 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3664 ; SSE-NEXT: mulpd %xmm7, %xmm3
3665 ; SSE-NEXT: addpd %xmm1, %xmm3
3666 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3667 ; SSE-NEXT: mulpd %xmm7, %xmm1
3668 ; SSE-NEXT: addpd %xmm5, %xmm1
3669 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3670 ; SSE-NEXT: mulpd %xmm7, %xmm5
3671 ; SSE-NEXT: addpd %xmm4, %xmm5
3672 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm7
3673 ; SSE-NEXT: addpd %xmm2, %xmm7
3674 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
3675 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3676 ; SSE-NEXT: mulpd %xmm0, %xmm2
3677 ; SSE-NEXT: addpd %xmm7, %xmm2
3678 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3679 ; SSE-NEXT: mulpd %xmm0, %xmm4
3680 ; SSE-NEXT: addpd %xmm5, %xmm4
3681 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3682 ; SSE-NEXT: mulpd %xmm0, %xmm5
3683 ; SSE-NEXT: addpd %xmm1, %xmm5
3684 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3685 ; SSE-NEXT: mulpd %xmm1, %xmm0
3686 ; SSE-NEXT: addpd %xmm3, %xmm0
3687 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3688 ; SSE-NEXT: movapd %xmm1, %xmm7
3689 ; SSE-NEXT: unpcklpd {{.*#+}} xmm7 = xmm7[0],xmm1[0]
3690 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3691 ; SSE-NEXT: mulpd %xmm7, %xmm3
3692 ; SSE-NEXT: addpd %xmm0, %xmm3
3693 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
3694 ; SSE-NEXT: mulpd %xmm7, %xmm9
3695 ; SSE-NEXT: addpd %xmm5, %xmm9
3696 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
3697 ; SSE-NEXT: mulpd %xmm7, %xmm5
3698 ; SSE-NEXT: addpd %xmm4, %xmm5
3699 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3700 ; SSE-NEXT: mulpd %xmm0, %xmm7
3701 ; SSE-NEXT: addpd %xmm2, %xmm7
3702 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3703 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3704 ; SSE-NEXT: mulpd %xmm1, %xmm0
3705 ; SSE-NEXT: addpd %xmm7, %xmm0
3706 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3707 ; SSE-NEXT: mulpd %xmm1, %xmm4
3708 ; SSE-NEXT: addpd %xmm5, %xmm4
3709 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3710 ; SSE-NEXT: mulpd %xmm1, %xmm7
3711 ; SSE-NEXT: addpd %xmm9, %xmm7
3712 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm1
3713 ; SSE-NEXT: addpd %xmm3, %xmm1
3714 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm15
3715 ; SSE-NEXT: movapd %xmm15, %xmm3
3716 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm15[0]
3717 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3718 ; SSE-NEXT: mulpd %xmm3, %xmm2
3719 ; SSE-NEXT: addpd %xmm1, %xmm2
3720 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3721 ; SSE-NEXT: mulpd %xmm3, %xmm1
3722 ; SSE-NEXT: addpd %xmm7, %xmm1
3723 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3724 ; SSE-NEXT: mulpd %xmm3, %xmm7
3725 ; SSE-NEXT: addpd %xmm4, %xmm7
3726 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm3
3727 ; SSE-NEXT: addpd %xmm0, %xmm3
3728 ; SSE-NEXT: unpckhpd {{.*#+}} xmm15 = xmm15[1,1]
3729 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3730 ; SSE-NEXT: mulpd %xmm15, %xmm0
3731 ; SSE-NEXT: addpd %xmm3, %xmm0
3732 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3733 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3734 ; SSE-NEXT: mulpd %xmm15, %xmm0
3735 ; SSE-NEXT: addpd %xmm7, %xmm0
3736 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3737 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3738 ; SSE-NEXT: mulpd %xmm15, %xmm0
3739 ; SSE-NEXT: addpd %xmm1, %xmm0
3740 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3741 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm15
3742 ; SSE-NEXT: addpd %xmm2, %xmm15
3743 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3744 ; SSE-NEXT: movapd %xmm1, %xmm0
3745 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
3746 ; SSE-NEXT: movapd %xmm12, %xmm3
3747 ; SSE-NEXT: mulpd %xmm0, %xmm3
3748 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3749 ; SSE-NEXT: movapd %xmm6, %xmm2
3750 ; SSE-NEXT: movapd %xmm6, %xmm12
3751 ; SSE-NEXT: mulpd %xmm1, %xmm2
3752 ; SSE-NEXT: addpd %xmm3, %xmm2
3753 ; SSE-NEXT: mulpd %xmm0, %xmm11
3754 ; SSE-NEXT: movapd %xmm13, %xmm6
3755 ; SSE-NEXT: movapd %xmm13, %xmm4
3756 ; SSE-NEXT: mulpd %xmm1, %xmm4
3757 ; SSE-NEXT: addpd %xmm11, %xmm4
3758 ; SSE-NEXT: mulpd %xmm0, %xmm14
3759 ; SSE-NEXT: movapd %xmm8, %xmm7
3760 ; SSE-NEXT: mulpd %xmm1, %xmm7
3761 ; SSE-NEXT: addpd %xmm14, %xmm7
3762 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Reload
3763 ; SSE-NEXT: mulpd %xmm8, %xmm0
3764 ; SSE-NEXT: movapd %xmm10, %xmm5
3765 ; SSE-NEXT: mulpd %xmm10, %xmm1
3766 ; SSE-NEXT: addpd %xmm0, %xmm1
3767 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3768 ; SSE-NEXT: movapd %xmm0, %xmm9
3769 ; SSE-NEXT: unpcklpd {{.*#+}} xmm9 = xmm9[0],xmm0[0]
3770 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3771 ; SSE-NEXT: mulpd %xmm9, %xmm3
3772 ; SSE-NEXT: addpd %xmm1, %xmm3
3773 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3774 ; SSE-NEXT: mulpd %xmm9, %xmm1
3775 ; SSE-NEXT: addpd %xmm7, %xmm1
3776 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3777 ; SSE-NEXT: mulpd %xmm9, %xmm7
3778 ; SSE-NEXT: addpd %xmm4, %xmm7
3779 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm9
3780 ; SSE-NEXT: addpd %xmm2, %xmm9
3781 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
3782 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3783 ; SSE-NEXT: mulpd %xmm0, %xmm2
3784 ; SSE-NEXT: addpd %xmm9, %xmm2
3785 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
3786 ; SSE-NEXT: mulpd %xmm0, %xmm4
3787 ; SSE-NEXT: addpd %xmm7, %xmm4
3788 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3789 ; SSE-NEXT: mulpd %xmm0, %xmm7
3790 ; SSE-NEXT: addpd %xmm1, %xmm7
3791 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm0
3792 ; SSE-NEXT: addpd %xmm3, %xmm0
3793 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3794 ; SSE-NEXT: movapd %xmm1, %xmm9
3795 ; SSE-NEXT: unpcklpd {{.*#+}} xmm9 = xmm9[0],xmm1[0]
3796 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3797 ; SSE-NEXT: mulpd %xmm9, %xmm3
3798 ; SSE-NEXT: addpd %xmm0, %xmm3
3799 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm10
3800 ; SSE-NEXT: mulpd %xmm9, %xmm10
3801 ; SSE-NEXT: addpd %xmm7, %xmm10
3802 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3803 ; SSE-NEXT: mulpd %xmm9, %xmm7
3804 ; SSE-NEXT: addpd %xmm4, %xmm7
3805 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm9
3806 ; SSE-NEXT: addpd %xmm2, %xmm9
3807 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3808 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3809 ; SSE-NEXT: mulpd %xmm1, %xmm0
3810 ; SSE-NEXT: addpd %xmm9, %xmm0
3811 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
3812 ; SSE-NEXT: mulpd %xmm1, %xmm9
3813 ; SSE-NEXT: addpd %xmm7, %xmm9
3814 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3815 ; SSE-NEXT: mulpd %xmm1, %xmm7
3816 ; SSE-NEXT: addpd %xmm10, %xmm7
3817 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm1
3818 ; SSE-NEXT: addpd %xmm3, %xmm1
3819 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm11
3820 ; SSE-NEXT: movapd %xmm11, %xmm3
3821 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm11[0]
3822 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3823 ; SSE-NEXT: mulpd %xmm3, %xmm2
3824 ; SSE-NEXT: addpd %xmm1, %xmm2
3825 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3826 ; SSE-NEXT: mulpd %xmm3, %xmm1
3827 ; SSE-NEXT: addpd %xmm7, %xmm1
3828 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3829 ; SSE-NEXT: mulpd %xmm3, %xmm7
3830 ; SSE-NEXT: addpd %xmm9, %xmm7
3831 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm3
3832 ; SSE-NEXT: addpd %xmm0, %xmm3
3833 ; SSE-NEXT: unpckhpd {{.*#+}} xmm11 = xmm11[1,1]
3834 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3835 ; SSE-NEXT: mulpd %xmm11, %xmm0
3836 ; SSE-NEXT: addpd %xmm3, %xmm0
3837 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3838 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3839 ; SSE-NEXT: mulpd %xmm11, %xmm0
3840 ; SSE-NEXT: addpd %xmm7, %xmm0
3841 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3842 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3843 ; SSE-NEXT: mulpd %xmm11, %xmm0
3844 ; SSE-NEXT: addpd %xmm1, %xmm0
3845 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3846 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm11
3847 ; SSE-NEXT: addpd %xmm2, %xmm11
3848 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3849 ; SSE-NEXT: movapd %xmm1, %xmm0
3850 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
3851 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3852 ; SSE-NEXT: movapd %xmm13, %xmm3
3853 ; SSE-NEXT: mulpd %xmm0, %xmm3
3854 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3855 ; SSE-NEXT: movapd %xmm12, %xmm2
3856 ; SSE-NEXT: mulpd %xmm1, %xmm2
3857 ; SSE-NEXT: addpd %xmm3, %xmm2
3858 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm14 # 16-byte Reload
3859 ; SSE-NEXT: movapd %xmm14, %xmm3
3860 ; SSE-NEXT: mulpd %xmm0, %xmm3
3861 ; SSE-NEXT: movapd %xmm6, %xmm7
3862 ; SSE-NEXT: mulpd %xmm1, %xmm7
3863 ; SSE-NEXT: addpd %xmm3, %xmm7
3864 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
3865 ; SSE-NEXT: movapd %xmm4, %xmm3
3866 ; SSE-NEXT: mulpd %xmm0, %xmm3
3867 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3868 ; SSE-NEXT: movapd %xmm6, %xmm9
3869 ; SSE-NEXT: mulpd %xmm1, %xmm9
3870 ; SSE-NEXT: addpd %xmm3, %xmm9
3871 ; SSE-NEXT: mulpd %xmm8, %xmm0
3872 ; SSE-NEXT: mulpd %xmm5, %xmm1
3873 ; SSE-NEXT: addpd %xmm0, %xmm1
3874 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3875 ; SSE-NEXT: movapd %xmm0, %xmm10
3876 ; SSE-NEXT: unpcklpd {{.*#+}} xmm10 = xmm10[0],xmm0[0]
3877 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3878 ; SSE-NEXT: mulpd %xmm10, %xmm3
3879 ; SSE-NEXT: addpd %xmm1, %xmm3
3880 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm12
3881 ; SSE-NEXT: mulpd %xmm10, %xmm12
3882 ; SSE-NEXT: addpd %xmm9, %xmm12
3883 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
3884 ; SSE-NEXT: mulpd %xmm10, %xmm9
3885 ; SSE-NEXT: addpd %xmm7, %xmm9
3886 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm10
3887 ; SSE-NEXT: addpd %xmm2, %xmm10
3888 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
3889 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3890 ; SSE-NEXT: mulpd %xmm0, %xmm1
3891 ; SSE-NEXT: addpd %xmm10, %xmm1
3892 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm10
3893 ; SSE-NEXT: mulpd %xmm0, %xmm10
3894 ; SSE-NEXT: addpd %xmm9, %xmm10
3895 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
3896 ; SSE-NEXT: mulpd %xmm0, %xmm9
3897 ; SSE-NEXT: addpd %xmm12, %xmm9
3898 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3899 ; SSE-NEXT: mulpd %xmm2, %xmm0
3900 ; SSE-NEXT: addpd %xmm3, %xmm0
3901 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3902 ; SSE-NEXT: movapd %xmm7, %xmm3
3903 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm7[0]
3904 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3905 ; SSE-NEXT: mulpd %xmm3, %xmm2
3906 ; SSE-NEXT: addpd %xmm0, %xmm2
3907 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm12
3908 ; SSE-NEXT: mulpd %xmm3, %xmm12
3909 ; SSE-NEXT: addpd %xmm9, %xmm12
3910 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
3911 ; SSE-NEXT: mulpd %xmm3, %xmm9
3912 ; SSE-NEXT: addpd %xmm10, %xmm9
3913 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3914 ; SSE-NEXT: mulpd %xmm0, %xmm3
3915 ; SSE-NEXT: addpd %xmm1, %xmm3
3916 ; SSE-NEXT: unpckhpd {{.*#+}} xmm7 = xmm7[1,1]
3917 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3918 ; SSE-NEXT: mulpd %xmm7, %xmm0
3919 ; SSE-NEXT: addpd %xmm3, %xmm0
3920 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm10
3921 ; SSE-NEXT: mulpd %xmm7, %xmm10
3922 ; SSE-NEXT: addpd %xmm9, %xmm10
3923 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
3924 ; SSE-NEXT: mulpd %xmm7, %xmm9
3925 ; SSE-NEXT: addpd %xmm12, %xmm9
3926 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm7
3927 ; SSE-NEXT: addpd %xmm2, %xmm7
3928 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm8
3929 ; SSE-NEXT: movapd %xmm8, %xmm2
3930 ; SSE-NEXT: unpcklpd {{.*#+}} xmm2 = xmm2[0],xmm8[0]
3931 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3932 ; SSE-NEXT: mulpd %xmm2, %xmm1
3933 ; SSE-NEXT: addpd %xmm7, %xmm1
3934 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm12
3935 ; SSE-NEXT: mulpd %xmm2, %xmm12
3936 ; SSE-NEXT: addpd %xmm9, %xmm12
3937 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
3938 ; SSE-NEXT: mulpd %xmm2, %xmm7
3939 ; SSE-NEXT: addpd %xmm10, %xmm7
3940 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
3941 ; SSE-NEXT: mulpd %xmm3, %xmm2
3942 ; SSE-NEXT: addpd %xmm0, %xmm2
3943 ; SSE-NEXT: unpckhpd {{.*#+}} xmm8 = xmm8[1,1]
3944 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3945 ; SSE-NEXT: mulpd %xmm8, %xmm0
3946 ; SSE-NEXT: addpd %xmm2, %xmm0
3947 ; SSE-NEXT: movapd %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
3948 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3949 ; SSE-NEXT: mulpd %xmm8, %xmm0
3950 ; SSE-NEXT: addpd %xmm7, %xmm0
3951 ; SSE-NEXT: movapd %xmm0, (%rsp) # 16-byte Spill
3952 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm9
3953 ; SSE-NEXT: mulpd %xmm8, %xmm9
3954 ; SSE-NEXT: addpd %xmm12, %xmm9
3955 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
3956 ; SSE-NEXT: mulpd %xmm0, %xmm8
3957 ; SSE-NEXT: addpd %xmm1, %xmm8
3958 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3959 ; SSE-NEXT: movapd %xmm1, %xmm0
3960 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
3961 ; SSE-NEXT: movapd %xmm13, %xmm12
3962 ; SSE-NEXT: mulpd %xmm0, %xmm12
3963 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
3964 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
3965 ; SSE-NEXT: mulpd %xmm1, %xmm3
3966 ; SSE-NEXT: addpd %xmm12, %xmm3
3967 ; SSE-NEXT: movapd %xmm14, %xmm12
3968 ; SSE-NEXT: movapd %xmm14, %xmm5
3969 ; SSE-NEXT: mulpd %xmm0, %xmm12
3970 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm13 # 16-byte Reload
3971 ; SSE-NEXT: mulpd %xmm1, %xmm13
3972 ; SSE-NEXT: addpd %xmm12, %xmm13
3973 ; SSE-NEXT: mulpd %xmm0, %xmm4
3974 ; SSE-NEXT: movapd %xmm6, %xmm14
3975 ; SSE-NEXT: mulpd %xmm1, %xmm14
3976 ; SSE-NEXT: addpd %xmm4, %xmm14
3977 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm6 # 16-byte Reload
3978 ; SSE-NEXT: mulpd %xmm6, %xmm0
3979 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Reload
3980 ; SSE-NEXT: mulpd %xmm10, %xmm1
3981 ; SSE-NEXT: addpd %xmm0, %xmm1
3982 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
3983 ; SSE-NEXT: movapd %xmm2, %xmm0
3984 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm2[0]
3985 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm12
3986 ; SSE-NEXT: mulpd %xmm0, %xmm12
3987 ; SSE-NEXT: addpd %xmm1, %xmm12
3988 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
3989 ; SSE-NEXT: mulpd %xmm0, %xmm1
3990 ; SSE-NEXT: addpd %xmm14, %xmm1
3991 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm14
3992 ; SSE-NEXT: mulpd %xmm0, %xmm14
3993 ; SSE-NEXT: addpd %xmm13, %xmm14
3994 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm0
3995 ; SSE-NEXT: addpd %xmm3, %xmm0
3996 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1,1]
3997 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm13
3998 ; SSE-NEXT: mulpd %xmm2, %xmm13
3999 ; SSE-NEXT: addpd %xmm0, %xmm13
4000 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4001 ; SSE-NEXT: mulpd %xmm2, %xmm0
4002 ; SSE-NEXT: addpd %xmm14, %xmm0
4003 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm14
4004 ; SSE-NEXT: mulpd %xmm2, %xmm14
4005 ; SSE-NEXT: addpd %xmm1, %xmm14
4006 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4007 ; SSE-NEXT: mulpd %xmm1, %xmm2
4008 ; SSE-NEXT: addpd %xmm12, %xmm2
4009 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm12
4010 ; SSE-NEXT: movapd %xmm12, %xmm1
4011 ; SSE-NEXT: unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm12[0]
4012 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
4013 ; SSE-NEXT: mulpd %xmm1, %xmm3
4014 ; SSE-NEXT: addpd %xmm2, %xmm3
4015 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
4016 ; SSE-NEXT: mulpd %xmm1, %xmm2
4017 ; SSE-NEXT: addpd %xmm14, %xmm2
4018 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm14
4019 ; SSE-NEXT: mulpd %xmm1, %xmm14
4020 ; SSE-NEXT: addpd %xmm0, %xmm14
4021 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4022 ; SSE-NEXT: mulpd %xmm0, %xmm1
4023 ; SSE-NEXT: addpd %xmm13, %xmm1
4024 ; SSE-NEXT: unpckhpd {{.*#+}} xmm12 = xmm12[1,1]
4025 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
4026 ; SSE-NEXT: mulpd %xmm12, %xmm4
4027 ; SSE-NEXT: addpd %xmm1, %xmm4
4028 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm13
4029 ; SSE-NEXT: mulpd %xmm12, %xmm13
4030 ; SSE-NEXT: addpd %xmm14, %xmm13
4031 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm14
4032 ; SSE-NEXT: mulpd %xmm12, %xmm14
4033 ; SSE-NEXT: addpd %xmm2, %xmm14
4034 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm12
4035 ; SSE-NEXT: addpd %xmm3, %xmm12
4036 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm2
4037 ; SSE-NEXT: movapd %xmm2, %xmm3
4038 ; SSE-NEXT: unpcklpd {{.*#+}} xmm3 = xmm3[0],xmm2[0]
4039 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4040 ; SSE-NEXT: mulpd %xmm3, %xmm1
4041 ; SSE-NEXT: addpd %xmm12, %xmm1
4042 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm12
4043 ; SSE-NEXT: mulpd %xmm3, %xmm12
4044 ; SSE-NEXT: addpd %xmm14, %xmm12
4045 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4046 ; SSE-NEXT: mulpd %xmm3, %xmm0
4047 ; SSE-NEXT: addpd %xmm13, %xmm0
4048 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
4049 ; SSE-NEXT: mulpd %xmm7, %xmm3
4050 ; SSE-NEXT: addpd %xmm4, %xmm3
4051 ; SSE-NEXT: unpckhpd {{.*#+}} xmm2 = xmm2[1,1]
4052 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm14
4053 ; SSE-NEXT: mulpd %xmm2, %xmm14
4054 ; SSE-NEXT: addpd %xmm3, %xmm14
4055 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm13
4056 ; SSE-NEXT: mulpd %xmm2, %xmm13
4057 ; SSE-NEXT: addpd %xmm0, %xmm13
4058 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm7
4059 ; SSE-NEXT: mulpd %xmm2, %xmm7
4060 ; SSE-NEXT: addpd %xmm12, %xmm7
4061 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4062 ; SSE-NEXT: mulpd %xmm0, %xmm2
4063 ; SSE-NEXT: addpd %xmm1, %xmm2
4064 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4065 ; SSE-NEXT: movapd %xmm1, %xmm0
4066 ; SSE-NEXT: unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
4067 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm12 # 16-byte Reload
4068 ; SSE-NEXT: mulpd %xmm0, %xmm12
4069 ; SSE-NEXT: mulpd %xmm0, %xmm5
4070 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Reload
4071 ; SSE-NEXT: mulpd %xmm0, %xmm3
4072 ; SSE-NEXT: mulpd %xmm6, %xmm0
4073 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
4074 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4075 ; SSE-NEXT: mulpd %xmm1, %xmm4
4076 ; SSE-NEXT: addpd %xmm12, %xmm4
4077 ; SSE-NEXT: movapd %xmm4, %xmm12
4078 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4079 ; SSE-NEXT: mulpd %xmm1, %xmm4
4080 ; SSE-NEXT: addpd %xmm5, %xmm4
4081 ; SSE-NEXT: movapd %xmm4, %xmm5
4082 ; SSE-NEXT: movapd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
4083 ; SSE-NEXT: mulpd %xmm1, %xmm4
4084 ; SSE-NEXT: addpd %xmm3, %xmm4
4085 ; SSE-NEXT: movapd %xmm4, %xmm3
4086 ; SSE-NEXT: mulpd %xmm10, %xmm1
4087 ; SSE-NEXT: addpd %xmm0, %xmm1
4088 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4089 ; SSE-NEXT: movapd %xmm0, %xmm4
4090 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm0[0]
4091 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm10
4092 ; SSE-NEXT: mulpd %xmm4, %xmm10
4093 ; SSE-NEXT: addpd %xmm1, %xmm10
4094 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4095 ; SSE-NEXT: mulpd %xmm4, %xmm1
4096 ; SSE-NEXT: addpd %xmm3, %xmm1
4097 ; SSE-NEXT: movapd %xmm1, %xmm3
4098 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4099 ; SSE-NEXT: mulpd %xmm4, %xmm1
4100 ; SSE-NEXT: addpd %xmm5, %xmm1
4101 ; SSE-NEXT: movapd %xmm1, %xmm5
4102 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm4
4103 ; SSE-NEXT: addpd %xmm12, %xmm4
4104 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
4105 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4106 ; SSE-NEXT: mulpd %xmm0, %xmm1
4107 ; SSE-NEXT: addpd %xmm4, %xmm1
4108 ; SSE-NEXT: movapd %xmm1, %xmm12
4109 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm6
4110 ; SSE-NEXT: mulpd %xmm0, %xmm6
4111 ; SSE-NEXT: addpd %xmm5, %xmm6
4112 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4113 ; SSE-NEXT: mulpd %xmm0, %xmm1
4114 ; SSE-NEXT: addpd %xmm3, %xmm1
4115 ; SSE-NEXT: movapd %xmm1, %xmm3
4116 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm0
4117 ; SSE-NEXT: addpd %xmm10, %xmm0
4118 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4119 ; SSE-NEXT: movapd %xmm1, %xmm4
4120 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm1[0]
4121 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
4122 ; SSE-NEXT: mulpd %xmm4, %xmm5
4123 ; SSE-NEXT: addpd %xmm0, %xmm5
4124 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4125 ; SSE-NEXT: mulpd %xmm4, %xmm0
4126 ; SSE-NEXT: addpd %xmm3, %xmm0
4127 ; SSE-NEXT: movapd %xmm0, %xmm10
4128 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4129 ; SSE-NEXT: mulpd %xmm4, %xmm0
4130 ; SSE-NEXT: addpd %xmm6, %xmm0
4131 ; SSE-NEXT: movapd %xmm0, %xmm6
4132 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm4
4133 ; SSE-NEXT: addpd %xmm12, %xmm4
4134 ; SSE-NEXT: unpckhpd {{.*#+}} xmm1 = xmm1[1,1]
4135 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4136 ; SSE-NEXT: mulpd %xmm1, %xmm0
4137 ; SSE-NEXT: addpd %xmm4, %xmm0
4138 ; SSE-NEXT: movapd %xmm0, %xmm3
4139 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4140 ; SSE-NEXT: mulpd %xmm1, %xmm0
4141 ; SSE-NEXT: addpd %xmm6, %xmm0
4142 ; SSE-NEXT: movapd %xmm0, %xmm6
4143 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4144 ; SSE-NEXT: mulpd %xmm1, %xmm0
4145 ; SSE-NEXT: addpd %xmm10, %xmm0
4146 ; SSE-NEXT: movapd %xmm0, %xmm10
4147 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm1
4148 ; SSE-NEXT: addpd %xmm5, %xmm1
4149 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm0
4150 ; SSE-NEXT: movapd %xmm0, %xmm4
4151 ; SSE-NEXT: unpcklpd {{.*#+}} xmm4 = xmm4[0],xmm0[0]
4152 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm5
4153 ; SSE-NEXT: mulpd %xmm4, %xmm5
4154 ; SSE-NEXT: addpd %xmm1, %xmm5
4155 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm1
4156 ; SSE-NEXT: mulpd %xmm4, %xmm1
4157 ; SSE-NEXT: addpd %xmm10, %xmm1
4158 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm10
4159 ; SSE-NEXT: mulpd %xmm4, %xmm10
4160 ; SSE-NEXT: addpd %xmm6, %xmm10
4161 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm4
4162 ; SSE-NEXT: addpd %xmm3, %xmm4
4163 ; SSE-NEXT: unpckhpd {{.*#+}} xmm0 = xmm0[1,1]
4164 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm3
4165 ; SSE-NEXT: mulpd %xmm0, %xmm3
4166 ; SSE-NEXT: addpd %xmm4, %xmm3
4167 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm4
4168 ; SSE-NEXT: mulpd %xmm0, %xmm4
4169 ; SSE-NEXT: addpd %xmm10, %xmm4
4170 ; SSE-NEXT: movapd {{[0-9]+}}(%rsp), %xmm6
4171 ; SSE-NEXT: mulpd %xmm0, %xmm6
4172 ; SSE-NEXT: addpd %xmm1, %xmm6
4173 ; SSE-NEXT: mulpd {{[0-9]+}}(%rsp), %xmm0
4174 ; SSE-NEXT: addpd %xmm5, %xmm0
4175 ; SSE-NEXT: movapd %xmm3, 496(%rdi)
4176 ; SSE-NEXT: movapd %xmm4, 480(%rdi)
4177 ; SSE-NEXT: movapd %xmm6, 464(%rdi)
4178 ; SSE-NEXT: movapd %xmm0, 448(%rdi)
4179 ; SSE-NEXT: movapd %xmm14, 432(%rdi)
4180 ; SSE-NEXT: movapd %xmm13, 416(%rdi)
4181 ; SSE-NEXT: movapd %xmm7, 400(%rdi)
4182 ; SSE-NEXT: movapd %xmm2, 384(%rdi)
4183 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4184 ; SSE-NEXT: movaps %xmm0, 368(%rdi)
4185 ; SSE-NEXT: movaps (%rsp), %xmm0 # 16-byte Reload
4186 ; SSE-NEXT: movaps %xmm0, 352(%rdi)
4187 ; SSE-NEXT: movapd %xmm9, 336(%rdi)
4188 ; SSE-NEXT: movapd %xmm8, 320(%rdi)
4189 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4190 ; SSE-NEXT: movaps %xmm0, 304(%rdi)
4191 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4192 ; SSE-NEXT: movaps %xmm0, 288(%rdi)
4193 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4194 ; SSE-NEXT: movaps %xmm0, 272(%rdi)
4195 ; SSE-NEXT: movapd %xmm11, 256(%rdi)
4196 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4197 ; SSE-NEXT: movaps %xmm0, 240(%rdi)
4198 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4199 ; SSE-NEXT: movaps %xmm0, 224(%rdi)
4200 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4201 ; SSE-NEXT: movaps %xmm0, 208(%rdi)
4202 ; SSE-NEXT: movapd %xmm15, 192(%rdi)
4203 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4204 ; SSE-NEXT: movaps %xmm0, 176(%rdi)
4205 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4206 ; SSE-NEXT: movaps %xmm0, 160(%rdi)
4207 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4208 ; SSE-NEXT: movaps %xmm0, 144(%rdi)
4209 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4210 ; SSE-NEXT: movaps %xmm0, 128(%rdi)
4211 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4212 ; SSE-NEXT: movaps %xmm0, 112(%rdi)
4213 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4214 ; SSE-NEXT: movaps %xmm0, 96(%rdi)
4215 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4216 ; SSE-NEXT: movaps %xmm0, 80(%rdi)
4217 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4218 ; SSE-NEXT: movaps %xmm0, 64(%rdi)
4219 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4220 ; SSE-NEXT: movaps %xmm0, 48(%rdi)
4221 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4222 ; SSE-NEXT: movaps %xmm0, 32(%rdi)
4223 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4224 ; SSE-NEXT: movaps %xmm0, 16(%rdi)
4225 ; SSE-NEXT: movaps {{[-0-9]+}}(%r{{[sb]}}p), %xmm0 # 16-byte Reload
4226 ; SSE-NEXT: movaps %xmm0, (%rdi)
4227 ; SSE-NEXT: addq $328, %rsp # imm = 0x148
4230 ; AVX1-LABEL: test_mul8x8_f64:
4231 ; AVX1: # %bb.0: # %entry
4232 ; AVX1-NEXT: pushq %rbp
4233 ; AVX1-NEXT: movq %rsp, %rbp
4234 ; AVX1-NEXT: andq $-32, %rsp
4235 ; AVX1-NEXT: subq $448, %rsp # imm = 0x1C0
4236 ; AVX1-NEXT: vmovapd %ymm2, %ymm12
4237 ; AVX1-NEXT: vmovapd %ymm0, (%rsp) # 32-byte Spill
4238 ; AVX1-NEXT: movq %rdi, %rax
4239 ; AVX1-NEXT: vmovapd 144(%rbp), %ymm2
4240 ; AVX1-NEXT: vmovapd 112(%rbp), %ymm13
4241 ; AVX1-NEXT: vbroadcastsd 272(%rbp), %ymm10
4242 ; AVX1-NEXT: vmulpd %ymm1, %ymm10, %ymm8
4243 ; AVX1-NEXT: vmovapd %ymm1, %ymm9
4244 ; AVX1-NEXT: vmulpd %ymm0, %ymm10, %ymm0
4245 ; AVX1-NEXT: vbroadcastsd 280(%rbp), %ymm10
4246 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4247 ; AVX1-NEXT: vaddpd %ymm11, %ymm8, %ymm1
4248 ; AVX1-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4249 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4250 ; AVX1-NEXT: vbroadcastsd 288(%rbp), %ymm10
4251 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4252 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4253 ; AVX1-NEXT: vmulpd %ymm5, %ymm10, %ymm10
4254 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4255 ; AVX1-NEXT: vbroadcastsd 296(%rbp), %ymm10
4256 ; AVX1-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4257 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4258 ; AVX1-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4259 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4260 ; AVX1-NEXT: vbroadcastsd 304(%rbp), %ymm10
4261 ; AVX1-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4262 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4263 ; AVX1-NEXT: vmulpd 48(%rbp), %ymm10, %ymm10
4264 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4265 ; AVX1-NEXT: vbroadcastsd 312(%rbp), %ymm10
4266 ; AVX1-NEXT: vmulpd %ymm10, %ymm13, %ymm11
4267 ; AVX1-NEXT: vmovapd %ymm13, %ymm14
4268 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4269 ; AVX1-NEXT: vmulpd 80(%rbp), %ymm10, %ymm10
4270 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4271 ; AVX1-NEXT: vbroadcastsd 320(%rbp), %ymm10
4272 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4273 ; AVX1-NEXT: vmovapd %ymm2, %ymm13
4274 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4275 ; AVX1-NEXT: vmulpd 176(%rbp), %ymm10, %ymm10
4276 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4277 ; AVX1-NEXT: vbroadcastsd 328(%rbp), %ymm10
4278 ; AVX1-NEXT: vmulpd 240(%rbp), %ymm10, %ymm11
4279 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4280 ; AVX1-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4281 ; AVX1-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4282 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4283 ; AVX1-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4284 ; AVX1-NEXT: vbroadcastsd 336(%rbp), %ymm0
4285 ; AVX1-NEXT: vmulpd %ymm0, %ymm9, %ymm1
4286 ; AVX1-NEXT: vbroadcastsd 344(%rbp), %ymm10
4287 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4288 ; AVX1-NEXT: vmovapd %ymm3, %ymm8
4289 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4290 ; AVX1-NEXT: vmovapd (%rsp), %ymm15 # 32-byte Reload
4291 ; AVX1-NEXT: vmulpd %ymm0, %ymm15, %ymm0
4292 ; AVX1-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4293 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4294 ; AVX1-NEXT: vbroadcastsd 352(%rbp), %ymm10
4295 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4296 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4297 ; AVX1-NEXT: vmulpd %ymm5, %ymm10, %ymm10
4298 ; AVX1-NEXT: vmovapd %ymm5, %ymm3
4299 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4300 ; AVX1-NEXT: vbroadcastsd 360(%rbp), %ymm10
4301 ; AVX1-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4302 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4303 ; AVX1-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4304 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4305 ; AVX1-NEXT: vbroadcastsd 368(%rbp), %ymm10
4306 ; AVX1-NEXT: vmovapd 16(%rbp), %ymm2
4307 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4308 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4309 ; AVX1-NEXT: vmulpd 48(%rbp), %ymm10, %ymm10
4310 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4311 ; AVX1-NEXT: vbroadcastsd 376(%rbp), %ymm10
4312 ; AVX1-NEXT: vmulpd %ymm10, %ymm14, %ymm11
4313 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4314 ; AVX1-NEXT: vmovapd 80(%rbp), %ymm2
4315 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4316 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4317 ; AVX1-NEXT: vbroadcastsd 384(%rbp), %ymm10
4318 ; AVX1-NEXT: vmulpd %ymm10, %ymm13, %ymm11
4319 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4320 ; AVX1-NEXT: vmovapd 176(%rbp), %ymm14
4321 ; AVX1-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4322 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4323 ; AVX1-NEXT: vbroadcastsd 392(%rbp), %ymm10
4324 ; AVX1-NEXT: vmovapd 240(%rbp), %ymm2
4325 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4326 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4327 ; AVX1-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4328 ; AVX1-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4329 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4330 ; AVX1-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4331 ; AVX1-NEXT: vbroadcastsd 400(%rbp), %ymm0
4332 ; AVX1-NEXT: vmulpd %ymm0, %ymm9, %ymm1
4333 ; AVX1-NEXT: vbroadcastsd 408(%rbp), %ymm10
4334 ; AVX1-NEXT: vmovapd %ymm8, %ymm5
4335 ; AVX1-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4336 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4337 ; AVX1-NEXT: vmulpd %ymm0, %ymm15, %ymm0
4338 ; AVX1-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4339 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4340 ; AVX1-NEXT: vbroadcastsd 416(%rbp), %ymm10
4341 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4342 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4343 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4344 ; AVX1-NEXT: vmovapd %ymm3, %ymm2
4345 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4346 ; AVX1-NEXT: vbroadcastsd 424(%rbp), %ymm10
4347 ; AVX1-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4348 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4349 ; AVX1-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4350 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4351 ; AVX1-NEXT: vbroadcastsd 432(%rbp), %ymm10
4352 ; AVX1-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4353 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4354 ; AVX1-NEXT: vmulpd 48(%rbp), %ymm10, %ymm10
4355 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4356 ; AVX1-NEXT: vbroadcastsd 440(%rbp), %ymm10
4357 ; AVX1-NEXT: vmulpd 112(%rbp), %ymm10, %ymm11
4358 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4359 ; AVX1-NEXT: vmulpd 80(%rbp), %ymm10, %ymm10
4360 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4361 ; AVX1-NEXT: vbroadcastsd 448(%rbp), %ymm10
4362 ; AVX1-NEXT: vmulpd %ymm10, %ymm13, %ymm11
4363 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4364 ; AVX1-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4365 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4366 ; AVX1-NEXT: vbroadcastsd 456(%rbp), %ymm10
4367 ; AVX1-NEXT: vmulpd 240(%rbp), %ymm10, %ymm11
4368 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4369 ; AVX1-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4370 ; AVX1-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4371 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4372 ; AVX1-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4373 ; AVX1-NEXT: vbroadcastsd 464(%rbp), %ymm0
4374 ; AVX1-NEXT: vmulpd %ymm0, %ymm9, %ymm1
4375 ; AVX1-NEXT: vmovapd %ymm9, %ymm13
4376 ; AVX1-NEXT: vbroadcastsd 472(%rbp), %ymm10
4377 ; AVX1-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4378 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4379 ; AVX1-NEXT: vmulpd %ymm0, %ymm15, %ymm0
4380 ; AVX1-NEXT: vmovapd %ymm15, %ymm9
4381 ; AVX1-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4382 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4383 ; AVX1-NEXT: vbroadcastsd 480(%rbp), %ymm10
4384 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4385 ; AVX1-NEXT: vmovapd %ymm4, %ymm3
4386 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4387 ; AVX1-NEXT: vmovapd %ymm2, %ymm15
4388 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4389 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4390 ; AVX1-NEXT: vbroadcastsd 488(%rbp), %ymm10
4391 ; AVX1-NEXT: vmovapd %ymm7, %ymm8
4392 ; AVX1-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4393 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4394 ; AVX1-NEXT: vmovapd %ymm6, %ymm7
4395 ; AVX1-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4396 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4397 ; AVX1-NEXT: vbroadcastsd 496(%rbp), %ymm10
4398 ; AVX1-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4399 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4400 ; AVX1-NEXT: vmovapd 48(%rbp), %ymm4
4401 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm10
4402 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4403 ; AVX1-NEXT: vbroadcastsd 504(%rbp), %ymm10
4404 ; AVX1-NEXT: vmovapd 112(%rbp), %ymm2
4405 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4406 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4407 ; AVX1-NEXT: vmovapd 80(%rbp), %ymm14
4408 ; AVX1-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4409 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4410 ; AVX1-NEXT: vbroadcastsd 512(%rbp), %ymm10
4411 ; AVX1-NEXT: vmulpd 144(%rbp), %ymm10, %ymm11
4412 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4413 ; AVX1-NEXT: vmovapd 176(%rbp), %ymm2
4414 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4415 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4416 ; AVX1-NEXT: vbroadcastsd 520(%rbp), %ymm10
4417 ; AVX1-NEXT: vmulpd 240(%rbp), %ymm10, %ymm11
4418 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4419 ; AVX1-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4420 ; AVX1-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4421 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4422 ; AVX1-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4423 ; AVX1-NEXT: vbroadcastsd 528(%rbp), %ymm0
4424 ; AVX1-NEXT: vmulpd %ymm0, %ymm13, %ymm1
4425 ; AVX1-NEXT: vbroadcastsd 536(%rbp), %ymm10
4426 ; AVX1-NEXT: vmulpd %ymm5, %ymm10, %ymm11
4427 ; AVX1-NEXT: vmovapd %ymm5, %ymm6
4428 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4429 ; AVX1-NEXT: vmulpd %ymm0, %ymm9, %ymm0
4430 ; AVX1-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4431 ; AVX1-NEXT: vmovapd %ymm12, %ymm5
4432 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4433 ; AVX1-NEXT: vbroadcastsd 544(%rbp), %ymm10
4434 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4435 ; AVX1-NEXT: vmovapd %ymm3, %ymm12
4436 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4437 ; AVX1-NEXT: vmulpd %ymm10, %ymm15, %ymm10
4438 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4439 ; AVX1-NEXT: vbroadcastsd 552(%rbp), %ymm10
4440 ; AVX1-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4441 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4442 ; AVX1-NEXT: vmulpd %ymm7, %ymm10, %ymm10
4443 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4444 ; AVX1-NEXT: vbroadcastsd 560(%rbp), %ymm10
4445 ; AVX1-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4446 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4447 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm10
4448 ; AVX1-NEXT: vmovapd %ymm4, %ymm3
4449 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4450 ; AVX1-NEXT: vbroadcastsd 568(%rbp), %ymm10
4451 ; AVX1-NEXT: vmulpd 112(%rbp), %ymm10, %ymm11
4452 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4453 ; AVX1-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4454 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4455 ; AVX1-NEXT: vbroadcastsd 576(%rbp), %ymm10
4456 ; AVX1-NEXT: vmovapd 144(%rbp), %ymm4
4457 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4458 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4459 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4460 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4461 ; AVX1-NEXT: vbroadcastsd 584(%rbp), %ymm10
4462 ; AVX1-NEXT: vmovapd 240(%rbp), %ymm14
4463 ; AVX1-NEXT: vmulpd %ymm10, %ymm14, %ymm11
4464 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4465 ; AVX1-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4466 ; AVX1-NEXT: vmovapd 208(%rbp), %ymm2
4467 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm1
4468 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4469 ; AVX1-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4470 ; AVX1-NEXT: vbroadcastsd 592(%rbp), %ymm0
4471 ; AVX1-NEXT: vmulpd %ymm0, %ymm13, %ymm1
4472 ; AVX1-NEXT: vbroadcastsd 600(%rbp), %ymm10
4473 ; AVX1-NEXT: vmulpd %ymm6, %ymm10, %ymm11
4474 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4475 ; AVX1-NEXT: vmulpd %ymm0, %ymm9, %ymm0
4476 ; AVX1-NEXT: vmulpd %ymm5, %ymm10, %ymm10
4477 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4478 ; AVX1-NEXT: vbroadcastsd 608(%rbp), %ymm10
4479 ; AVX1-NEXT: vmulpd %ymm10, %ymm12, %ymm11
4480 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4481 ; AVX1-NEXT: vmulpd %ymm10, %ymm15, %ymm10
4482 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4483 ; AVX1-NEXT: vbroadcastsd 616(%rbp), %ymm10
4484 ; AVX1-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4485 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4486 ; AVX1-NEXT: vmulpd %ymm7, %ymm10, %ymm10
4487 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4488 ; AVX1-NEXT: vbroadcastsd 624(%rbp), %ymm10
4489 ; AVX1-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4490 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4491 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4492 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4493 ; AVX1-NEXT: vbroadcastsd 632(%rbp), %ymm10
4494 ; AVX1-NEXT: vmovapd 112(%rbp), %ymm3
4495 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4496 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4497 ; AVX1-NEXT: vmovapd 80(%rbp), %ymm3
4498 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4499 ; AVX1-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4500 ; AVX1-NEXT: vbroadcastsd 640(%rbp), %ymm10
4501 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4502 ; AVX1-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4503 ; AVX1-NEXT: vmovapd 176(%rbp), %ymm3
4504 ; AVX1-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4505 ; AVX1-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4506 ; AVX1-NEXT: vbroadcastsd 648(%rbp), %ymm10
4507 ; AVX1-NEXT: vmovapd %ymm14, %ymm4
4508 ; AVX1-NEXT: vmulpd %ymm10, %ymm14, %ymm11
4509 ; AVX1-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4510 ; AVX1-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4511 ; AVX1-NEXT: vmulpd %ymm2, %ymm10, %ymm1
4512 ; AVX1-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4513 ; AVX1-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4514 ; AVX1-NEXT: vbroadcastsd 656(%rbp), %ymm2
4515 ; AVX1-NEXT: vmovapd %ymm13, %ymm3
4516 ; AVX1-NEXT: vmulpd %ymm2, %ymm13, %ymm1
4517 ; AVX1-NEXT: vbroadcastsd 664(%rbp), %ymm0
4518 ; AVX1-NEXT: vmulpd %ymm0, %ymm6, %ymm14
4519 ; AVX1-NEXT: vmovapd %ymm6, %ymm10
4520 ; AVX1-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4521 ; AVX1-NEXT: vmulpd %ymm2, %ymm9, %ymm2
4522 ; AVX1-NEXT: vmulpd %ymm0, %ymm5, %ymm0
4523 ; AVX1-NEXT: vmovapd %ymm5, %ymm6
4524 ; AVX1-NEXT: vaddpd %ymm0, %ymm2, %ymm0
4525 ; AVX1-NEXT: vbroadcastsd 672(%rbp), %ymm2
4526 ; AVX1-NEXT: vmulpd %ymm2, %ymm12, %ymm14
4527 ; AVX1-NEXT: vaddpd %ymm0, %ymm14, %ymm0
4528 ; AVX1-NEXT: vmulpd %ymm2, %ymm15, %ymm2
4529 ; AVX1-NEXT: vaddpd %ymm2, %ymm1, %ymm1
4530 ; AVX1-NEXT: vbroadcastsd 680(%rbp), %ymm2
4531 ; AVX1-NEXT: vmulpd %ymm2, %ymm8, %ymm14
4532 ; AVX1-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4533 ; AVX1-NEXT: vmulpd %ymm2, %ymm7, %ymm2
4534 ; AVX1-NEXT: vaddpd %ymm2, %ymm0, %ymm0
4535 ; AVX1-NEXT: vbroadcastsd 688(%rbp), %ymm2
4536 ; AVX1-NEXT: vmovapd 16(%rbp), %ymm11
4537 ; AVX1-NEXT: vmulpd %ymm2, %ymm11, %ymm14
4538 ; AVX1-NEXT: vaddpd %ymm0, %ymm14, %ymm0
4539 ; AVX1-NEXT: vmulpd 48(%rbp), %ymm2, %ymm2
4540 ; AVX1-NEXT: vaddpd %ymm2, %ymm1, %ymm1
4541 ; AVX1-NEXT: vbroadcastsd 696(%rbp), %ymm2
4542 ; AVX1-NEXT: vmovapd 112(%rbp), %ymm5
4543 ; AVX1-NEXT: vmulpd %ymm2, %ymm5, %ymm14
4544 ; AVX1-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4545 ; AVX1-NEXT: vmovapd 80(%rbp), %ymm5
4546 ; AVX1-NEXT: vmulpd %ymm2, %ymm5, %ymm2
4547 ; AVX1-NEXT: vaddpd %ymm2, %ymm0, %ymm0
4548 ; AVX1-NEXT: vbroadcastsd 704(%rbp), %ymm2
4549 ; AVX1-NEXT: vmulpd 144(%rbp), %ymm2, %ymm14
4550 ; AVX1-NEXT: vaddpd %ymm0, %ymm14, %ymm0
4551 ; AVX1-NEXT: vmovapd 176(%rbp), %ymm13
4552 ; AVX1-NEXT: vmulpd %ymm2, %ymm13, %ymm2
4553 ; AVX1-NEXT: vaddpd %ymm2, %ymm1, %ymm1
4554 ; AVX1-NEXT: vbroadcastsd 712(%rbp), %ymm2
4555 ; AVX1-NEXT: vmulpd %ymm2, %ymm4, %ymm14
4556 ; AVX1-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4557 ; AVX1-NEXT: vmovapd 208(%rbp), %ymm14
4558 ; AVX1-NEXT: vmulpd %ymm2, %ymm14, %ymm2
4559 ; AVX1-NEXT: vaddpd %ymm2, %ymm0, %ymm0
4560 ; AVX1-NEXT: vbroadcastsd 720(%rbp), %ymm2
4561 ; AVX1-NEXT: vmulpd %ymm2, %ymm3, %ymm3
4562 ; AVX1-NEXT: vmulpd %ymm2, %ymm9, %ymm2
4563 ; AVX1-NEXT: vbroadcastsd 728(%rbp), %ymm4
4564 ; AVX1-NEXT: vmulpd %ymm4, %ymm10, %ymm5
4565 ; AVX1-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4566 ; AVX1-NEXT: vmulpd %ymm4, %ymm6, %ymm4
4567 ; AVX1-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4568 ; AVX1-NEXT: vbroadcastsd 736(%rbp), %ymm4
4569 ; AVX1-NEXT: vmulpd %ymm4, %ymm12, %ymm5
4570 ; AVX1-NEXT: vaddpd %ymm5, %ymm2, %ymm2
4571 ; AVX1-NEXT: vmulpd %ymm4, %ymm15, %ymm4
4572 ; AVX1-NEXT: vaddpd %ymm4, %ymm3, %ymm3
4573 ; AVX1-NEXT: vbroadcastsd 744(%rbp), %ymm4
4574 ; AVX1-NEXT: vmulpd %ymm4, %ymm8, %ymm5
4575 ; AVX1-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4576 ; AVX1-NEXT: vmulpd %ymm4, %ymm7, %ymm4
4577 ; AVX1-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4578 ; AVX1-NEXT: vbroadcastsd 752(%rbp), %ymm4
4579 ; AVX1-NEXT: vmulpd %ymm4, %ymm11, %ymm5
4580 ; AVX1-NEXT: vaddpd %ymm5, %ymm2, %ymm2
4581 ; AVX1-NEXT: vmulpd 48(%rbp), %ymm4, %ymm4
4582 ; AVX1-NEXT: vaddpd %ymm4, %ymm3, %ymm3
4583 ; AVX1-NEXT: vbroadcastsd 760(%rbp), %ymm4
4584 ; AVX1-NEXT: vmulpd 112(%rbp), %ymm4, %ymm5
4585 ; AVX1-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4586 ; AVX1-NEXT: vmulpd 80(%rbp), %ymm4, %ymm4
4587 ; AVX1-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4588 ; AVX1-NEXT: vbroadcastsd 768(%rbp), %ymm4
4589 ; AVX1-NEXT: vmulpd 144(%rbp), %ymm4, %ymm5
4590 ; AVX1-NEXT: vaddpd %ymm5, %ymm2, %ymm2
4591 ; AVX1-NEXT: vmulpd %ymm4, %ymm13, %ymm4
4592 ; AVX1-NEXT: vaddpd %ymm4, %ymm3, %ymm3
4593 ; AVX1-NEXT: vbroadcastsd 776(%rbp), %ymm4
4594 ; AVX1-NEXT: vmulpd 240(%rbp), %ymm4, %ymm5
4595 ; AVX1-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4596 ; AVX1-NEXT: vmulpd %ymm4, %ymm14, %ymm4
4597 ; AVX1-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4598 ; AVX1-NEXT: vmovapd %ymm3, 480(%rdi)
4599 ; AVX1-NEXT: vmovapd %ymm2, 448(%rdi)
4600 ; AVX1-NEXT: vmovapd %ymm1, 416(%rdi)
4601 ; AVX1-NEXT: vmovapd %ymm0, 384(%rdi)
4602 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4603 ; AVX1-NEXT: vmovaps %ymm0, 352(%rdi)
4604 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4605 ; AVX1-NEXT: vmovaps %ymm0, 320(%rdi)
4606 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4607 ; AVX1-NEXT: vmovaps %ymm0, 288(%rdi)
4608 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4609 ; AVX1-NEXT: vmovaps %ymm0, 256(%rdi)
4610 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4611 ; AVX1-NEXT: vmovaps %ymm0, 224(%rdi)
4612 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4613 ; AVX1-NEXT: vmovaps %ymm0, 192(%rdi)
4614 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4615 ; AVX1-NEXT: vmovaps %ymm0, 160(%rdi)
4616 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4617 ; AVX1-NEXT: vmovaps %ymm0, 128(%rdi)
4618 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4619 ; AVX1-NEXT: vmovaps %ymm0, 96(%rdi)
4620 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4621 ; AVX1-NEXT: vmovaps %ymm0, 64(%rdi)
4622 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4623 ; AVX1-NEXT: vmovaps %ymm0, 32(%rdi)
4624 ; AVX1-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
4625 ; AVX1-NEXT: vmovaps %ymm0, (%rdi)
4626 ; AVX1-NEXT: movq %rbp, %rsp
4627 ; AVX1-NEXT: popq %rbp
4628 ; AVX1-NEXT: vzeroupper
4631 ; AVX2-LABEL: test_mul8x8_f64:
4632 ; AVX2: # %bb.0: # %entry
4633 ; AVX2-NEXT: pushq %rbp
4634 ; AVX2-NEXT: movq %rsp, %rbp
4635 ; AVX2-NEXT: andq $-32, %rsp
4636 ; AVX2-NEXT: subq $448, %rsp # imm = 0x1C0
4637 ; AVX2-NEXT: vmovapd %ymm2, %ymm12
4638 ; AVX2-NEXT: vmovapd %ymm0, (%rsp) # 32-byte Spill
4639 ; AVX2-NEXT: movq %rdi, %rax
4640 ; AVX2-NEXT: vmovapd 144(%rbp), %ymm2
4641 ; AVX2-NEXT: vmovapd 112(%rbp), %ymm13
4642 ; AVX2-NEXT: vbroadcastsd 272(%rbp), %ymm10
4643 ; AVX2-NEXT: vmulpd %ymm1, %ymm10, %ymm8
4644 ; AVX2-NEXT: vmovapd %ymm1, %ymm9
4645 ; AVX2-NEXT: vmulpd %ymm0, %ymm10, %ymm0
4646 ; AVX2-NEXT: vbroadcastsd 280(%rbp), %ymm10
4647 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4648 ; AVX2-NEXT: vaddpd %ymm11, %ymm8, %ymm1
4649 ; AVX2-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4650 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4651 ; AVX2-NEXT: vbroadcastsd 288(%rbp), %ymm10
4652 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4653 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4654 ; AVX2-NEXT: vmulpd %ymm5, %ymm10, %ymm10
4655 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4656 ; AVX2-NEXT: vbroadcastsd 296(%rbp), %ymm10
4657 ; AVX2-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4658 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4659 ; AVX2-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4660 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4661 ; AVX2-NEXT: vbroadcastsd 304(%rbp), %ymm10
4662 ; AVX2-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4663 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4664 ; AVX2-NEXT: vmulpd 48(%rbp), %ymm10, %ymm10
4665 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4666 ; AVX2-NEXT: vbroadcastsd 312(%rbp), %ymm10
4667 ; AVX2-NEXT: vmulpd %ymm10, %ymm13, %ymm11
4668 ; AVX2-NEXT: vmovapd %ymm13, %ymm14
4669 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4670 ; AVX2-NEXT: vmulpd 80(%rbp), %ymm10, %ymm10
4671 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4672 ; AVX2-NEXT: vbroadcastsd 320(%rbp), %ymm10
4673 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4674 ; AVX2-NEXT: vmovapd %ymm2, %ymm13
4675 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4676 ; AVX2-NEXT: vmulpd 176(%rbp), %ymm10, %ymm10
4677 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4678 ; AVX2-NEXT: vbroadcastsd 328(%rbp), %ymm10
4679 ; AVX2-NEXT: vmulpd 240(%rbp), %ymm10, %ymm11
4680 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4681 ; AVX2-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4682 ; AVX2-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4683 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4684 ; AVX2-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4685 ; AVX2-NEXT: vbroadcastsd 336(%rbp), %ymm0
4686 ; AVX2-NEXT: vmulpd %ymm0, %ymm9, %ymm1
4687 ; AVX2-NEXT: vbroadcastsd 344(%rbp), %ymm10
4688 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4689 ; AVX2-NEXT: vmovapd %ymm3, %ymm8
4690 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4691 ; AVX2-NEXT: vmovapd (%rsp), %ymm15 # 32-byte Reload
4692 ; AVX2-NEXT: vmulpd %ymm0, %ymm15, %ymm0
4693 ; AVX2-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4694 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4695 ; AVX2-NEXT: vbroadcastsd 352(%rbp), %ymm10
4696 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4697 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4698 ; AVX2-NEXT: vmulpd %ymm5, %ymm10, %ymm10
4699 ; AVX2-NEXT: vmovapd %ymm5, %ymm3
4700 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4701 ; AVX2-NEXT: vbroadcastsd 360(%rbp), %ymm10
4702 ; AVX2-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4703 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4704 ; AVX2-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4705 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4706 ; AVX2-NEXT: vbroadcastsd 368(%rbp), %ymm10
4707 ; AVX2-NEXT: vmovapd 16(%rbp), %ymm2
4708 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4709 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4710 ; AVX2-NEXT: vmulpd 48(%rbp), %ymm10, %ymm10
4711 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4712 ; AVX2-NEXT: vbroadcastsd 376(%rbp), %ymm10
4713 ; AVX2-NEXT: vmulpd %ymm10, %ymm14, %ymm11
4714 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4715 ; AVX2-NEXT: vmovapd 80(%rbp), %ymm2
4716 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4717 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4718 ; AVX2-NEXT: vbroadcastsd 384(%rbp), %ymm10
4719 ; AVX2-NEXT: vmulpd %ymm10, %ymm13, %ymm11
4720 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4721 ; AVX2-NEXT: vmovapd 176(%rbp), %ymm14
4722 ; AVX2-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4723 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4724 ; AVX2-NEXT: vbroadcastsd 392(%rbp), %ymm10
4725 ; AVX2-NEXT: vmovapd 240(%rbp), %ymm2
4726 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4727 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4728 ; AVX2-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4729 ; AVX2-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4730 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4731 ; AVX2-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4732 ; AVX2-NEXT: vbroadcastsd 400(%rbp), %ymm0
4733 ; AVX2-NEXT: vmulpd %ymm0, %ymm9, %ymm1
4734 ; AVX2-NEXT: vbroadcastsd 408(%rbp), %ymm10
4735 ; AVX2-NEXT: vmovapd %ymm8, %ymm5
4736 ; AVX2-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4737 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4738 ; AVX2-NEXT: vmulpd %ymm0, %ymm15, %ymm0
4739 ; AVX2-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4740 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4741 ; AVX2-NEXT: vbroadcastsd 416(%rbp), %ymm10
4742 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4743 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4744 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4745 ; AVX2-NEXT: vmovapd %ymm3, %ymm2
4746 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4747 ; AVX2-NEXT: vbroadcastsd 424(%rbp), %ymm10
4748 ; AVX2-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4749 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4750 ; AVX2-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4751 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4752 ; AVX2-NEXT: vbroadcastsd 432(%rbp), %ymm10
4753 ; AVX2-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4754 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4755 ; AVX2-NEXT: vmulpd 48(%rbp), %ymm10, %ymm10
4756 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4757 ; AVX2-NEXT: vbroadcastsd 440(%rbp), %ymm10
4758 ; AVX2-NEXT: vmulpd 112(%rbp), %ymm10, %ymm11
4759 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4760 ; AVX2-NEXT: vmulpd 80(%rbp), %ymm10, %ymm10
4761 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4762 ; AVX2-NEXT: vbroadcastsd 448(%rbp), %ymm10
4763 ; AVX2-NEXT: vmulpd %ymm10, %ymm13, %ymm11
4764 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4765 ; AVX2-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4766 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4767 ; AVX2-NEXT: vbroadcastsd 456(%rbp), %ymm10
4768 ; AVX2-NEXT: vmulpd 240(%rbp), %ymm10, %ymm11
4769 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4770 ; AVX2-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4771 ; AVX2-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4772 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4773 ; AVX2-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4774 ; AVX2-NEXT: vbroadcastsd 464(%rbp), %ymm0
4775 ; AVX2-NEXT: vmulpd %ymm0, %ymm9, %ymm1
4776 ; AVX2-NEXT: vmovapd %ymm9, %ymm13
4777 ; AVX2-NEXT: vbroadcastsd 472(%rbp), %ymm10
4778 ; AVX2-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4779 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4780 ; AVX2-NEXT: vmulpd %ymm0, %ymm15, %ymm0
4781 ; AVX2-NEXT: vmovapd %ymm15, %ymm9
4782 ; AVX2-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4783 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4784 ; AVX2-NEXT: vbroadcastsd 480(%rbp), %ymm10
4785 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4786 ; AVX2-NEXT: vmovapd %ymm4, %ymm3
4787 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4788 ; AVX2-NEXT: vmovapd %ymm2, %ymm15
4789 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4790 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4791 ; AVX2-NEXT: vbroadcastsd 488(%rbp), %ymm10
4792 ; AVX2-NEXT: vmovapd %ymm7, %ymm8
4793 ; AVX2-NEXT: vmulpd %ymm7, %ymm10, %ymm11
4794 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4795 ; AVX2-NEXT: vmovapd %ymm6, %ymm7
4796 ; AVX2-NEXT: vmulpd %ymm6, %ymm10, %ymm10
4797 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4798 ; AVX2-NEXT: vbroadcastsd 496(%rbp), %ymm10
4799 ; AVX2-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4800 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4801 ; AVX2-NEXT: vmovapd 48(%rbp), %ymm4
4802 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm10
4803 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4804 ; AVX2-NEXT: vbroadcastsd 504(%rbp), %ymm10
4805 ; AVX2-NEXT: vmovapd 112(%rbp), %ymm2
4806 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm11
4807 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4808 ; AVX2-NEXT: vmovapd 80(%rbp), %ymm14
4809 ; AVX2-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4810 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4811 ; AVX2-NEXT: vbroadcastsd 512(%rbp), %ymm10
4812 ; AVX2-NEXT: vmulpd 144(%rbp), %ymm10, %ymm11
4813 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4814 ; AVX2-NEXT: vmovapd 176(%rbp), %ymm2
4815 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4816 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4817 ; AVX2-NEXT: vbroadcastsd 520(%rbp), %ymm10
4818 ; AVX2-NEXT: vmulpd 240(%rbp), %ymm10, %ymm11
4819 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4820 ; AVX2-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4821 ; AVX2-NEXT: vmulpd 208(%rbp), %ymm10, %ymm1
4822 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4823 ; AVX2-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4824 ; AVX2-NEXT: vbroadcastsd 528(%rbp), %ymm0
4825 ; AVX2-NEXT: vmulpd %ymm0, %ymm13, %ymm1
4826 ; AVX2-NEXT: vbroadcastsd 536(%rbp), %ymm10
4827 ; AVX2-NEXT: vmulpd %ymm5, %ymm10, %ymm11
4828 ; AVX2-NEXT: vmovapd %ymm5, %ymm6
4829 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4830 ; AVX2-NEXT: vmulpd %ymm0, %ymm9, %ymm0
4831 ; AVX2-NEXT: vmulpd %ymm10, %ymm12, %ymm10
4832 ; AVX2-NEXT: vmovapd %ymm12, %ymm5
4833 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4834 ; AVX2-NEXT: vbroadcastsd 544(%rbp), %ymm10
4835 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4836 ; AVX2-NEXT: vmovapd %ymm3, %ymm12
4837 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4838 ; AVX2-NEXT: vmulpd %ymm10, %ymm15, %ymm10
4839 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4840 ; AVX2-NEXT: vbroadcastsd 552(%rbp), %ymm10
4841 ; AVX2-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4842 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4843 ; AVX2-NEXT: vmulpd %ymm7, %ymm10, %ymm10
4844 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4845 ; AVX2-NEXT: vbroadcastsd 560(%rbp), %ymm10
4846 ; AVX2-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4847 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4848 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm10
4849 ; AVX2-NEXT: vmovapd %ymm4, %ymm3
4850 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4851 ; AVX2-NEXT: vbroadcastsd 568(%rbp), %ymm10
4852 ; AVX2-NEXT: vmulpd 112(%rbp), %ymm10, %ymm11
4853 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4854 ; AVX2-NEXT: vmulpd %ymm10, %ymm14, %ymm10
4855 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4856 ; AVX2-NEXT: vbroadcastsd 576(%rbp), %ymm10
4857 ; AVX2-NEXT: vmovapd 144(%rbp), %ymm4
4858 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4859 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4860 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm10
4861 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4862 ; AVX2-NEXT: vbroadcastsd 584(%rbp), %ymm10
4863 ; AVX2-NEXT: vmovapd 240(%rbp), %ymm14
4864 ; AVX2-NEXT: vmulpd %ymm10, %ymm14, %ymm11
4865 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4866 ; AVX2-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4867 ; AVX2-NEXT: vmovapd 208(%rbp), %ymm2
4868 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm1
4869 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4870 ; AVX2-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4871 ; AVX2-NEXT: vbroadcastsd 592(%rbp), %ymm0
4872 ; AVX2-NEXT: vmulpd %ymm0, %ymm13, %ymm1
4873 ; AVX2-NEXT: vbroadcastsd 600(%rbp), %ymm10
4874 ; AVX2-NEXT: vmulpd %ymm6, %ymm10, %ymm11
4875 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4876 ; AVX2-NEXT: vmulpd %ymm0, %ymm9, %ymm0
4877 ; AVX2-NEXT: vmulpd %ymm5, %ymm10, %ymm10
4878 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4879 ; AVX2-NEXT: vbroadcastsd 608(%rbp), %ymm10
4880 ; AVX2-NEXT: vmulpd %ymm10, %ymm12, %ymm11
4881 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4882 ; AVX2-NEXT: vmulpd %ymm10, %ymm15, %ymm10
4883 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4884 ; AVX2-NEXT: vbroadcastsd 616(%rbp), %ymm10
4885 ; AVX2-NEXT: vmulpd %ymm10, %ymm8, %ymm11
4886 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4887 ; AVX2-NEXT: vmulpd %ymm7, %ymm10, %ymm10
4888 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4889 ; AVX2-NEXT: vbroadcastsd 624(%rbp), %ymm10
4890 ; AVX2-NEXT: vmulpd 16(%rbp), %ymm10, %ymm11
4891 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4892 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4893 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4894 ; AVX2-NEXT: vbroadcastsd 632(%rbp), %ymm10
4895 ; AVX2-NEXT: vmovapd 112(%rbp), %ymm3
4896 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm11
4897 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4898 ; AVX2-NEXT: vmovapd 80(%rbp), %ymm3
4899 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4900 ; AVX2-NEXT: vaddpd %ymm0, %ymm10, %ymm0
4901 ; AVX2-NEXT: vbroadcastsd 640(%rbp), %ymm10
4902 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm11
4903 ; AVX2-NEXT: vaddpd %ymm0, %ymm11, %ymm0
4904 ; AVX2-NEXT: vmovapd 176(%rbp), %ymm3
4905 ; AVX2-NEXT: vmulpd %ymm3, %ymm10, %ymm10
4906 ; AVX2-NEXT: vaddpd %ymm1, %ymm10, %ymm1
4907 ; AVX2-NEXT: vbroadcastsd 648(%rbp), %ymm10
4908 ; AVX2-NEXT: vmovapd %ymm14, %ymm4
4909 ; AVX2-NEXT: vmulpd %ymm10, %ymm14, %ymm11
4910 ; AVX2-NEXT: vaddpd %ymm1, %ymm11, %ymm1
4911 ; AVX2-NEXT: vmovapd %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4912 ; AVX2-NEXT: vmulpd %ymm2, %ymm10, %ymm1
4913 ; AVX2-NEXT: vaddpd %ymm1, %ymm0, %ymm0
4914 ; AVX2-NEXT: vmovapd %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
4915 ; AVX2-NEXT: vbroadcastsd 656(%rbp), %ymm2
4916 ; AVX2-NEXT: vmovapd %ymm13, %ymm3
4917 ; AVX2-NEXT: vmulpd %ymm2, %ymm13, %ymm1
4918 ; AVX2-NEXT: vbroadcastsd 664(%rbp), %ymm0
4919 ; AVX2-NEXT: vmulpd %ymm0, %ymm6, %ymm14
4920 ; AVX2-NEXT: vmovapd %ymm6, %ymm10
4921 ; AVX2-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4922 ; AVX2-NEXT: vmulpd %ymm2, %ymm9, %ymm2
4923 ; AVX2-NEXT: vmulpd %ymm0, %ymm5, %ymm0
4924 ; AVX2-NEXT: vmovapd %ymm5, %ymm6
4925 ; AVX2-NEXT: vaddpd %ymm0, %ymm2, %ymm0
4926 ; AVX2-NEXT: vbroadcastsd 672(%rbp), %ymm2
4927 ; AVX2-NEXT: vmulpd %ymm2, %ymm12, %ymm14
4928 ; AVX2-NEXT: vaddpd %ymm0, %ymm14, %ymm0
4929 ; AVX2-NEXT: vmulpd %ymm2, %ymm15, %ymm2
4930 ; AVX2-NEXT: vaddpd %ymm2, %ymm1, %ymm1
4931 ; AVX2-NEXT: vbroadcastsd 680(%rbp), %ymm2
4932 ; AVX2-NEXT: vmulpd %ymm2, %ymm8, %ymm14
4933 ; AVX2-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4934 ; AVX2-NEXT: vmulpd %ymm2, %ymm7, %ymm2
4935 ; AVX2-NEXT: vaddpd %ymm2, %ymm0, %ymm0
4936 ; AVX2-NEXT: vbroadcastsd 688(%rbp), %ymm2
4937 ; AVX2-NEXT: vmovapd 16(%rbp), %ymm11
4938 ; AVX2-NEXT: vmulpd %ymm2, %ymm11, %ymm14
4939 ; AVX2-NEXT: vaddpd %ymm0, %ymm14, %ymm0
4940 ; AVX2-NEXT: vmulpd 48(%rbp), %ymm2, %ymm2
4941 ; AVX2-NEXT: vaddpd %ymm2, %ymm1, %ymm1
4942 ; AVX2-NEXT: vbroadcastsd 696(%rbp), %ymm2
4943 ; AVX2-NEXT: vmovapd 112(%rbp), %ymm5
4944 ; AVX2-NEXT: vmulpd %ymm2, %ymm5, %ymm14
4945 ; AVX2-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4946 ; AVX2-NEXT: vmovapd 80(%rbp), %ymm5
4947 ; AVX2-NEXT: vmulpd %ymm2, %ymm5, %ymm2
4948 ; AVX2-NEXT: vaddpd %ymm2, %ymm0, %ymm0
4949 ; AVX2-NEXT: vbroadcastsd 704(%rbp), %ymm2
4950 ; AVX2-NEXT: vmulpd 144(%rbp), %ymm2, %ymm14
4951 ; AVX2-NEXT: vaddpd %ymm0, %ymm14, %ymm0
4952 ; AVX2-NEXT: vmovapd 176(%rbp), %ymm13
4953 ; AVX2-NEXT: vmulpd %ymm2, %ymm13, %ymm2
4954 ; AVX2-NEXT: vaddpd %ymm2, %ymm1, %ymm1
4955 ; AVX2-NEXT: vbroadcastsd 712(%rbp), %ymm2
4956 ; AVX2-NEXT: vmulpd %ymm2, %ymm4, %ymm14
4957 ; AVX2-NEXT: vaddpd %ymm1, %ymm14, %ymm1
4958 ; AVX2-NEXT: vmovapd 208(%rbp), %ymm14
4959 ; AVX2-NEXT: vmulpd %ymm2, %ymm14, %ymm2
4960 ; AVX2-NEXT: vaddpd %ymm2, %ymm0, %ymm0
4961 ; AVX2-NEXT: vbroadcastsd 720(%rbp), %ymm2
4962 ; AVX2-NEXT: vmulpd %ymm2, %ymm3, %ymm3
4963 ; AVX2-NEXT: vmulpd %ymm2, %ymm9, %ymm2
4964 ; AVX2-NEXT: vbroadcastsd 728(%rbp), %ymm4
4965 ; AVX2-NEXT: vmulpd %ymm4, %ymm10, %ymm5
4966 ; AVX2-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4967 ; AVX2-NEXT: vmulpd %ymm4, %ymm6, %ymm4
4968 ; AVX2-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4969 ; AVX2-NEXT: vbroadcastsd 736(%rbp), %ymm4
4970 ; AVX2-NEXT: vmulpd %ymm4, %ymm12, %ymm5
4971 ; AVX2-NEXT: vaddpd %ymm5, %ymm2, %ymm2
4972 ; AVX2-NEXT: vmulpd %ymm4, %ymm15, %ymm4
4973 ; AVX2-NEXT: vaddpd %ymm4, %ymm3, %ymm3
4974 ; AVX2-NEXT: vbroadcastsd 744(%rbp), %ymm4
4975 ; AVX2-NEXT: vmulpd %ymm4, %ymm8, %ymm5
4976 ; AVX2-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4977 ; AVX2-NEXT: vmulpd %ymm4, %ymm7, %ymm4
4978 ; AVX2-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4979 ; AVX2-NEXT: vbroadcastsd 752(%rbp), %ymm4
4980 ; AVX2-NEXT: vmulpd %ymm4, %ymm11, %ymm5
4981 ; AVX2-NEXT: vaddpd %ymm5, %ymm2, %ymm2
4982 ; AVX2-NEXT: vmulpd 48(%rbp), %ymm4, %ymm4
4983 ; AVX2-NEXT: vaddpd %ymm4, %ymm3, %ymm3
4984 ; AVX2-NEXT: vbroadcastsd 760(%rbp), %ymm4
4985 ; AVX2-NEXT: vmulpd 112(%rbp), %ymm4, %ymm5
4986 ; AVX2-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4987 ; AVX2-NEXT: vmulpd 80(%rbp), %ymm4, %ymm4
4988 ; AVX2-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4989 ; AVX2-NEXT: vbroadcastsd 768(%rbp), %ymm4
4990 ; AVX2-NEXT: vmulpd 144(%rbp), %ymm4, %ymm5
4991 ; AVX2-NEXT: vaddpd %ymm5, %ymm2, %ymm2
4992 ; AVX2-NEXT: vmulpd %ymm4, %ymm13, %ymm4
4993 ; AVX2-NEXT: vaddpd %ymm4, %ymm3, %ymm3
4994 ; AVX2-NEXT: vbroadcastsd 776(%rbp), %ymm4
4995 ; AVX2-NEXT: vmulpd 240(%rbp), %ymm4, %ymm5
4996 ; AVX2-NEXT: vaddpd %ymm5, %ymm3, %ymm3
4997 ; AVX2-NEXT: vmulpd %ymm4, %ymm14, %ymm4
4998 ; AVX2-NEXT: vaddpd %ymm4, %ymm2, %ymm2
4999 ; AVX2-NEXT: vmovapd %ymm3, 480(%rdi)
5000 ; AVX2-NEXT: vmovapd %ymm2, 448(%rdi)
5001 ; AVX2-NEXT: vmovapd %ymm1, 416(%rdi)
5002 ; AVX2-NEXT: vmovapd %ymm0, 384(%rdi)
5003 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5004 ; AVX2-NEXT: vmovaps %ymm0, 352(%rdi)
5005 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5006 ; AVX2-NEXT: vmovaps %ymm0, 320(%rdi)
5007 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5008 ; AVX2-NEXT: vmovaps %ymm0, 288(%rdi)
5009 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5010 ; AVX2-NEXT: vmovaps %ymm0, 256(%rdi)
5011 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5012 ; AVX2-NEXT: vmovaps %ymm0, 224(%rdi)
5013 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5014 ; AVX2-NEXT: vmovaps %ymm0, 192(%rdi)
5015 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5016 ; AVX2-NEXT: vmovaps %ymm0, 160(%rdi)
5017 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5018 ; AVX2-NEXT: vmovaps %ymm0, 128(%rdi)
5019 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5020 ; AVX2-NEXT: vmovaps %ymm0, 96(%rdi)
5021 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5022 ; AVX2-NEXT: vmovaps %ymm0, 64(%rdi)
5023 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5024 ; AVX2-NEXT: vmovaps %ymm0, 32(%rdi)
5025 ; AVX2-NEXT: vmovaps {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
5026 ; AVX2-NEXT: vmovaps %ymm0, (%rdi)
5027 ; AVX2-NEXT: movq %rbp, %rsp
5028 ; AVX2-NEXT: popq %rbp
5029 ; AVX2-NEXT: vzeroupper
5032 ; AVX512-LABEL: test_mul8x8_f64:
5033 ; AVX512: # %bb.0: # %entry
5034 ; AVX512-NEXT: pushq %rbp
5035 ; AVX512-NEXT: movq %rsp, %rbp
5036 ; AVX512-NEXT: andq $-64, %rsp
5037 ; AVX512-NEXT: subq $64, %rsp
5038 ; AVX512-NEXT: movq %rdi, %rax
5039 ; AVX512-NEXT: vmulpd 16(%rbp){1to8}, %zmm0, %zmm8
5040 ; AVX512-NEXT: vmulpd 24(%rbp){1to8}, %zmm1, %zmm9
5041 ; AVX512-NEXT: vaddpd %zmm9, %zmm8, %zmm8
5042 ; AVX512-NEXT: vmulpd 32(%rbp){1to8}, %zmm2, %zmm9
5043 ; AVX512-NEXT: vaddpd %zmm9, %zmm8, %zmm8
5044 ; AVX512-NEXT: vmulpd 40(%rbp){1to8}, %zmm3, %zmm9
5045 ; AVX512-NEXT: vaddpd %zmm9, %zmm8, %zmm8
5046 ; AVX512-NEXT: vmulpd 48(%rbp){1to8}, %zmm4, %zmm9
5047 ; AVX512-NEXT: vaddpd %zmm9, %zmm8, %zmm8
5048 ; AVX512-NEXT: vmulpd 56(%rbp){1to8}, %zmm5, %zmm9
5049 ; AVX512-NEXT: vaddpd %zmm9, %zmm8, %zmm8
5050 ; AVX512-NEXT: vmulpd 64(%rbp){1to8}, %zmm6, %zmm9
5051 ; AVX512-NEXT: vaddpd %zmm9, %zmm8, %zmm8
5052 ; AVX512-NEXT: vmulpd 72(%rbp){1to8}, %zmm7, %zmm9
5053 ; AVX512-NEXT: vaddpd %zmm9, %zmm8, %zmm8
5054 ; AVX512-NEXT: vmulpd 80(%rbp){1to8}, %zmm0, %zmm9
5055 ; AVX512-NEXT: vmulpd 88(%rbp){1to8}, %zmm1, %zmm10
5056 ; AVX512-NEXT: vaddpd %zmm10, %zmm9, %zmm9
5057 ; AVX512-NEXT: vmulpd 96(%rbp){1to8}, %zmm2, %zmm10
5058 ; AVX512-NEXT: vaddpd %zmm10, %zmm9, %zmm9
5059 ; AVX512-NEXT: vmulpd 104(%rbp){1to8}, %zmm3, %zmm10
5060 ; AVX512-NEXT: vaddpd %zmm10, %zmm9, %zmm9
5061 ; AVX512-NEXT: vmulpd 112(%rbp){1to8}, %zmm4, %zmm10
5062 ; AVX512-NEXT: vaddpd %zmm10, %zmm9, %zmm9
5063 ; AVX512-NEXT: vmulpd 120(%rbp){1to8}, %zmm5, %zmm10
5064 ; AVX512-NEXT: vaddpd %zmm10, %zmm9, %zmm9
5065 ; AVX512-NEXT: vmulpd 128(%rbp){1to8}, %zmm6, %zmm10
5066 ; AVX512-NEXT: vaddpd %zmm10, %zmm9, %zmm9
5067 ; AVX512-NEXT: vmulpd 136(%rbp){1to8}, %zmm7, %zmm10
5068 ; AVX512-NEXT: vaddpd %zmm10, %zmm9, %zmm9
5069 ; AVX512-NEXT: vmulpd 144(%rbp){1to8}, %zmm0, %zmm10
5070 ; AVX512-NEXT: vmulpd 152(%rbp){1to8}, %zmm1, %zmm11
5071 ; AVX512-NEXT: vaddpd %zmm11, %zmm10, %zmm10
5072 ; AVX512-NEXT: vmulpd 160(%rbp){1to8}, %zmm2, %zmm11
5073 ; AVX512-NEXT: vaddpd %zmm11, %zmm10, %zmm10
5074 ; AVX512-NEXT: vmulpd 168(%rbp){1to8}, %zmm3, %zmm11
5075 ; AVX512-NEXT: vaddpd %zmm11, %zmm10, %zmm10
5076 ; AVX512-NEXT: vmulpd 176(%rbp){1to8}, %zmm4, %zmm11
5077 ; AVX512-NEXT: vaddpd %zmm11, %zmm10, %zmm10
5078 ; AVX512-NEXT: vmulpd 184(%rbp){1to8}, %zmm5, %zmm11
5079 ; AVX512-NEXT: vaddpd %zmm11, %zmm10, %zmm10
5080 ; AVX512-NEXT: vmulpd 192(%rbp){1to8}, %zmm6, %zmm11
5081 ; AVX512-NEXT: vaddpd %zmm11, %zmm10, %zmm10
5082 ; AVX512-NEXT: vmulpd 200(%rbp){1to8}, %zmm7, %zmm11
5083 ; AVX512-NEXT: vaddpd %zmm11, %zmm10, %zmm10
5084 ; AVX512-NEXT: vmulpd 208(%rbp){1to8}, %zmm0, %zmm11
5085 ; AVX512-NEXT: vmulpd 216(%rbp){1to8}, %zmm1, %zmm12
5086 ; AVX512-NEXT: vaddpd %zmm12, %zmm11, %zmm11
5087 ; AVX512-NEXT: vmulpd 224(%rbp){1to8}, %zmm2, %zmm12
5088 ; AVX512-NEXT: vaddpd %zmm12, %zmm11, %zmm11
5089 ; AVX512-NEXT: vmulpd 232(%rbp){1to8}, %zmm3, %zmm12
5090 ; AVX512-NEXT: vaddpd %zmm12, %zmm11, %zmm11
5091 ; AVX512-NEXT: vmulpd 240(%rbp){1to8}, %zmm4, %zmm12
5092 ; AVX512-NEXT: vaddpd %zmm12, %zmm11, %zmm11
5093 ; AVX512-NEXT: vmulpd 248(%rbp){1to8}, %zmm5, %zmm12
5094 ; AVX512-NEXT: vaddpd %zmm12, %zmm11, %zmm11
5095 ; AVX512-NEXT: vmulpd 256(%rbp){1to8}, %zmm6, %zmm12
5096 ; AVX512-NEXT: vaddpd %zmm12, %zmm11, %zmm11
5097 ; AVX512-NEXT: vmulpd 264(%rbp){1to8}, %zmm7, %zmm12
5098 ; AVX512-NEXT: vaddpd %zmm12, %zmm11, %zmm11
5099 ; AVX512-NEXT: vmulpd 272(%rbp){1to8}, %zmm0, %zmm12
5100 ; AVX512-NEXT: vmulpd 280(%rbp){1to8}, %zmm1, %zmm13
5101 ; AVX512-NEXT: vaddpd %zmm13, %zmm12, %zmm12
5102 ; AVX512-NEXT: vmulpd 288(%rbp){1to8}, %zmm2, %zmm13
5103 ; AVX512-NEXT: vaddpd %zmm13, %zmm12, %zmm12
5104 ; AVX512-NEXT: vmulpd 296(%rbp){1to8}, %zmm3, %zmm13
5105 ; AVX512-NEXT: vaddpd %zmm13, %zmm12, %zmm12
5106 ; AVX512-NEXT: vmulpd 304(%rbp){1to8}, %zmm4, %zmm13
5107 ; AVX512-NEXT: vaddpd %zmm13, %zmm12, %zmm12
5108 ; AVX512-NEXT: vmulpd 312(%rbp){1to8}, %zmm5, %zmm13
5109 ; AVX512-NEXT: vaddpd %zmm13, %zmm12, %zmm12
5110 ; AVX512-NEXT: vmulpd 320(%rbp){1to8}, %zmm6, %zmm13
5111 ; AVX512-NEXT: vaddpd %zmm13, %zmm12, %zmm12
5112 ; AVX512-NEXT: vmulpd 328(%rbp){1to8}, %zmm7, %zmm13
5113 ; AVX512-NEXT: vaddpd %zmm13, %zmm12, %zmm12
5114 ; AVX512-NEXT: vmulpd 336(%rbp){1to8}, %zmm0, %zmm13
5115 ; AVX512-NEXT: vmulpd 344(%rbp){1to8}, %zmm1, %zmm14
5116 ; AVX512-NEXT: vaddpd %zmm14, %zmm13, %zmm13
5117 ; AVX512-NEXT: vmulpd 352(%rbp){1to8}, %zmm2, %zmm14
5118 ; AVX512-NEXT: vaddpd %zmm14, %zmm13, %zmm13
5119 ; AVX512-NEXT: vmulpd 360(%rbp){1to8}, %zmm3, %zmm14
5120 ; AVX512-NEXT: vaddpd %zmm14, %zmm13, %zmm13
5121 ; AVX512-NEXT: vmulpd 368(%rbp){1to8}, %zmm4, %zmm14
5122 ; AVX512-NEXT: vaddpd %zmm14, %zmm13, %zmm13
5123 ; AVX512-NEXT: vmulpd 376(%rbp){1to8}, %zmm5, %zmm14
5124 ; AVX512-NEXT: vaddpd %zmm14, %zmm13, %zmm13
5125 ; AVX512-NEXT: vmulpd 384(%rbp){1to8}, %zmm6, %zmm14
5126 ; AVX512-NEXT: vaddpd %zmm14, %zmm13, %zmm13
5127 ; AVX512-NEXT: vmulpd 392(%rbp){1to8}, %zmm7, %zmm14
5128 ; AVX512-NEXT: vaddpd %zmm14, %zmm13, %zmm13
5129 ; AVX512-NEXT: vmulpd 400(%rbp){1to8}, %zmm0, %zmm14
5130 ; AVX512-NEXT: vmulpd 408(%rbp){1to8}, %zmm1, %zmm15
5131 ; AVX512-NEXT: vaddpd %zmm15, %zmm14, %zmm14
5132 ; AVX512-NEXT: vmulpd 416(%rbp){1to8}, %zmm2, %zmm15
5133 ; AVX512-NEXT: vaddpd %zmm15, %zmm14, %zmm14
5134 ; AVX512-NEXT: vmulpd 424(%rbp){1to8}, %zmm3, %zmm15
5135 ; AVX512-NEXT: vaddpd %zmm15, %zmm14, %zmm14
5136 ; AVX512-NEXT: vmulpd 432(%rbp){1to8}, %zmm4, %zmm15
5137 ; AVX512-NEXT: vaddpd %zmm15, %zmm14, %zmm14
5138 ; AVX512-NEXT: vmulpd 440(%rbp){1to8}, %zmm5, %zmm15
5139 ; AVX512-NEXT: vaddpd %zmm15, %zmm14, %zmm14
5140 ; AVX512-NEXT: vmulpd 448(%rbp){1to8}, %zmm6, %zmm15
5141 ; AVX512-NEXT: vaddpd %zmm15, %zmm14, %zmm14
5142 ; AVX512-NEXT: vmulpd 456(%rbp){1to8}, %zmm7, %zmm15
5143 ; AVX512-NEXT: vaddpd %zmm15, %zmm14, %zmm14
5144 ; AVX512-NEXT: vmulpd 464(%rbp){1to8}, %zmm0, %zmm0
5145 ; AVX512-NEXT: vmulpd 472(%rbp){1to8}, %zmm1, %zmm1
5146 ; AVX512-NEXT: vaddpd %zmm1, %zmm0, %zmm0
5147 ; AVX512-NEXT: vmulpd 480(%rbp){1to8}, %zmm2, %zmm1
5148 ; AVX512-NEXT: vaddpd %zmm1, %zmm0, %zmm0
5149 ; AVX512-NEXT: vmulpd 488(%rbp){1to8}, %zmm3, %zmm1
5150 ; AVX512-NEXT: vaddpd %zmm1, %zmm0, %zmm0
5151 ; AVX512-NEXT: vmulpd 496(%rbp){1to8}, %zmm4, %zmm1
5152 ; AVX512-NEXT: vaddpd %zmm1, %zmm0, %zmm0
5153 ; AVX512-NEXT: vmulpd 504(%rbp){1to8}, %zmm5, %zmm1
5154 ; AVX512-NEXT: vaddpd %zmm1, %zmm0, %zmm0
5155 ; AVX512-NEXT: vmulpd 512(%rbp){1to8}, %zmm6, %zmm1
5156 ; AVX512-NEXT: vaddpd %zmm1, %zmm0, %zmm0
5157 ; AVX512-NEXT: vmulpd 520(%rbp){1to8}, %zmm7, %zmm1
5158 ; AVX512-NEXT: vaddpd %zmm1, %zmm0, %zmm0
5159 ; AVX512-NEXT: vmovapd %zmm0, 448(%rdi)
5160 ; AVX512-NEXT: vmovapd %zmm14, 384(%rdi)
5161 ; AVX512-NEXT: vmovapd %zmm13, 320(%rdi)
5162 ; AVX512-NEXT: vmovapd %zmm12, 256(%rdi)
5163 ; AVX512-NEXT: vmovapd %zmm11, 192(%rdi)
5164 ; AVX512-NEXT: vmovapd %zmm10, 128(%rdi)
5165 ; AVX512-NEXT: vmovapd %zmm9, 64(%rdi)
5166 ; AVX512-NEXT: vmovapd %zmm8, (%rdi)
5167 ; AVX512-NEXT: movq %rbp, %rsp
5168 ; AVX512-NEXT: popq %rbp
5169 ; AVX512-NEXT: vzeroupper
5172 %split = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7>
5173 %split1 = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
5174 %split2 = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23>
5175 %split3 = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
5176 %split4 = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39>
5177 %split5 = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47>
5178 %split6 = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55>
5179 %split7 = shufflevector <64 x double> %a0, <64 x double> poison, <8 x i32> <i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
5180 %splat.splat = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> zeroinitializer
5181 %0 = fmul <8 x double> %split, %splat.splat
5182 %splat.splat18 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1, i32 1>
5183 %1 = fmul <8 x double> %split1, %splat.splat18
5184 %2 = fadd <8 x double> %0, %1
5185 %splat.splat21 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2, i32 2>
5186 %3 = fmul <8 x double> %split2, %splat.splat21
5187 %4 = fadd <8 x double> %2, %3
5188 %splat.splat24 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3, i32 3>
5189 %5 = fmul <8 x double> %split3, %splat.splat24
5190 %6 = fadd <8 x double> %4, %5
5191 %splat.splat27 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4, i32 4>
5192 %7 = fmul <8 x double> %split4, %splat.splat27
5193 %8 = fadd <8 x double> %6, %7
5194 %splat.splat30 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5, i32 5>
5195 %9 = fmul <8 x double> %split5, %splat.splat30
5196 %10 = fadd <8 x double> %8, %9
5197 %splat.splat33 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6, i32 6>
5198 %11 = fmul <8 x double> %split6, %splat.splat33
5199 %12 = fadd <8 x double> %10, %11
5200 %splat.splat36 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7>
5201 %13 = fmul <8 x double> %split7, %splat.splat36
5202 %14 = fadd <8 x double> %12, %13
5203 %splat.splat39 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8, i32 8>
5204 %15 = fmul <8 x double> %split, %splat.splat39
5205 %splat.splat42 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9, i32 9>
5206 %16 = fmul <8 x double> %split1, %splat.splat42
5207 %17 = fadd <8 x double> %15, %16
5208 %splat.splat45 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10, i32 10>
5209 %18 = fmul <8 x double> %split2, %splat.splat45
5210 %19 = fadd <8 x double> %17, %18
5211 %splat.splat48 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11, i32 11>
5212 %20 = fmul <8 x double> %split3, %splat.splat48
5213 %21 = fadd <8 x double> %19, %20
5214 %splat.splat51 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12, i32 12>
5215 %22 = fmul <8 x double> %split4, %splat.splat51
5216 %23 = fadd <8 x double> %21, %22
5217 %splat.splat54 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13, i32 13>
5218 %24 = fmul <8 x double> %split5, %splat.splat54
5219 %25 = fadd <8 x double> %23, %24
5220 %splat.splat57 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14, i32 14>
5221 %26 = fmul <8 x double> %split6, %splat.splat57
5222 %27 = fadd <8 x double> %25, %26
5223 %splat.splat60 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15, i32 15>
5224 %28 = fmul <8 x double> %split7, %splat.splat60
5225 %29 = fadd <8 x double> %27, %28
5226 %splat.splat63 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16, i32 16>
5227 %30 = fmul <8 x double> %split, %splat.splat63
5228 %splat.splat66 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17, i32 17>
5229 %31 = fmul <8 x double> %split1, %splat.splat66
5230 %32 = fadd <8 x double> %30, %31
5231 %splat.splat69 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18, i32 18>
5232 %33 = fmul <8 x double> %split2, %splat.splat69
5233 %34 = fadd <8 x double> %32, %33
5234 %splat.splat72 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19, i32 19>
5235 %35 = fmul <8 x double> %split3, %splat.splat72
5236 %36 = fadd <8 x double> %34, %35
5237 %splat.splat75 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20, i32 20>
5238 %37 = fmul <8 x double> %split4, %splat.splat75
5239 %38 = fadd <8 x double> %36, %37
5240 %splat.splat78 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21, i32 21>
5241 %39 = fmul <8 x double> %split5, %splat.splat78
5242 %40 = fadd <8 x double> %38, %39
5243 %splat.splat81 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22, i32 22>
5244 %41 = fmul <8 x double> %split6, %splat.splat81
5245 %42 = fadd <8 x double> %40, %41
5246 %splat.splat84 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23, i32 23>
5247 %43 = fmul <8 x double> %split7, %splat.splat84
5248 %44 = fadd <8 x double> %42, %43
5249 %splat.splat87 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24, i32 24>
5250 %45 = fmul <8 x double> %split, %splat.splat87
5251 %splat.splat90 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25, i32 25>
5252 %46 = fmul <8 x double> %split1, %splat.splat90
5253 %47 = fadd <8 x double> %45, %46
5254 %splat.splat93 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26, i32 26>
5255 %48 = fmul <8 x double> %split2, %splat.splat93
5256 %49 = fadd <8 x double> %47, %48
5257 %splat.splat96 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27, i32 27>
5258 %50 = fmul <8 x double> %split3, %splat.splat96
5259 %51 = fadd <8 x double> %49, %50
5260 %splat.splat99 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28, i32 28>
5261 %52 = fmul <8 x double> %split4, %splat.splat99
5262 %53 = fadd <8 x double> %51, %52
5263 %splat.splat102 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29, i32 29>
5264 %54 = fmul <8 x double> %split5, %splat.splat102
5265 %55 = fadd <8 x double> %53, %54
5266 %splat.splat105 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30, i32 30>
5267 %56 = fmul <8 x double> %split6, %splat.splat105
5268 %57 = fadd <8 x double> %55, %56
5269 %splat.splat108 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31, i32 31>
5270 %58 = fmul <8 x double> %split7, %splat.splat108
5271 %59 = fadd <8 x double> %57, %58
5272 %splat.splat111 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32, i32 32>
5273 %60 = fmul <8 x double> %split, %splat.splat111
5274 %splat.splat114 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33, i32 33>
5275 %61 = fmul <8 x double> %split1, %splat.splat114
5276 %62 = fadd <8 x double> %60, %61
5277 %splat.splat117 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34, i32 34>
5278 %63 = fmul <8 x double> %split2, %splat.splat117
5279 %64 = fadd <8 x double> %62, %63
5280 %splat.splat120 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35, i32 35>
5281 %65 = fmul <8 x double> %split3, %splat.splat120
5282 %66 = fadd <8 x double> %64, %65
5283 %splat.splat123 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36, i32 36>
5284 %67 = fmul <8 x double> %split4, %splat.splat123
5285 %68 = fadd <8 x double> %66, %67
5286 %splat.splat126 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37, i32 37>
5287 %69 = fmul <8 x double> %split5, %splat.splat126
5288 %70 = fadd <8 x double> %68, %69
5289 %splat.splat129 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38, i32 38>
5290 %71 = fmul <8 x double> %split6, %splat.splat129
5291 %72 = fadd <8 x double> %70, %71
5292 %splat.splat132 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39, i32 39>
5293 %73 = fmul <8 x double> %split7, %splat.splat132
5294 %74 = fadd <8 x double> %72, %73
5295 %splat.splat135 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40, i32 40>
5296 %75 = fmul <8 x double> %split, %splat.splat135
5297 %splat.splat138 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41, i32 41>
5298 %76 = fmul <8 x double> %split1, %splat.splat138
5299 %77 = fadd <8 x double> %75, %76
5300 %splat.splat141 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42, i32 42>
5301 %78 = fmul <8 x double> %split2, %splat.splat141
5302 %79 = fadd <8 x double> %77, %78
5303 %splat.splat144 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43, i32 43>
5304 %80 = fmul <8 x double> %split3, %splat.splat144
5305 %81 = fadd <8 x double> %79, %80
5306 %splat.splat147 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44, i32 44>
5307 %82 = fmul <8 x double> %split4, %splat.splat147
5308 %83 = fadd <8 x double> %81, %82
5309 %splat.splat150 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45, i32 45>
5310 %84 = fmul <8 x double> %split5, %splat.splat150
5311 %85 = fadd <8 x double> %83, %84
5312 %splat.splat153 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46, i32 46>
5313 %86 = fmul <8 x double> %split6, %splat.splat153
5314 %87 = fadd <8 x double> %85, %86
5315 %splat.splat156 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47, i32 47>
5316 %88 = fmul <8 x double> %split7, %splat.splat156
5317 %89 = fadd <8 x double> %87, %88
5318 %splat.splat159 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48, i32 48>
5319 %90 = fmul <8 x double> %split, %splat.splat159
5320 %splat.splat162 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49, i32 49>
5321 %91 = fmul <8 x double> %split1, %splat.splat162
5322 %92 = fadd <8 x double> %90, %91
5323 %splat.splat165 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50, i32 50>
5324 %93 = fmul <8 x double> %split2, %splat.splat165
5325 %94 = fadd <8 x double> %92, %93
5326 %splat.splat168 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51, i32 51>
5327 %95 = fmul <8 x double> %split3, %splat.splat168
5328 %96 = fadd <8 x double> %94, %95
5329 %splat.splat171 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52, i32 52>
5330 %97 = fmul <8 x double> %split4, %splat.splat171
5331 %98 = fadd <8 x double> %96, %97
5332 %splat.splat174 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53, i32 53>
5333 %99 = fmul <8 x double> %split5, %splat.splat174
5334 %100 = fadd <8 x double> %98, %99
5335 %splat.splat177 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54, i32 54>
5336 %101 = fmul <8 x double> %split6, %splat.splat177
5337 %102 = fadd <8 x double> %100, %101
5338 %splat.splat180 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55, i32 55>
5339 %103 = fmul <8 x double> %split7, %splat.splat180
5340 %104 = fadd <8 x double> %102, %103
5341 %splat.splat183 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56, i32 56>
5342 %105 = fmul <8 x double> %split, %splat.splat183
5343 %splat.splat186 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57, i32 57>
5344 %106 = fmul <8 x double> %split1, %splat.splat186
5345 %107 = fadd <8 x double> %105, %106
5346 %splat.splat189 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58, i32 58>
5347 %108 = fmul <8 x double> %split2, %splat.splat189
5348 %109 = fadd <8 x double> %107, %108
5349 %splat.splat192 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59, i32 59>
5350 %110 = fmul <8 x double> %split3, %splat.splat192
5351 %111 = fadd <8 x double> %109, %110
5352 %splat.splat195 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60, i32 60>
5353 %112 = fmul <8 x double> %split4, %splat.splat195
5354 %113 = fadd <8 x double> %111, %112
5355 %splat.splat198 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61, i32 61>
5356 %114 = fmul <8 x double> %split5, %splat.splat198
5357 %115 = fadd <8 x double> %113, %114
5358 %splat.splat201 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62, i32 62>
5359 %116 = fmul <8 x double> %split6, %splat.splat201
5360 %117 = fadd <8 x double> %115, %116
5361 %splat.splat204 = shufflevector <64 x double> %a1, <64 x double> undef, <8 x i32> <i32 63, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63, i32 63>
5362 %118 = fmul <8 x double> %split7, %splat.splat204
5363 %119 = fadd <8 x double> %117, %118
5364 %120 = shufflevector <8 x double> %14, <8 x double> %29, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
5365 %121 = shufflevector <8 x double> %44, <8 x double> %59, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
5366 %122 = shufflevector <8 x double> %74, <8 x double> %89, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
5367 %123 = shufflevector <8 x double> %104, <8 x double> %119, <16 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15>
5368 %124 = shufflevector <16 x double> %120, <16 x double> %121, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
5369 %125 = shufflevector <16 x double> %122, <16 x double> %123, <32 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31>
5370 %126 = shufflevector <32 x double> %124, <32 x double> %125, <64 x i32> <i32 0, i32 1, i32 2, i32 3, i32 4, i32 5, i32 6, i32 7, i32 8, i32 9, i32 10, i32 11, i32 12, i32 13, i32 14, i32 15, i32 16, i32 17, i32 18, i32 19, i32 20, i32 21, i32 22, i32 23, i32 24, i32 25, i32 26, i32 27, i32 28, i32 29, i32 30, i32 31, i32 32, i32 33, i32 34, i32 35, i32 36, i32 37, i32 38, i32 39, i32 40, i32 41, i32 42, i32 43, i32 44, i32 45, i32 46, i32 47, i32 48, i32 49, i32 50, i32 51, i32 52, i32 53, i32 54, i32 55, i32 56, i32 57, i32 58, i32 59, i32 60, i32 61, i32 62, i32 63>
5371 ret <64 x double> %126