1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefix=AVX --check-prefix=AVX512 --check-prefix=AVX512F
3 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefix=AVX --check-prefix=AVX512 --check-prefix=AVX512BW
9 define <8 x i64> @test_div7_8i64(<8 x i64> %a) nounwind {
10 ; AVX-LABEL: test_div7_8i64:
12 ; AVX-NEXT: vextracti32x4 $3, %zmm0, %xmm1
13 ; AVX-NEXT: vpextrq $1, %xmm1, %rax
14 ; AVX-NEXT: movabsq $5270498306774157605, %rcx # imm = 0x4924924924924925
15 ; AVX-NEXT: imulq %rcx
16 ; AVX-NEXT: movq %rdx, %rax
17 ; AVX-NEXT: shrq $63, %rax
19 ; AVX-NEXT: addq %rax, %rdx
20 ; AVX-NEXT: vmovq %rdx, %xmm2
21 ; AVX-NEXT: vmovq %xmm1, %rax
22 ; AVX-NEXT: imulq %rcx
23 ; AVX-NEXT: movq %rdx, %rax
24 ; AVX-NEXT: shrq $63, %rax
26 ; AVX-NEXT: addq %rax, %rdx
27 ; AVX-NEXT: vmovq %rdx, %xmm1
28 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm2[0]
29 ; AVX-NEXT: vextracti32x4 $2, %zmm0, %xmm2
30 ; AVX-NEXT: vpextrq $1, %xmm2, %rax
31 ; AVX-NEXT: imulq %rcx
32 ; AVX-NEXT: movq %rdx, %rax
33 ; AVX-NEXT: shrq $63, %rax
35 ; AVX-NEXT: addq %rax, %rdx
36 ; AVX-NEXT: vmovq %rdx, %xmm3
37 ; AVX-NEXT: vmovq %xmm2, %rax
38 ; AVX-NEXT: imulq %rcx
39 ; AVX-NEXT: movq %rdx, %rax
40 ; AVX-NEXT: shrq $63, %rax
42 ; AVX-NEXT: addq %rax, %rdx
43 ; AVX-NEXT: vmovq %rdx, %xmm2
44 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
45 ; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
46 ; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
47 ; AVX-NEXT: vpextrq $1, %xmm2, %rax
48 ; AVX-NEXT: imulq %rcx
49 ; AVX-NEXT: movq %rdx, %rax
50 ; AVX-NEXT: shrq $63, %rax
52 ; AVX-NEXT: addq %rax, %rdx
53 ; AVX-NEXT: vmovq %rdx, %xmm3
54 ; AVX-NEXT: vmovq %xmm2, %rax
55 ; AVX-NEXT: imulq %rcx
56 ; AVX-NEXT: movq %rdx, %rax
57 ; AVX-NEXT: shrq $63, %rax
59 ; AVX-NEXT: addq %rax, %rdx
60 ; AVX-NEXT: vmovq %rdx, %xmm2
61 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
62 ; AVX-NEXT: vpextrq $1, %xmm0, %rax
63 ; AVX-NEXT: imulq %rcx
64 ; AVX-NEXT: movq %rdx, %rax
65 ; AVX-NEXT: shrq $63, %rax
67 ; AVX-NEXT: addq %rax, %rdx
68 ; AVX-NEXT: vmovq %rdx, %xmm3
69 ; AVX-NEXT: vmovq %xmm0, %rax
70 ; AVX-NEXT: imulq %rcx
71 ; AVX-NEXT: movq %rdx, %rax
72 ; AVX-NEXT: shrq $63, %rax
74 ; AVX-NEXT: addq %rax, %rdx
75 ; AVX-NEXT: vmovq %rdx, %xmm0
76 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
77 ; AVX-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
78 ; AVX-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
80 %res = sdiv <8 x i64> %a, <i64 7, i64 7, i64 7, i64 7, i64 7, i64 7, i64 7, i64 7>
84 define <16 x i32> @test_div7_16i32(<16 x i32> %a) nounwind {
85 ; AVX-LABEL: test_div7_16i32:
87 ; AVX-NEXT: vpbroadcastd {{.*#+}} zmm1 = [2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027]
88 ; AVX-NEXT: vpmuldq %zmm1, %zmm0, %zmm2
89 ; AVX-NEXT: vpshufd {{.*#+}} zmm3 = zmm0[1,1,3,3,5,5,7,7,9,9,11,11,13,13,15,15]
90 ; AVX-NEXT: vpmuldq %zmm1, %zmm3, %zmm1
91 ; AVX-NEXT: vmovdqa64 {{.*#+}} zmm3 = [1,17,3,19,5,21,7,23,9,25,11,27,13,29,15,31]
92 ; AVX-NEXT: vpermi2d %zmm1, %zmm2, %zmm3
93 ; AVX-NEXT: vpaddd %zmm0, %zmm3, %zmm0
94 ; AVX-NEXT: vpsrld $31, %zmm0, %zmm1
95 ; AVX-NEXT: vpsrad $2, %zmm0, %zmm0
96 ; AVX-NEXT: vpaddd %zmm1, %zmm0, %zmm0
98 %res = sdiv <16 x i32> %a, <i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7>
102 define <32 x i16> @test_div7_32i16(<32 x i16> %a) nounwind {
103 ; AVX512F-LABEL: test_div7_32i16:
105 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725]
106 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm0, %ymm0
107 ; AVX512F-NEXT: vpsrlw $15, %ymm0, %ymm3
108 ; AVX512F-NEXT: vpsraw $1, %ymm0, %ymm0
109 ; AVX512F-NEXT: vpaddw %ymm3, %ymm0, %ymm0
110 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm1, %ymm1
111 ; AVX512F-NEXT: vpsrlw $15, %ymm1, %ymm2
112 ; AVX512F-NEXT: vpsraw $1, %ymm1, %ymm1
113 ; AVX512F-NEXT: vpaddw %ymm2, %ymm1, %ymm1
116 ; AVX512BW-LABEL: test_div7_32i16:
118 ; AVX512BW-NEXT: vpmulhw {{.*}}(%rip), %zmm0, %zmm0
119 ; AVX512BW-NEXT: vpsrlw $15, %zmm0, %zmm1
120 ; AVX512BW-NEXT: vpsraw $1, %zmm0, %zmm0
121 ; AVX512BW-NEXT: vpaddw %zmm1, %zmm0, %zmm0
122 ; AVX512BW-NEXT: retq
123 %res = sdiv <32 x i16> %a, <i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7>
127 define <64 x i8> @test_div7_64i8(<64 x i8> %a) nounwind {
128 ; AVX512F-LABEL: test_div7_64i8:
130 ; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm2
131 ; AVX512F-NEXT: vpmovsxbw %xmm2, %ymm2
132 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm3 = [65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427]
133 ; AVX512F-NEXT: vpmullw %ymm3, %ymm2, %ymm2
134 ; AVX512F-NEXT: vpsrlw $8, %ymm2, %ymm2
135 ; AVX512F-NEXT: vpmovsxbw %xmm0, %ymm4
136 ; AVX512F-NEXT: vpmullw %ymm3, %ymm4, %ymm4
137 ; AVX512F-NEXT: vpsrlw $8, %ymm4, %ymm4
138 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm4, %ymm2
139 ; AVX512F-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,1,3]
140 ; AVX512F-NEXT: vpaddb %ymm0, %ymm2, %ymm0
141 ; AVX512F-NEXT: vpsrlw $7, %ymm0, %ymm2
142 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm4 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
143 ; AVX512F-NEXT: vpand %ymm4, %ymm2, %ymm2
144 ; AVX512F-NEXT: vpsrlw $2, %ymm0, %ymm0
145 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm5 = [63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63]
146 ; AVX512F-NEXT: vpand %ymm5, %ymm0, %ymm0
147 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm6 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
148 ; AVX512F-NEXT: vpxor %ymm6, %ymm0, %ymm0
149 ; AVX512F-NEXT: vpaddb %ymm2, %ymm0, %ymm0
150 ; AVX512F-NEXT: vpsubb %ymm6, %ymm0, %ymm0
151 ; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm2
152 ; AVX512F-NEXT: vpmovsxbw %xmm2, %ymm2
153 ; AVX512F-NEXT: vpmullw %ymm3, %ymm2, %ymm2
154 ; AVX512F-NEXT: vpsrlw $8, %ymm2, %ymm2
155 ; AVX512F-NEXT: vpmovsxbw %xmm1, %ymm7
156 ; AVX512F-NEXT: vpmullw %ymm3, %ymm7, %ymm3
157 ; AVX512F-NEXT: vpsrlw $8, %ymm3, %ymm3
158 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm3, %ymm2
159 ; AVX512F-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,1,3]
160 ; AVX512F-NEXT: vpaddb %ymm1, %ymm2, %ymm1
161 ; AVX512F-NEXT: vpsrlw $7, %ymm1, %ymm2
162 ; AVX512F-NEXT: vpand %ymm4, %ymm2, %ymm2
163 ; AVX512F-NEXT: vpsrlw $2, %ymm1, %ymm1
164 ; AVX512F-NEXT: vpand %ymm5, %ymm1, %ymm1
165 ; AVX512F-NEXT: vpxor %ymm6, %ymm1, %ymm1
166 ; AVX512F-NEXT: vpaddb %ymm2, %ymm1, %ymm1
167 ; AVX512F-NEXT: vpsubb %ymm6, %ymm1, %ymm1
170 ; AVX512BW-LABEL: test_div7_64i8:
172 ; AVX512BW-NEXT: vpmovsxbw %ymm0, %zmm1
173 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427]
174 ; AVX512BW-NEXT: vpmullw %zmm2, %zmm1, %zmm1
175 ; AVX512BW-NEXT: vpsrlw $8, %zmm1, %zmm1
176 ; AVX512BW-NEXT: vpmovwb %zmm1, %ymm1
177 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm0, %ymm3
178 ; AVX512BW-NEXT: vpmovsxbw %ymm3, %zmm3
179 ; AVX512BW-NEXT: vpmullw %zmm2, %zmm3, %zmm2
180 ; AVX512BW-NEXT: vpsrlw $8, %zmm2, %zmm2
181 ; AVX512BW-NEXT: vpmovwb %zmm2, %ymm2
182 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
183 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0
184 ; AVX512BW-NEXT: vpsrlw $2, %zmm0, %zmm1
185 ; AVX512BW-NEXT: vpandq {{.*}}(%rip), %zmm1, %zmm1
186 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
187 ; AVX512BW-NEXT: vpxorq %zmm2, %zmm1, %zmm1
188 ; AVX512BW-NEXT: vpsrlw $7, %zmm0, %zmm0
189 ; AVX512BW-NEXT: vpandq {{.*}}(%rip), %zmm0, %zmm0
190 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0
191 ; AVX512BW-NEXT: vpsubb %zmm2, %zmm0, %zmm0
192 ; AVX512BW-NEXT: retq
193 %res = sdiv <64 x i8> %a, <i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7>
198 ; sdiv by non-splat constant
201 define <64 x i8> @test_divconstant_64i8(<64 x i8> %a) nounwind {
202 ; AVX512F-LABEL: test_divconstant_64i8:
204 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
205 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm2, %ymm3
206 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255]
207 ; AVX512F-NEXT: vpand %ymm2, %ymm3, %ymm3
208 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
209 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
210 ; AVX512F-NEXT: vpand %ymm2, %ymm4, %ymm4
211 ; AVX512F-NEXT: vpackuswb %ymm3, %ymm4, %ymm3
212 ; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm4
213 ; AVX512F-NEXT: vpmovsxbw %xmm4, %ymm4
214 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
215 ; AVX512F-NEXT: vpsrlw $8, %ymm4, %ymm4
216 ; AVX512F-NEXT: vpmovsxbw %xmm0, %ymm0
217 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm0, %ymm0
218 ; AVX512F-NEXT: vpsrlw $8, %ymm0, %ymm0
219 ; AVX512F-NEXT: vpackuswb %ymm4, %ymm0, %ymm0
220 ; AVX512F-NEXT: vpermq {{.*#+}} ymm0 = ymm0[0,2,1,3]
221 ; AVX512F-NEXT: vpaddb %ymm3, %ymm0, %ymm0
222 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm3 = ymm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
223 ; AVX512F-NEXT: vpsraw $8, %ymm3, %ymm3
224 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm3, %ymm3
225 ; AVX512F-NEXT: vpsrlw $8, %ymm3, %ymm3
226 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
227 ; AVX512F-NEXT: vpsraw $8, %ymm4, %ymm4
228 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
229 ; AVX512F-NEXT: vpsrlw $8, %ymm4, %ymm4
230 ; AVX512F-NEXT: vpackuswb %ymm3, %ymm4, %ymm3
231 ; AVX512F-NEXT: vpsrlw $7, %ymm0, %ymm0
232 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm4 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
233 ; AVX512F-NEXT: vpand %ymm4, %ymm0, %ymm0
234 ; AVX512F-NEXT: vpaddb %ymm0, %ymm3, %ymm0
235 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm3 = ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15],ymm1[24],ymm0[24],ymm1[25],ymm0[25],ymm1[26],ymm0[26],ymm1[27],ymm0[27],ymm1[28],ymm0[28],ymm1[29],ymm0[29],ymm1[30],ymm0[30],ymm1[31],ymm0[31]
236 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm3, %ymm3
237 ; AVX512F-NEXT: vpand %ymm2, %ymm3, %ymm3
238 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
239 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm5, %ymm5
240 ; AVX512F-NEXT: vpand %ymm2, %ymm5, %ymm2
241 ; AVX512F-NEXT: vpackuswb %ymm3, %ymm2, %ymm2
242 ; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm3
243 ; AVX512F-NEXT: vpmovsxbw %xmm3, %ymm3
244 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm3, %ymm3
245 ; AVX512F-NEXT: vpsrlw $8, %ymm3, %ymm3
246 ; AVX512F-NEXT: vpmovsxbw %xmm1, %ymm1
247 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm1, %ymm1
248 ; AVX512F-NEXT: vpsrlw $8, %ymm1, %ymm1
249 ; AVX512F-NEXT: vpackuswb %ymm3, %ymm1, %ymm1
250 ; AVX512F-NEXT: vpermq {{.*#+}} ymm1 = ymm1[0,2,1,3]
251 ; AVX512F-NEXT: vpaddb %ymm2, %ymm1, %ymm1
252 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
253 ; AVX512F-NEXT: vpsraw $8, %ymm2, %ymm2
254 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm2, %ymm2
255 ; AVX512F-NEXT: vpsrlw $8, %ymm2, %ymm2
256 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
257 ; AVX512F-NEXT: vpsraw $8, %ymm3, %ymm3
258 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm3, %ymm3
259 ; AVX512F-NEXT: vpsrlw $8, %ymm3, %ymm3
260 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm3, %ymm2
261 ; AVX512F-NEXT: vpsrlw $7, %ymm1, %ymm1
262 ; AVX512F-NEXT: vpand %ymm4, %ymm1, %ymm1
263 ; AVX512F-NEXT: vpaddb %ymm1, %ymm2, %ymm1
266 ; AVX512BW-LABEL: test_divconstant_64i8:
268 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} zmm1 = zmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
269 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm1, %zmm1
270 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255]
271 ; AVX512BW-NEXT: vpandq %zmm2, %zmm1, %zmm1
272 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} zmm3 = zmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
273 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm3, %zmm3
274 ; AVX512BW-NEXT: vpandq %zmm2, %zmm3, %zmm2
275 ; AVX512BW-NEXT: vpackuswb %zmm1, %zmm2, %zmm1
276 ; AVX512BW-NEXT: vpmovsxbw %ymm0, %zmm2
277 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm2, %zmm2
278 ; AVX512BW-NEXT: vpsrlw $8, %zmm2, %zmm2
279 ; AVX512BW-NEXT: vpmovwb %zmm2, %ymm2
280 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm0, %ymm0
281 ; AVX512BW-NEXT: vpmovsxbw %ymm0, %zmm0
282 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm0, %zmm0
283 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0
284 ; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
285 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm0, %zmm2, %zmm0
286 ; AVX512BW-NEXT: vpaddb %zmm1, %zmm0, %zmm0
287 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} zmm1 = zmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
288 ; AVX512BW-NEXT: vpsraw $8, %zmm1, %zmm1
289 ; AVX512BW-NEXT: vpsllvw {{.*}}(%rip), %zmm1, %zmm1
290 ; AVX512BW-NEXT: vpsrlw $8, %zmm1, %zmm1
291 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} zmm2 = zmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
292 ; AVX512BW-NEXT: vpsraw $8, %zmm2, %zmm2
293 ; AVX512BW-NEXT: vpsllvw {{.*}}(%rip), %zmm2, %zmm2
294 ; AVX512BW-NEXT: vpsrlw $8, %zmm2, %zmm2
295 ; AVX512BW-NEXT: vpackuswb %zmm1, %zmm2, %zmm1
296 ; AVX512BW-NEXT: vpsrlw $7, %zmm0, %zmm0
297 ; AVX512BW-NEXT: vpandq {{.*}}(%rip), %zmm0, %zmm0
298 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0
299 ; AVX512BW-NEXT: retq
300 %res = sdiv <64 x i8> %a, <i8 7, i8 8, i8 9, i8 10, i8 11, i8 12, i8 13, i8 14, i8 15, i8 16, i8 17, i8 18, i8 19, i8 20, i8 21, i8 22, i8 23, i8 24, i8 25, i8 26, i8 27, i8 28, i8 29, i8 30, i8 31, i8 32, i8 33, i8 34, i8 35, i8 36, i8 37, i8 38, i8 38, i8 37, i8 36, i8 35, i8 34, i8 33, i8 32, i8 31, i8 30, i8 29, i8 28, i8 27, i8 26, i8 25, i8 24, i8 23, i8 22, i8 21, i8 20, i8 19, i8 18, i8 17, i8 16, i8 15, i8 14, i8 13, i8 12, i8 11, i8 10, i8 9, i8 8, i8 7>
308 define <8 x i64> @test_rem7_8i64(<8 x i64> %a) nounwind {
309 ; AVX-LABEL: test_rem7_8i64:
311 ; AVX-NEXT: vextracti32x4 $3, %zmm0, %xmm1
312 ; AVX-NEXT: vpextrq $1, %xmm1, %rcx
313 ; AVX-NEXT: movabsq $5270498306774157605, %rsi # imm = 0x4924924924924925
314 ; AVX-NEXT: movq %rcx, %rax
315 ; AVX-NEXT: imulq %rsi
316 ; AVX-NEXT: movq %rdx, %rax
317 ; AVX-NEXT: shrq $63, %rax
318 ; AVX-NEXT: sarq %rdx
319 ; AVX-NEXT: addq %rax, %rdx
320 ; AVX-NEXT: leaq (,%rdx,8), %rax
321 ; AVX-NEXT: subq %rax, %rdx
322 ; AVX-NEXT: addq %rcx, %rdx
323 ; AVX-NEXT: vmovq %rdx, %xmm2
324 ; AVX-NEXT: vmovq %xmm1, %rcx
325 ; AVX-NEXT: movq %rcx, %rax
326 ; AVX-NEXT: imulq %rsi
327 ; AVX-NEXT: movq %rdx, %rax
328 ; AVX-NEXT: shrq $63, %rax
329 ; AVX-NEXT: sarq %rdx
330 ; AVX-NEXT: addq %rax, %rdx
331 ; AVX-NEXT: leaq (,%rdx,8), %rax
332 ; AVX-NEXT: subq %rax, %rdx
333 ; AVX-NEXT: addq %rcx, %rdx
334 ; AVX-NEXT: vmovq %rdx, %xmm1
335 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm2[0]
336 ; AVX-NEXT: vextracti32x4 $2, %zmm0, %xmm2
337 ; AVX-NEXT: vpextrq $1, %xmm2, %rcx
338 ; AVX-NEXT: movq %rcx, %rax
339 ; AVX-NEXT: imulq %rsi
340 ; AVX-NEXT: movq %rdx, %rax
341 ; AVX-NEXT: shrq $63, %rax
342 ; AVX-NEXT: sarq %rdx
343 ; AVX-NEXT: addq %rax, %rdx
344 ; AVX-NEXT: leaq (,%rdx,8), %rax
345 ; AVX-NEXT: subq %rax, %rdx
346 ; AVX-NEXT: addq %rcx, %rdx
347 ; AVX-NEXT: vmovq %rdx, %xmm3
348 ; AVX-NEXT: vmovq %xmm2, %rcx
349 ; AVX-NEXT: movq %rcx, %rax
350 ; AVX-NEXT: imulq %rsi
351 ; AVX-NEXT: movq %rdx, %rax
352 ; AVX-NEXT: shrq $63, %rax
353 ; AVX-NEXT: sarq %rdx
354 ; AVX-NEXT: addq %rax, %rdx
355 ; AVX-NEXT: leaq (,%rdx,8), %rax
356 ; AVX-NEXT: subq %rax, %rdx
357 ; AVX-NEXT: addq %rcx, %rdx
358 ; AVX-NEXT: vmovq %rdx, %xmm2
359 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
360 ; AVX-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
361 ; AVX-NEXT: vextracti128 $1, %ymm0, %xmm2
362 ; AVX-NEXT: vpextrq $1, %xmm2, %rcx
363 ; AVX-NEXT: movq %rcx, %rax
364 ; AVX-NEXT: imulq %rsi
365 ; AVX-NEXT: movq %rdx, %rax
366 ; AVX-NEXT: shrq $63, %rax
367 ; AVX-NEXT: sarq %rdx
368 ; AVX-NEXT: addq %rax, %rdx
369 ; AVX-NEXT: leaq (,%rdx,8), %rax
370 ; AVX-NEXT: subq %rax, %rdx
371 ; AVX-NEXT: addq %rcx, %rdx
372 ; AVX-NEXT: vmovq %rdx, %xmm3
373 ; AVX-NEXT: vmovq %xmm2, %rcx
374 ; AVX-NEXT: movq %rcx, %rax
375 ; AVX-NEXT: imulq %rsi
376 ; AVX-NEXT: movq %rdx, %rax
377 ; AVX-NEXT: shrq $63, %rax
378 ; AVX-NEXT: sarq %rdx
379 ; AVX-NEXT: addq %rax, %rdx
380 ; AVX-NEXT: leaq (,%rdx,8), %rax
381 ; AVX-NEXT: subq %rax, %rdx
382 ; AVX-NEXT: addq %rcx, %rdx
383 ; AVX-NEXT: vmovq %rdx, %xmm2
384 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm3[0]
385 ; AVX-NEXT: vpextrq $1, %xmm0, %rcx
386 ; AVX-NEXT: movq %rcx, %rax
387 ; AVX-NEXT: imulq %rsi
388 ; AVX-NEXT: movq %rdx, %rax
389 ; AVX-NEXT: shrq $63, %rax
390 ; AVX-NEXT: sarq %rdx
391 ; AVX-NEXT: addq %rax, %rdx
392 ; AVX-NEXT: leaq (,%rdx,8), %rax
393 ; AVX-NEXT: subq %rax, %rdx
394 ; AVX-NEXT: addq %rcx, %rdx
395 ; AVX-NEXT: vmovq %rdx, %xmm3
396 ; AVX-NEXT: vmovq %xmm0, %rcx
397 ; AVX-NEXT: movq %rcx, %rax
398 ; AVX-NEXT: imulq %rsi
399 ; AVX-NEXT: movq %rdx, %rax
400 ; AVX-NEXT: shrq $63, %rax
401 ; AVX-NEXT: sarq %rdx
402 ; AVX-NEXT: addq %rax, %rdx
403 ; AVX-NEXT: leaq (,%rdx,8), %rax
404 ; AVX-NEXT: subq %rax, %rdx
405 ; AVX-NEXT: addq %rcx, %rdx
406 ; AVX-NEXT: vmovq %rdx, %xmm0
407 ; AVX-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
408 ; AVX-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
409 ; AVX-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
411 %res = srem <8 x i64> %a, <i64 7, i64 7, i64 7, i64 7, i64 7, i64 7, i64 7, i64 7>
415 define <16 x i32> @test_rem7_16i32(<16 x i32> %a) nounwind {
416 ; AVX-LABEL: test_rem7_16i32:
418 ; AVX-NEXT: vpbroadcastd {{.*#+}} zmm1 = [2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027,2454267027]
419 ; AVX-NEXT: vpmuldq %zmm1, %zmm0, %zmm2
420 ; AVX-NEXT: vpshufd {{.*#+}} zmm3 = zmm0[1,1,3,3,5,5,7,7,9,9,11,11,13,13,15,15]
421 ; AVX-NEXT: vpmuldq %zmm1, %zmm3, %zmm1
422 ; AVX-NEXT: vmovdqa64 {{.*#+}} zmm3 = [1,17,3,19,5,21,7,23,9,25,11,27,13,29,15,31]
423 ; AVX-NEXT: vpermi2d %zmm1, %zmm2, %zmm3
424 ; AVX-NEXT: vpaddd %zmm0, %zmm3, %zmm1
425 ; AVX-NEXT: vpsrld $31, %zmm1, %zmm2
426 ; AVX-NEXT: vpsrad $2, %zmm1, %zmm1
427 ; AVX-NEXT: vpaddd %zmm2, %zmm1, %zmm1
428 ; AVX-NEXT: vpmulld {{.*}}(%rip){1to16}, %zmm1, %zmm1
429 ; AVX-NEXT: vpsubd %zmm1, %zmm0, %zmm0
431 %res = srem <16 x i32> %a, <i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7, i32 7>
435 define <32 x i16> @test_rem7_32i16(<32 x i16> %a) nounwind {
436 ; AVX512F-LABEL: test_rem7_32i16:
438 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725,18725]
439 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm0, %ymm3
440 ; AVX512F-NEXT: vpsrlw $15, %ymm3, %ymm4
441 ; AVX512F-NEXT: vpsraw $1, %ymm3, %ymm3
442 ; AVX512F-NEXT: vpaddw %ymm4, %ymm3, %ymm3
443 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm4 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7]
444 ; AVX512F-NEXT: vpmullw %ymm4, %ymm3, %ymm3
445 ; AVX512F-NEXT: vpsubw %ymm3, %ymm0, %ymm0
446 ; AVX512F-NEXT: vpmulhw %ymm2, %ymm1, %ymm2
447 ; AVX512F-NEXT: vpsrlw $15, %ymm2, %ymm3
448 ; AVX512F-NEXT: vpsraw $1, %ymm2, %ymm2
449 ; AVX512F-NEXT: vpaddw %ymm3, %ymm2, %ymm2
450 ; AVX512F-NEXT: vpmullw %ymm4, %ymm2, %ymm2
451 ; AVX512F-NEXT: vpsubw %ymm2, %ymm1, %ymm1
454 ; AVX512BW-LABEL: test_rem7_32i16:
456 ; AVX512BW-NEXT: vpmulhw {{.*}}(%rip), %zmm0, %zmm1
457 ; AVX512BW-NEXT: vpsrlw $15, %zmm1, %zmm2
458 ; AVX512BW-NEXT: vpsraw $1, %zmm1, %zmm1
459 ; AVX512BW-NEXT: vpaddw %zmm2, %zmm1, %zmm1
460 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm1, %zmm1
461 ; AVX512BW-NEXT: vpsubw %zmm1, %zmm0, %zmm0
462 ; AVX512BW-NEXT: retq
463 %res = srem <32 x i16> %a, <i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7, i16 7>
467 define <64 x i8> @test_rem7_64i8(<64 x i8> %a) nounwind {
468 ; AVX512F-LABEL: test_rem7_64i8:
470 ; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm2
471 ; AVX512F-NEXT: vpmovsxbw %xmm2, %ymm2
472 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm3 = [65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427]
473 ; AVX512F-NEXT: vpmullw %ymm3, %ymm2, %ymm2
474 ; AVX512F-NEXT: vpsrlw $8, %ymm2, %ymm2
475 ; AVX512F-NEXT: vpmovsxbw %xmm0, %ymm4
476 ; AVX512F-NEXT: vpmullw %ymm3, %ymm4, %ymm4
477 ; AVX512F-NEXT: vpsrlw $8, %ymm4, %ymm4
478 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm4, %ymm2
479 ; AVX512F-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,1,3]
480 ; AVX512F-NEXT: vpaddb %ymm0, %ymm2, %ymm2
481 ; AVX512F-NEXT: vpsrlw $7, %ymm2, %ymm4
482 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm5 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
483 ; AVX512F-NEXT: vpand %ymm5, %ymm4, %ymm4
484 ; AVX512F-NEXT: vpsrlw $2, %ymm2, %ymm2
485 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm6 = [63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63,63]
486 ; AVX512F-NEXT: vpand %ymm6, %ymm2, %ymm2
487 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm7 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
488 ; AVX512F-NEXT: vpxor %ymm7, %ymm2, %ymm2
489 ; AVX512F-NEXT: vpaddb %ymm4, %ymm2, %ymm2
490 ; AVX512F-NEXT: vpsubb %ymm7, %ymm2, %ymm2
491 ; AVX512F-NEXT: vpsllw $3, %ymm2, %ymm4
492 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm8 = [248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248,248]
493 ; AVX512F-NEXT: vpand %ymm8, %ymm4, %ymm4
494 ; AVX512F-NEXT: vpsubb %ymm4, %ymm2, %ymm2
495 ; AVX512F-NEXT: vpaddb %ymm2, %ymm0, %ymm0
496 ; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm2
497 ; AVX512F-NEXT: vpmovsxbw %xmm2, %ymm2
498 ; AVX512F-NEXT: vpmullw %ymm3, %ymm2, %ymm2
499 ; AVX512F-NEXT: vpsrlw $8, %ymm2, %ymm2
500 ; AVX512F-NEXT: vpmovsxbw %xmm1, %ymm4
501 ; AVX512F-NEXT: vpmullw %ymm3, %ymm4, %ymm3
502 ; AVX512F-NEXT: vpsrlw $8, %ymm3, %ymm3
503 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm3, %ymm2
504 ; AVX512F-NEXT: vpermq {{.*#+}} ymm2 = ymm2[0,2,1,3]
505 ; AVX512F-NEXT: vpaddb %ymm1, %ymm2, %ymm2
506 ; AVX512F-NEXT: vpsrlw $7, %ymm2, %ymm3
507 ; AVX512F-NEXT: vpand %ymm5, %ymm3, %ymm3
508 ; AVX512F-NEXT: vpsrlw $2, %ymm2, %ymm2
509 ; AVX512F-NEXT: vpand %ymm6, %ymm2, %ymm2
510 ; AVX512F-NEXT: vpxor %ymm7, %ymm2, %ymm2
511 ; AVX512F-NEXT: vpaddb %ymm3, %ymm2, %ymm2
512 ; AVX512F-NEXT: vpsubb %ymm7, %ymm2, %ymm2
513 ; AVX512F-NEXT: vpsllw $3, %ymm2, %ymm3
514 ; AVX512F-NEXT: vpand %ymm8, %ymm3, %ymm3
515 ; AVX512F-NEXT: vpsubb %ymm3, %ymm2, %ymm2
516 ; AVX512F-NEXT: vpaddb %ymm2, %ymm1, %ymm1
519 ; AVX512BW-LABEL: test_rem7_64i8:
521 ; AVX512BW-NEXT: vpmovsxbw %ymm0, %zmm1
522 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427,65427]
523 ; AVX512BW-NEXT: vpmullw %zmm2, %zmm1, %zmm1
524 ; AVX512BW-NEXT: vpsrlw $8, %zmm1, %zmm1
525 ; AVX512BW-NEXT: vpmovwb %zmm1, %ymm1
526 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm0, %ymm3
527 ; AVX512BW-NEXT: vpmovsxbw %ymm3, %zmm3
528 ; AVX512BW-NEXT: vpmullw %zmm2, %zmm3, %zmm2
529 ; AVX512BW-NEXT: vpsrlw $8, %zmm2, %zmm2
530 ; AVX512BW-NEXT: vpmovwb %zmm2, %ymm2
531 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
532 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm1
533 ; AVX512BW-NEXT: vpsrlw $2, %zmm1, %zmm2
534 ; AVX512BW-NEXT: vpandq {{.*}}(%rip), %zmm2, %zmm2
535 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32,32]
536 ; AVX512BW-NEXT: vpxorq %zmm3, %zmm2, %zmm2
537 ; AVX512BW-NEXT: vpsrlw $7, %zmm1, %zmm1
538 ; AVX512BW-NEXT: vpandq {{.*}}(%rip), %zmm1, %zmm1
539 ; AVX512BW-NEXT: vpaddb %zmm1, %zmm2, %zmm1
540 ; AVX512BW-NEXT: vpsubb %zmm3, %zmm1, %zmm1
541 ; AVX512BW-NEXT: vpsllw $3, %zmm1, %zmm2
542 ; AVX512BW-NEXT: vpandq {{.*}}(%rip), %zmm2, %zmm2
543 ; AVX512BW-NEXT: vpsubb %zmm2, %zmm1, %zmm1
544 ; AVX512BW-NEXT: vpaddb %zmm1, %zmm0, %zmm0
545 ; AVX512BW-NEXT: retq
546 %res = srem <64 x i8> %a, <i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7, i8 7,i8 7, i8 7, i8 7, i8 7>
551 ; srem by non-splat constant
554 define <64 x i8> @test_remconstant_64i8(<64 x i8> %a) nounwind {
555 ; AVX512F-LABEL: test_remconstant_64i8:
557 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm2 = ymm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
558 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm2, %ymm3
559 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm2 = [255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255]
560 ; AVX512F-NEXT: vpand %ymm2, %ymm3, %ymm3
561 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
562 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
563 ; AVX512F-NEXT: vpand %ymm2, %ymm4, %ymm4
564 ; AVX512F-NEXT: vpackuswb %ymm3, %ymm4, %ymm3
565 ; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm4
566 ; AVX512F-NEXT: vpmovsxbw %xmm4, %ymm4
567 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
568 ; AVX512F-NEXT: vpsrlw $8, %ymm4, %ymm4
569 ; AVX512F-NEXT: vpmovsxbw %xmm0, %ymm5
570 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm5, %ymm5
571 ; AVX512F-NEXT: vpsrlw $8, %ymm5, %ymm5
572 ; AVX512F-NEXT: vpackuswb %ymm4, %ymm5, %ymm4
573 ; AVX512F-NEXT: vpermq {{.*#+}} ymm4 = ymm4[0,2,1,3]
574 ; AVX512F-NEXT: vpaddb %ymm3, %ymm4, %ymm3
575 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm4 = ymm3[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
576 ; AVX512F-NEXT: vpsraw $8, %ymm4, %ymm4
577 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
578 ; AVX512F-NEXT: vpsrlw $8, %ymm4, %ymm4
579 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm3[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
580 ; AVX512F-NEXT: vpsraw $8, %ymm5, %ymm5
581 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm5, %ymm5
582 ; AVX512F-NEXT: vpsrlw $8, %ymm5, %ymm5
583 ; AVX512F-NEXT: vpackuswb %ymm4, %ymm5, %ymm4
584 ; AVX512F-NEXT: vpsrlw $7, %ymm3, %ymm5
585 ; AVX512F-NEXT: vmovdqa {{.*#+}} ymm3 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
586 ; AVX512F-NEXT: vpand %ymm3, %ymm5, %ymm5
587 ; AVX512F-NEXT: vpaddb %ymm5, %ymm4, %ymm4
588 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm5 = ymm4[8],ymm0[8],ymm4[9],ymm0[9],ymm4[10],ymm0[10],ymm4[11],ymm0[11],ymm4[12],ymm0[12],ymm4[13],ymm0[13],ymm4[14],ymm0[14],ymm4[15],ymm0[15],ymm4[24],ymm0[24],ymm4[25],ymm0[25],ymm4[26],ymm0[26],ymm4[27],ymm0[27],ymm4[28],ymm0[28],ymm4[29],ymm0[29],ymm4[30],ymm0[30],ymm4[31],ymm0[31]
589 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm5, %ymm5
590 ; AVX512F-NEXT: vpand %ymm2, %ymm5, %ymm5
591 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm4 = ymm4[0],ymm0[0],ymm4[1],ymm0[1],ymm4[2],ymm0[2],ymm4[3],ymm0[3],ymm4[4],ymm0[4],ymm4[5],ymm0[5],ymm4[6],ymm0[6],ymm4[7],ymm0[7],ymm4[16],ymm0[16],ymm4[17],ymm0[17],ymm4[18],ymm0[18],ymm4[19],ymm0[19],ymm4[20],ymm0[20],ymm4[21],ymm0[21],ymm4[22],ymm0[22],ymm4[23],ymm0[23]
592 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
593 ; AVX512F-NEXT: vpand %ymm2, %ymm4, %ymm4
594 ; AVX512F-NEXT: vpackuswb %ymm5, %ymm4, %ymm4
595 ; AVX512F-NEXT: vpsubb %ymm4, %ymm0, %ymm0
596 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm4 = ymm1[8],ymm0[8],ymm1[9],ymm0[9],ymm1[10],ymm0[10],ymm1[11],ymm0[11],ymm1[12],ymm0[12],ymm1[13],ymm0[13],ymm1[14],ymm0[14],ymm1[15],ymm0[15],ymm1[24],ymm0[24],ymm1[25],ymm0[25],ymm1[26],ymm0[26],ymm1[27],ymm0[27],ymm1[28],ymm0[28],ymm1[29],ymm0[29],ymm1[30],ymm0[30],ymm1[31],ymm0[31]
597 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
598 ; AVX512F-NEXT: vpand %ymm2, %ymm4, %ymm4
599 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm5 = ymm1[0],ymm0[0],ymm1[1],ymm0[1],ymm1[2],ymm0[2],ymm1[3],ymm0[3],ymm1[4],ymm0[4],ymm1[5],ymm0[5],ymm1[6],ymm0[6],ymm1[7],ymm0[7],ymm1[16],ymm0[16],ymm1[17],ymm0[17],ymm1[18],ymm0[18],ymm1[19],ymm0[19],ymm1[20],ymm0[20],ymm1[21],ymm0[21],ymm1[22],ymm0[22],ymm1[23],ymm0[23]
600 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm5, %ymm5
601 ; AVX512F-NEXT: vpand %ymm2, %ymm5, %ymm5
602 ; AVX512F-NEXT: vpackuswb %ymm4, %ymm5, %ymm4
603 ; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm5
604 ; AVX512F-NEXT: vpmovsxbw %xmm5, %ymm5
605 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm5, %ymm5
606 ; AVX512F-NEXT: vpsrlw $8, %ymm5, %ymm5
607 ; AVX512F-NEXT: vpmovsxbw %xmm1, %ymm6
608 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm6, %ymm6
609 ; AVX512F-NEXT: vpsrlw $8, %ymm6, %ymm6
610 ; AVX512F-NEXT: vpackuswb %ymm5, %ymm6, %ymm5
611 ; AVX512F-NEXT: vpermq {{.*#+}} ymm5 = ymm5[0,2,1,3]
612 ; AVX512F-NEXT: vpaddb %ymm4, %ymm5, %ymm4
613 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm5 = ymm4[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31]
614 ; AVX512F-NEXT: vpsraw $8, %ymm5, %ymm5
615 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm5, %ymm5
616 ; AVX512F-NEXT: vpsrlw $8, %ymm5, %ymm5
617 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm6 = ymm4[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23]
618 ; AVX512F-NEXT: vpsraw $8, %ymm6, %ymm6
619 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm6, %ymm6
620 ; AVX512F-NEXT: vpsrlw $8, %ymm6, %ymm6
621 ; AVX512F-NEXT: vpackuswb %ymm5, %ymm6, %ymm5
622 ; AVX512F-NEXT: vpsrlw $7, %ymm4, %ymm4
623 ; AVX512F-NEXT: vpand %ymm3, %ymm4, %ymm3
624 ; AVX512F-NEXT: vpaddb %ymm3, %ymm5, %ymm3
625 ; AVX512F-NEXT: vpunpckhbw {{.*#+}} ymm4 = ymm3[8],ymm0[8],ymm3[9],ymm0[9],ymm3[10],ymm0[10],ymm3[11],ymm0[11],ymm3[12],ymm0[12],ymm3[13],ymm0[13],ymm3[14],ymm0[14],ymm3[15],ymm0[15],ymm3[24],ymm0[24],ymm3[25],ymm0[25],ymm3[26],ymm0[26],ymm3[27],ymm0[27],ymm3[28],ymm0[28],ymm3[29],ymm0[29],ymm3[30],ymm0[30],ymm3[31],ymm0[31]
626 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm4, %ymm4
627 ; AVX512F-NEXT: vpand %ymm2, %ymm4, %ymm4
628 ; AVX512F-NEXT: vpunpcklbw {{.*#+}} ymm3 = ymm3[0],ymm0[0],ymm3[1],ymm0[1],ymm3[2],ymm0[2],ymm3[3],ymm0[3],ymm3[4],ymm0[4],ymm3[5],ymm0[5],ymm3[6],ymm0[6],ymm3[7],ymm0[7],ymm3[16],ymm0[16],ymm3[17],ymm0[17],ymm3[18],ymm0[18],ymm3[19],ymm0[19],ymm3[20],ymm0[20],ymm3[21],ymm0[21],ymm3[22],ymm0[22],ymm3[23],ymm0[23]
629 ; AVX512F-NEXT: vpmullw {{.*}}(%rip), %ymm3, %ymm3
630 ; AVX512F-NEXT: vpand %ymm2, %ymm3, %ymm2
631 ; AVX512F-NEXT: vpackuswb %ymm4, %ymm2, %ymm2
632 ; AVX512F-NEXT: vpsubb %ymm2, %ymm1, %ymm1
635 ; AVX512BW-LABEL: test_remconstant_64i8:
637 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} zmm1 = zmm0[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
638 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm1, %zmm1
639 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255]
640 ; AVX512BW-NEXT: vpandq %zmm2, %zmm1, %zmm1
641 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} zmm3 = zmm0[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
642 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm3, %zmm3
643 ; AVX512BW-NEXT: vpandq %zmm2, %zmm3, %zmm3
644 ; AVX512BW-NEXT: vpackuswb %zmm1, %zmm3, %zmm1
645 ; AVX512BW-NEXT: vpmovsxbw %ymm0, %zmm3
646 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm3, %zmm3
647 ; AVX512BW-NEXT: vpsrlw $8, %zmm3, %zmm3
648 ; AVX512BW-NEXT: vpmovwb %zmm3, %ymm3
649 ; AVX512BW-NEXT: vextracti64x4 $1, %zmm0, %ymm4
650 ; AVX512BW-NEXT: vpmovsxbw %ymm4, %zmm4
651 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm4, %zmm4
652 ; AVX512BW-NEXT: vpsrlw $8, %zmm4, %zmm4
653 ; AVX512BW-NEXT: vpmovwb %zmm4, %ymm4
654 ; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm3, %zmm3
655 ; AVX512BW-NEXT: vpaddb %zmm1, %zmm3, %zmm1
656 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} zmm3 = zmm1[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15,24,24,25,25,26,26,27,27,28,28,29,29,30,30,31,31,40,40,41,41,42,42,43,43,44,44,45,45,46,46,47,47,56,56,57,57,58,58,59,59,60,60,61,61,62,62,63,63]
657 ; AVX512BW-NEXT: vpsraw $8, %zmm3, %zmm3
658 ; AVX512BW-NEXT: vpsllvw {{.*}}(%rip), %zmm3, %zmm3
659 ; AVX512BW-NEXT: vpsrlw $8, %zmm3, %zmm3
660 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} zmm4 = zmm1[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7,16,16,17,17,18,18,19,19,20,20,21,21,22,22,23,23,32,32,33,33,34,34,35,35,36,36,37,37,38,38,39,39,48,48,49,49,50,50,51,51,52,52,53,53,54,54,55,55]
661 ; AVX512BW-NEXT: vpsraw $8, %zmm4, %zmm4
662 ; AVX512BW-NEXT: vpsllvw {{.*}}(%rip), %zmm4, %zmm4
663 ; AVX512BW-NEXT: vpsrlw $8, %zmm4, %zmm4
664 ; AVX512BW-NEXT: vpackuswb %zmm3, %zmm4, %zmm3
665 ; AVX512BW-NEXT: vpsrlw $7, %zmm1, %zmm1
666 ; AVX512BW-NEXT: vpandq {{.*}}(%rip), %zmm1, %zmm1
667 ; AVX512BW-NEXT: vpaddb %zmm1, %zmm3, %zmm1
668 ; AVX512BW-NEXT: vpunpckhbw {{.*#+}} zmm3 = zmm1[8],zmm0[8],zmm1[9],zmm0[9],zmm1[10],zmm0[10],zmm1[11],zmm0[11],zmm1[12],zmm0[12],zmm1[13],zmm0[13],zmm1[14],zmm0[14],zmm1[15],zmm0[15],zmm1[24],zmm0[24],zmm1[25],zmm0[25],zmm1[26],zmm0[26],zmm1[27],zmm0[27],zmm1[28],zmm0[28],zmm1[29],zmm0[29],zmm1[30],zmm0[30],zmm1[31],zmm0[31],zmm1[40],zmm0[40],zmm1[41],zmm0[41],zmm1[42],zmm0[42],zmm1[43],zmm0[43],zmm1[44],zmm0[44],zmm1[45],zmm0[45],zmm1[46],zmm0[46],zmm1[47],zmm0[47],zmm1[56],zmm0[56],zmm1[57],zmm0[57],zmm1[58],zmm0[58],zmm1[59],zmm0[59],zmm1[60],zmm0[60],zmm1[61],zmm0[61],zmm1[62],zmm0[62],zmm1[63],zmm0[63]
669 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm3, %zmm3
670 ; AVX512BW-NEXT: vpandq %zmm2, %zmm3, %zmm3
671 ; AVX512BW-NEXT: vpunpcklbw {{.*#+}} zmm1 = zmm1[0],zmm0[0],zmm1[1],zmm0[1],zmm1[2],zmm0[2],zmm1[3],zmm0[3],zmm1[4],zmm0[4],zmm1[5],zmm0[5],zmm1[6],zmm0[6],zmm1[7],zmm0[7],zmm1[16],zmm0[16],zmm1[17],zmm0[17],zmm1[18],zmm0[18],zmm1[19],zmm0[19],zmm1[20],zmm0[20],zmm1[21],zmm0[21],zmm1[22],zmm0[22],zmm1[23],zmm0[23],zmm1[32],zmm0[32],zmm1[33],zmm0[33],zmm1[34],zmm0[34],zmm1[35],zmm0[35],zmm1[36],zmm0[36],zmm1[37],zmm0[37],zmm1[38],zmm0[38],zmm1[39],zmm0[39],zmm1[48],zmm0[48],zmm1[49],zmm0[49],zmm1[50],zmm0[50],zmm1[51],zmm0[51],zmm1[52],zmm0[52],zmm1[53],zmm0[53],zmm1[54],zmm0[54],zmm1[55],zmm0[55]
672 ; AVX512BW-NEXT: vpmullw {{.*}}(%rip), %zmm1, %zmm1
673 ; AVX512BW-NEXT: vpandq %zmm2, %zmm1, %zmm1
674 ; AVX512BW-NEXT: vpackuswb %zmm3, %zmm1, %zmm1
675 ; AVX512BW-NEXT: vpsubb %zmm1, %zmm0, %zmm0
676 ; AVX512BW-NEXT: retq
677 %res = srem <64 x i8> %a, <i8 7, i8 8, i8 9, i8 10, i8 11, i8 12, i8 13, i8 14, i8 15, i8 16, i8 17, i8 18, i8 19, i8 20, i8 21, i8 22, i8 23, i8 24, i8 25, i8 26, i8 27, i8 28, i8 29, i8 30, i8 31, i8 32, i8 33, i8 34, i8 35, i8 36, i8 37, i8 38, i8 38, i8 37, i8 36, i8 35, i8 34, i8 33, i8 32, i8 31, i8 30, i8 29, i8 28, i8 27, i8 26, i8 25, i8 24, i8 23, i8 22, i8 21, i8 20, i8 19, i8 18, i8 17, i8 16, i8 15, i8 14, i8 13, i8 12, i8 11, i8 10, i8 9, i8 8, i8 7>