1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=aarch64-unknown-unknown -verify-machineinstrs -run-pass=legalizer %s -o - | FileCheck %s
3 # RUN: llc -mtriple=aarch64-unknown-unknown -verify-machineinstrs -run-pass=legalizer -mattr=+cssc %s -o - | FileCheck %s --check-prefix=CHECK-CSSC
7 tracksRegLiveness: true
11 ; CHECK-LABEL: name: v8s8_legal
14 ; CHECK-NEXT: %copy:_(<8 x s8>) = COPY $d0
15 ; CHECK-NEXT: %ctpop:_(<8 x s8>) = G_CTPOP %copy(<8 x s8>)
16 ; CHECK-NEXT: $d0 = COPY %ctpop(<8 x s8>)
17 ; CHECK-NEXT: RET_ReallyLR implicit $d0
19 ; CHECK-CSSC-LABEL: name: v8s8_legal
20 ; CHECK-CSSC: liveins: $d0
21 ; CHECK-CSSC-NEXT: {{ $}}
22 ; CHECK-CSSC-NEXT: %copy:_(<8 x s8>) = COPY $d0
23 ; CHECK-CSSC-NEXT: %ctpop:_(<8 x s8>) = G_CTPOP %copy(<8 x s8>)
24 ; CHECK-CSSC-NEXT: $d0 = COPY %ctpop(<8 x s8>)
25 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $d0
26 %copy:_(<8 x s8>) = COPY $d0
27 %ctpop:_(<8 x s8>) = G_CTPOP %copy(<8 x s8>)
28 $d0 = COPY %ctpop(<8 x s8>)
29 RET_ReallyLR implicit $d0
34 tracksRegLiveness: true
38 ; CHECK-LABEL: name: v16s8_legal
41 ; CHECK-NEXT: %copy:_(<16 x s8>) = COPY $q0
42 ; CHECK-NEXT: %ctpop:_(<16 x s8>) = G_CTPOP %copy(<16 x s8>)
43 ; CHECK-NEXT: $q0 = COPY %ctpop(<16 x s8>)
44 ; CHECK-NEXT: RET_ReallyLR implicit $q0
46 ; CHECK-CSSC-LABEL: name: v16s8_legal
47 ; CHECK-CSSC: liveins: $q0
48 ; CHECK-CSSC-NEXT: {{ $}}
49 ; CHECK-CSSC-NEXT: %copy:_(<16 x s8>) = COPY $q0
50 ; CHECK-CSSC-NEXT: %ctpop:_(<16 x s8>) = G_CTPOP %copy(<16 x s8>)
51 ; CHECK-CSSC-NEXT: $q0 = COPY %ctpop(<16 x s8>)
52 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $q0
53 %copy:_(<16 x s8>) = COPY $q0
54 %ctpop:_(<16 x s8>) = G_CTPOP %copy(<16 x s8>)
55 $q0 = COPY %ctpop(<16 x s8>)
56 RET_ReallyLR implicit $q0
61 tracksRegLiveness: true
65 ; CHECK-LABEL: name: s32_lower
68 ; CHECK-NEXT: %copy:_(s32) = COPY $w0
69 ; CHECK-NEXT: [[ZEXT:%[0-9]+]]:_(s64) = G_ZEXT %copy(s32)
70 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[ZEXT]](s64)
71 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
72 ; CHECK-NEXT: %ctpop:_(s32) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlv), [[CTPOP]](<8 x s8>)
73 ; CHECK-NEXT: $w0 = COPY %ctpop(s32)
74 ; CHECK-NEXT: RET_ReallyLR implicit $w0
76 ; CHECK-CSSC-LABEL: name: s32_lower
77 ; CHECK-CSSC: liveins: $w0
78 ; CHECK-CSSC-NEXT: {{ $}}
79 ; CHECK-CSSC-NEXT: %copy:_(s32) = COPY $w0
80 ; CHECK-CSSC-NEXT: %ctpop:_(s32) = G_CTPOP %copy(s32)
81 ; CHECK-CSSC-NEXT: $w0 = COPY %ctpop(s32)
82 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
83 %copy:_(s32) = COPY $w0
84 %ctpop:_(s32) = G_CTPOP %copy(s32)
85 $w0 = COPY %ctpop(s32)
86 RET_ReallyLR implicit $w0
91 tracksRegLiveness: true
95 ; CHECK-LABEL: name: s64_lower
98 ; CHECK-NEXT: %copy:_(s64) = COPY $x0
99 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST %copy(s64)
100 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
101 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(s32) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlv), [[CTPOP]](<8 x s8>)
102 ; CHECK-NEXT: %ctpop:_(s64) = G_ZEXT [[INT]](s32)
103 ; CHECK-NEXT: $x0 = COPY %ctpop(s64)
104 ; CHECK-NEXT: RET_ReallyLR implicit $x0
106 ; CHECK-CSSC-LABEL: name: s64_lower
107 ; CHECK-CSSC: liveins: $x0
108 ; CHECK-CSSC-NEXT: {{ $}}
109 ; CHECK-CSSC-NEXT: %copy:_(s64) = COPY $x0
110 ; CHECK-CSSC-NEXT: %ctpop:_(s64) = G_CTPOP %copy(s64)
111 ; CHECK-CSSC-NEXT: $x0 = COPY %ctpop(s64)
112 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $x0
113 %copy:_(s64) = COPY $x0
114 %ctpop:_(s64) = G_CTPOP %copy(s64)
115 $x0 = COPY %ctpop(s64)
116 RET_ReallyLR implicit $x0
121 tracksRegLiveness: true
126 ; CHECK-LABEL: name: s128_lower
127 ; CHECK: liveins: $x0, $x1
129 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
130 ; CHECK-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
131 ; CHECK-NEXT: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[COPY]](s64), [[COPY1]](s64)
132 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[MV]](s128)
133 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
134 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(s32) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlv), [[CTPOP]](<16 x s8>)
135 ; CHECK-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
136 ; CHECK-NEXT: [[MV1:%[0-9]+]]:_(s64) = G_MERGE_VALUES [[INT]](s32), [[C]](s32)
137 ; CHECK-NEXT: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
138 ; CHECK-NEXT: $x0 = COPY [[MV1]](s64)
139 ; CHECK-NEXT: $x1 = COPY [[C1]](s64)
140 ; CHECK-NEXT: RET_ReallyLR implicit $x0, implicit $x1
142 ; CHECK-CSSC-LABEL: name: s128_lower
143 ; CHECK-CSSC: liveins: $x0, $x1
144 ; CHECK-CSSC-NEXT: {{ $}}
145 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
146 ; CHECK-CSSC-NEXT: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
147 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(s64) = G_CTPOP [[COPY]](s64)
148 ; CHECK-CSSC-NEXT: [[CTPOP1:%[0-9]+]]:_(s64) = G_CTPOP [[COPY1]](s64)
149 ; CHECK-CSSC-NEXT: [[ADD:%[0-9]+]]:_(s64) = G_ADD [[CTPOP]], [[CTPOP1]]
150 ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
151 ; CHECK-CSSC-NEXT: $x0 = COPY [[ADD]](s64)
152 ; CHECK-CSSC-NEXT: $x1 = COPY [[C]](s64)
153 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $x0, implicit $x1
156 %0:_(s128) = G_MERGE_VALUES %1(s64), %2(s64)
157 %3:_(s128) = G_CTPOP %0(s128)
158 %4:_(s64), %5:_(s64) = G_UNMERGE_VALUES %3(s128)
161 RET_ReallyLR implicit $x0, implicit $x1
166 tracksRegLiveness: true
171 ; CHECK-LABEL: name: widen_s16
172 ; CHECK: liveins: $w0
174 ; CHECK-NEXT: %copy:_(s32) = COPY $w0
175 ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 65535
176 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT %copy(s32)
177 ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
178 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[AND]](s64)
179 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
180 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(s32) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlv), [[CTPOP]](<8 x s8>)
181 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[INT]](s32)
182 ; CHECK-NEXT: $w0 = COPY [[COPY]](s32)
183 ; CHECK-NEXT: RET_ReallyLR implicit $w0
185 ; CHECK-CSSC-LABEL: name: widen_s16
186 ; CHECK-CSSC: liveins: $w0
187 ; CHECK-CSSC-NEXT: {{ $}}
188 ; CHECK-CSSC-NEXT: %copy:_(s32) = COPY $w0
189 ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 65535
190 ; CHECK-CSSC-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND %copy, [[C]]
191 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(s32) = G_CTPOP [[AND]](s32)
192 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[CTPOP]](s32)
193 ; CHECK-CSSC-NEXT: $w0 = COPY [[COPY]](s32)
194 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
195 %copy:_(s32) = COPY $w0
196 %trunc:_(s16) = G_TRUNC %copy(s32)
197 %ctpop:_(s16) = G_CTPOP %trunc(s16)
198 %ext:_(s32) = G_ANYEXT %ctpop(s16)
200 RET_ReallyLR implicit $w0
205 tracksRegLiveness: true
210 ; CHECK-LABEL: name: widen_s8
211 ; CHECK: liveins: $w0
213 ; CHECK-NEXT: %copy:_(s32) = COPY $w0
214 ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
215 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT %copy(s32)
216 ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
217 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[AND]](s64)
218 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
219 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(s32) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlv), [[CTPOP]](<8 x s8>)
220 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[INT]](s32)
221 ; CHECK-NEXT: $w0 = COPY [[COPY]](s32)
222 ; CHECK-NEXT: RET_ReallyLR implicit $w0
224 ; CHECK-CSSC-LABEL: name: widen_s8
225 ; CHECK-CSSC: liveins: $w0
226 ; CHECK-CSSC-NEXT: {{ $}}
227 ; CHECK-CSSC-NEXT: %copy:_(s32) = COPY $w0
228 ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
229 ; CHECK-CSSC-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND %copy, [[C]]
230 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(s32) = G_CTPOP [[AND]](s32)
231 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[CTPOP]](s32)
232 ; CHECK-CSSC-NEXT: $w0 = COPY [[COPY]](s32)
233 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
234 %copy:_(s32) = COPY $w0
235 %trunc:_(s8) = G_TRUNC %copy(s32)
236 %ctpop:_(s8) = G_CTPOP %trunc(s8)
237 %ext:_(s32) = G_ANYEXT %ctpop(s8)
239 RET_ReallyLR implicit $w0
244 tracksRegLiveness: true
249 ; CHECK-LABEL: name: widen_s3
250 ; CHECK: liveins: $w0
252 ; CHECK-NEXT: %copy:_(s32) = COPY $w0
253 ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 7
254 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT %copy(s32)
255 ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
256 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[AND]](s64)
257 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
258 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(s32) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlv), [[CTPOP]](<8 x s8>)
259 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[INT]](s32)
260 ; CHECK-NEXT: $w0 = COPY [[COPY]](s32)
261 ; CHECK-NEXT: RET_ReallyLR implicit $w0
263 ; CHECK-CSSC-LABEL: name: widen_s3
264 ; CHECK-CSSC: liveins: $w0
265 ; CHECK-CSSC-NEXT: {{ $}}
266 ; CHECK-CSSC-NEXT: %copy:_(s32) = COPY $w0
267 ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 7
268 ; CHECK-CSSC-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND %copy, [[C]]
269 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(s32) = G_CTPOP [[AND]](s32)
270 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[CTPOP]](s32)
271 ; CHECK-CSSC-NEXT: $w0 = COPY [[COPY]](s32)
272 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
273 %copy:_(s32) = COPY $w0
274 %trunc:_(s3) = G_TRUNC %copy(s32)
275 %ctpop:_(s3) = G_CTPOP %trunc(s3)
276 %ext:_(s32) = G_ANYEXT %ctpop(s3)
278 RET_ReallyLR implicit $w0
282 name: different_sizes
283 tracksRegLiveness: true
287 ; CHECK-LABEL: name: different_sizes
288 ; CHECK: liveins: $w0
290 ; CHECK-NEXT: %copy:_(s32) = COPY $w0
291 ; CHECK-NEXT: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 255
292 ; CHECK-NEXT: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT %copy(s32)
293 ; CHECK-NEXT: [[AND:%[0-9]+]]:_(s64) = G_AND [[ANYEXT]], [[C]]
294 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[AND]](s64)
295 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
296 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(s32) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlv), [[CTPOP]](<8 x s8>)
297 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[INT]](s32)
298 ; CHECK-NEXT: $w0 = COPY [[COPY]](s32)
299 ; CHECK-NEXT: RET_ReallyLR implicit $w0
301 ; CHECK-CSSC-LABEL: name: different_sizes
302 ; CHECK-CSSC: liveins: $w0
303 ; CHECK-CSSC-NEXT: {{ $}}
304 ; CHECK-CSSC-NEXT: %copy:_(s32) = COPY $w0
305 ; CHECK-CSSC-NEXT: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 255
306 ; CHECK-CSSC-NEXT: [[AND:%[0-9]+]]:_(s32) = G_AND %copy, [[C]]
307 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(s32) = G_CTPOP [[AND]](s32)
308 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(s32) = COPY [[CTPOP]](s32)
309 ; CHECK-CSSC-NEXT: $w0 = COPY [[COPY]](s32)
310 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $w0
311 %copy:_(s32) = COPY $w0
312 %trunc:_(s8) = G_TRUNC %copy(s32)
313 %ctpop:_(s16) = G_CTPOP %trunc(s8)
314 %ext:_(s32) = G_ANYEXT %ctpop(s16)
316 RET_ReallyLR implicit $w0
321 tracksRegLiveness: true
326 ; CHECK-LABEL: name: custom_8x16
327 ; CHECK: liveins: $q0
329 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(<8 x s16>) = COPY $q0
330 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[COPY]](<8 x s16>)
331 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
332 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(<8 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<16 x s8>)
333 ; CHECK-NEXT: $q0 = COPY [[INT]](<8 x s16>)
334 ; CHECK-NEXT: RET_ReallyLR implicit $q0
336 ; CHECK-CSSC-LABEL: name: custom_8x16
337 ; CHECK-CSSC: liveins: $q0
338 ; CHECK-CSSC-NEXT: {{ $}}
339 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(<8 x s16>) = COPY $q0
340 ; CHECK-CSSC-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[COPY]](<8 x s16>)
341 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
342 ; CHECK-CSSC-NEXT: [[INT:%[0-9]+]]:_(<8 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<16 x s8>)
343 ; CHECK-CSSC-NEXT: $q0 = COPY [[INT]](<8 x s16>)
344 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $q0
345 %0:_(<8 x s16>) = COPY $q0
346 %1:_(<8 x s16>) = G_CTPOP %0(<8 x s16>)
347 $q0 = COPY %1(<8 x s16>)
348 RET_ReallyLR implicit $q0
353 tracksRegLiveness: true
358 ; CHECK-LABEL: name: custom_4x32
359 ; CHECK: liveins: $q0
361 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $q0
362 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[COPY]](<4 x s32>)
363 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
364 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(<8 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<16 x s8>)
365 ; CHECK-NEXT: [[INT1:%[0-9]+]]:_(<4 x s32>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT]](<8 x s16>)
366 ; CHECK-NEXT: $q0 = COPY [[INT1]](<4 x s32>)
367 ; CHECK-NEXT: RET_ReallyLR implicit $q0
369 ; CHECK-CSSC-LABEL: name: custom_4x32
370 ; CHECK-CSSC: liveins: $q0
371 ; CHECK-CSSC-NEXT: {{ $}}
372 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(<4 x s32>) = COPY $q0
373 ; CHECK-CSSC-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[COPY]](<4 x s32>)
374 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
375 ; CHECK-CSSC-NEXT: [[INT:%[0-9]+]]:_(<8 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<16 x s8>)
376 ; CHECK-CSSC-NEXT: [[INT1:%[0-9]+]]:_(<4 x s32>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT]](<8 x s16>)
377 ; CHECK-CSSC-NEXT: $q0 = COPY [[INT1]](<4 x s32>)
378 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $q0
379 %0:_(<4 x s32>) = COPY $q0
380 %1:_(<4 x s32>) = G_CTPOP %0(<4 x s32>)
381 $q0 = COPY %1(<4 x s32>)
382 RET_ReallyLR implicit $q0
387 tracksRegLiveness: true
392 ; CHECK-LABEL: name: custom_2x64
393 ; CHECK: liveins: $q0
395 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $q0
396 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[COPY]](<2 x s64>)
397 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
398 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(<8 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<16 x s8>)
399 ; CHECK-NEXT: [[INT1:%[0-9]+]]:_(<4 x s32>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT]](<8 x s16>)
400 ; CHECK-NEXT: [[INT2:%[0-9]+]]:_(<2 x s64>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT1]](<4 x s32>)
401 ; CHECK-NEXT: $q0 = COPY [[INT2]](<2 x s64>)
402 ; CHECK-NEXT: RET_ReallyLR implicit $q0
404 ; CHECK-CSSC-LABEL: name: custom_2x64
405 ; CHECK-CSSC: liveins: $q0
406 ; CHECK-CSSC-NEXT: {{ $}}
407 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $q0
408 ; CHECK-CSSC-NEXT: [[BITCAST:%[0-9]+]]:_(<16 x s8>) = G_BITCAST [[COPY]](<2 x s64>)
409 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(<16 x s8>) = G_CTPOP [[BITCAST]](<16 x s8>)
410 ; CHECK-CSSC-NEXT: [[INT:%[0-9]+]]:_(<8 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<16 x s8>)
411 ; CHECK-CSSC-NEXT: [[INT1:%[0-9]+]]:_(<4 x s32>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT]](<8 x s16>)
412 ; CHECK-CSSC-NEXT: [[INT2:%[0-9]+]]:_(<2 x s64>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT1]](<4 x s32>)
413 ; CHECK-CSSC-NEXT: $q0 = COPY [[INT2]](<2 x s64>)
414 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $q0
415 %0:_(<2 x s64>) = COPY $q0
416 %1:_(<2 x s64>) = G_CTPOP %0(<2 x s64>)
417 $q0 = COPY %1(<2 x s64>)
418 RET_ReallyLR implicit $q0
423 tracksRegLiveness: true
428 ; CHECK-LABEL: name: custom_4x16
429 ; CHECK: liveins: $d0
431 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $d0
432 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[COPY]](<4 x s16>)
433 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
434 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(<4 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<8 x s8>)
435 ; CHECK-NEXT: $d0 = COPY [[INT]](<4 x s16>)
436 ; CHECK-NEXT: RET_ReallyLR implicit $d0
438 ; CHECK-CSSC-LABEL: name: custom_4x16
439 ; CHECK-CSSC: liveins: $d0
440 ; CHECK-CSSC-NEXT: {{ $}}
441 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $d0
442 ; CHECK-CSSC-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[COPY]](<4 x s16>)
443 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
444 ; CHECK-CSSC-NEXT: [[INT:%[0-9]+]]:_(<4 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<8 x s8>)
445 ; CHECK-CSSC-NEXT: $d0 = COPY [[INT]](<4 x s16>)
446 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $d0
447 %0:_(<4 x s16>) = COPY $d0
448 %1:_(<4 x s16>) = G_CTPOP %0(<4 x s16>)
449 $d0 = COPY %1(<4 x s16>)
450 RET_ReallyLR implicit $d0
455 tracksRegLiveness: true
460 ; CHECK-LABEL: name: custom_2x32
461 ; CHECK: liveins: $d0
463 ; CHECK-NEXT: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $d0
464 ; CHECK-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[COPY]](<2 x s32>)
465 ; CHECK-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
466 ; CHECK-NEXT: [[INT:%[0-9]+]]:_(<4 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<8 x s8>)
467 ; CHECK-NEXT: [[INT1:%[0-9]+]]:_(<2 x s32>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT]](<4 x s16>)
468 ; CHECK-NEXT: $d0 = COPY [[INT1]](<2 x s32>)
469 ; CHECK-NEXT: RET_ReallyLR implicit $d0
471 ; CHECK-CSSC-LABEL: name: custom_2x32
472 ; CHECK-CSSC: liveins: $d0
473 ; CHECK-CSSC-NEXT: {{ $}}
474 ; CHECK-CSSC-NEXT: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $d0
475 ; CHECK-CSSC-NEXT: [[BITCAST:%[0-9]+]]:_(<8 x s8>) = G_BITCAST [[COPY]](<2 x s32>)
476 ; CHECK-CSSC-NEXT: [[CTPOP:%[0-9]+]]:_(<8 x s8>) = G_CTPOP [[BITCAST]](<8 x s8>)
477 ; CHECK-CSSC-NEXT: [[INT:%[0-9]+]]:_(<4 x s16>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[CTPOP]](<8 x s8>)
478 ; CHECK-CSSC-NEXT: [[INT1:%[0-9]+]]:_(<2 x s32>) = G_INTRINSIC intrinsic(@llvm.aarch64.neon.uaddlp), [[INT]](<4 x s16>)
479 ; CHECK-CSSC-NEXT: $d0 = COPY [[INT1]](<2 x s32>)
480 ; CHECK-CSSC-NEXT: RET_ReallyLR implicit $d0
481 %0:_(<2 x s32>) = COPY $d0
482 %1:_(<2 x s32>) = G_CTPOP %0(<2 x s32>)
483 $d0 = COPY %1(<2 x s32>)
484 RET_ReallyLR implicit $d0