1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc %s -verify-machineinstrs -O0 -run-pass=legalizer -mtriple aarch64-unknown-unknown -o - | FileCheck %s
7 tracksRegLiveness: true
12 ; CHECK-LABEL: name: v2s64
13 ; CHECK: liveins: $q0, $q1
14 ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $q0
15 ; CHECK: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $q1
16 ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
17 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C]](s64), [[C]](s64)
18 ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s64>) = G_ICMP intpred(sgt), [[COPY]](<2 x s64>), [[BUILD_VECTOR]]
19 ; CHECK: [[COPY2:%[0-9]+]]:_(<2 x s64>) = COPY [[ICMP]](<2 x s64>)
20 ; CHECK: [[C1:%[0-9]+]]:_(s64) = G_CONSTANT i64 63
21 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C1]](s64), [[C1]](s64)
22 ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s64>) = G_SHL [[COPY2]], [[BUILD_VECTOR1]](<2 x s64>)
23 ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s64>) = G_ASHR [[SHL]], [[BUILD_VECTOR1]](<2 x s64>)
24 ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 -1
25 ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s64>) = G_BUILD_VECTOR [[C2]](s64), [[C2]](s64)
26 ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s64>) = G_XOR [[ASHR]], [[BUILD_VECTOR2]]
27 ; CHECK: [[AND:%[0-9]+]]:_(<2 x s64>) = G_AND [[COPY1]], [[ASHR]]
28 ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s64>) = G_AND [[COPY]], [[XOR]]
29 ; CHECK: [[OR:%[0-9]+]]:_(<2 x s64>) = G_OR [[AND]], [[AND1]]
30 ; CHECK: $q0 = COPY [[OR]](<2 x s64>)
31 ; CHECK: RET_ReallyLR implicit $q0
32 %0:_(<2 x s64>) = COPY $q0
33 %1:_(<2 x s64>) = COPY $q1
34 %3:_(s64) = G_CONSTANT i64 0
35 %2:_(<2 x s64>) = G_BUILD_VECTOR %3(s64), %3(s64)
36 %4:_(<2 x s1>) = G_ICMP intpred(sgt), %0(<2 x s64>), %2
37 %5:_(<2 x s64>) = G_SELECT %4(<2 x s1>), %1, %0
38 $q0 = COPY %5(<2 x s64>)
39 RET_ReallyLR implicit $q0
45 tracksRegLiveness: true
50 ; CHECK-LABEL: name: v2s32
51 ; CHECK: liveins: $d0, $d1
52 ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s32>) = COPY $d0
53 ; CHECK: [[COPY1:%[0-9]+]]:_(<2 x s32>) = COPY $d1
54 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 0
55 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[C]](s32), [[C]](s32)
56 ; CHECK: [[ICMP:%[0-9]+]]:_(<2 x s32>) = G_ICMP intpred(sgt), [[COPY]](<2 x s32>), [[BUILD_VECTOR]]
57 ; CHECK: [[COPY2:%[0-9]+]]:_(<2 x s32>) = COPY [[ICMP]](<2 x s32>)
58 ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_CONSTANT i32 31
59 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[C1]](s32), [[C1]](s32)
60 ; CHECK: [[SHL:%[0-9]+]]:_(<2 x s32>) = G_SHL [[COPY2]], [[BUILD_VECTOR1]](<2 x s32>)
61 ; CHECK: [[ASHR:%[0-9]+]]:_(<2 x s32>) = G_ASHR [[SHL]], [[BUILD_VECTOR1]](<2 x s32>)
62 ; CHECK: [[C2:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
63 ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<2 x s32>) = G_BUILD_VECTOR [[C2]](s32), [[C2]](s32)
64 ; CHECK: [[XOR:%[0-9]+]]:_(<2 x s32>) = G_XOR [[ASHR]], [[BUILD_VECTOR2]]
65 ; CHECK: [[AND:%[0-9]+]]:_(<2 x s32>) = G_AND [[COPY1]], [[ASHR]]
66 ; CHECK: [[AND1:%[0-9]+]]:_(<2 x s32>) = G_AND [[COPY]], [[XOR]]
67 ; CHECK: [[OR:%[0-9]+]]:_(<2 x s32>) = G_OR [[AND]], [[AND1]]
68 ; CHECK: $d0 = COPY [[OR]](<2 x s32>)
69 ; CHECK: RET_ReallyLR implicit $d0
70 %0:_(<2 x s32>) = COPY $d0
71 %1:_(<2 x s32>) = COPY $d1
72 %3:_(s32) = G_CONSTANT i32 0
73 %2:_(<2 x s32>) = G_BUILD_VECTOR %3(s32), %3(s32)
74 %4:_(<2 x s1>) = G_ICMP intpred(sgt), %0(<2 x s32>), %2
75 %5:_(<2 x s32>) = G_SELECT %4(<2 x s1>), %1, %0
76 $d0 = COPY %5(<2 x s32>)
77 RET_ReallyLR implicit $d0
83 tracksRegLiveness: true
88 ; CHECK-LABEL: name: v16s8
89 ; CHECK: liveins: $q0, $q1
90 ; CHECK: [[COPY:%[0-9]+]]:_(<16 x s8>) = COPY $q0
91 ; CHECK: [[COPY1:%[0-9]+]]:_(<16 x s8>) = COPY $q1
92 ; CHECK: [[C:%[0-9]+]]:_(s8) = G_CONSTANT i8 0
93 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8), [[C]](s8)
94 ; CHECK: [[ICMP:%[0-9]+]]:_(<16 x s8>) = G_ICMP intpred(sgt), [[COPY]](<16 x s8>), [[BUILD_VECTOR]]
95 ; CHECK: [[COPY2:%[0-9]+]]:_(<16 x s8>) = COPY [[ICMP]](<16 x s8>)
96 ; CHECK: [[C1:%[0-9]+]]:_(s8) = G_CONSTANT i8 7
97 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8), [[C1]](s8)
98 ; CHECK: [[SHL:%[0-9]+]]:_(<16 x s8>) = G_SHL [[COPY2]], [[BUILD_VECTOR1]](<16 x s8>)
99 ; CHECK: [[ASHR:%[0-9]+]]:_(<16 x s8>) = G_ASHR [[SHL]], [[BUILD_VECTOR1]](<16 x s8>)
100 ; CHECK: [[C2:%[0-9]+]]:_(s8) = G_CONSTANT i8 -1
101 ; CHECK: [[BUILD_VECTOR2:%[0-9]+]]:_(<16 x s8>) = G_BUILD_VECTOR [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8), [[C2]](s8)
102 ; CHECK: [[XOR:%[0-9]+]]:_(<16 x s8>) = G_XOR [[ASHR]], [[BUILD_VECTOR2]]
103 ; CHECK: [[AND:%[0-9]+]]:_(<16 x s8>) = G_AND [[COPY1]], [[ASHR]]
104 ; CHECK: [[AND1:%[0-9]+]]:_(<16 x s8>) = G_AND [[COPY]], [[XOR]]
105 ; CHECK: [[OR:%[0-9]+]]:_(<16 x s8>) = G_OR [[AND]], [[AND1]]
106 ; CHECK: $q0 = COPY [[OR]](<16 x s8>)
107 ; CHECK: RET_ReallyLR implicit $q0
108 %0:_(<16 x s8>) = COPY $q0
109 %1:_(<16 x s8>) = COPY $q1
110 %3:_(s8) = G_CONSTANT i8 0
111 %2:_(<16 x s8>) = G_BUILD_VECTOR %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8), %3(s8)
112 %4:_(<16 x s1>) = G_ICMP intpred(sgt), %0(<16 x s8>), %2
113 %5:_(<16 x s8>) = G_SELECT %4(<16 x s1>), %1, %0
114 $q0 = COPY %5(<16 x s8>)
115 RET_ReallyLR implicit $q0
120 tracksRegLiveness: true
128 ; CHECK-LABEL: name: scalar_mask
129 ; CHECK: liveins: $q0, $w0
130 ; CHECK: [[COPY:%[0-9]+]]:_(s32) = COPY $w0
131 ; CHECK: [[COPY1:%[0-9]+]]:_(<4 x s32>) = COPY $q0
132 ; CHECK: [[C:%[0-9]+]]:_(s32) = G_CONSTANT i32 4100
133 ; CHECK: [[C1:%[0-9]+]]:_(s32) = G_FCONSTANT float 0.000000e+00
134 ; CHECK: [[BUILD_VECTOR:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[C1]](s32), [[C1]](s32), [[C1]](s32), [[C1]](s32)
135 ; CHECK: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(eq), [[COPY]](s32), [[C]]
136 ; CHECK: [[COPY2:%[0-9]+]]:_(s32) = COPY [[ICMP]](s32)
137 ; CHECK: [[SEXT_INREG:%[0-9]+]]:_(s32) = G_SEXT_INREG [[COPY2]], 1
138 ; CHECK: [[DEF:%[0-9]+]]:_(<4 x s32>) = G_IMPLICIT_DEF
139 ; CHECK: [[C2:%[0-9]+]]:_(s64) = G_CONSTANT i64 0
140 ; CHECK: [[IVEC:%[0-9]+]]:_(<4 x s32>) = G_INSERT_VECTOR_ELT [[DEF]], [[SEXT_INREG]](s32), [[C2]](s64)
141 ; CHECK: [[SHUF:%[0-9]+]]:_(<4 x s32>) = G_SHUFFLE_VECTOR [[IVEC]](<4 x s32>), [[DEF]], shufflemask(0, 0, 0, 0)
142 ; CHECK: [[C3:%[0-9]+]]:_(s32) = G_CONSTANT i32 -1
143 ; CHECK: [[BUILD_VECTOR1:%[0-9]+]]:_(<4 x s32>) = G_BUILD_VECTOR [[C3]](s32), [[C3]](s32), [[C3]](s32), [[C3]](s32)
144 ; CHECK: [[XOR:%[0-9]+]]:_(<4 x s32>) = G_XOR [[SHUF]], [[BUILD_VECTOR1]]
145 ; CHECK: [[AND:%[0-9]+]]:_(<4 x s32>) = G_AND [[COPY1]], [[SHUF]]
146 ; CHECK: [[AND1:%[0-9]+]]:_(<4 x s32>) = G_AND [[BUILD_VECTOR]], [[XOR]]
147 ; CHECK: [[OR:%[0-9]+]]:_(<4 x s32>) = G_OR [[AND]], [[AND1]]
148 ; CHECK: $q0 = COPY [[OR]](<4 x s32>)
149 ; CHECK: RET_ReallyLR implicit $q0
151 %1:_(<4 x s32>) = COPY $q0
152 %2:_(s32) = G_CONSTANT i32 4100
153 %6:_(s32) = G_FCONSTANT float 0.000000e+00
154 %5:_(<4 x s32>) = G_BUILD_VECTOR %6(s32), %6(s32), %6(s32), %6(s32)
155 %3:_(s1) = G_ICMP intpred(eq), %0(s32), %2
156 %4:_(<4 x s32>) = G_SELECT %3(s1), %1, %5
157 $q0 = COPY %4(<4 x s32>)
158 RET_ReallyLR implicit $q0
163 tracksRegLiveness: true
166 liveins: $w0, $w1, $x0
168 ; CHECK-LABEL: name: s88
169 ; CHECK: liveins: $w0, $w1, $x0
170 ; CHECK: %a:_(s32) = COPY $w0
171 ; CHECK: %b:_(s32) = COPY $w1
172 ; CHECK: [[ICMP:%[0-9]+]]:_(s32) = G_ICMP intpred(sgt), %a(s32), %b
173 ; CHECK: %cmp:_(s1) = G_TRUNC [[ICMP]](s32)
174 ; CHECK: [[DEF:%[0-9]+]]:_(s64) = G_IMPLICIT_DEF
175 ; CHECK: [[SELECT:%[0-9]+]]:_(s64) = G_SELECT %cmp(s1), [[DEF]], [[DEF]]
176 ; CHECK: $x0 = COPY [[SELECT]](s64)
177 ; CHECK: RET_ReallyLR implicit $x0
180 %cmp:_(s1) = G_ICMP intpred(sgt), %a(s32), %b
181 %sel_a:_(s88) = G_IMPLICIT_DEF
182 %sel_b:_(s88) = G_IMPLICIT_DEF
183 %select:_(s88) = G_SELECT %cmp(s1), %sel_a, %sel_b
184 %trunc:_(s64) = G_TRUNC %select
186 RET_ReallyLR implicit $x0