1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -march=aarch64 -run-pass=legalizer %s -o - | FileCheck %s
4 name: test_scalar_add_big
7 ; CHECK-LABEL: name: test_scalar_add_big
8 ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
9 ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
10 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2
11 ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $x3
12 ; CHECK: [[UADDO:%[0-9]+]]:_(s64), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[COPY]], [[COPY2]]
13 ; CHECK: [[UADDE:%[0-9]+]]:_(s64), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[COPY1]], [[COPY3]], [[UADDO1]]
14 ; CHECK: $x0 = COPY [[UADDO]](s64)
15 ; CHECK: $x1 = COPY [[UADDE]](s64)
20 %4:_(s128) = G_MERGE_VALUES %0(s64), %1(s64)
21 %5:_(s128) = G_MERGE_VALUES %2(s64), %3(s64)
22 %6:_(s128) = G_ADD %4, %5
23 %7:_(s64), %8:_(s64) = G_UNMERGE_VALUES %6(s128)
29 name: test_scalar_add_big_nonpow2
32 ; CHECK-LABEL: name: test_scalar_add_big_nonpow2
33 ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
34 ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
35 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2
36 ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $x3
37 ; CHECK: [[DEF:%[0-9]+]]:_(s64) = G_IMPLICIT_DEF
38 ; CHECK: [[UADDO:%[0-9]+]]:_(s64), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[COPY]], [[COPY1]]
39 ; CHECK: [[UADDE:%[0-9]+]]:_(s64), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[COPY1]], [[COPY2]], [[UADDO1]]
40 ; CHECK: [[UADDE2:%[0-9]+]]:_(s64), [[UADDE3:%[0-9]+]]:_(s1) = G_UADDE [[COPY2]], [[COPY3]], [[UADDE1]]
41 ; CHECK: $x0 = COPY [[UADDO]](s64)
42 ; CHECK: $x1 = COPY [[UADDE]](s64)
43 ; CHECK: $x2 = COPY [[UADDE2]](s64)
48 %4:_(s192) = G_MERGE_VALUES %0(s64), %1(s64), %2(s64)
49 %5:_(s192) = G_MERGE_VALUES %1(s64), %2(s64), %3(s64)
50 %6:_(s192) = G_ADD %4, %5
51 %7:_(s64), %8:_(s64), %9:_(s64) = G_UNMERGE_VALUES %6(s192)
58 name: test_scalar_add_small
61 ; CHECK-LABEL: name: test_scalar_add_small
62 ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
63 ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
64 ; CHECK: [[TRUNC:%[0-9]+]]:_(s32) = G_TRUNC [[COPY]](s64)
65 ; CHECK: [[TRUNC1:%[0-9]+]]:_(s32) = G_TRUNC [[COPY1]](s64)
66 ; CHECK: [[ADD:%[0-9]+]]:_(s32) = G_ADD [[TRUNC]], [[TRUNC1]]
67 ; CHECK: [[ANYEXT:%[0-9]+]]:_(s64) = G_ANYEXT [[ADD]](s32)
68 ; CHECK: $x0 = COPY [[ANYEXT]](s64)
71 %2:_(s8) = G_TRUNC %0(s64)
72 %3:_(s8) = G_TRUNC %1(s64)
73 %4:_(s8) = G_ADD %2, %3
74 %5:_(s64) = G_ANYEXT %4(s8)
79 name: test_scalar_add_narrowing
82 ; CHECK-LABEL: name: test_scalar_add_narrowing
83 ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
84 ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
85 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2
86 ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $x3
87 ; CHECK: [[UADDO:%[0-9]+]]:_(s64), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[COPY]], [[COPY2]]
88 ; CHECK: [[UADDE:%[0-9]+]]:_(s64), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[COPY1]], [[COPY3]], [[UADDO1]]
89 ; CHECK: $x0 = COPY [[UADDO]](s64)
90 ; CHECK: $x1 = COPY [[UADDE]](s64)
95 %4:_(s128) = G_MERGE_VALUES %0(s64), %1(s64)
96 %5:_(s128) = G_MERGE_VALUES %2(s64), %3(s64)
97 %6:_(s96) = G_TRUNC %4(s128)
98 %7:_(s96) = G_TRUNC %5(s128)
99 %8:_(s96) = G_ADD %6, %7
100 %9:_(s128) = G_ANYEXT %8(s96)
101 %10:_(s64), %11:_(s64) = G_UNMERGE_VALUES %9(s128)
107 name: test_scalar_add_narrowing_s65
110 ; CHECK-LABEL: name: test_scalar_add_narrowing_s65
111 ; CHECK: [[COPY:%[0-9]+]]:_(s64) = COPY $x0
112 ; CHECK: [[COPY1:%[0-9]+]]:_(s64) = COPY $x1
113 ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x2
114 ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $x3
115 ; CHECK: [[UADDO:%[0-9]+]]:_(s64), [[UADDO1:%[0-9]+]]:_(s1) = G_UADDO [[COPY]], [[COPY2]]
116 ; CHECK: [[UADDE:%[0-9]+]]:_(s64), [[UADDE1:%[0-9]+]]:_(s1) = G_UADDE [[COPY1]], [[COPY3]], [[UADDO1]]
117 ; CHECK: $x0 = COPY [[UADDO]](s64)
118 ; CHECK: $x1 = COPY [[UADDE]](s64)
123 %4:_(s128) = G_MERGE_VALUES %0(s64), %1(s64)
124 %5:_(s128) = G_MERGE_VALUES %2(s64), %3(s64)
125 %6:_(s65) = G_TRUNC %4(s128)
126 %7:_(s65) = G_TRUNC %5(s128)
127 %8:_(s65) = G_ADD %6, %7
128 %9:_(s128) = G_ANYEXT %8(s65)
129 %10:_(s64), %11:_(s64) = G_UNMERGE_VALUES %9(s128)
135 name: test_vector_add
138 ; CHECK-LABEL: name: test_vector_add
139 ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $q0
140 ; CHECK: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $q1
141 ; CHECK: [[COPY2:%[0-9]+]]:_(<2 x s64>) = COPY $q2
142 ; CHECK: [[COPY3:%[0-9]+]]:_(<2 x s64>) = COPY $q3
143 ; CHECK: [[ADD:%[0-9]+]]:_(<2 x s64>) = G_ADD [[COPY]], [[COPY2]]
144 ; CHECK: [[ADD1:%[0-9]+]]:_(<2 x s64>) = G_ADD [[COPY1]], [[COPY3]]
145 ; CHECK: $q0 = COPY [[ADD]](<2 x s64>)
146 ; CHECK: $q1 = COPY [[ADD1]](<2 x s64>)
147 %0:_(<2 x s64>) = COPY $q0
148 %1:_(<2 x s64>) = COPY $q1
149 %2:_(<2 x s64>) = COPY $q2
150 %3:_(<2 x s64>) = COPY $q3
151 %4:_(<4 x s64>) = G_CONCAT_VECTORS %0, %1
152 %5:_(<4 x s64>) = G_CONCAT_VECTORS %2, %3
153 %6:_(<4 x s64>) = G_ADD %4, %5
154 %7:_(<2 x s64>), %8:_(<2 x s64>) = G_UNMERGE_VALUES %6(<4 x s64>)
155 $q0 = COPY %7(<2 x s64>)
156 $q1 = COPY %8(<2 x s64>)
160 name: test_vector_add_nonpow2
163 ; CHECK-LABEL: name: test_vector_add_nonpow2
164 ; CHECK: [[COPY:%[0-9]+]]:_(<2 x s64>) = COPY $q0
165 ; CHECK: [[COPY1:%[0-9]+]]:_(<2 x s64>) = COPY $q1
166 ; CHECK: [[COPY2:%[0-9]+]]:_(<2 x s64>) = COPY $q2
167 ; CHECK: [[COPY3:%[0-9]+]]:_(<2 x s64>) = COPY $q3
168 ; CHECK: [[ADD:%[0-9]+]]:_(<2 x s64>) = G_ADD [[COPY]], [[COPY1]]
169 ; CHECK: [[ADD1:%[0-9]+]]:_(<2 x s64>) = G_ADD [[COPY1]], [[COPY2]]
170 ; CHECK: [[ADD2:%[0-9]+]]:_(<2 x s64>) = G_ADD [[COPY2]], [[COPY3]]
171 ; CHECK: $q0 = COPY [[ADD]](<2 x s64>)
172 ; CHECK: $q1 = COPY [[ADD1]](<2 x s64>)
173 ; CHECK: $q2 = COPY [[ADD2]](<2 x s64>)
174 %0:_(<2 x s64>) = COPY $q0
175 %1:_(<2 x s64>) = COPY $q1
176 %2:_(<2 x s64>) = COPY $q2
177 %3:_(<2 x s64>) = COPY $q3
178 %4:_(<6 x s64>) = G_CONCAT_VECTORS %0(<2 x s64>), %1(<2 x s64>), %2(<2 x s64>)
179 %5:_(<6 x s64>) = G_CONCAT_VECTORS %1(<2 x s64>), %2(<2 x s64>), %3(<2 x s64>)
180 %6:_(<6 x s64>) = G_ADD %4, %5
181 %7:_(<2 x s64>), %8:_(<2 x s64>), %9:_(<2 x s64>) = G_UNMERGE_VALUES %6(<6 x s64>)
182 $q0 = COPY %7(<2 x s64>)
183 $q1 = COPY %8(<2 x s64>)
184 $q2 = COPY %9(<2 x s64>)
189 tracksRegLiveness: true
190 machineFunctionInfo: {}
195 ; CHECK-LABEL: name: add_v8i16
196 ; CHECK: liveins: $q0, $q1
197 ; CHECK: [[COPY:%[0-9]+]]:_(<8 x s16>) = COPY $q0
198 ; CHECK: [[COPY1:%[0-9]+]]:_(<8 x s16>) = COPY $q1
199 ; CHECK: [[ADD:%[0-9]+]]:_(<8 x s16>) = G_ADD [[COPY]], [[COPY1]]
200 ; CHECK: $q0 = COPY [[ADD]](<8 x s16>)
201 ; CHECK: RET_ReallyLR implicit $q0
202 %0:_(<8 x s16>) = COPY $q0
203 %1:_(<8 x s16>) = COPY $q1
204 %2:_(<8 x s16>) = G_ADD %0, %1
205 $q0 = COPY %2(<8 x s16>)
206 RET_ReallyLR implicit $q0
212 tracksRegLiveness: true
213 machineFunctionInfo: {}
218 ; CHECK-LABEL: name: add_v16i8
219 ; CHECK: liveins: $q0, $q1
220 ; CHECK: [[COPY:%[0-9]+]]:_(<16 x s8>) = COPY $q0
221 ; CHECK: [[COPY1:%[0-9]+]]:_(<16 x s8>) = COPY $q1
222 ; CHECK: [[ADD:%[0-9]+]]:_(<16 x s8>) = G_ADD [[COPY]], [[COPY1]]
223 ; CHECK: $q0 = COPY [[ADD]](<16 x s8>)
224 ; CHECK: RET_ReallyLR implicit $q0
225 %0:_(<16 x s8>) = COPY $q0
226 %1:_(<16 x s8>) = COPY $q1
227 %2:_(<16 x s8>) = G_ADD %0, %1
228 $q0 = COPY %2(<16 x s8>)
229 RET_ReallyLR implicit $q0
235 tracksRegLiveness: true
236 machineFunctionInfo: {}
241 ; CHECK-LABEL: name: add_v4i16
242 ; CHECK: liveins: $d0, $d1
243 ; CHECK: [[COPY:%[0-9]+]]:_(<4 x s16>) = COPY $d0
244 ; CHECK: [[COPY1:%[0-9]+]]:_(<4 x s16>) = COPY $d1
245 ; CHECK: [[ADD:%[0-9]+]]:_(<4 x s16>) = G_ADD [[COPY]], [[COPY1]]
246 ; CHECK: $d0 = COPY [[ADD]](<4 x s16>)
247 ; CHECK: RET_ReallyLR implicit $d0
248 %0:_(<4 x s16>) = COPY $d0
249 %1:_(<4 x s16>) = COPY $d1
250 %2:_(<4 x s16>) = G_ADD %0, %1
251 $d0 = COPY %2(<4 x s16>)
252 RET_ReallyLR implicit $d0
256 tracksRegLiveness: true
261 ; CHECK-LABEL: name: add_v8s8
262 ; CHECK: liveins: $d0, $d1
263 ; CHECK: [[COPY:%[0-9]+]]:_(<8 x s8>) = COPY $d0
264 ; CHECK: [[COPY1:%[0-9]+]]:_(<8 x s8>) = COPY $d1
265 ; CHECK: [[ADD:%[0-9]+]]:_(<8 x s8>) = G_ADD [[COPY]], [[COPY1]]
266 ; CHECK: $d0 = COPY [[ADD]](<8 x s8>)
267 ; CHECK: RET_ReallyLR implicit $d0
268 %0:_(<8 x s8>) = COPY $d0
269 %1:_(<8 x s8>) = COPY $d1
270 %2:_(<8 x s8>) = G_ADD %0, %1
271 $d0 = COPY %2(<8 x s8>)
272 RET_ReallyLR implicit $d0