1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=aarch64 -run-pass=legalizer -global-isel-abort=1 %s -o - | FileCheck %s
10 ; CHECK-LABEL: name: post_store_s64
11 ; CHECK: liveins: $x0, $x1
13 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
14 ; CHECK-NEXT: %val:_(s64) = COPY $x1
15 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
16 ; CHECK-NEXT: %writeback:_(p0) = G_INDEXED_STORE %val(s64), %ptr, %offset(s64), 0 :: (store (s64))
17 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
18 ; CHECK-NEXT: RET_ReallyLR implicit $x0
20 %val:_(s64) = COPY $x1
21 %offset:_(s64) = G_CONSTANT i64 8
22 %writeback:_(p0) = G_INDEXED_STORE %val, %ptr, %offset, 0 :: (store (s64), align 8)
24 RET_ReallyLR implicit $x0
27 name: post_store_v2s64
32 ; CHECK-LABEL: name: post_store_v2s64
33 ; CHECK: liveins: $x0, $q0
35 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
36 ; CHECK-NEXT: %val:_(<2 x s64>) = COPY $q0
37 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
38 ; CHECK-NEXT: %writeback:_(p0) = G_INDEXED_STORE %val(<2 x s64>), %ptr, %offset(s64), 0 :: (store (<2 x s64>), align 8)
39 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
40 ; CHECK-NEXT: RET_ReallyLR implicit $x0
42 %val:_(<2 x s64>) = COPY $q0
43 %offset:_(s64) = G_CONSTANT i64 8
44 %writeback:_(p0) = G_INDEXED_STORE %val, %ptr, %offset, 0 :: (store (<2 x s64>), align 8)
46 RET_ReallyLR implicit $x0
54 ; CHECK-LABEL: name: post_load_s64
57 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
58 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
59 ; CHECK-NEXT: %dst:_(s64), %writeback:_(p0) = G_INDEXED_LOAD %ptr, %offset(s64), 0 :: (load (s64))
60 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
61 ; CHECK-NEXT: RET_ReallyLR implicit $x0
63 %offset:_(s64) = G_CONSTANT i64 8
64 %dst:_(s64), %writeback:_(p0) = G_INDEXED_LOAD %ptr, %offset, 0 :: (load (s64), align 8)
66 RET_ReallyLR implicit $x0
74 ; CHECK-LABEL: name: post_load_v2s64
77 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
78 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
79 ; CHECK-NEXT: %dst:_(<2 x s64>), %writeback:_(p0) = G_INDEXED_LOAD %ptr, %offset(s64), 0 :: (load (s64))
80 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
81 ; CHECK-NEXT: $q0 = COPY %dst(<2 x s64>)
82 ; CHECK-NEXT: RET_ReallyLR implicit $x0, implicit $q0
84 %offset:_(s64) = G_CONSTANT i64 8
85 %dst:_(<2 x s64>), %writeback:_(p0) = G_INDEXED_LOAD %ptr, %offset, 0 :: (load (s64), align 8)
88 RET_ReallyLR implicit $x0, implicit $q0
96 ; CHECK-LABEL: name: pre_store_s64
97 ; CHECK: liveins: $x0, $x1
99 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
100 ; CHECK-NEXT: %val:_(s64) = COPY $x1
101 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
102 ; CHECK-NEXT: %writeback:_(p0) = G_INDEXED_STORE %val(s64), %ptr, %offset(s64), 1 :: (store (s64))
103 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
104 ; CHECK-NEXT: RET_ReallyLR implicit $x0
105 %ptr:_(p0) = COPY $x0
106 %val:_(s64) = COPY $x1
107 %offset:_(s64) = G_CONSTANT i64 8
108 %writeback:_(p0) = G_INDEXED_STORE %val, %ptr, %offset, 1 :: (store (s64), align 8)
109 $x0 = COPY %writeback
110 RET_ReallyLR implicit $x0
113 name: post_zextload_s8_to_s64
118 ; CHECK-LABEL: name: post_zextload_s8_to_s64
119 ; CHECK: liveins: $x0
121 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
122 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
123 ; CHECK-NEXT: %dst:_(s64), %writeback:_(p0) = G_INDEXED_ZEXTLOAD %ptr, %offset(s64), 0 :: (load (s8), align 8)
124 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
125 ; CHECK-NEXT: RET_ReallyLR implicit $x0
126 %ptr:_(p0) = COPY $x0
127 %offset:_(s64) = G_CONSTANT i64 8
128 %dst:_(s64), %writeback:_(p0) = G_INDEXED_ZEXTLOAD %ptr, %offset, 0 :: (load (s8), align 8)
129 $x0 = COPY %writeback
130 RET_ReallyLR implicit $x0
133 name: post_sextload_s8_to_s64
138 ; CHECK-LABEL: name: post_sextload_s8_to_s64
139 ; CHECK: liveins: $x0
141 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
142 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
143 ; CHECK-NEXT: %dst:_(s64), %writeback:_(p0) = G_INDEXED_SEXTLOAD %ptr, %offset(s64), 0 :: (load (s8), align 8)
144 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
145 ; CHECK-NEXT: RET_ReallyLR implicit $x0
146 %ptr:_(p0) = COPY $x0
147 %offset:_(s64) = G_CONSTANT i64 8
148 %dst:_(s64), %writeback:_(p0) = G_INDEXED_SEXTLOAD %ptr, %offset, 0 :: (load (s8), align 8)
149 $x0 = COPY %writeback
150 RET_ReallyLR implicit $x0
153 name: post_sextload_s32_to_s64
158 ; CHECK-LABEL: name: post_sextload_s32_to_s64
159 ; CHECK: liveins: $x0
161 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
162 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
163 ; CHECK-NEXT: %dst:_(s64), %writeback:_(p0) = G_INDEXED_SEXTLOAD %ptr, %offset(s64), 0 :: (load (s32), align 8)
164 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
165 ; CHECK-NEXT: RET_ReallyLR implicit $x0
166 %ptr:_(p0) = COPY $x0
167 %offset:_(s64) = G_CONSTANT i64 8
168 %dst:_(s64), %writeback:_(p0) = G_INDEXED_SEXTLOAD %ptr, %offset, 0 :: (load (s32), align 8)
169 $x0 = COPY %writeback
170 RET_ReallyLR implicit $x0
173 name: post_zextload_s32_to_s64
178 ; CHECK-LABEL: name: post_zextload_s32_to_s64
179 ; CHECK: liveins: $x0
181 ; CHECK-NEXT: %ptr:_(p0) = COPY $x0
182 ; CHECK-NEXT: %offset:_(s64) = G_CONSTANT i64 8
183 ; CHECK-NEXT: %dst:_(s64), %writeback:_(p0) = G_INDEXED_ZEXTLOAD %ptr, %offset(s64), 0 :: (load (s32), align 8)
184 ; CHECK-NEXT: $x0 = COPY %writeback(p0)
185 ; CHECK-NEXT: RET_ReallyLR implicit $x0
186 %ptr:_(p0) = COPY $x0
187 %offset:_(s64) = G_CONSTANT i64 8
188 %dst:_(s64), %writeback:_(p0) = G_INDEXED_ZEXTLOAD %ptr, %offset, 0 :: (load (s32), align 8)
189 $x0 = COPY %writeback
190 RET_ReallyLR implicit $x0