1 # NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 # RUN: llc -mtriple=aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
4 # Test using the xro addressing mode with immediates. This should be done for
5 # wide constants which are preferably selected using a mov rather than an add.
9 name: use_xro_cannot_encode_add_lsl
13 tracksRegLiveness: true
17 ; Check that we use the XRO addressing mode when the constant cannot be
18 ; represented using an add + lsl.
20 ; cst = 0000000111000000
21 ; cst & 000fffffff000000 != 0
23 ; CHECK-LABEL: name: use_xro_cannot_encode_add_lsl
25 ; CHECK: %copy:gpr64sp = COPY $x0
26 ; CHECK: %cst:gpr64 = MOVi64imm 4580179968
27 ; CHECK: %load:gpr64 = LDRXroX %copy, %cst, 0, 0 :: (volatile load (s64))
29 %copy:gpr(p0) = COPY $x0
30 %cst:gpr(s64) = G_CONSTANT i64 4580179968
31 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
32 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
37 name: use_xro_preferred_mov
41 tracksRegLiveness: true
45 ; Check that we use the XRO addressing mode when the constant can be
46 ; represented using a single movk.
48 ; cst = 000000000000f000
49 ; cst & 000fffffff000000 == 0
50 ; cst & ffffffffffff0fff != 0
52 ; CHECK-LABEL: name: use_xro_preferred_mov
54 ; CHECK: %copy:gpr64sp = COPY $x0
55 ; CHECK: [[MOVi32imm:%[0-9]+]]:gpr32 = MOVi32imm 61440
56 ; CHECK: %cst:gpr64 = SUBREG_TO_REG 0, [[MOVi32imm]], %subreg.sub_32
57 ; CHECK: %load:gpr64 = LDRXroX %copy, %cst, 0, 0 :: (volatile load (s64))
59 %copy:gpr(p0) = COPY $x0
60 %cst:gpr(s64) = G_CONSTANT i64 61440
61 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
62 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
67 name: use_xro_negative_imm
71 tracksRegLiveness: true
75 ; Check that this works even if we have a negative immediate.
77 ; CHECK-LABEL: name: use_xro_negative_imm
79 ; CHECK: %copy:gpr64sp = COPY $x0
80 ; CHECK: %cst:gpr64 = MOVi64imm -61440
81 ; CHECK: %load:gpr64 = LDRXroX %copy, %cst, 0, 0 :: (volatile load (s64))
83 %copy:gpr(p0) = COPY $x0
84 %cst:gpr(s64) = G_CONSTANT i64 -61440
85 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
86 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
91 name: dont_use_xro_selectable_imm
95 tracksRegLiveness: true
99 ; Immediates that can be encoded in a LDRXui should be skipped.
101 ; CHECK-LABEL: name: dont_use_xro_selectable_imm
102 ; CHECK: liveins: $x0
103 ; CHECK: %copy:gpr64sp = COPY $x0
104 ; CHECK: %load:gpr64 = LDRXui %copy, 2 :: (volatile load (s64))
105 ; CHECK: RET_ReallyLR
106 %copy:gpr(p0) = COPY $x0
107 %cst:gpr(s64) = G_CONSTANT i64 16
108 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
109 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
114 name: dont_use_xro_selectable_negative_imm
117 regBankSelected: true
118 tracksRegLiveness: true
122 ; Immediates that can be encoded in a LDRXui should be skipped.
124 ; CHECK-LABEL: name: dont_use_xro_selectable_negative_imm
125 ; CHECK: liveins: $x0
126 ; CHECK: %copy:gpr64sp = COPY $x0
127 ; CHECK: %load:gpr64 = LDURXi %copy, -16 :: (volatile load (s64))
128 ; CHECK: RET_ReallyLR
129 %copy:gpr(p0) = COPY $x0
130 %cst:gpr(s64) = G_CONSTANT i64 -16
131 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
132 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
137 name: dont_use_xro_zero
140 regBankSelected: true
141 tracksRegLiveness: true
145 ; Immediates that can be encoded in a LDRXui should be skipped.
147 ; CHECK-LABEL: name: dont_use_xro_zero
148 ; CHECK: liveins: $x0
149 ; CHECK: %copy:gpr64sp = COPY $x0
150 ; CHECK: %load:gpr64 = LDRXui %copy, 0 :: (volatile load (s64))
151 ; CHECK: RET_ReallyLR
152 %copy:gpr(p0) = COPY $x0
153 %cst:gpr(s64) = G_CONSTANT i64 0
154 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
155 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
160 name: dont_use_xro_in_range
163 regBankSelected: true
164 tracksRegLiveness: true
168 ; Check that we skip constants which can be encoded in an add.
169 ; 17 is in [0x0, 0xfff]
171 ; CHECK-LABEL: name: dont_use_xro_in_range
172 ; CHECK: liveins: $x0
173 ; CHECK: %copy:gpr64sp = COPY $x0
174 ; CHECK: %load:gpr64 = LDURXi %copy, 17 :: (volatile load (s64))
175 ; CHECK: RET_ReallyLR
176 %copy:gpr(p0) = COPY $x0
177 %cst:gpr(s64) = G_CONSTANT i64 17
178 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
179 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))
184 name: dont_use_xro_add_lsl
187 regBankSelected: true
188 tracksRegLiveness: true
192 ; Check that we skip when we have an add with an lsl which cannot be
193 ; represented as a movk.
195 ; cst = 0x0000000000111000
196 ; cst & 000fffffff000000 = 0
197 ; cst & ffffffffff00ffff != 0
198 ; cst & ffffffffffff0fff != 0
200 ; CHECK-LABEL: name: dont_use_xro_add_lsl
201 ; CHECK: liveins: $x0
202 ; CHECK: %copy:gpr64 = COPY $x0
203 ; CHECK: [[COPY:%[0-9]+]]:gpr64common = COPY %copy
204 ; CHECK: %addr:gpr64sp = ADDXri [[COPY]], 273, 12
205 ; CHECK: %load:gpr64 = LDRXui %addr, 0 :: (volatile load (s64))
206 ; CHECK: RET_ReallyLR
207 %copy:gpr(p0) = COPY $x0
208 %cst:gpr(s64) = G_CONSTANT i64 1118208
209 %addr:gpr(p0) = G_PTR_ADD %copy, %cst(s64)
210 %load:gpr(s64) = G_LOAD %addr(p0) :: (volatile load (s64))