1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc -mtriple=arm64-eabi < %s | FileCheck %s
5 @object = external hidden global i64, section "__DATA, __objc_ivar", align 8
8 define void @t1(ptr %object) {
11 ; CHECK-NEXT: ldr xzr, [x0, #8]
13 %incdec.ptr = getelementptr inbounds i64, ptr %object, i64 1
14 %tmp = load volatile i64, ptr %incdec.ptr, align 8
18 ; base + offset (> imm9)
19 define void @t2(ptr %object) {
22 ; CHECK-NEXT: sub x8, x0, #264
23 ; CHECK-NEXT: ldr xzr, [x8]
25 %incdec.ptr = getelementptr inbounds i64, ptr %object, i64 -33
26 %tmp = load volatile i64, ptr %incdec.ptr, align 8
30 ; base + unsigned offset (> imm9 and <= imm12 * size of type in bytes)
31 define void @t3(ptr %object) {
34 ; CHECK-NEXT: ldr xzr, [x0, #32760]
36 %incdec.ptr = getelementptr inbounds i64, ptr %object, i64 4095
37 %tmp = load volatile i64, ptr %incdec.ptr, align 8
41 ; base + unsigned offset (> imm12 * size of type in bytes)
42 define void @t4(ptr %object) {
45 ; CHECK-NEXT: mov w8, #32768 // =0x8000
46 ; CHECK-NEXT: ldr xzr, [x0, x8]
48 %incdec.ptr = getelementptr inbounds i64, ptr %object, i64 4096
49 %tmp = load volatile i64, ptr %incdec.ptr, align 8
54 define void @t5(i64 %a) {
57 ; CHECK-NEXT: adrp x8, object
58 ; CHECK-NEXT: add x8, x8, :lo12:object
59 ; CHECK-NEXT: ldr xzr, [x8, x0, lsl #3]
61 %incdec.ptr = getelementptr inbounds i64, ptr @object, i64 %a
62 %tmp = load volatile i64, ptr %incdec.ptr, align 8
67 define void @t6(i64 %a, ptr %object) {
70 ; CHECK-NEXT: add x8, x1, x0, lsl #3
71 ; CHECK-NEXT: mov w9, #32768 // =0x8000
72 ; CHECK-NEXT: ldr xzr, [x8, x9]
74 %tmp1 = getelementptr inbounds i64, ptr %object, i64 %a
75 %incdec.ptr = getelementptr inbounds i64, ptr %tmp1, i64 4096
76 %tmp = load volatile i64, ptr %incdec.ptr, align 8
80 ; Test base + wide immediate
81 define void @t7(i64 %a) {
84 ; CHECK-NEXT: mov w8, #65535 // =0xffff
85 ; CHECK-NEXT: ldr xzr, [x0, x8]
87 %1 = add i64 %a, 65535 ;0xffff
88 %2 = inttoptr i64 %1 to ptr
89 %3 = load volatile i64, ptr %2, align 8
93 define void @t8(i64 %a) {
96 ; CHECK-NEXT: mov x8, #-4662 // =0xffffffffffffedca
97 ; CHECK-NEXT: ldr xzr, [x0, x8]
99 %1 = sub i64 %a, 4662 ;-4662 is 0xffffffffffffedca
100 %2 = inttoptr i64 %1 to ptr
101 %3 = load volatile i64, ptr %2, align 8
105 define void @t9(i64 %a) {
108 ; CHECK-NEXT: mov x8, #-305463297 // =0xffffffffedcaffff
109 ; CHECK-NEXT: ldr xzr, [x0, x8]
111 %1 = add i64 -305463297, %a ;-305463297 is 0xffffffffedcaffff
112 %2 = inttoptr i64 %1 to ptr
113 %3 = load volatile i64, ptr %2, align 8
117 define void @t10(i64 %a) {
120 ; CHECK-NEXT: mov x8, #81909218222800896 // =0x123000000000000
121 ; CHECK-NEXT: ldr xzr, [x0, x8]
123 %1 = add i64 %a, 81909218222800896 ;0x123000000000000
124 %2 = inttoptr i64 %1 to ptr
125 %3 = load volatile i64, ptr %2, align 8
129 define void @t11(i64 %a) {
132 ; CHECK-NEXT: mov w8, #17767 // =0x4567
133 ; CHECK-NEXT: movk w8, #291, lsl #16
134 ; CHECK-NEXT: ldr xzr, [x0, x8]
136 %1 = add i64 %a, 19088743 ;0x1234567
137 %2 = inttoptr i64 %1 to ptr
138 %3 = load volatile i64, ptr %2, align 8
142 ; Test some boundaries that should not use movz/movn/orr
143 define void @t12(i64 %a) {
146 ; CHECK-NEXT: add x8, x0, #4095
147 ; CHECK-NEXT: ldr xzr, [x8]
149 %1 = add i64 %a, 4095 ;0xfff
150 %2 = inttoptr i64 %1 to ptr
151 %3 = load volatile i64, ptr %2, align 8
155 define void @t13(i64 %a) {
158 ; CHECK-NEXT: sub x8, x0, #4095
159 ; CHECK-NEXT: ldr xzr, [x8]
161 %1 = add i64 %a, -4095 ;-0xfff
162 %2 = inttoptr i64 %1 to ptr
163 %3 = load volatile i64, ptr %2, align 8
167 define void @t14(i64 %a) {
170 ; CHECK-NEXT: add x8, x0, #291, lsl #12 // =1191936
171 ; CHECK-NEXT: ldr xzr, [x8]
173 %1 = add i64 %a, 1191936 ;0x123000
174 %2 = inttoptr i64 %1 to ptr
175 %3 = load volatile i64, ptr %2, align 8
179 define void @t15(i64 %a) {
182 ; CHECK-NEXT: sub x8, x0, #291, lsl #12 // =1191936
183 ; CHECK-NEXT: ldr xzr, [x8]
185 %1 = add i64 %a, -1191936 ;0xFFFFFFFFFFEDD000
186 %2 = inttoptr i64 %1 to ptr
187 %3 = load volatile i64, ptr %2, align 8
191 define void @t16(i64 %a) {
194 ; CHECK-NEXT: ldr xzr, [x0, #28672]
196 %1 = add i64 %a, 28672 ;0x7000
197 %2 = inttoptr i64 %1 to ptr
198 %3 = load volatile i64, ptr %2, align 8
202 define void @t17(i64 %a) {
205 ; CHECK-NEXT: ldur xzr, [x0, #-256]
207 %1 = add i64 %a, -256 ;-0x100
208 %2 = inttoptr i64 %1 to ptr
209 %3 = load volatile i64, ptr %2, align 8