1 ; RUN: llc -mtriple=arm64-apple-ios -global-isel -global-isel-abort=1 -verify-machineinstrs -stop-after=aarch64-prelegalizer-combiner -force-legal-indexing %s -o - | FileCheck %s
2 ; RUN: llc -debugify-and-strip-all-safe -mtriple=arm64-apple-ios -global-isel -global-isel-abort=1 -verify-machineinstrs -stop-after=aarch64-prelegalizer-combiner -force-legal-indexing %s -o - | FileCheck %s
4 define ptr @test_simple_load_pre(ptr %ptr) {
5 ; CHECK-LABEL: name: test_simple_load_pre
6 ; CHECK: [[BASE:%.*]]:_(p0) = COPY $x0
7 ; CHECK: [[OFFSET:%.*]]:_(s64) = G_CONSTANT i64 42
9 ; CHECK: {{%.*}}:_(s8), [[NEXT:%.*]]:_(p0) = G_INDEXED_LOAD [[BASE]], [[OFFSET]](s64), 1
10 ; CHECK: $x0 = COPY [[NEXT]](p0)
12 %next = getelementptr i8, ptr %ptr, i32 42
13 load volatile i8, ptr %next
17 define ptr @test_unused_load_pre(ptr %ptr) {
18 ; CHECK-LABEL: name: test_unused_load_pre
19 ; CHECK-NOT: G_INDEXED_LOAD
21 %next = getelementptr i8, ptr %ptr, i32 42
22 load volatile i8, ptr %next
26 define void @test_load_multiple_dominated(ptr %ptr, i1 %tst, i1 %tst2) {
27 ; CHECK-LABEL: name: test_load_multiple_dominated
28 ; CHECK: [[BASE:%.*]]:_(p0) = COPY $x0
29 ; CHECK: [[OFFSET:%.*]]:_(s64) = G_CONSTANT i64 42
30 ; CHECK-NOT: G_PTR_ADD
31 ; CHECK: {{%.*}}:_(s8), [[NEXT:%.*]]:_(p0) = G_INDEXED_LOAD [[BASE]], [[OFFSET]](s64), 1
32 ; CHECK: $x0 = COPY [[NEXT]](p0)
33 %next = getelementptr i8, ptr %ptr, i32 42
34 br i1 %tst, label %do_load, label %end
37 load volatile i8, ptr %next
38 br i1 %tst2, label %bb1, label %bb2
41 store volatile ptr %next, ptr undef
45 call void @bar(ptr %next)
52 define ptr @test_simple_store_pre(ptr %ptr) {
53 ; CHECK-LABEL: name: test_simple_store_pre
54 ; CHECK: [[BASE:%.*]]:_(p0) = COPY $x0
55 ; CHECK: [[VAL:%.*]]:_(s8) = G_CONSTANT i8 0
56 ; CHECK: [[OFFSET:%.*]]:_(s64) = G_CONSTANT i64 42
57 ; CHECK-NOT: G_PTR_ADD
58 ; CHECK: [[NEXT:%.*]]:_(p0) = G_INDEXED_STORE [[VAL]](s8), [[BASE]], [[OFFSET]](s64), 1
59 ; CHECK: $x0 = COPY [[NEXT]](p0)
61 %next = getelementptr i8, ptr %ptr, i32 42
62 store volatile i8 0, ptr %next
66 ; The potentially pre-indexed address is used as the value stored. Converting
67 ; would produce the value too late but only by one instruction.
68 define ptr @test_store_pre_val_loop(ptr %ptr) {
69 ; CHECK-LABEL: name: test_store_pre_val_loop
73 %next = getelementptr ptr, ptr %ptr, i32 42
74 store volatile ptr %next, ptr %next
78 ; Potentially pre-indexed address is used between GEP computing it and load.
79 define ptr @test_load_pre_before(ptr %ptr) {
80 ; CHECK-LABEL: name: test_load_pre_before
85 %next = getelementptr i8, ptr %ptr, i32 42
86 call void @bar(ptr %next)
87 load volatile i8, ptr %next
91 ; Materializing the base into a writable register (from sp/fp) would be just as
92 ; bad as the original GEP.
93 define ptr @test_alloca_load_pre() {
94 ; CHECK-LABEL: name: test_alloca_load_pre
98 %ptr = alloca i8, i32 128
99 %next = getelementptr i8, ptr %ptr, i32 42
100 load volatile i8, ptr %next
104 ; Load does not dominate use of its address. No indexing.
105 define ptr @test_pre_nodom(ptr %in, i1 %tst) {
106 ; CHECK-LABEL: name: test_pre_nodom
110 %next = getelementptr i8, ptr %in, i32 16
111 br i1 %tst, label %do_indexed, label %use_addr
114 %val = load i8, ptr %next
115 store i8 %val, ptr @var
116 store ptr %next, ptr @varp8
123 define ptr @test_simple_load_post(ptr %ptr) {
124 ; CHECK-LABEL: name: test_simple_load_post
125 ; CHECK: [[BASE:%.*]]:_(p0) = COPY $x0
126 ; CHECK: [[OFFSET:%.*]]:_(s64) = G_CONSTANT i64 42
127 ; CHECK-NOT: G_PTR_ADD
128 ; CHECK: {{%.*}}:_(s8), [[NEXT:%.*]]:_(p0) = G_INDEXED_LOAD [[BASE]], [[OFFSET]](s64), 0
129 ; CHECK: $x0 = COPY [[NEXT]](p0)
131 %next = getelementptr i8, ptr %ptr, i32 42
132 load volatile i8, ptr %ptr
136 define ptr @test_simple_load_post_gep_after(ptr %ptr) {
137 ; CHECK-LABEL: name: test_simple_load_post_gep_after
138 ; CHECK: [[BASE:%.*]]:_(p0) = COPY $x0
139 ; CHECK: BL @get_offset
140 ; CHECK: [[OFFSET:%.*]]:_(s64) = COPY $x0
141 ; CHECK: {{%.*}}:_(s8), [[ADDR:%.*]]:_(p0) = G_INDEXED_LOAD [[BASE]], [[OFFSET]](s64), 0
142 ; CHECK: $x0 = COPY [[ADDR]](p0)
144 %offset = call i64 @get_offset()
145 load volatile i8, ptr %ptr
146 %next = getelementptr i8, ptr %ptr, i64 %offset
150 define ptr @test_load_post_keep_looking(ptr %ptr) {
151 ; CHECK: name: test_load_post_keep_looking
152 ; CHECK: G_INDEXED_LOAD
154 %offset = call i64 @get_offset()
155 load volatile i8, ptr %ptr
156 %intval = ptrtoint ptr %ptr to i8
157 store i8 %intval, ptr @var
159 %next = getelementptr i8, ptr %ptr, i64 %offset
163 ; Base is frame index. Using indexing would need copy anyway.
164 define ptr @test_load_post_alloca() {
165 ; CHECK-LABEL: name: test_load_post_alloca
169 %ptr = alloca i8, i32 128
170 %next = getelementptr i8, ptr %ptr, i32 42
171 load volatile i8, ptr %ptr
175 ; Offset computation does not dominate the load we might be indexing.
176 define ptr @test_load_post_gep_offset_after(ptr %ptr) {
177 ; CHECK-LABEL: name: test_load_post_gep_offset_after
179 ; CHECK: BL @get_offset
182 load volatile i8, ptr %ptr
183 %offset = call i64 @get_offset()
184 %next = getelementptr i8, ptr %ptr, i64 %offset
188 declare void @bar(ptr)
189 declare i64 @get_offset()
191 @varp8 = global ptr null