1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2 ; RUN: llc %s -verify-machineinstrs -mtriple aarch64-apple-darwin -global-isel -o - 2>&1 | FileCheck %s
4 ; There are two things we want to test here:
5 ; (1) We can tail call musttail calls.
6 ; (2) We spill and reload all of the arguments around a normal call.
8 declare i32 @musttail_variadic_callee(i32, ...)
9 define i32 @test_musttail_variadic(i32 %arg0, ...) {
10 ; CHECK-LABEL: test_musttail_variadic:
12 ; CHECK-NEXT: b _musttail_variadic_callee
13 %r = musttail call i32 (i32, ...) @musttail_variadic_callee(i32 %arg0, ...)
17 declare [2 x i64] @musttail_variadic_aggret_callee(i32 %arg0, ...)
18 define [2 x i64] @test_musttail_variadic_aggret(i32 %arg0, ...) {
19 ; CHECK-LABEL: test_musttail_variadic_aggret:
21 ; CHECK-NEXT: b _musttail_variadic_aggret_callee
22 %r = musttail call [2 x i64] (i32, ...) @musttail_variadic_aggret_callee(i32 %arg0, ...)
26 ; Test musttailing with a normal call in the block. Test that we spill and
27 ; restore, as a normal call will clobber all argument registers.
28 @asdf = internal constant [4 x i8] c"asdf"
29 declare void @puts(ptr)
30 define i32 @test_musttail_variadic_spill(i32 %arg0, ...) {
31 ; CHECK-LABEL: test_musttail_variadic_spill:
33 ; CHECK-NEXT: sub sp, sp, #224
34 ; CHECK-NEXT: stp x28, x27, [sp, #128] ; 16-byte Folded Spill
35 ; CHECK-NEXT: stp x26, x25, [sp, #144] ; 16-byte Folded Spill
36 ; CHECK-NEXT: stp x24, x23, [sp, #160] ; 16-byte Folded Spill
37 ; CHECK-NEXT: stp x22, x21, [sp, #176] ; 16-byte Folded Spill
38 ; CHECK-NEXT: stp x20, x19, [sp, #192] ; 16-byte Folded Spill
39 ; CHECK-NEXT: stp x29, x30, [sp, #208] ; 16-byte Folded Spill
40 ; CHECK-NEXT: .cfi_def_cfa_offset 224
41 ; CHECK-NEXT: .cfi_offset w30, -8
42 ; CHECK-NEXT: .cfi_offset w29, -16
43 ; CHECK-NEXT: .cfi_offset w19, -24
44 ; CHECK-NEXT: .cfi_offset w20, -32
45 ; CHECK-NEXT: .cfi_offset w21, -40
46 ; CHECK-NEXT: .cfi_offset w22, -48
47 ; CHECK-NEXT: .cfi_offset w23, -56
48 ; CHECK-NEXT: .cfi_offset w24, -64
49 ; CHECK-NEXT: .cfi_offset w25, -72
50 ; CHECK-NEXT: .cfi_offset w26, -80
51 ; CHECK-NEXT: .cfi_offset w27, -88
52 ; CHECK-NEXT: .cfi_offset w28, -96
53 ; CHECK-NEXT: mov w19, w0
55 ; CHECK-NEXT: adrp x0, _asdf@PAGE
57 ; CHECK-NEXT: add x0, x0, _asdf@PAGEOFF
58 ; CHECK-NEXT: mov x20, x1
59 ; CHECK-NEXT: mov x21, x2
60 ; CHECK-NEXT: mov x22, x3
61 ; CHECK-NEXT: mov x23, x4
62 ; CHECK-NEXT: mov x24, x5
63 ; CHECK-NEXT: mov x25, x6
64 ; CHECK-NEXT: mov x26, x7
65 ; CHECK-NEXT: stp q7, q6, [sp] ; 32-byte Folded Spill
66 ; CHECK-NEXT: mov x27, x8
67 ; CHECK-NEXT: stp q5, q4, [sp, #32] ; 32-byte Folded Spill
68 ; CHECK-NEXT: stp q3, q2, [sp, #64] ; 32-byte Folded Spill
69 ; CHECK-NEXT: stp q1, q0, [sp, #96] ; 32-byte Folded Spill
70 ; CHECK-NEXT: bl _puts
71 ; CHECK-NEXT: ldp q1, q0, [sp, #96] ; 32-byte Folded Reload
72 ; CHECK-NEXT: mov w0, w19
73 ; CHECK-NEXT: ldp q3, q2, [sp, #64] ; 32-byte Folded Reload
74 ; CHECK-NEXT: mov x1, x20
75 ; CHECK-NEXT: ldp q5, q4, [sp, #32] ; 32-byte Folded Reload
76 ; CHECK-NEXT: mov x2, x21
77 ; CHECK-NEXT: ldp q7, q6, [sp] ; 32-byte Folded Reload
78 ; CHECK-NEXT: mov x3, x22
79 ; CHECK-NEXT: mov x4, x23
80 ; CHECK-NEXT: mov x5, x24
81 ; CHECK-NEXT: mov x6, x25
82 ; CHECK-NEXT: mov x7, x26
83 ; CHECK-NEXT: mov x8, x27
84 ; CHECK-NEXT: ldp x29, x30, [sp, #208] ; 16-byte Folded Reload
85 ; CHECK-NEXT: ldp x20, x19, [sp, #192] ; 16-byte Folded Reload
86 ; CHECK-NEXT: ldp x22, x21, [sp, #176] ; 16-byte Folded Reload
87 ; CHECK-NEXT: ldp x24, x23, [sp, #160] ; 16-byte Folded Reload
88 ; CHECK-NEXT: ldp x26, x25, [sp, #144] ; 16-byte Folded Reload
89 ; CHECK-NEXT: ldp x28, x27, [sp, #128] ; 16-byte Folded Reload
90 ; CHECK-NEXT: add sp, sp, #224
91 ; CHECK-NEXT: b _musttail_variadic_callee
92 ; CHECK-NEXT: .loh AdrpAdd Lloh0, Lloh1
93 call void @puts(ptr @asdf)
94 %r = musttail call i32 (i32, ...) @musttail_variadic_callee(i32 %arg0, ...)
98 ; Test musttailing with a varargs call in the block. Test that we spill and
99 ; reload all arguments in the variadic argument pack.
100 declare void @llvm.va_start(ptr) nounwind
101 declare ptr @get_f(ptr %this)
102 define void @f_thunk(ptr %this, ...) {
103 ; CHECK-LABEL: f_thunk:
105 ; CHECK-NEXT: sub sp, sp, #256
106 ; CHECK-NEXT: stp x28, x27, [sp, #160] ; 16-byte Folded Spill
107 ; CHECK-NEXT: stp x26, x25, [sp, #176] ; 16-byte Folded Spill
108 ; CHECK-NEXT: stp x24, x23, [sp, #192] ; 16-byte Folded Spill
109 ; CHECK-NEXT: stp x22, x21, [sp, #208] ; 16-byte Folded Spill
110 ; CHECK-NEXT: stp x20, x19, [sp, #224] ; 16-byte Folded Spill
111 ; CHECK-NEXT: stp x29, x30, [sp, #240] ; 16-byte Folded Spill
112 ; CHECK-NEXT: .cfi_def_cfa_offset 256
113 ; CHECK-NEXT: .cfi_offset w30, -8
114 ; CHECK-NEXT: .cfi_offset w29, -16
115 ; CHECK-NEXT: .cfi_offset w19, -24
116 ; CHECK-NEXT: .cfi_offset w20, -32
117 ; CHECK-NEXT: .cfi_offset w21, -40
118 ; CHECK-NEXT: .cfi_offset w22, -48
119 ; CHECK-NEXT: .cfi_offset w23, -56
120 ; CHECK-NEXT: .cfi_offset w24, -64
121 ; CHECK-NEXT: .cfi_offset w25, -72
122 ; CHECK-NEXT: .cfi_offset w26, -80
123 ; CHECK-NEXT: .cfi_offset w27, -88
124 ; CHECK-NEXT: .cfi_offset w28, -96
125 ; CHECK-NEXT: mov x27, x8
126 ; CHECK-NEXT: add x8, sp, #128
127 ; CHECK-NEXT: add x9, sp, #256
128 ; CHECK-NEXT: mov x19, x0
129 ; CHECK-NEXT: mov x20, x1
130 ; CHECK-NEXT: mov x21, x2
131 ; CHECK-NEXT: mov x22, x3
132 ; CHECK-NEXT: mov x23, x4
133 ; CHECK-NEXT: mov x24, x5
134 ; CHECK-NEXT: mov x25, x6
135 ; CHECK-NEXT: mov x26, x7
136 ; CHECK-NEXT: stp q7, q6, [sp] ; 32-byte Folded Spill
137 ; CHECK-NEXT: stp q5, q4, [sp, #32] ; 32-byte Folded Spill
138 ; CHECK-NEXT: stp q3, q2, [sp, #64] ; 32-byte Folded Spill
139 ; CHECK-NEXT: stp q1, q0, [sp, #96] ; 32-byte Folded Spill
140 ; CHECK-NEXT: str x9, [x8]
141 ; CHECK-NEXT: bl _get_f
142 ; CHECK-NEXT: mov x9, x0
143 ; CHECK-NEXT: ldp q1, q0, [sp, #96] ; 32-byte Folded Reload
144 ; CHECK-NEXT: ldp q3, q2, [sp, #64] ; 32-byte Folded Reload
145 ; CHECK-NEXT: mov x0, x19
146 ; CHECK-NEXT: ldp q5, q4, [sp, #32] ; 32-byte Folded Reload
147 ; CHECK-NEXT: mov x1, x20
148 ; CHECK-NEXT: ldp q7, q6, [sp] ; 32-byte Folded Reload
149 ; CHECK-NEXT: mov x2, x21
150 ; CHECK-NEXT: mov x3, x22
151 ; CHECK-NEXT: mov x4, x23
152 ; CHECK-NEXT: mov x5, x24
153 ; CHECK-NEXT: mov x6, x25
154 ; CHECK-NEXT: mov x7, x26
155 ; CHECK-NEXT: mov x8, x27
156 ; CHECK-NEXT: ldp x29, x30, [sp, #240] ; 16-byte Folded Reload
157 ; CHECK-NEXT: ldp x20, x19, [sp, #224] ; 16-byte Folded Reload
158 ; CHECK-NEXT: ldp x22, x21, [sp, #208] ; 16-byte Folded Reload
159 ; CHECK-NEXT: ldp x24, x23, [sp, #192] ; 16-byte Folded Reload
160 ; CHECK-NEXT: ldp x26, x25, [sp, #176] ; 16-byte Folded Reload
161 ; CHECK-NEXT: ldp x28, x27, [sp, #160] ; 16-byte Folded Reload
162 ; CHECK-NEXT: add sp, sp, #256
164 %ap = alloca [4 x ptr], align 16
165 call void @llvm.va_start(ptr %ap)
166 %fptr = call ptr(ptr) @get_f(ptr %this)
167 musttail call void (ptr, ...) %fptr(ptr %this, ...)
171 ; We don't need any spills and reloads here, but we should still emit the
172 ; copies in call lowering.
173 define void @g_thunk(ptr %fptr_i8, ...) {
174 ; CHECK-LABEL: g_thunk:
177 musttail call void (ptr, ...) %fptr_i8(ptr %fptr_i8, ...)
181 ; Test that this works with multiple exits and basic blocks.
182 %struct.Foo = type { i1, ptr, ptr }
183 @g = external global i32
184 define void @h_thunk(ptr %this, ...) {
185 ; CHECK-LABEL: h_thunk:
187 ; CHECK-NEXT: ldrb w9, [x0]
188 ; CHECK-NEXT: tbz w9, #0, LBB5_2
189 ; CHECK-NEXT: ; %bb.1: ; %then
190 ; CHECK-NEXT: ldr x9, [x0, #8]
192 ; CHECK-NEXT: LBB5_2: ; %else
194 ; CHECK-NEXT: adrp x10, _g@GOTPAGE
195 ; CHECK-NEXT: ldr x9, [x0, #16]
196 ; CHECK-NEXT: mov w11, #42 ; =0x2a
198 ; CHECK-NEXT: ldr x10, [x10, _g@GOTPAGEOFF]
200 ; CHECK-NEXT: str w11, [x10]
202 ; CHECK-NEXT: .loh AdrpLdrGotStr Lloh2, Lloh3, Lloh4
203 %cond = load i1, ptr %this
204 br i1 %cond, label %then, label %else
207 %a_p = getelementptr %struct.Foo, ptr %this, i32 0, i32 1
208 %a_i8 = load ptr, ptr %a_p
209 musttail call void (ptr, ...) %a_i8(ptr %this, ...)
213 %b_p = getelementptr %struct.Foo, ptr %this, i32 0, i32 2
214 %b_i8 = load ptr, ptr %b_p
216 musttail call void (ptr, ...) %b_i8(ptr %this, ...)