1 ; RUN: llc -mtriple=riscv32 < %s | FileCheck %s -check-prefix=RV32I
2 ; RUN: llc -mtriple=riscv64 < %s | FileCheck %s -check-prefix=RV64I
3 ; RUN: llc -mtriple=riscv32 -mattr=+save-restore < %s | FileCheck %s -check-prefix=RV32I-SR
4 ; RUN: llc -mtriple=riscv64 -mattr=+save-restore < %s | FileCheck %s -check-prefix=RV64I-SR
5 ; RUN: llc -mtriple=riscv32 -mattr=+f,+save-restore -target-abi=ilp32f < %s | FileCheck %s -check-prefix=RV32I-FP-SR
6 ; RUN: llc -mtriple=riscv64 -mattr=+f,+d,+save-restore -target-abi=lp64d < %s | FileCheck %s -check-prefix=RV64I-FP-SR
8 ; Check that the correct save/restore libcalls are generated.
10 @var0 = global [18 x i32] zeroinitializer
11 @var1 = global [24 x i32] zeroinitializer
12 @var2 = global [30 x i32] zeroinitializer
14 define void @callee_saved0() nounwind {
15 ; RV32I-LABEL: callee_saved0:
16 ; RV32I-NOT: call t0, __riscv_save
17 ; RV32I-NOT: tail __riscv_restore
19 ; RV64I-LABEL: callee_saved0:
20 ; RV64I-NOT: call t0, __riscv_save
21 ; RV64I-NOT: tail __riscv_restore
23 ; RV32I-SR-LABEL: callee_saved0:
24 ; RV32I-SR: call t0, __riscv_save_5
25 ; RV32I-SR: tail __riscv_restore_5
27 ; RV64I-SR-LABEL: callee_saved0:
28 ; RV64I-SR: call t0, __riscv_save_5
29 ; RV64I-SR: tail __riscv_restore_5
31 ; RV32I-FP-SR-LABEL: callee_saved0:
32 ; RV32I-FP-SR: call t0, __riscv_save_5
33 ; RV32I-FP-SR: tail __riscv_restore_5
35 ; RV64I-FP-SR-LABEL: callee_saved0:
36 ; RV64I-FP-SR: call t0, __riscv_save_5
37 ; RV64I-FP-SR: tail __riscv_restore_5
38 %val = load [18 x i32], ptr @var0
39 store volatile [18 x i32] %val, ptr @var0
43 define void @callee_saved1() nounwind {
44 ; RV32I-LABEL: callee_saved1:
45 ; RV32I-NOT: call t0, __riscv_save
46 ; RV32I-NOT: tail __riscv_restore
48 ; RV64I-LABEL: callee_saved1:
49 ; RV64I-NOT: call t0, __riscv_save
50 ; RV64I-NOT: tail __riscv_restore
52 ; RV32I-SR-LABEL: callee_saved1:
53 ; RV32I-SR: call t0, __riscv_save_11
54 ; RV32I-SR: tail __riscv_restore_11
56 ; RV64I-SR-LABEL: callee_saved1:
57 ; RV64I-SR: call t0, __riscv_save_11
58 ; RV64I-SR: tail __riscv_restore_11
60 ; RV32I-FP-SR-LABEL: callee_saved1:
61 ; RV32I-FP-SR: call t0, __riscv_save_11
62 ; RV32I-FP-SR: tail __riscv_restore_11
64 ; RV64I-FP-SR-LABEL: callee_saved1:
65 ; RV64I-FP-SR: call t0, __riscv_save_11
66 ; RV64I-FP-SR: tail __riscv_restore_11
67 %val = load [24 x i32], ptr @var1
68 store volatile [24 x i32] %val, ptr @var1
72 define void @callee_saved2() nounwind {
73 ; RV32I-LABEL: callee_saved2:
74 ; RV32I-NOT: call t0, __riscv_save
75 ; RV32I-NOT: tail __riscv_restore
77 ; RV64I-LABEL: callee_saved2:
78 ; RV64I-NOT: call t0, __riscv_save
79 ; RV64I-NOT: tail __riscv_restore
81 ; RV32I-SR-LABEL: callee_saved2:
82 ; RV32I-SR: call t0, __riscv_save_12
83 ; RV32I-SR: tail __riscv_restore_12
85 ; RV64I-SR-LABEL: callee_saved2:
86 ; RV64I-SR: call t0, __riscv_save_12
87 ; RV64I-SR: tail __riscv_restore_12
89 ; RV32I-FP-SR-LABEL: callee_saved2:
90 ; RV32I-FP-SR: call t0, __riscv_save_12
91 ; RV32I-FP-SR: tail __riscv_restore_12
93 ; RV64I-FP-SR-LABEL: callee_saved2:
94 ; RV64I-FP-SR: call t0, __riscv_save_12
95 ; RV64I-FP-SR: tail __riscv_restore_12
96 %val = load [30 x i32], ptr @var2
97 store volatile [30 x i32] %val, ptr @var2
101 ; Check that floating point callee saved registers are still manually saved and
104 define void @callee_saved_fp() nounwind {
105 ; RV32I-LABEL: callee_saved_fp:
106 ; RV32I-NOT: call t0, __riscv_save
107 ; RV32I-NOT: tail __riscv_restore
109 ; RV64I-LABEL: callee_saved_fp:
110 ; RV64I-NOT: call t0, __riscv_save
111 ; RV64I-NOT: tail __riscv_restore
113 ; RV32I-SR-LABEL: callee_saved_fp:
114 ; RV32I-SR: call t0, __riscv_save_7
115 ; RV32I-SR: tail __riscv_restore_7
117 ; RV64I-SR-LABEL: callee_saved_fp:
118 ; RV64I-SR: call t0, __riscv_save_7
119 ; RV64I-SR: tail __riscv_restore_7
121 ; RV32I-FP-SR-LABEL: callee_saved_fp:
122 ; RV32I-FP-SR: call t0, __riscv_save_7
123 ; RV32I-FP-SR-NEXT: addi sp, sp, -16
124 ; RV32I-FP-SR-NEXT: fsw fs0, 12(sp)
125 ; RV32I-FP-SR: flw fs0, 12(sp)
126 ; RV32I-FP-SR-NEXT: addi sp, sp, 16
127 ; RV32I-FP-SR-NEXT: tail __riscv_restore_7
129 ; RV64I-FP-SR-LABEL: callee_saved_fp:
130 ; RV64I-FP-SR: call t0, __riscv_save_7
131 ; RV64I-FP-SR-NEXT: addi sp, sp, -16
132 ; RV64I-FP-SR-NEXT: fsd fs0, 8(sp)
133 ; RV64I-FP-SR: fld fs0, 8(sp)
134 ; RV64I-FP-SR-NEXT: addi sp, sp, 16
135 ; RV64I-FP-SR-NEXT: tail __riscv_restore_7
136 call void asm sideeffect "", "~{f8},~{x9},~{x18},~{x19},~{x20},~{x21},~{x22}"()
140 ; Check that preserving tail calls is preferred over save/restore
142 declare i32 @tail_callee(i32 %i)
144 define i32 @tail_call(i32 %i) nounwind {
145 ; RV32I-LABEL: tail_call:
146 ; RV32I-NOT: call t0, __riscv_save
147 ; RV32I: tail tail_callee
148 ; RV32I-NOT: tail __riscv_restore
150 ; RV64I-LABEL: tail_call:
151 ; RV64I-NOT: call t0, __riscv_save
152 ; RV64I: tail tail_callee
153 ; RV64I-NOT: tail __riscv_restore
155 ; RV32I-SR-LABEL: tail_call:
156 ; RV32I-SR-NOT: call t0, __riscv_save
157 ; RV32I-SR: tail tail_callee
158 ; RV32I-SR-NOT: tail __riscv_restore
160 ; RV64I-SR-LABEL: tail_call:
161 ; RV64I-SR-NOT: call t0, __riscv_save
162 ; RV64I-SR: tail tail_callee
163 ; RV64I-SR-NOT: tail __riscv_restore
165 ; RV32I-FP-SR-LABEL: tail_call:
166 ; RV32I-FP-SR-NOT: call t0, __riscv_save
167 ; RV32I-FP-SR: tail tail_callee
168 ; RV32I-FP-SR-NOT: tail __riscv_restore
170 ; RV64I-FP-SR-LABEL: tail_call:
171 ; RV64I-FP-SR-NOT: call t0, __riscv_save
172 ; RV64I-FP-SR: tail tail_callee
173 ; RV64I-FP-SR-NOT: tail __riscv_restore
175 %val = load [18 x i32], ptr @var0
176 store volatile [18 x i32] %val, ptr @var0
177 %r = tail call i32 @tail_callee(i32 %i)
181 ; Check that functions with varargs do not use save/restore code
183 declare void @llvm.va_start(ptr)
184 declare void @llvm.va_end(ptr)
186 define i32 @varargs(ptr %fmt, ...) nounwind {
187 ; RV32I-LABEL: varargs:
188 ; RV32I-NOT: call t0, __riscv_save
189 ; RV32I-NOT: tail __riscv_restore
191 ; RV64I-LABEL: varargs:
192 ; RV64I-NOT: call t0, __riscv_save
193 ; RV64I-NOT: tail __riscv_restore
195 ; RV32I-SR-LABEL: varargs:
196 ; RV32I-SR-NOT: call t0, __riscv_save
197 ; RV32I-SR-NOT: tail __riscv_restore
199 ; RV64I-SR-LABEL: varargs:
200 ; RV64I-SR-NOT: call t0, __riscv_save
201 ; RV64I-SR-NOT: tail __riscv_restore
203 ; RV32I-FP-SR-LABEL: varargs:
204 ; RV32I-FP-SR-NOT: call t0, __riscv_save
205 ; RV32I-FP-SR-NOT: tail __riscv_restore
207 ; RV64I-FP-SR-LABEL: varargs:
208 ; RV64I-FP-SR-NOT: call t0, __riscv_save
209 ; RV64I-FP-SR-NOT: tail __riscv_restore
210 %va = alloca ptr, align 4
211 call void @llvm.va_start(ptr %va)
212 %argp.cur = load ptr, ptr %va, align 4
213 %argp.next = getelementptr inbounds i8, ptr %argp.cur, i32 4
214 store ptr %argp.next, ptr %va, align 4
215 %1 = load i32, ptr %argp.cur, align 4
216 call void @llvm.va_end(ptr %va)
220 define void @many_args(i32, i32, i32, i32, i32, i32, i32, i32, i32) nounwind {
221 ; RV32I-LABEL: many_args:
222 ; RV32I-NOT: call t0, __riscv_save
223 ; RV32I-NOT: tail __riscv_restore
225 ; RV64I-LABEL: many_args:
226 ; RV64I-NOT: call t0, __riscv_save
227 ; RV64I-NOT: tail __riscv_restore
229 ; RV32I-SR-LABEL: many_args:
230 ; RV32I-SR: call t0, __riscv_save_5
231 ; RV32I-SR: tail __riscv_restore_5
233 ; RV64I-SR-LABEL: many_args:
234 ; RV64I-SR: call t0, __riscv_save_5
235 ; RV64I-SR: tail __riscv_restore_5
237 ; RV32I-FP-SR-LABEL: many_args:
238 ; RV32I-FP-SR: call t0, __riscv_save_5
239 ; RV32I-FP-SR: tail __riscv_restore_5
241 ; RV64I-FP-SR-LABEL: many_args:
242 ; RV64I-FP-SR: call t0, __riscv_save_5
243 ; RV64I-FP-SR: tail __riscv_restore_5
245 %val = load [18 x i32], ptr @var0
246 store volatile [18 x i32] %val, ptr @var0
250 ; Check that dynamic allocation calculations remain correct
252 declare ptr @llvm.stacksave()
253 declare void @llvm.stackrestore(ptr)
254 declare void @notdead(ptr)
256 define void @alloca(i32 %n) nounwind {
257 ; RV32I-LABEL: alloca:
258 ; RV32I-NOT: call t0, __riscv_save
259 ; RV32I: addi s0, sp, 16
260 ; RV32I: addi sp, s0, -16
261 ; RV32I-NOT: tail __riscv_restore
263 ; RV64I-LABEL: alloca:
264 ; RV64I-NOT: call t0, __riscv_save
265 ; RV64I: addi s0, sp, 32
266 ; RV64I: addi sp, s0, -32
267 ; RV64I-NOT: tail __riscv_restore
269 ; RV32I-SR-LABEL: alloca:
270 ; RV32I-SR: call t0, __riscv_save_2
271 ; RV32I-SR: addi s0, sp, 16
272 ; RV32I-SR: addi sp, s0, -16
273 ; RV32I-SR: tail __riscv_restore_2
275 ; RV64I-SR-LABEL: alloca:
276 ; RV64I-SR: call t0, __riscv_save_2
277 ; RV64I-SR: addi s0, sp, 32
278 ; RV64I-SR: addi sp, s0, -32
279 ; RV64I-SR: tail __riscv_restore_2
281 ; RV32I-FP-SR-LABEL: alloca:
282 ; RV32I-FP-SR: call t0, __riscv_save_2
283 ; RV32I-FP-SR: addi s0, sp, 16
284 ; RV32I-FP-SR: addi sp, s0, -16
285 ; RV32I-FP-SR: tail __riscv_restore_2
287 ; RV64I-FP-SR-LABEL: alloca:
288 ; RV64I-FP-SR: call t0, __riscv_save_2
289 ; RV64I-FP-SR: addi s0, sp, 32
290 ; RV64I-FP-SR: addi sp, s0, -32
291 ; RV64I-FP-SR: tail __riscv_restore_2
292 %sp = call ptr @llvm.stacksave()
293 %addr = alloca i8, i32 %n
294 call void @notdead(ptr %addr)
295 call void @llvm.stackrestore(ptr %sp)
299 ; Check that functions with interrupt attribute do not use save/restore code
301 declare i32 @foo(...)
302 define void @interrupt() nounwind "interrupt"="supervisor" {
303 ; RV32I-LABEL: interrupt:
304 ; RV32I-NOT: call t0, __riscv_save
305 ; RV32I-NOT: tail __riscv_restore
307 ; RV64I-LABEL: interrupt:
308 ; RV64I-NOT: call t0, __riscv_save
309 ; RV64I-NOT: tail __riscv_restore
311 ; RV32I-SR-LABEL: interrupt:
312 ; RV32I-SR-NOT: call t0, __riscv_save
313 ; RV32I-SR-NOT: tail __riscv_restore
315 ; RV64I-SR-LABEL: interrupt:
316 ; RV64I-SR-NOT: call t0, __riscv_save
317 ; RV64I-SR-NOT: tail __riscv_restore
319 ; RV32I-FP-SR-LABEL: interrupt:
320 ; RV32I-FP-SR-NOT: call t0, __riscv_save
321 ; RV32I-FP-SR-NOT: tail __riscv_restore
323 ; RV64I-FP-SR-LABEL: interrupt:
324 ; RV64I-FP-SR-NOT: call t0, __riscv_save
325 ; RV64I-FP-SR-NOT: tail __riscv_restore
326 %call = call i32 @foo()