1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 2
2 ; RUN: llc -mtriple=riscv32 -mattr=+zcmp -verify-machineinstrs < %s \
3 ; RUN: | FileCheck %s -check-prefixes=RV32IZCMP
4 ; RUN: llc -mtriple=riscv64 -mattr=+zcmp -verify-machineinstrs < %s \
5 ; RUN: | FileCheck %s -check-prefixes=RV64IZCMP
6 ; RUN: llc -mtriple=riscv32 -mattr=+zcmp,+save-restore \
7 ; RUN: -verify-machineinstrs < %s | FileCheck %s -check-prefixes=RV32IZCMP-SR
8 ; RUN: llc -mtriple=riscv64 -mattr=+zcmp,+save-restore \
9 ; RUN: -verify-machineinstrs < %s | FileCheck %s -check-prefixes=RV64IZCMP-SR
10 ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
11 ; RUN: | FileCheck -check-prefixes=RV32I %s
12 ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
13 ; RUN: | FileCheck -check-prefixes=RV64I %s
15 declare void @test(ptr)
16 declare void @callee_void(ptr)
17 declare i32 @callee(ptr)
20 ; RV32IZCMP-LABEL: foo:
22 ; RV32IZCMP-NEXT: cm.push {ra}, -64
23 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 64
24 ; RV32IZCMP-NEXT: .cfi_offset ra, -4
25 ; RV32IZCMP-NEXT: addi sp, sp, -464
26 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 528
27 ; RV32IZCMP-NEXT: mv a0, sp
28 ; RV32IZCMP-NEXT: call test
29 ; RV32IZCMP-NEXT: addi sp, sp, 464
30 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 64
31 ; RV32IZCMP-NEXT: cm.popretz {ra}, 64
33 ; RV64IZCMP-LABEL: foo:
35 ; RV64IZCMP-NEXT: cm.push {ra}, -64
36 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 64
37 ; RV64IZCMP-NEXT: .cfi_offset ra, -8
38 ; RV64IZCMP-NEXT: addi sp, sp, -464
39 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 528
40 ; RV64IZCMP-NEXT: mv a0, sp
41 ; RV64IZCMP-NEXT: call test
42 ; RV64IZCMP-NEXT: addi sp, sp, 464
43 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 64
44 ; RV64IZCMP-NEXT: cm.popretz {ra}, 64
46 ; RV32IZCMP-SR-LABEL: foo:
47 ; RV32IZCMP-SR: # %bb.0:
48 ; RV32IZCMP-SR-NEXT: cm.push {ra}, -64
49 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 64
50 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -4
51 ; RV32IZCMP-SR-NEXT: addi sp, sp, -464
52 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 528
53 ; RV32IZCMP-SR-NEXT: mv a0, sp
54 ; RV32IZCMP-SR-NEXT: call test
55 ; RV32IZCMP-SR-NEXT: addi sp, sp, 464
56 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 64
57 ; RV32IZCMP-SR-NEXT: cm.popretz {ra}, 64
59 ; RV64IZCMP-SR-LABEL: foo:
60 ; RV64IZCMP-SR: # %bb.0:
61 ; RV64IZCMP-SR-NEXT: cm.push {ra}, -64
62 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 64
63 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -8
64 ; RV64IZCMP-SR-NEXT: addi sp, sp, -464
65 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 528
66 ; RV64IZCMP-SR-NEXT: mv a0, sp
67 ; RV64IZCMP-SR-NEXT: call test
68 ; RV64IZCMP-SR-NEXT: addi sp, sp, 464
69 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 64
70 ; RV64IZCMP-SR-NEXT: cm.popretz {ra}, 64
74 ; RV32I-NEXT: addi sp, sp, -528
75 ; RV32I-NEXT: .cfi_def_cfa_offset 528
76 ; RV32I-NEXT: sw ra, 524(sp) # 4-byte Folded Spill
77 ; RV32I-NEXT: .cfi_offset ra, -4
78 ; RV32I-NEXT: addi a0, sp, 12
79 ; RV32I-NEXT: call test
80 ; RV32I-NEXT: li a0, 0
81 ; RV32I-NEXT: lw ra, 524(sp) # 4-byte Folded Reload
82 ; RV32I-NEXT: .cfi_restore ra
83 ; RV32I-NEXT: addi sp, sp, 528
84 ; RV32I-NEXT: .cfi_def_cfa_offset 0
89 ; RV64I-NEXT: addi sp, sp, -528
90 ; RV64I-NEXT: .cfi_def_cfa_offset 528
91 ; RV64I-NEXT: sd ra, 520(sp) # 8-byte Folded Spill
92 ; RV64I-NEXT: .cfi_offset ra, -8
93 ; RV64I-NEXT: addi a0, sp, 8
94 ; RV64I-NEXT: call test
95 ; RV64I-NEXT: li a0, 0
96 ; RV64I-NEXT: ld ra, 520(sp) # 8-byte Folded Reload
97 ; RV64I-NEXT: .cfi_restore ra
98 ; RV64I-NEXT: addi sp, sp, 528
99 ; RV64I-NEXT: .cfi_def_cfa_offset 0
101 %1 = alloca [512 x i8]
102 %2 = getelementptr [512 x i8], ptr %1, i32 0, i32 0
103 call void @test(ptr %2)
107 define i32 @pushpopret0(i32 signext %size){
108 ; RV32IZCMP-LABEL: pushpopret0:
109 ; RV32IZCMP: # %bb.0: # %entry
110 ; RV32IZCMP-NEXT: cm.push {ra, s0}, -16
111 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 16
112 ; RV32IZCMP-NEXT: .cfi_offset ra, -8
113 ; RV32IZCMP-NEXT: .cfi_offset s0, -4
114 ; RV32IZCMP-NEXT: addi s0, sp, 16
115 ; RV32IZCMP-NEXT: .cfi_def_cfa s0, 0
116 ; RV32IZCMP-NEXT: addi a0, a0, 15
117 ; RV32IZCMP-NEXT: andi a0, a0, -16
118 ; RV32IZCMP-NEXT: sub a0, sp, a0
119 ; RV32IZCMP-NEXT: mv sp, a0
120 ; RV32IZCMP-NEXT: call callee_void
121 ; RV32IZCMP-NEXT: addi sp, s0, -16
122 ; RV32IZCMP-NEXT: .cfi_def_cfa sp, 16
123 ; RV32IZCMP-NEXT: cm.popretz {ra, s0}, 16
125 ; RV64IZCMP-LABEL: pushpopret0:
126 ; RV64IZCMP: # %bb.0: # %entry
127 ; RV64IZCMP-NEXT: cm.push {ra, s0}, -16
128 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 16
129 ; RV64IZCMP-NEXT: .cfi_offset ra, -16
130 ; RV64IZCMP-NEXT: .cfi_offset s0, -8
131 ; RV64IZCMP-NEXT: addi s0, sp, 16
132 ; RV64IZCMP-NEXT: .cfi_def_cfa s0, 0
133 ; RV64IZCMP-NEXT: slli a0, a0, 32
134 ; RV64IZCMP-NEXT: srli a0, a0, 32
135 ; RV64IZCMP-NEXT: addi a0, a0, 15
136 ; RV64IZCMP-NEXT: andi a0, a0, -16
137 ; RV64IZCMP-NEXT: sub a0, sp, a0
138 ; RV64IZCMP-NEXT: mv sp, a0
139 ; RV64IZCMP-NEXT: call callee_void
140 ; RV64IZCMP-NEXT: addi sp, s0, -16
141 ; RV64IZCMP-NEXT: .cfi_def_cfa sp, 16
142 ; RV64IZCMP-NEXT: cm.popretz {ra, s0}, 16
144 ; RV32IZCMP-SR-LABEL: pushpopret0:
145 ; RV32IZCMP-SR: # %bb.0: # %entry
146 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0}, -16
147 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
148 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -8
149 ; RV32IZCMP-SR-NEXT: .cfi_offset s0, -4
150 ; RV32IZCMP-SR-NEXT: addi s0, sp, 16
151 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
152 ; RV32IZCMP-SR-NEXT: addi a0, a0, 15
153 ; RV32IZCMP-SR-NEXT: andi a0, a0, -16
154 ; RV32IZCMP-SR-NEXT: sub a0, sp, a0
155 ; RV32IZCMP-SR-NEXT: mv sp, a0
156 ; RV32IZCMP-SR-NEXT: call callee_void
157 ; RV32IZCMP-SR-NEXT: addi sp, s0, -16
158 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
159 ; RV32IZCMP-SR-NEXT: cm.popretz {ra, s0}, 16
161 ; RV64IZCMP-SR-LABEL: pushpopret0:
162 ; RV64IZCMP-SR: # %bb.0: # %entry
163 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0}, -16
164 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
165 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -16
166 ; RV64IZCMP-SR-NEXT: .cfi_offset s0, -8
167 ; RV64IZCMP-SR-NEXT: addi s0, sp, 16
168 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
169 ; RV64IZCMP-SR-NEXT: slli a0, a0, 32
170 ; RV64IZCMP-SR-NEXT: srli a0, a0, 32
171 ; RV64IZCMP-SR-NEXT: addi a0, a0, 15
172 ; RV64IZCMP-SR-NEXT: andi a0, a0, -16
173 ; RV64IZCMP-SR-NEXT: sub a0, sp, a0
174 ; RV64IZCMP-SR-NEXT: mv sp, a0
175 ; RV64IZCMP-SR-NEXT: call callee_void
176 ; RV64IZCMP-SR-NEXT: addi sp, s0, -16
177 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
178 ; RV64IZCMP-SR-NEXT: cm.popretz {ra, s0}, 16
180 ; RV32I-LABEL: pushpopret0:
181 ; RV32I: # %bb.0: # %entry
182 ; RV32I-NEXT: addi sp, sp, -16
183 ; RV32I-NEXT: .cfi_def_cfa_offset 16
184 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
185 ; RV32I-NEXT: sw s0, 8(sp) # 4-byte Folded Spill
186 ; RV32I-NEXT: .cfi_offset ra, -4
187 ; RV32I-NEXT: .cfi_offset s0, -8
188 ; RV32I-NEXT: addi s0, sp, 16
189 ; RV32I-NEXT: .cfi_def_cfa s0, 0
190 ; RV32I-NEXT: addi a0, a0, 15
191 ; RV32I-NEXT: andi a0, a0, -16
192 ; RV32I-NEXT: sub a0, sp, a0
193 ; RV32I-NEXT: mv sp, a0
194 ; RV32I-NEXT: call callee_void
195 ; RV32I-NEXT: li a0, 0
196 ; RV32I-NEXT: addi sp, s0, -16
197 ; RV32I-NEXT: .cfi_def_cfa sp, 16
198 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
199 ; RV32I-NEXT: lw s0, 8(sp) # 4-byte Folded Reload
200 ; RV32I-NEXT: .cfi_restore ra
201 ; RV32I-NEXT: .cfi_restore s0
202 ; RV32I-NEXT: addi sp, sp, 16
203 ; RV32I-NEXT: .cfi_def_cfa_offset 0
206 ; RV64I-LABEL: pushpopret0:
207 ; RV64I: # %bb.0: # %entry
208 ; RV64I-NEXT: addi sp, sp, -16
209 ; RV64I-NEXT: .cfi_def_cfa_offset 16
210 ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
211 ; RV64I-NEXT: sd s0, 0(sp) # 8-byte Folded Spill
212 ; RV64I-NEXT: .cfi_offset ra, -8
213 ; RV64I-NEXT: .cfi_offset s0, -16
214 ; RV64I-NEXT: addi s0, sp, 16
215 ; RV64I-NEXT: .cfi_def_cfa s0, 0
216 ; RV64I-NEXT: slli a0, a0, 32
217 ; RV64I-NEXT: srli a0, a0, 32
218 ; RV64I-NEXT: addi a0, a0, 15
219 ; RV64I-NEXT: andi a0, a0, -16
220 ; RV64I-NEXT: sub a0, sp, a0
221 ; RV64I-NEXT: mv sp, a0
222 ; RV64I-NEXT: call callee_void
223 ; RV64I-NEXT: li a0, 0
224 ; RV64I-NEXT: addi sp, s0, -16
225 ; RV64I-NEXT: .cfi_def_cfa sp, 16
226 ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
227 ; RV64I-NEXT: ld s0, 0(sp) # 8-byte Folded Reload
228 ; RV64I-NEXT: .cfi_restore ra
229 ; RV64I-NEXT: .cfi_restore s0
230 ; RV64I-NEXT: addi sp, sp, 16
231 ; RV64I-NEXT: .cfi_def_cfa_offset 0
234 %0 = alloca i8, i32 %size, align 16
235 call void @callee_void(ptr nonnull %0)
239 define i32 @pushpopret1(i32 signext %size) {
240 ; RV32IZCMP-LABEL: pushpopret1:
241 ; RV32IZCMP: # %bb.0: # %entry
242 ; RV32IZCMP-NEXT: cm.push {ra, s0}, -16
243 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 16
244 ; RV32IZCMP-NEXT: .cfi_offset ra, -8
245 ; RV32IZCMP-NEXT: .cfi_offset s0, -4
246 ; RV32IZCMP-NEXT: addi s0, sp, 16
247 ; RV32IZCMP-NEXT: .cfi_def_cfa s0, 0
248 ; RV32IZCMP-NEXT: addi a0, a0, 15
249 ; RV32IZCMP-NEXT: andi a0, a0, -16
250 ; RV32IZCMP-NEXT: sub a0, sp, a0
251 ; RV32IZCMP-NEXT: mv sp, a0
252 ; RV32IZCMP-NEXT: call callee_void
253 ; RV32IZCMP-NEXT: li a0, 1
254 ; RV32IZCMP-NEXT: addi sp, s0, -16
255 ; RV32IZCMP-NEXT: .cfi_def_cfa sp, 16
256 ; RV32IZCMP-NEXT: cm.popret {ra, s0}, 16
258 ; RV64IZCMP-LABEL: pushpopret1:
259 ; RV64IZCMP: # %bb.0: # %entry
260 ; RV64IZCMP-NEXT: cm.push {ra, s0}, -16
261 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 16
262 ; RV64IZCMP-NEXT: .cfi_offset ra, -16
263 ; RV64IZCMP-NEXT: .cfi_offset s0, -8
264 ; RV64IZCMP-NEXT: addi s0, sp, 16
265 ; RV64IZCMP-NEXT: .cfi_def_cfa s0, 0
266 ; RV64IZCMP-NEXT: slli a0, a0, 32
267 ; RV64IZCMP-NEXT: srli a0, a0, 32
268 ; RV64IZCMP-NEXT: addi a0, a0, 15
269 ; RV64IZCMP-NEXT: andi a0, a0, -16
270 ; RV64IZCMP-NEXT: sub a0, sp, a0
271 ; RV64IZCMP-NEXT: mv sp, a0
272 ; RV64IZCMP-NEXT: call callee_void
273 ; RV64IZCMP-NEXT: li a0, 1
274 ; RV64IZCMP-NEXT: addi sp, s0, -16
275 ; RV64IZCMP-NEXT: .cfi_def_cfa sp, 16
276 ; RV64IZCMP-NEXT: cm.popret {ra, s0}, 16
278 ; RV32IZCMP-SR-LABEL: pushpopret1:
279 ; RV32IZCMP-SR: # %bb.0: # %entry
280 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0}, -16
281 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
282 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -8
283 ; RV32IZCMP-SR-NEXT: .cfi_offset s0, -4
284 ; RV32IZCMP-SR-NEXT: addi s0, sp, 16
285 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
286 ; RV32IZCMP-SR-NEXT: addi a0, a0, 15
287 ; RV32IZCMP-SR-NEXT: andi a0, a0, -16
288 ; RV32IZCMP-SR-NEXT: sub a0, sp, a0
289 ; RV32IZCMP-SR-NEXT: mv sp, a0
290 ; RV32IZCMP-SR-NEXT: call callee_void
291 ; RV32IZCMP-SR-NEXT: li a0, 1
292 ; RV32IZCMP-SR-NEXT: addi sp, s0, -16
293 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
294 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0}, 16
296 ; RV64IZCMP-SR-LABEL: pushpopret1:
297 ; RV64IZCMP-SR: # %bb.0: # %entry
298 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0}, -16
299 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
300 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -16
301 ; RV64IZCMP-SR-NEXT: .cfi_offset s0, -8
302 ; RV64IZCMP-SR-NEXT: addi s0, sp, 16
303 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
304 ; RV64IZCMP-SR-NEXT: slli a0, a0, 32
305 ; RV64IZCMP-SR-NEXT: srli a0, a0, 32
306 ; RV64IZCMP-SR-NEXT: addi a0, a0, 15
307 ; RV64IZCMP-SR-NEXT: andi a0, a0, -16
308 ; RV64IZCMP-SR-NEXT: sub a0, sp, a0
309 ; RV64IZCMP-SR-NEXT: mv sp, a0
310 ; RV64IZCMP-SR-NEXT: call callee_void
311 ; RV64IZCMP-SR-NEXT: li a0, 1
312 ; RV64IZCMP-SR-NEXT: addi sp, s0, -16
313 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
314 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0}, 16
316 ; RV32I-LABEL: pushpopret1:
317 ; RV32I: # %bb.0: # %entry
318 ; RV32I-NEXT: addi sp, sp, -16
319 ; RV32I-NEXT: .cfi_def_cfa_offset 16
320 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
321 ; RV32I-NEXT: sw s0, 8(sp) # 4-byte Folded Spill
322 ; RV32I-NEXT: .cfi_offset ra, -4
323 ; RV32I-NEXT: .cfi_offset s0, -8
324 ; RV32I-NEXT: addi s0, sp, 16
325 ; RV32I-NEXT: .cfi_def_cfa s0, 0
326 ; RV32I-NEXT: addi a0, a0, 15
327 ; RV32I-NEXT: andi a0, a0, -16
328 ; RV32I-NEXT: sub a0, sp, a0
329 ; RV32I-NEXT: mv sp, a0
330 ; RV32I-NEXT: call callee_void
331 ; RV32I-NEXT: li a0, 1
332 ; RV32I-NEXT: addi sp, s0, -16
333 ; RV32I-NEXT: .cfi_def_cfa sp, 16
334 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
335 ; RV32I-NEXT: lw s0, 8(sp) # 4-byte Folded Reload
336 ; RV32I-NEXT: .cfi_restore ra
337 ; RV32I-NEXT: .cfi_restore s0
338 ; RV32I-NEXT: addi sp, sp, 16
339 ; RV32I-NEXT: .cfi_def_cfa_offset 0
342 ; RV64I-LABEL: pushpopret1:
343 ; RV64I: # %bb.0: # %entry
344 ; RV64I-NEXT: addi sp, sp, -16
345 ; RV64I-NEXT: .cfi_def_cfa_offset 16
346 ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
347 ; RV64I-NEXT: sd s0, 0(sp) # 8-byte Folded Spill
348 ; RV64I-NEXT: .cfi_offset ra, -8
349 ; RV64I-NEXT: .cfi_offset s0, -16
350 ; RV64I-NEXT: addi s0, sp, 16
351 ; RV64I-NEXT: .cfi_def_cfa s0, 0
352 ; RV64I-NEXT: slli a0, a0, 32
353 ; RV64I-NEXT: srli a0, a0, 32
354 ; RV64I-NEXT: addi a0, a0, 15
355 ; RV64I-NEXT: andi a0, a0, -16
356 ; RV64I-NEXT: sub a0, sp, a0
357 ; RV64I-NEXT: mv sp, a0
358 ; RV64I-NEXT: call callee_void
359 ; RV64I-NEXT: li a0, 1
360 ; RV64I-NEXT: addi sp, s0, -16
361 ; RV64I-NEXT: .cfi_def_cfa sp, 16
362 ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
363 ; RV64I-NEXT: ld s0, 0(sp) # 8-byte Folded Reload
364 ; RV64I-NEXT: .cfi_restore ra
365 ; RV64I-NEXT: .cfi_restore s0
366 ; RV64I-NEXT: addi sp, sp, 16
367 ; RV64I-NEXT: .cfi_def_cfa_offset 0
370 %0 = alloca i8, i32 %size, align 16
371 call void @callee_void(ptr nonnull %0)
375 define i32 @pushpopretneg1(i32 signext %size) {
376 ; RV32IZCMP-LABEL: pushpopretneg1:
377 ; RV32IZCMP: # %bb.0: # %entry
378 ; RV32IZCMP-NEXT: cm.push {ra, s0}, -16
379 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 16
380 ; RV32IZCMP-NEXT: .cfi_offset ra, -8
381 ; RV32IZCMP-NEXT: .cfi_offset s0, -4
382 ; RV32IZCMP-NEXT: addi s0, sp, 16
383 ; RV32IZCMP-NEXT: .cfi_def_cfa s0, 0
384 ; RV32IZCMP-NEXT: addi a0, a0, 15
385 ; RV32IZCMP-NEXT: andi a0, a0, -16
386 ; RV32IZCMP-NEXT: sub a0, sp, a0
387 ; RV32IZCMP-NEXT: mv sp, a0
388 ; RV32IZCMP-NEXT: call callee_void
389 ; RV32IZCMP-NEXT: li a0, -1
390 ; RV32IZCMP-NEXT: addi sp, s0, -16
391 ; RV32IZCMP-NEXT: .cfi_def_cfa sp, 16
392 ; RV32IZCMP-NEXT: cm.popret {ra, s0}, 16
394 ; RV64IZCMP-LABEL: pushpopretneg1:
395 ; RV64IZCMP: # %bb.0: # %entry
396 ; RV64IZCMP-NEXT: cm.push {ra, s0}, -16
397 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 16
398 ; RV64IZCMP-NEXT: .cfi_offset ra, -16
399 ; RV64IZCMP-NEXT: .cfi_offset s0, -8
400 ; RV64IZCMP-NEXT: addi s0, sp, 16
401 ; RV64IZCMP-NEXT: .cfi_def_cfa s0, 0
402 ; RV64IZCMP-NEXT: slli a0, a0, 32
403 ; RV64IZCMP-NEXT: srli a0, a0, 32
404 ; RV64IZCMP-NEXT: addi a0, a0, 15
405 ; RV64IZCMP-NEXT: andi a0, a0, -16
406 ; RV64IZCMP-NEXT: sub a0, sp, a0
407 ; RV64IZCMP-NEXT: mv sp, a0
408 ; RV64IZCMP-NEXT: call callee_void
409 ; RV64IZCMP-NEXT: li a0, -1
410 ; RV64IZCMP-NEXT: addi sp, s0, -16
411 ; RV64IZCMP-NEXT: .cfi_def_cfa sp, 16
412 ; RV64IZCMP-NEXT: cm.popret {ra, s0}, 16
414 ; RV32IZCMP-SR-LABEL: pushpopretneg1:
415 ; RV32IZCMP-SR: # %bb.0: # %entry
416 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0}, -16
417 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
418 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -8
419 ; RV32IZCMP-SR-NEXT: .cfi_offset s0, -4
420 ; RV32IZCMP-SR-NEXT: addi s0, sp, 16
421 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
422 ; RV32IZCMP-SR-NEXT: addi a0, a0, 15
423 ; RV32IZCMP-SR-NEXT: andi a0, a0, -16
424 ; RV32IZCMP-SR-NEXT: sub a0, sp, a0
425 ; RV32IZCMP-SR-NEXT: mv sp, a0
426 ; RV32IZCMP-SR-NEXT: call callee_void
427 ; RV32IZCMP-SR-NEXT: li a0, -1
428 ; RV32IZCMP-SR-NEXT: addi sp, s0, -16
429 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
430 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0}, 16
432 ; RV64IZCMP-SR-LABEL: pushpopretneg1:
433 ; RV64IZCMP-SR: # %bb.0: # %entry
434 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0}, -16
435 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
436 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -16
437 ; RV64IZCMP-SR-NEXT: .cfi_offset s0, -8
438 ; RV64IZCMP-SR-NEXT: addi s0, sp, 16
439 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
440 ; RV64IZCMP-SR-NEXT: slli a0, a0, 32
441 ; RV64IZCMP-SR-NEXT: srli a0, a0, 32
442 ; RV64IZCMP-SR-NEXT: addi a0, a0, 15
443 ; RV64IZCMP-SR-NEXT: andi a0, a0, -16
444 ; RV64IZCMP-SR-NEXT: sub a0, sp, a0
445 ; RV64IZCMP-SR-NEXT: mv sp, a0
446 ; RV64IZCMP-SR-NEXT: call callee_void
447 ; RV64IZCMP-SR-NEXT: li a0, -1
448 ; RV64IZCMP-SR-NEXT: addi sp, s0, -16
449 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
450 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0}, 16
452 ; RV32I-LABEL: pushpopretneg1:
453 ; RV32I: # %bb.0: # %entry
454 ; RV32I-NEXT: addi sp, sp, -16
455 ; RV32I-NEXT: .cfi_def_cfa_offset 16
456 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
457 ; RV32I-NEXT: sw s0, 8(sp) # 4-byte Folded Spill
458 ; RV32I-NEXT: .cfi_offset ra, -4
459 ; RV32I-NEXT: .cfi_offset s0, -8
460 ; RV32I-NEXT: addi s0, sp, 16
461 ; RV32I-NEXT: .cfi_def_cfa s0, 0
462 ; RV32I-NEXT: addi a0, a0, 15
463 ; RV32I-NEXT: andi a0, a0, -16
464 ; RV32I-NEXT: sub a0, sp, a0
465 ; RV32I-NEXT: mv sp, a0
466 ; RV32I-NEXT: call callee_void
467 ; RV32I-NEXT: li a0, -1
468 ; RV32I-NEXT: addi sp, s0, -16
469 ; RV32I-NEXT: .cfi_def_cfa sp, 16
470 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
471 ; RV32I-NEXT: lw s0, 8(sp) # 4-byte Folded Reload
472 ; RV32I-NEXT: .cfi_restore ra
473 ; RV32I-NEXT: .cfi_restore s0
474 ; RV32I-NEXT: addi sp, sp, 16
475 ; RV32I-NEXT: .cfi_def_cfa_offset 0
478 ; RV64I-LABEL: pushpopretneg1:
479 ; RV64I: # %bb.0: # %entry
480 ; RV64I-NEXT: addi sp, sp, -16
481 ; RV64I-NEXT: .cfi_def_cfa_offset 16
482 ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
483 ; RV64I-NEXT: sd s0, 0(sp) # 8-byte Folded Spill
484 ; RV64I-NEXT: .cfi_offset ra, -8
485 ; RV64I-NEXT: .cfi_offset s0, -16
486 ; RV64I-NEXT: addi s0, sp, 16
487 ; RV64I-NEXT: .cfi_def_cfa s0, 0
488 ; RV64I-NEXT: slli a0, a0, 32
489 ; RV64I-NEXT: srli a0, a0, 32
490 ; RV64I-NEXT: addi a0, a0, 15
491 ; RV64I-NEXT: andi a0, a0, -16
492 ; RV64I-NEXT: sub a0, sp, a0
493 ; RV64I-NEXT: mv sp, a0
494 ; RV64I-NEXT: call callee_void
495 ; RV64I-NEXT: li a0, -1
496 ; RV64I-NEXT: addi sp, s0, -16
497 ; RV64I-NEXT: .cfi_def_cfa sp, 16
498 ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
499 ; RV64I-NEXT: ld s0, 0(sp) # 8-byte Folded Reload
500 ; RV64I-NEXT: .cfi_restore ra
501 ; RV64I-NEXT: .cfi_restore s0
502 ; RV64I-NEXT: addi sp, sp, 16
503 ; RV64I-NEXT: .cfi_def_cfa_offset 0
506 %0 = alloca i8, i32 %size, align 16
507 call void @callee_void(ptr nonnull %0)
511 define i32 @pushpopret2(i32 signext %size) {
512 ; RV32IZCMP-LABEL: pushpopret2:
513 ; RV32IZCMP: # %bb.0: # %entry
514 ; RV32IZCMP-NEXT: cm.push {ra, s0}, -16
515 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 16
516 ; RV32IZCMP-NEXT: .cfi_offset ra, -8
517 ; RV32IZCMP-NEXT: .cfi_offset s0, -4
518 ; RV32IZCMP-NEXT: addi s0, sp, 16
519 ; RV32IZCMP-NEXT: .cfi_def_cfa s0, 0
520 ; RV32IZCMP-NEXT: addi a0, a0, 15
521 ; RV32IZCMP-NEXT: andi a0, a0, -16
522 ; RV32IZCMP-NEXT: sub a0, sp, a0
523 ; RV32IZCMP-NEXT: mv sp, a0
524 ; RV32IZCMP-NEXT: call callee_void
525 ; RV32IZCMP-NEXT: li a0, 2
526 ; RV32IZCMP-NEXT: addi sp, s0, -16
527 ; RV32IZCMP-NEXT: .cfi_def_cfa sp, 16
528 ; RV32IZCMP-NEXT: cm.popret {ra, s0}, 16
530 ; RV64IZCMP-LABEL: pushpopret2:
531 ; RV64IZCMP: # %bb.0: # %entry
532 ; RV64IZCMP-NEXT: cm.push {ra, s0}, -16
533 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 16
534 ; RV64IZCMP-NEXT: .cfi_offset ra, -16
535 ; RV64IZCMP-NEXT: .cfi_offset s0, -8
536 ; RV64IZCMP-NEXT: addi s0, sp, 16
537 ; RV64IZCMP-NEXT: .cfi_def_cfa s0, 0
538 ; RV64IZCMP-NEXT: slli a0, a0, 32
539 ; RV64IZCMP-NEXT: srli a0, a0, 32
540 ; RV64IZCMP-NEXT: addi a0, a0, 15
541 ; RV64IZCMP-NEXT: andi a0, a0, -16
542 ; RV64IZCMP-NEXT: sub a0, sp, a0
543 ; RV64IZCMP-NEXT: mv sp, a0
544 ; RV64IZCMP-NEXT: call callee_void
545 ; RV64IZCMP-NEXT: li a0, 2
546 ; RV64IZCMP-NEXT: addi sp, s0, -16
547 ; RV64IZCMP-NEXT: .cfi_def_cfa sp, 16
548 ; RV64IZCMP-NEXT: cm.popret {ra, s0}, 16
550 ; RV32IZCMP-SR-LABEL: pushpopret2:
551 ; RV32IZCMP-SR: # %bb.0: # %entry
552 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0}, -16
553 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
554 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -8
555 ; RV32IZCMP-SR-NEXT: .cfi_offset s0, -4
556 ; RV32IZCMP-SR-NEXT: addi s0, sp, 16
557 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
558 ; RV32IZCMP-SR-NEXT: addi a0, a0, 15
559 ; RV32IZCMP-SR-NEXT: andi a0, a0, -16
560 ; RV32IZCMP-SR-NEXT: sub a0, sp, a0
561 ; RV32IZCMP-SR-NEXT: mv sp, a0
562 ; RV32IZCMP-SR-NEXT: call callee_void
563 ; RV32IZCMP-SR-NEXT: li a0, 2
564 ; RV32IZCMP-SR-NEXT: addi sp, s0, -16
565 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
566 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0}, 16
568 ; RV64IZCMP-SR-LABEL: pushpopret2:
569 ; RV64IZCMP-SR: # %bb.0: # %entry
570 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0}, -16
571 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
572 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -16
573 ; RV64IZCMP-SR-NEXT: .cfi_offset s0, -8
574 ; RV64IZCMP-SR-NEXT: addi s0, sp, 16
575 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
576 ; RV64IZCMP-SR-NEXT: slli a0, a0, 32
577 ; RV64IZCMP-SR-NEXT: srli a0, a0, 32
578 ; RV64IZCMP-SR-NEXT: addi a0, a0, 15
579 ; RV64IZCMP-SR-NEXT: andi a0, a0, -16
580 ; RV64IZCMP-SR-NEXT: sub a0, sp, a0
581 ; RV64IZCMP-SR-NEXT: mv sp, a0
582 ; RV64IZCMP-SR-NEXT: call callee_void
583 ; RV64IZCMP-SR-NEXT: li a0, 2
584 ; RV64IZCMP-SR-NEXT: addi sp, s0, -16
585 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
586 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0}, 16
588 ; RV32I-LABEL: pushpopret2:
589 ; RV32I: # %bb.0: # %entry
590 ; RV32I-NEXT: addi sp, sp, -16
591 ; RV32I-NEXT: .cfi_def_cfa_offset 16
592 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
593 ; RV32I-NEXT: sw s0, 8(sp) # 4-byte Folded Spill
594 ; RV32I-NEXT: .cfi_offset ra, -4
595 ; RV32I-NEXT: .cfi_offset s0, -8
596 ; RV32I-NEXT: addi s0, sp, 16
597 ; RV32I-NEXT: .cfi_def_cfa s0, 0
598 ; RV32I-NEXT: addi a0, a0, 15
599 ; RV32I-NEXT: andi a0, a0, -16
600 ; RV32I-NEXT: sub a0, sp, a0
601 ; RV32I-NEXT: mv sp, a0
602 ; RV32I-NEXT: call callee_void
603 ; RV32I-NEXT: li a0, 2
604 ; RV32I-NEXT: addi sp, s0, -16
605 ; RV32I-NEXT: .cfi_def_cfa sp, 16
606 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
607 ; RV32I-NEXT: lw s0, 8(sp) # 4-byte Folded Reload
608 ; RV32I-NEXT: .cfi_restore ra
609 ; RV32I-NEXT: .cfi_restore s0
610 ; RV32I-NEXT: addi sp, sp, 16
611 ; RV32I-NEXT: .cfi_def_cfa_offset 0
614 ; RV64I-LABEL: pushpopret2:
615 ; RV64I: # %bb.0: # %entry
616 ; RV64I-NEXT: addi sp, sp, -16
617 ; RV64I-NEXT: .cfi_def_cfa_offset 16
618 ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
619 ; RV64I-NEXT: sd s0, 0(sp) # 8-byte Folded Spill
620 ; RV64I-NEXT: .cfi_offset ra, -8
621 ; RV64I-NEXT: .cfi_offset s0, -16
622 ; RV64I-NEXT: addi s0, sp, 16
623 ; RV64I-NEXT: .cfi_def_cfa s0, 0
624 ; RV64I-NEXT: slli a0, a0, 32
625 ; RV64I-NEXT: srli a0, a0, 32
626 ; RV64I-NEXT: addi a0, a0, 15
627 ; RV64I-NEXT: andi a0, a0, -16
628 ; RV64I-NEXT: sub a0, sp, a0
629 ; RV64I-NEXT: mv sp, a0
630 ; RV64I-NEXT: call callee_void
631 ; RV64I-NEXT: li a0, 2
632 ; RV64I-NEXT: addi sp, s0, -16
633 ; RV64I-NEXT: .cfi_def_cfa sp, 16
634 ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
635 ; RV64I-NEXT: ld s0, 0(sp) # 8-byte Folded Reload
636 ; RV64I-NEXT: .cfi_restore ra
637 ; RV64I-NEXT: .cfi_restore s0
638 ; RV64I-NEXT: addi sp, sp, 16
639 ; RV64I-NEXT: .cfi_def_cfa_offset 0
642 %0 = alloca i8, i32 %size, align 16
643 call void @callee_void(ptr nonnull %0)
647 define dso_local i32 @tailcall(i32 signext %size) local_unnamed_addr #0 {
648 ; RV32IZCMP-LABEL: tailcall:
649 ; RV32IZCMP: # %bb.0: # %entry
650 ; RV32IZCMP-NEXT: cm.push {ra, s0}, -16
651 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 16
652 ; RV32IZCMP-NEXT: .cfi_offset ra, -8
653 ; RV32IZCMP-NEXT: .cfi_offset s0, -4
654 ; RV32IZCMP-NEXT: addi s0, sp, 16
655 ; RV32IZCMP-NEXT: .cfi_def_cfa s0, 0
656 ; RV32IZCMP-NEXT: addi a0, a0, 15
657 ; RV32IZCMP-NEXT: andi a0, a0, -16
658 ; RV32IZCMP-NEXT: sub a0, sp, a0
659 ; RV32IZCMP-NEXT: mv sp, a0
660 ; RV32IZCMP-NEXT: addi sp, s0, -16
661 ; RV32IZCMP-NEXT: .cfi_def_cfa sp, 16
662 ; RV32IZCMP-NEXT: cm.pop {ra, s0}, 16
663 ; RV32IZCMP-NEXT: .cfi_restore ra
664 ; RV32IZCMP-NEXT: .cfi_restore s0
665 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 0
666 ; RV32IZCMP-NEXT: tail callee
668 ; RV64IZCMP-LABEL: tailcall:
669 ; RV64IZCMP: # %bb.0: # %entry
670 ; RV64IZCMP-NEXT: cm.push {ra, s0}, -16
671 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 16
672 ; RV64IZCMP-NEXT: .cfi_offset ra, -16
673 ; RV64IZCMP-NEXT: .cfi_offset s0, -8
674 ; RV64IZCMP-NEXT: addi s0, sp, 16
675 ; RV64IZCMP-NEXT: .cfi_def_cfa s0, 0
676 ; RV64IZCMP-NEXT: slli a0, a0, 32
677 ; RV64IZCMP-NEXT: srli a0, a0, 32
678 ; RV64IZCMP-NEXT: addi a0, a0, 15
679 ; RV64IZCMP-NEXT: andi a0, a0, -16
680 ; RV64IZCMP-NEXT: sub a0, sp, a0
681 ; RV64IZCMP-NEXT: mv sp, a0
682 ; RV64IZCMP-NEXT: addi sp, s0, -16
683 ; RV64IZCMP-NEXT: .cfi_def_cfa sp, 16
684 ; RV64IZCMP-NEXT: cm.pop {ra, s0}, 16
685 ; RV64IZCMP-NEXT: .cfi_restore ra
686 ; RV64IZCMP-NEXT: .cfi_restore s0
687 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 0
688 ; RV64IZCMP-NEXT: tail callee
690 ; RV32IZCMP-SR-LABEL: tailcall:
691 ; RV32IZCMP-SR: # %bb.0: # %entry
692 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0}, -16
693 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
694 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -8
695 ; RV32IZCMP-SR-NEXT: .cfi_offset s0, -4
696 ; RV32IZCMP-SR-NEXT: addi s0, sp, 16
697 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
698 ; RV32IZCMP-SR-NEXT: addi a0, a0, 15
699 ; RV32IZCMP-SR-NEXT: andi a0, a0, -16
700 ; RV32IZCMP-SR-NEXT: sub a0, sp, a0
701 ; RV32IZCMP-SR-NEXT: mv sp, a0
702 ; RV32IZCMP-SR-NEXT: addi sp, s0, -16
703 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
704 ; RV32IZCMP-SR-NEXT: cm.pop {ra, s0}, 16
705 ; RV32IZCMP-SR-NEXT: .cfi_restore ra
706 ; RV32IZCMP-SR-NEXT: .cfi_restore s0
707 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 0
708 ; RV32IZCMP-SR-NEXT: tail callee
710 ; RV64IZCMP-SR-LABEL: tailcall:
711 ; RV64IZCMP-SR: # %bb.0: # %entry
712 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0}, -16
713 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 16
714 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -16
715 ; RV64IZCMP-SR-NEXT: .cfi_offset s0, -8
716 ; RV64IZCMP-SR-NEXT: addi s0, sp, 16
717 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
718 ; RV64IZCMP-SR-NEXT: slli a0, a0, 32
719 ; RV64IZCMP-SR-NEXT: srli a0, a0, 32
720 ; RV64IZCMP-SR-NEXT: addi a0, a0, 15
721 ; RV64IZCMP-SR-NEXT: andi a0, a0, -16
722 ; RV64IZCMP-SR-NEXT: sub a0, sp, a0
723 ; RV64IZCMP-SR-NEXT: mv sp, a0
724 ; RV64IZCMP-SR-NEXT: addi sp, s0, -16
725 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa sp, 16
726 ; RV64IZCMP-SR-NEXT: cm.pop {ra, s0}, 16
727 ; RV64IZCMP-SR-NEXT: .cfi_restore ra
728 ; RV64IZCMP-SR-NEXT: .cfi_restore s0
729 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 0
730 ; RV64IZCMP-SR-NEXT: tail callee
732 ; RV32I-LABEL: tailcall:
733 ; RV32I: # %bb.0: # %entry
734 ; RV32I-NEXT: addi sp, sp, -16
735 ; RV32I-NEXT: .cfi_def_cfa_offset 16
736 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
737 ; RV32I-NEXT: sw s0, 8(sp) # 4-byte Folded Spill
738 ; RV32I-NEXT: .cfi_offset ra, -4
739 ; RV32I-NEXT: .cfi_offset s0, -8
740 ; RV32I-NEXT: addi s0, sp, 16
741 ; RV32I-NEXT: .cfi_def_cfa s0, 0
742 ; RV32I-NEXT: addi a0, a0, 15
743 ; RV32I-NEXT: andi a0, a0, -16
744 ; RV32I-NEXT: sub a0, sp, a0
745 ; RV32I-NEXT: mv sp, a0
746 ; RV32I-NEXT: addi sp, s0, -16
747 ; RV32I-NEXT: .cfi_def_cfa sp, 16
748 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
749 ; RV32I-NEXT: lw s0, 8(sp) # 4-byte Folded Reload
750 ; RV32I-NEXT: .cfi_restore ra
751 ; RV32I-NEXT: .cfi_restore s0
752 ; RV32I-NEXT: addi sp, sp, 16
753 ; RV32I-NEXT: .cfi_def_cfa_offset 0
754 ; RV32I-NEXT: tail callee
756 ; RV64I-LABEL: tailcall:
757 ; RV64I: # %bb.0: # %entry
758 ; RV64I-NEXT: addi sp, sp, -16
759 ; RV64I-NEXT: .cfi_def_cfa_offset 16
760 ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
761 ; RV64I-NEXT: sd s0, 0(sp) # 8-byte Folded Spill
762 ; RV64I-NEXT: .cfi_offset ra, -8
763 ; RV64I-NEXT: .cfi_offset s0, -16
764 ; RV64I-NEXT: addi s0, sp, 16
765 ; RV64I-NEXT: .cfi_def_cfa s0, 0
766 ; RV64I-NEXT: slli a0, a0, 32
767 ; RV64I-NEXT: srli a0, a0, 32
768 ; RV64I-NEXT: addi a0, a0, 15
769 ; RV64I-NEXT: andi a0, a0, -16
770 ; RV64I-NEXT: sub a0, sp, a0
771 ; RV64I-NEXT: mv sp, a0
772 ; RV64I-NEXT: addi sp, s0, -16
773 ; RV64I-NEXT: .cfi_def_cfa sp, 16
774 ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
775 ; RV64I-NEXT: ld s0, 0(sp) # 8-byte Folded Reload
776 ; RV64I-NEXT: .cfi_restore ra
777 ; RV64I-NEXT: .cfi_restore s0
778 ; RV64I-NEXT: addi sp, sp, 16
779 ; RV64I-NEXT: .cfi_def_cfa_offset 0
780 ; RV64I-NEXT: tail callee
782 %0 = alloca i8, i32 %size, align 16
783 %1 = tail call i32 @callee(ptr nonnull %0)
787 @var = global [5 x i32] zeroinitializer
788 define i32 @nocompress(i32 signext %size) {
789 ; RV32IZCMP-LABEL: nocompress:
790 ; RV32IZCMP: # %bb.0: # %entry
791 ; RV32IZCMP-NEXT: cm.push {ra, s0-s8}, -48
792 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 48
793 ; RV32IZCMP-NEXT: .cfi_offset ra, -40
794 ; RV32IZCMP-NEXT: .cfi_offset s0, -36
795 ; RV32IZCMP-NEXT: .cfi_offset s1, -32
796 ; RV32IZCMP-NEXT: .cfi_offset s2, -28
797 ; RV32IZCMP-NEXT: .cfi_offset s3, -24
798 ; RV32IZCMP-NEXT: .cfi_offset s4, -20
799 ; RV32IZCMP-NEXT: .cfi_offset s5, -16
800 ; RV32IZCMP-NEXT: .cfi_offset s6, -12
801 ; RV32IZCMP-NEXT: .cfi_offset s7, -8
802 ; RV32IZCMP-NEXT: .cfi_offset s8, -4
803 ; RV32IZCMP-NEXT: addi s0, sp, 48
804 ; RV32IZCMP-NEXT: .cfi_def_cfa s0, 0
805 ; RV32IZCMP-NEXT: addi a0, a0, 15
806 ; RV32IZCMP-NEXT: andi a0, a0, -16
807 ; RV32IZCMP-NEXT: sub s2, sp, a0
808 ; RV32IZCMP-NEXT: mv sp, s2
809 ; RV32IZCMP-NEXT: lui s1, %hi(var)
810 ; RV32IZCMP-NEXT: lw s3, %lo(var)(s1)
811 ; RV32IZCMP-NEXT: lw s4, %lo(var+4)(s1)
812 ; RV32IZCMP-NEXT: lw s5, %lo(var+8)(s1)
813 ; RV32IZCMP-NEXT: lw s6, %lo(var+12)(s1)
814 ; RV32IZCMP-NEXT: addi s7, s1, %lo(var)
815 ; RV32IZCMP-NEXT: lw s8, 16(s7)
816 ; RV32IZCMP-NEXT: mv a0, s2
817 ; RV32IZCMP-NEXT: call callee_void
818 ; RV32IZCMP-NEXT: sw s8, 16(s7)
819 ; RV32IZCMP-NEXT: sw s6, %lo(var+12)(s1)
820 ; RV32IZCMP-NEXT: sw s5, %lo(var+8)(s1)
821 ; RV32IZCMP-NEXT: sw s4, %lo(var+4)(s1)
822 ; RV32IZCMP-NEXT: sw s3, %lo(var)(s1)
823 ; RV32IZCMP-NEXT: mv a0, s2
824 ; RV32IZCMP-NEXT: addi sp, s0, -48
825 ; RV32IZCMP-NEXT: .cfi_def_cfa sp, 48
826 ; RV32IZCMP-NEXT: cm.pop {ra, s0-s8}, 48
827 ; RV32IZCMP-NEXT: .cfi_restore ra
828 ; RV32IZCMP-NEXT: .cfi_restore s0
829 ; RV32IZCMP-NEXT: .cfi_restore s1
830 ; RV32IZCMP-NEXT: .cfi_restore s2
831 ; RV32IZCMP-NEXT: .cfi_restore s3
832 ; RV32IZCMP-NEXT: .cfi_restore s4
833 ; RV32IZCMP-NEXT: .cfi_restore s5
834 ; RV32IZCMP-NEXT: .cfi_restore s6
835 ; RV32IZCMP-NEXT: .cfi_restore s7
836 ; RV32IZCMP-NEXT: .cfi_restore s8
837 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 0
838 ; RV32IZCMP-NEXT: tail callee
840 ; RV64IZCMP-LABEL: nocompress:
841 ; RV64IZCMP: # %bb.0: # %entry
842 ; RV64IZCMP-NEXT: cm.push {ra, s0-s8}, -80
843 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 80
844 ; RV64IZCMP-NEXT: .cfi_offset ra, -80
845 ; RV64IZCMP-NEXT: .cfi_offset s0, -72
846 ; RV64IZCMP-NEXT: .cfi_offset s1, -64
847 ; RV64IZCMP-NEXT: .cfi_offset s2, -56
848 ; RV64IZCMP-NEXT: .cfi_offset s3, -48
849 ; RV64IZCMP-NEXT: .cfi_offset s4, -40
850 ; RV64IZCMP-NEXT: .cfi_offset s5, -32
851 ; RV64IZCMP-NEXT: .cfi_offset s6, -24
852 ; RV64IZCMP-NEXT: .cfi_offset s7, -16
853 ; RV64IZCMP-NEXT: .cfi_offset s8, -8
854 ; RV64IZCMP-NEXT: addi s0, sp, 80
855 ; RV64IZCMP-NEXT: .cfi_def_cfa s0, 0
856 ; RV64IZCMP-NEXT: slli a0, a0, 32
857 ; RV64IZCMP-NEXT: srli a0, a0, 32
858 ; RV64IZCMP-NEXT: addi a0, a0, 15
859 ; RV64IZCMP-NEXT: andi a0, a0, -16
860 ; RV64IZCMP-NEXT: sub s2, sp, a0
861 ; RV64IZCMP-NEXT: mv sp, s2
862 ; RV64IZCMP-NEXT: lui s1, %hi(var)
863 ; RV64IZCMP-NEXT: lw s3, %lo(var)(s1)
864 ; RV64IZCMP-NEXT: lw s4, %lo(var+4)(s1)
865 ; RV64IZCMP-NEXT: lw s5, %lo(var+8)(s1)
866 ; RV64IZCMP-NEXT: lw s6, %lo(var+12)(s1)
867 ; RV64IZCMP-NEXT: addi s7, s1, %lo(var)
868 ; RV64IZCMP-NEXT: lw s8, 16(s7)
869 ; RV64IZCMP-NEXT: mv a0, s2
870 ; RV64IZCMP-NEXT: call callee_void
871 ; RV64IZCMP-NEXT: sw s8, 16(s7)
872 ; RV64IZCMP-NEXT: sw s6, %lo(var+12)(s1)
873 ; RV64IZCMP-NEXT: sw s5, %lo(var+8)(s1)
874 ; RV64IZCMP-NEXT: sw s4, %lo(var+4)(s1)
875 ; RV64IZCMP-NEXT: sw s3, %lo(var)(s1)
876 ; RV64IZCMP-NEXT: mv a0, s2
877 ; RV64IZCMP-NEXT: addi sp, s0, -80
878 ; RV64IZCMP-NEXT: .cfi_def_cfa sp, 80
879 ; RV64IZCMP-NEXT: cm.pop {ra, s0-s8}, 80
880 ; RV64IZCMP-NEXT: .cfi_restore ra
881 ; RV64IZCMP-NEXT: .cfi_restore s0
882 ; RV64IZCMP-NEXT: .cfi_restore s1
883 ; RV64IZCMP-NEXT: .cfi_restore s2
884 ; RV64IZCMP-NEXT: .cfi_restore s3
885 ; RV64IZCMP-NEXT: .cfi_restore s4
886 ; RV64IZCMP-NEXT: .cfi_restore s5
887 ; RV64IZCMP-NEXT: .cfi_restore s6
888 ; RV64IZCMP-NEXT: .cfi_restore s7
889 ; RV64IZCMP-NEXT: .cfi_restore s8
890 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 0
891 ; RV64IZCMP-NEXT: tail callee
893 ; RV32IZCMP-SR-LABEL: nocompress:
894 ; RV32IZCMP-SR: # %bb.0: # %entry
895 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0-s8}, -48
896 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 48
897 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -40
898 ; RV32IZCMP-SR-NEXT: .cfi_offset s0, -36
899 ; RV32IZCMP-SR-NEXT: .cfi_offset s1, -32
900 ; RV32IZCMP-SR-NEXT: .cfi_offset s2, -28
901 ; RV32IZCMP-SR-NEXT: .cfi_offset s3, -24
902 ; RV32IZCMP-SR-NEXT: .cfi_offset s4, -20
903 ; RV32IZCMP-SR-NEXT: .cfi_offset s5, -16
904 ; RV32IZCMP-SR-NEXT: .cfi_offset s6, -12
905 ; RV32IZCMP-SR-NEXT: .cfi_offset s7, -8
906 ; RV32IZCMP-SR-NEXT: .cfi_offset s8, -4
907 ; RV32IZCMP-SR-NEXT: addi s0, sp, 48
908 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
909 ; RV32IZCMP-SR-NEXT: addi a0, a0, 15
910 ; RV32IZCMP-SR-NEXT: andi a0, a0, -16
911 ; RV32IZCMP-SR-NEXT: sub s2, sp, a0
912 ; RV32IZCMP-SR-NEXT: mv sp, s2
913 ; RV32IZCMP-SR-NEXT: lui s1, %hi(var)
914 ; RV32IZCMP-SR-NEXT: lw s3, %lo(var)(s1)
915 ; RV32IZCMP-SR-NEXT: lw s4, %lo(var+4)(s1)
916 ; RV32IZCMP-SR-NEXT: lw s5, %lo(var+8)(s1)
917 ; RV32IZCMP-SR-NEXT: lw s6, %lo(var+12)(s1)
918 ; RV32IZCMP-SR-NEXT: addi s7, s1, %lo(var)
919 ; RV32IZCMP-SR-NEXT: lw s8, 16(s7)
920 ; RV32IZCMP-SR-NEXT: mv a0, s2
921 ; RV32IZCMP-SR-NEXT: call callee_void
922 ; RV32IZCMP-SR-NEXT: sw s8, 16(s7)
923 ; RV32IZCMP-SR-NEXT: sw s6, %lo(var+12)(s1)
924 ; RV32IZCMP-SR-NEXT: sw s5, %lo(var+8)(s1)
925 ; RV32IZCMP-SR-NEXT: sw s4, %lo(var+4)(s1)
926 ; RV32IZCMP-SR-NEXT: sw s3, %lo(var)(s1)
927 ; RV32IZCMP-SR-NEXT: mv a0, s2
928 ; RV32IZCMP-SR-NEXT: addi sp, s0, -48
929 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa sp, 48
930 ; RV32IZCMP-SR-NEXT: cm.pop {ra, s0-s8}, 48
931 ; RV32IZCMP-SR-NEXT: .cfi_restore ra
932 ; RV32IZCMP-SR-NEXT: .cfi_restore s0
933 ; RV32IZCMP-SR-NEXT: .cfi_restore s1
934 ; RV32IZCMP-SR-NEXT: .cfi_restore s2
935 ; RV32IZCMP-SR-NEXT: .cfi_restore s3
936 ; RV32IZCMP-SR-NEXT: .cfi_restore s4
937 ; RV32IZCMP-SR-NEXT: .cfi_restore s5
938 ; RV32IZCMP-SR-NEXT: .cfi_restore s6
939 ; RV32IZCMP-SR-NEXT: .cfi_restore s7
940 ; RV32IZCMP-SR-NEXT: .cfi_restore s8
941 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 0
942 ; RV32IZCMP-SR-NEXT: tail callee
944 ; RV64IZCMP-SR-LABEL: nocompress:
945 ; RV64IZCMP-SR: # %bb.0: # %entry
946 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0-s8}, -80
947 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 80
948 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -80
949 ; RV64IZCMP-SR-NEXT: .cfi_offset s0, -72
950 ; RV64IZCMP-SR-NEXT: .cfi_offset s1, -64
951 ; RV64IZCMP-SR-NEXT: .cfi_offset s2, -56
952 ; RV64IZCMP-SR-NEXT: .cfi_offset s3, -48
953 ; RV64IZCMP-SR-NEXT: .cfi_offset s4, -40
954 ; RV64IZCMP-SR-NEXT: .cfi_offset s5, -32
955 ; RV64IZCMP-SR-NEXT: .cfi_offset s6, -24
956 ; RV64IZCMP-SR-NEXT: .cfi_offset s7, -16
957 ; RV64IZCMP-SR-NEXT: .cfi_offset s8, -8
958 ; RV64IZCMP-SR-NEXT: addi s0, sp, 80
959 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
960 ; RV64IZCMP-SR-NEXT: slli a0, a0, 32
961 ; RV64IZCMP-SR-NEXT: srli a0, a0, 32
962 ; RV64IZCMP-SR-NEXT: addi a0, a0, 15
963 ; RV64IZCMP-SR-NEXT: andi a0, a0, -16
964 ; RV64IZCMP-SR-NEXT: sub s2, sp, a0
965 ; RV64IZCMP-SR-NEXT: mv sp, s2
966 ; RV64IZCMP-SR-NEXT: lui s1, %hi(var)
967 ; RV64IZCMP-SR-NEXT: lw s3, %lo(var)(s1)
968 ; RV64IZCMP-SR-NEXT: lw s4, %lo(var+4)(s1)
969 ; RV64IZCMP-SR-NEXT: lw s5, %lo(var+8)(s1)
970 ; RV64IZCMP-SR-NEXT: lw s6, %lo(var+12)(s1)
971 ; RV64IZCMP-SR-NEXT: addi s7, s1, %lo(var)
972 ; RV64IZCMP-SR-NEXT: lw s8, 16(s7)
973 ; RV64IZCMP-SR-NEXT: mv a0, s2
974 ; RV64IZCMP-SR-NEXT: call callee_void
975 ; RV64IZCMP-SR-NEXT: sw s8, 16(s7)
976 ; RV64IZCMP-SR-NEXT: sw s6, %lo(var+12)(s1)
977 ; RV64IZCMP-SR-NEXT: sw s5, %lo(var+8)(s1)
978 ; RV64IZCMP-SR-NEXT: sw s4, %lo(var+4)(s1)
979 ; RV64IZCMP-SR-NEXT: sw s3, %lo(var)(s1)
980 ; RV64IZCMP-SR-NEXT: mv a0, s2
981 ; RV64IZCMP-SR-NEXT: addi sp, s0, -80
982 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa sp, 80
983 ; RV64IZCMP-SR-NEXT: cm.pop {ra, s0-s8}, 80
984 ; RV64IZCMP-SR-NEXT: .cfi_restore ra
985 ; RV64IZCMP-SR-NEXT: .cfi_restore s0
986 ; RV64IZCMP-SR-NEXT: .cfi_restore s1
987 ; RV64IZCMP-SR-NEXT: .cfi_restore s2
988 ; RV64IZCMP-SR-NEXT: .cfi_restore s3
989 ; RV64IZCMP-SR-NEXT: .cfi_restore s4
990 ; RV64IZCMP-SR-NEXT: .cfi_restore s5
991 ; RV64IZCMP-SR-NEXT: .cfi_restore s6
992 ; RV64IZCMP-SR-NEXT: .cfi_restore s7
993 ; RV64IZCMP-SR-NEXT: .cfi_restore s8
994 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 0
995 ; RV64IZCMP-SR-NEXT: tail callee
997 ; RV32I-LABEL: nocompress:
998 ; RV32I: # %bb.0: # %entry
999 ; RV32I-NEXT: addi sp, sp, -48
1000 ; RV32I-NEXT: .cfi_def_cfa_offset 48
1001 ; RV32I-NEXT: sw ra, 44(sp) # 4-byte Folded Spill
1002 ; RV32I-NEXT: sw s0, 40(sp) # 4-byte Folded Spill
1003 ; RV32I-NEXT: sw s1, 36(sp) # 4-byte Folded Spill
1004 ; RV32I-NEXT: sw s2, 32(sp) # 4-byte Folded Spill
1005 ; RV32I-NEXT: sw s3, 28(sp) # 4-byte Folded Spill
1006 ; RV32I-NEXT: sw s4, 24(sp) # 4-byte Folded Spill
1007 ; RV32I-NEXT: sw s5, 20(sp) # 4-byte Folded Spill
1008 ; RV32I-NEXT: sw s6, 16(sp) # 4-byte Folded Spill
1009 ; RV32I-NEXT: sw s7, 12(sp) # 4-byte Folded Spill
1010 ; RV32I-NEXT: sw s8, 8(sp) # 4-byte Folded Spill
1011 ; RV32I-NEXT: .cfi_offset ra, -4
1012 ; RV32I-NEXT: .cfi_offset s0, -8
1013 ; RV32I-NEXT: .cfi_offset s1, -12
1014 ; RV32I-NEXT: .cfi_offset s2, -16
1015 ; RV32I-NEXT: .cfi_offset s3, -20
1016 ; RV32I-NEXT: .cfi_offset s4, -24
1017 ; RV32I-NEXT: .cfi_offset s5, -28
1018 ; RV32I-NEXT: .cfi_offset s6, -32
1019 ; RV32I-NEXT: .cfi_offset s7, -36
1020 ; RV32I-NEXT: .cfi_offset s8, -40
1021 ; RV32I-NEXT: addi s0, sp, 48
1022 ; RV32I-NEXT: .cfi_def_cfa s0, 0
1023 ; RV32I-NEXT: addi a0, a0, 15
1024 ; RV32I-NEXT: andi a0, a0, -16
1025 ; RV32I-NEXT: sub s1, sp, a0
1026 ; RV32I-NEXT: mv sp, s1
1027 ; RV32I-NEXT: lui s2, %hi(var)
1028 ; RV32I-NEXT: lw s3, %lo(var)(s2)
1029 ; RV32I-NEXT: lw s4, %lo(var+4)(s2)
1030 ; RV32I-NEXT: lw s5, %lo(var+8)(s2)
1031 ; RV32I-NEXT: lw s6, %lo(var+12)(s2)
1032 ; RV32I-NEXT: addi s7, s2, %lo(var)
1033 ; RV32I-NEXT: lw s8, 16(s7)
1034 ; RV32I-NEXT: mv a0, s1
1035 ; RV32I-NEXT: call callee_void
1036 ; RV32I-NEXT: sw s8, 16(s7)
1037 ; RV32I-NEXT: sw s6, %lo(var+12)(s2)
1038 ; RV32I-NEXT: sw s5, %lo(var+8)(s2)
1039 ; RV32I-NEXT: sw s4, %lo(var+4)(s2)
1040 ; RV32I-NEXT: sw s3, %lo(var)(s2)
1041 ; RV32I-NEXT: mv a0, s1
1042 ; RV32I-NEXT: addi sp, s0, -48
1043 ; RV32I-NEXT: .cfi_def_cfa sp, 48
1044 ; RV32I-NEXT: lw ra, 44(sp) # 4-byte Folded Reload
1045 ; RV32I-NEXT: lw s0, 40(sp) # 4-byte Folded Reload
1046 ; RV32I-NEXT: lw s1, 36(sp) # 4-byte Folded Reload
1047 ; RV32I-NEXT: lw s2, 32(sp) # 4-byte Folded Reload
1048 ; RV32I-NEXT: lw s3, 28(sp) # 4-byte Folded Reload
1049 ; RV32I-NEXT: lw s4, 24(sp) # 4-byte Folded Reload
1050 ; RV32I-NEXT: lw s5, 20(sp) # 4-byte Folded Reload
1051 ; RV32I-NEXT: lw s6, 16(sp) # 4-byte Folded Reload
1052 ; RV32I-NEXT: lw s7, 12(sp) # 4-byte Folded Reload
1053 ; RV32I-NEXT: lw s8, 8(sp) # 4-byte Folded Reload
1054 ; RV32I-NEXT: .cfi_restore ra
1055 ; RV32I-NEXT: .cfi_restore s0
1056 ; RV32I-NEXT: .cfi_restore s1
1057 ; RV32I-NEXT: .cfi_restore s2
1058 ; RV32I-NEXT: .cfi_restore s3
1059 ; RV32I-NEXT: .cfi_restore s4
1060 ; RV32I-NEXT: .cfi_restore s5
1061 ; RV32I-NEXT: .cfi_restore s6
1062 ; RV32I-NEXT: .cfi_restore s7
1063 ; RV32I-NEXT: .cfi_restore s8
1064 ; RV32I-NEXT: addi sp, sp, 48
1065 ; RV32I-NEXT: .cfi_def_cfa_offset 0
1066 ; RV32I-NEXT: tail callee
1068 ; RV64I-LABEL: nocompress:
1069 ; RV64I: # %bb.0: # %entry
1070 ; RV64I-NEXT: addi sp, sp, -80
1071 ; RV64I-NEXT: .cfi_def_cfa_offset 80
1072 ; RV64I-NEXT: sd ra, 72(sp) # 8-byte Folded Spill
1073 ; RV64I-NEXT: sd s0, 64(sp) # 8-byte Folded Spill
1074 ; RV64I-NEXT: sd s1, 56(sp) # 8-byte Folded Spill
1075 ; RV64I-NEXT: sd s2, 48(sp) # 8-byte Folded Spill
1076 ; RV64I-NEXT: sd s3, 40(sp) # 8-byte Folded Spill
1077 ; RV64I-NEXT: sd s4, 32(sp) # 8-byte Folded Spill
1078 ; RV64I-NEXT: sd s5, 24(sp) # 8-byte Folded Spill
1079 ; RV64I-NEXT: sd s6, 16(sp) # 8-byte Folded Spill
1080 ; RV64I-NEXT: sd s7, 8(sp) # 8-byte Folded Spill
1081 ; RV64I-NEXT: sd s8, 0(sp) # 8-byte Folded Spill
1082 ; RV64I-NEXT: .cfi_offset ra, -8
1083 ; RV64I-NEXT: .cfi_offset s0, -16
1084 ; RV64I-NEXT: .cfi_offset s1, -24
1085 ; RV64I-NEXT: .cfi_offset s2, -32
1086 ; RV64I-NEXT: .cfi_offset s3, -40
1087 ; RV64I-NEXT: .cfi_offset s4, -48
1088 ; RV64I-NEXT: .cfi_offset s5, -56
1089 ; RV64I-NEXT: .cfi_offset s6, -64
1090 ; RV64I-NEXT: .cfi_offset s7, -72
1091 ; RV64I-NEXT: .cfi_offset s8, -80
1092 ; RV64I-NEXT: addi s0, sp, 80
1093 ; RV64I-NEXT: .cfi_def_cfa s0, 0
1094 ; RV64I-NEXT: slli a0, a0, 32
1095 ; RV64I-NEXT: srli a0, a0, 32
1096 ; RV64I-NEXT: addi a0, a0, 15
1097 ; RV64I-NEXT: andi a0, a0, -16
1098 ; RV64I-NEXT: sub s1, sp, a0
1099 ; RV64I-NEXT: mv sp, s1
1100 ; RV64I-NEXT: lui s2, %hi(var)
1101 ; RV64I-NEXT: lw s3, %lo(var)(s2)
1102 ; RV64I-NEXT: lw s4, %lo(var+4)(s2)
1103 ; RV64I-NEXT: lw s5, %lo(var+8)(s2)
1104 ; RV64I-NEXT: lw s6, %lo(var+12)(s2)
1105 ; RV64I-NEXT: addi s7, s2, %lo(var)
1106 ; RV64I-NEXT: lw s8, 16(s7)
1107 ; RV64I-NEXT: mv a0, s1
1108 ; RV64I-NEXT: call callee_void
1109 ; RV64I-NEXT: sw s8, 16(s7)
1110 ; RV64I-NEXT: sw s6, %lo(var+12)(s2)
1111 ; RV64I-NEXT: sw s5, %lo(var+8)(s2)
1112 ; RV64I-NEXT: sw s4, %lo(var+4)(s2)
1113 ; RV64I-NEXT: sw s3, %lo(var)(s2)
1114 ; RV64I-NEXT: mv a0, s1
1115 ; RV64I-NEXT: addi sp, s0, -80
1116 ; RV64I-NEXT: .cfi_def_cfa sp, 80
1117 ; RV64I-NEXT: ld ra, 72(sp) # 8-byte Folded Reload
1118 ; RV64I-NEXT: ld s0, 64(sp) # 8-byte Folded Reload
1119 ; RV64I-NEXT: ld s1, 56(sp) # 8-byte Folded Reload
1120 ; RV64I-NEXT: ld s2, 48(sp) # 8-byte Folded Reload
1121 ; RV64I-NEXT: ld s3, 40(sp) # 8-byte Folded Reload
1122 ; RV64I-NEXT: ld s4, 32(sp) # 8-byte Folded Reload
1123 ; RV64I-NEXT: ld s5, 24(sp) # 8-byte Folded Reload
1124 ; RV64I-NEXT: ld s6, 16(sp) # 8-byte Folded Reload
1125 ; RV64I-NEXT: ld s7, 8(sp) # 8-byte Folded Reload
1126 ; RV64I-NEXT: ld s8, 0(sp) # 8-byte Folded Reload
1127 ; RV64I-NEXT: .cfi_restore ra
1128 ; RV64I-NEXT: .cfi_restore s0
1129 ; RV64I-NEXT: .cfi_restore s1
1130 ; RV64I-NEXT: .cfi_restore s2
1131 ; RV64I-NEXT: .cfi_restore s3
1132 ; RV64I-NEXT: .cfi_restore s4
1133 ; RV64I-NEXT: .cfi_restore s5
1134 ; RV64I-NEXT: .cfi_restore s6
1135 ; RV64I-NEXT: .cfi_restore s7
1136 ; RV64I-NEXT: .cfi_restore s8
1137 ; RV64I-NEXT: addi sp, sp, 80
1138 ; RV64I-NEXT: .cfi_def_cfa_offset 0
1139 ; RV64I-NEXT: tail callee
1141 %0 = alloca i8, i32 %size, align 16
1142 %val = load [5 x i32], ptr @var
1143 call void @callee_void(ptr nonnull %0)
1144 store volatile [5 x i32] %val, ptr @var
1145 %1 = tail call i32 @callee(ptr nonnull %0)
1149 ; Check that functions with varargs do not use save/restore code
1151 declare void @llvm.va_start(ptr)
1152 declare void @llvm.va_end(ptr)
1154 define i32 @varargs(ptr %fmt, ...) nounwind {
1155 ; RV32IZCMP-LABEL: varargs:
1156 ; RV32IZCMP: # %bb.0:
1157 ; RV32IZCMP-NEXT: addi sp, sp, -48
1158 ; RV32IZCMP-NEXT: mv a0, a1
1159 ; RV32IZCMP-NEXT: sw a5, 36(sp)
1160 ; RV32IZCMP-NEXT: sw a6, 40(sp)
1161 ; RV32IZCMP-NEXT: sw a7, 44(sp)
1162 ; RV32IZCMP-NEXT: sw a1, 20(sp)
1163 ; RV32IZCMP-NEXT: sw a2, 24(sp)
1164 ; RV32IZCMP-NEXT: sw a3, 28(sp)
1165 ; RV32IZCMP-NEXT: sw a4, 32(sp)
1166 ; RV32IZCMP-NEXT: addi a1, sp, 24
1167 ; RV32IZCMP-NEXT: sw a1, 12(sp)
1168 ; RV32IZCMP-NEXT: addi sp, sp, 48
1169 ; RV32IZCMP-NEXT: ret
1171 ; RV64IZCMP-LABEL: varargs:
1172 ; RV64IZCMP: # %bb.0:
1173 ; RV64IZCMP-NEXT: addi sp, sp, -80
1174 ; RV64IZCMP-NEXT: sd a1, 24(sp)
1175 ; RV64IZCMP-NEXT: addi a0, sp, 28
1176 ; RV64IZCMP-NEXT: sd a0, 8(sp)
1177 ; RV64IZCMP-NEXT: lw a0, 24(sp)
1178 ; RV64IZCMP-NEXT: sd a5, 56(sp)
1179 ; RV64IZCMP-NEXT: sd a6, 64(sp)
1180 ; RV64IZCMP-NEXT: sd a7, 72(sp)
1181 ; RV64IZCMP-NEXT: sd a2, 32(sp)
1182 ; RV64IZCMP-NEXT: sd a3, 40(sp)
1183 ; RV64IZCMP-NEXT: sd a4, 48(sp)
1184 ; RV64IZCMP-NEXT: addi sp, sp, 80
1185 ; RV64IZCMP-NEXT: ret
1187 ; RV32IZCMP-SR-LABEL: varargs:
1188 ; RV32IZCMP-SR: # %bb.0:
1189 ; RV32IZCMP-SR-NEXT: addi sp, sp, -48
1190 ; RV32IZCMP-SR-NEXT: mv a0, a1
1191 ; RV32IZCMP-SR-NEXT: sw a5, 36(sp)
1192 ; RV32IZCMP-SR-NEXT: sw a6, 40(sp)
1193 ; RV32IZCMP-SR-NEXT: sw a7, 44(sp)
1194 ; RV32IZCMP-SR-NEXT: sw a1, 20(sp)
1195 ; RV32IZCMP-SR-NEXT: sw a2, 24(sp)
1196 ; RV32IZCMP-SR-NEXT: sw a3, 28(sp)
1197 ; RV32IZCMP-SR-NEXT: sw a4, 32(sp)
1198 ; RV32IZCMP-SR-NEXT: addi a1, sp, 24
1199 ; RV32IZCMP-SR-NEXT: sw a1, 12(sp)
1200 ; RV32IZCMP-SR-NEXT: addi sp, sp, 48
1201 ; RV32IZCMP-SR-NEXT: ret
1203 ; RV64IZCMP-SR-LABEL: varargs:
1204 ; RV64IZCMP-SR: # %bb.0:
1205 ; RV64IZCMP-SR-NEXT: addi sp, sp, -80
1206 ; RV64IZCMP-SR-NEXT: sd a1, 24(sp)
1207 ; RV64IZCMP-SR-NEXT: addi a0, sp, 28
1208 ; RV64IZCMP-SR-NEXT: sd a0, 8(sp)
1209 ; RV64IZCMP-SR-NEXT: lw a0, 24(sp)
1210 ; RV64IZCMP-SR-NEXT: sd a5, 56(sp)
1211 ; RV64IZCMP-SR-NEXT: sd a6, 64(sp)
1212 ; RV64IZCMP-SR-NEXT: sd a7, 72(sp)
1213 ; RV64IZCMP-SR-NEXT: sd a2, 32(sp)
1214 ; RV64IZCMP-SR-NEXT: sd a3, 40(sp)
1215 ; RV64IZCMP-SR-NEXT: sd a4, 48(sp)
1216 ; RV64IZCMP-SR-NEXT: addi sp, sp, 80
1217 ; RV64IZCMP-SR-NEXT: ret
1219 ; RV32I-LABEL: varargs:
1221 ; RV32I-NEXT: addi sp, sp, -48
1222 ; RV32I-NEXT: mv a0, a1
1223 ; RV32I-NEXT: sw a5, 36(sp)
1224 ; RV32I-NEXT: sw a6, 40(sp)
1225 ; RV32I-NEXT: sw a7, 44(sp)
1226 ; RV32I-NEXT: sw a1, 20(sp)
1227 ; RV32I-NEXT: sw a2, 24(sp)
1228 ; RV32I-NEXT: sw a3, 28(sp)
1229 ; RV32I-NEXT: sw a4, 32(sp)
1230 ; RV32I-NEXT: addi a1, sp, 24
1231 ; RV32I-NEXT: sw a1, 12(sp)
1232 ; RV32I-NEXT: addi sp, sp, 48
1235 ; RV64I-LABEL: varargs:
1237 ; RV64I-NEXT: addi sp, sp, -80
1238 ; RV64I-NEXT: sd a1, 24(sp)
1239 ; RV64I-NEXT: addi a0, sp, 28
1240 ; RV64I-NEXT: sd a0, 8(sp)
1241 ; RV64I-NEXT: lw a0, 24(sp)
1242 ; RV64I-NEXT: sd a5, 56(sp)
1243 ; RV64I-NEXT: sd a6, 64(sp)
1244 ; RV64I-NEXT: sd a7, 72(sp)
1245 ; RV64I-NEXT: sd a2, 32(sp)
1246 ; RV64I-NEXT: sd a3, 40(sp)
1247 ; RV64I-NEXT: sd a4, 48(sp)
1248 ; RV64I-NEXT: addi sp, sp, 80
1251 call void @llvm.va_start(ptr %va)
1252 %argp.cur = load ptr, ptr %va
1253 %argp.next = getelementptr inbounds i8, ptr %argp.cur, i32 4
1254 store ptr %argp.next, ptr %va
1255 %1 = load i32, ptr %argp.cur
1256 call void @llvm.va_end(ptr %va)
1260 @var0 = global [18 x i32] zeroinitializer
1262 define void @many_args(i32, i32, i32, i32, i32, i32, i32, i32, i32) nounwind {
1263 ; RV32IZCMP-LABEL: many_args:
1264 ; RV32IZCMP: # %bb.0: # %entry
1265 ; RV32IZCMP-NEXT: cm.push {ra, s0-s4}, -32
1266 ; RV32IZCMP-NEXT: lui a0, %hi(var0)
1267 ; RV32IZCMP-NEXT: lw a6, %lo(var0)(a0)
1268 ; RV32IZCMP-NEXT: lw a7, %lo(var0+4)(a0)
1269 ; RV32IZCMP-NEXT: lw t0, %lo(var0+8)(a0)
1270 ; RV32IZCMP-NEXT: lw t1, %lo(var0+12)(a0)
1271 ; RV32IZCMP-NEXT: addi a5, a0, %lo(var0)
1272 ; RV32IZCMP-NEXT: lw t2, 16(a5)
1273 ; RV32IZCMP-NEXT: lw t3, 20(a5)
1274 ; RV32IZCMP-NEXT: lw t4, 24(a5)
1275 ; RV32IZCMP-NEXT: lw t5, 28(a5)
1276 ; RV32IZCMP-NEXT: lw t6, 48(a5)
1277 ; RV32IZCMP-NEXT: lw s2, 52(a5)
1278 ; RV32IZCMP-NEXT: lw a3, 56(a5)
1279 ; RV32IZCMP-NEXT: lw a4, 60(a5)
1280 ; RV32IZCMP-NEXT: lw a1, 64(a5)
1281 ; RV32IZCMP-NEXT: lw s0, 68(a5)
1282 ; RV32IZCMP-NEXT: lw s3, 32(a5)
1283 ; RV32IZCMP-NEXT: lw s4, 36(a5)
1284 ; RV32IZCMP-NEXT: lw s1, 40(a5)
1285 ; RV32IZCMP-NEXT: lw a2, 44(a5)
1286 ; RV32IZCMP-NEXT: sw s0, 68(a5)
1287 ; RV32IZCMP-NEXT: sw a1, 64(a5)
1288 ; RV32IZCMP-NEXT: sw a4, 60(a5)
1289 ; RV32IZCMP-NEXT: sw a3, 56(a5)
1290 ; RV32IZCMP-NEXT: sw s2, 52(a5)
1291 ; RV32IZCMP-NEXT: sw t6, 48(a5)
1292 ; RV32IZCMP-NEXT: sw a2, 44(a5)
1293 ; RV32IZCMP-NEXT: sw s1, 40(a5)
1294 ; RV32IZCMP-NEXT: sw s4, 36(a5)
1295 ; RV32IZCMP-NEXT: sw s3, 32(a5)
1296 ; RV32IZCMP-NEXT: sw t5, 28(a5)
1297 ; RV32IZCMP-NEXT: sw t4, 24(a5)
1298 ; RV32IZCMP-NEXT: sw t3, 20(a5)
1299 ; RV32IZCMP-NEXT: sw t2, 16(a5)
1300 ; RV32IZCMP-NEXT: sw t1, %lo(var0+12)(a0)
1301 ; RV32IZCMP-NEXT: sw t0, %lo(var0+8)(a0)
1302 ; RV32IZCMP-NEXT: sw a7, %lo(var0+4)(a0)
1303 ; RV32IZCMP-NEXT: sw a6, %lo(var0)(a0)
1304 ; RV32IZCMP-NEXT: cm.popret {ra, s0-s4}, 32
1306 ; RV64IZCMP-LABEL: many_args:
1307 ; RV64IZCMP: # %bb.0: # %entry
1308 ; RV64IZCMP-NEXT: cm.push {ra, s0-s4}, -48
1309 ; RV64IZCMP-NEXT: lui a0, %hi(var0)
1310 ; RV64IZCMP-NEXT: lw a6, %lo(var0)(a0)
1311 ; RV64IZCMP-NEXT: lw a7, %lo(var0+4)(a0)
1312 ; RV64IZCMP-NEXT: lw t0, %lo(var0+8)(a0)
1313 ; RV64IZCMP-NEXT: lw t1, %lo(var0+12)(a0)
1314 ; RV64IZCMP-NEXT: addi a5, a0, %lo(var0)
1315 ; RV64IZCMP-NEXT: lw t2, 16(a5)
1316 ; RV64IZCMP-NEXT: lw t3, 20(a5)
1317 ; RV64IZCMP-NEXT: lw t4, 24(a5)
1318 ; RV64IZCMP-NEXT: lw t5, 28(a5)
1319 ; RV64IZCMP-NEXT: lw t6, 48(a5)
1320 ; RV64IZCMP-NEXT: lw s2, 52(a5)
1321 ; RV64IZCMP-NEXT: lw a3, 56(a5)
1322 ; RV64IZCMP-NEXT: lw a4, 60(a5)
1323 ; RV64IZCMP-NEXT: lw a1, 64(a5)
1324 ; RV64IZCMP-NEXT: lw s0, 68(a5)
1325 ; RV64IZCMP-NEXT: lw s3, 32(a5)
1326 ; RV64IZCMP-NEXT: lw s4, 36(a5)
1327 ; RV64IZCMP-NEXT: lw s1, 40(a5)
1328 ; RV64IZCMP-NEXT: lw a2, 44(a5)
1329 ; RV64IZCMP-NEXT: sw s0, 68(a5)
1330 ; RV64IZCMP-NEXT: sw a1, 64(a5)
1331 ; RV64IZCMP-NEXT: sw a4, 60(a5)
1332 ; RV64IZCMP-NEXT: sw a3, 56(a5)
1333 ; RV64IZCMP-NEXT: sw s2, 52(a5)
1334 ; RV64IZCMP-NEXT: sw t6, 48(a5)
1335 ; RV64IZCMP-NEXT: sw a2, 44(a5)
1336 ; RV64IZCMP-NEXT: sw s1, 40(a5)
1337 ; RV64IZCMP-NEXT: sw s4, 36(a5)
1338 ; RV64IZCMP-NEXT: sw s3, 32(a5)
1339 ; RV64IZCMP-NEXT: sw t5, 28(a5)
1340 ; RV64IZCMP-NEXT: sw t4, 24(a5)
1341 ; RV64IZCMP-NEXT: sw t3, 20(a5)
1342 ; RV64IZCMP-NEXT: sw t2, 16(a5)
1343 ; RV64IZCMP-NEXT: sw t1, %lo(var0+12)(a0)
1344 ; RV64IZCMP-NEXT: sw t0, %lo(var0+8)(a0)
1345 ; RV64IZCMP-NEXT: sw a7, %lo(var0+4)(a0)
1346 ; RV64IZCMP-NEXT: sw a6, %lo(var0)(a0)
1347 ; RV64IZCMP-NEXT: cm.popret {ra, s0-s4}, 48
1349 ; RV32IZCMP-SR-LABEL: many_args:
1350 ; RV32IZCMP-SR: # %bb.0: # %entry
1351 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0-s4}, -32
1352 ; RV32IZCMP-SR-NEXT: lui a0, %hi(var0)
1353 ; RV32IZCMP-SR-NEXT: lw a6, %lo(var0)(a0)
1354 ; RV32IZCMP-SR-NEXT: lw a7, %lo(var0+4)(a0)
1355 ; RV32IZCMP-SR-NEXT: lw t0, %lo(var0+8)(a0)
1356 ; RV32IZCMP-SR-NEXT: lw t1, %lo(var0+12)(a0)
1357 ; RV32IZCMP-SR-NEXT: addi a5, a0, %lo(var0)
1358 ; RV32IZCMP-SR-NEXT: lw t2, 16(a5)
1359 ; RV32IZCMP-SR-NEXT: lw t3, 20(a5)
1360 ; RV32IZCMP-SR-NEXT: lw t4, 24(a5)
1361 ; RV32IZCMP-SR-NEXT: lw t5, 28(a5)
1362 ; RV32IZCMP-SR-NEXT: lw t6, 48(a5)
1363 ; RV32IZCMP-SR-NEXT: lw s2, 52(a5)
1364 ; RV32IZCMP-SR-NEXT: lw a3, 56(a5)
1365 ; RV32IZCMP-SR-NEXT: lw a4, 60(a5)
1366 ; RV32IZCMP-SR-NEXT: lw a1, 64(a5)
1367 ; RV32IZCMP-SR-NEXT: lw s0, 68(a5)
1368 ; RV32IZCMP-SR-NEXT: lw s3, 32(a5)
1369 ; RV32IZCMP-SR-NEXT: lw s4, 36(a5)
1370 ; RV32IZCMP-SR-NEXT: lw s1, 40(a5)
1371 ; RV32IZCMP-SR-NEXT: lw a2, 44(a5)
1372 ; RV32IZCMP-SR-NEXT: sw s0, 68(a5)
1373 ; RV32IZCMP-SR-NEXT: sw a1, 64(a5)
1374 ; RV32IZCMP-SR-NEXT: sw a4, 60(a5)
1375 ; RV32IZCMP-SR-NEXT: sw a3, 56(a5)
1376 ; RV32IZCMP-SR-NEXT: sw s2, 52(a5)
1377 ; RV32IZCMP-SR-NEXT: sw t6, 48(a5)
1378 ; RV32IZCMP-SR-NEXT: sw a2, 44(a5)
1379 ; RV32IZCMP-SR-NEXT: sw s1, 40(a5)
1380 ; RV32IZCMP-SR-NEXT: sw s4, 36(a5)
1381 ; RV32IZCMP-SR-NEXT: sw s3, 32(a5)
1382 ; RV32IZCMP-SR-NEXT: sw t5, 28(a5)
1383 ; RV32IZCMP-SR-NEXT: sw t4, 24(a5)
1384 ; RV32IZCMP-SR-NEXT: sw t3, 20(a5)
1385 ; RV32IZCMP-SR-NEXT: sw t2, 16(a5)
1386 ; RV32IZCMP-SR-NEXT: sw t1, %lo(var0+12)(a0)
1387 ; RV32IZCMP-SR-NEXT: sw t0, %lo(var0+8)(a0)
1388 ; RV32IZCMP-SR-NEXT: sw a7, %lo(var0+4)(a0)
1389 ; RV32IZCMP-SR-NEXT: sw a6, %lo(var0)(a0)
1390 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0-s4}, 32
1392 ; RV64IZCMP-SR-LABEL: many_args:
1393 ; RV64IZCMP-SR: # %bb.0: # %entry
1394 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0-s4}, -48
1395 ; RV64IZCMP-SR-NEXT: lui a0, %hi(var0)
1396 ; RV64IZCMP-SR-NEXT: lw a6, %lo(var0)(a0)
1397 ; RV64IZCMP-SR-NEXT: lw a7, %lo(var0+4)(a0)
1398 ; RV64IZCMP-SR-NEXT: lw t0, %lo(var0+8)(a0)
1399 ; RV64IZCMP-SR-NEXT: lw t1, %lo(var0+12)(a0)
1400 ; RV64IZCMP-SR-NEXT: addi a5, a0, %lo(var0)
1401 ; RV64IZCMP-SR-NEXT: lw t2, 16(a5)
1402 ; RV64IZCMP-SR-NEXT: lw t3, 20(a5)
1403 ; RV64IZCMP-SR-NEXT: lw t4, 24(a5)
1404 ; RV64IZCMP-SR-NEXT: lw t5, 28(a5)
1405 ; RV64IZCMP-SR-NEXT: lw t6, 48(a5)
1406 ; RV64IZCMP-SR-NEXT: lw s2, 52(a5)
1407 ; RV64IZCMP-SR-NEXT: lw a3, 56(a5)
1408 ; RV64IZCMP-SR-NEXT: lw a4, 60(a5)
1409 ; RV64IZCMP-SR-NEXT: lw a1, 64(a5)
1410 ; RV64IZCMP-SR-NEXT: lw s0, 68(a5)
1411 ; RV64IZCMP-SR-NEXT: lw s3, 32(a5)
1412 ; RV64IZCMP-SR-NEXT: lw s4, 36(a5)
1413 ; RV64IZCMP-SR-NEXT: lw s1, 40(a5)
1414 ; RV64IZCMP-SR-NEXT: lw a2, 44(a5)
1415 ; RV64IZCMP-SR-NEXT: sw s0, 68(a5)
1416 ; RV64IZCMP-SR-NEXT: sw a1, 64(a5)
1417 ; RV64IZCMP-SR-NEXT: sw a4, 60(a5)
1418 ; RV64IZCMP-SR-NEXT: sw a3, 56(a5)
1419 ; RV64IZCMP-SR-NEXT: sw s2, 52(a5)
1420 ; RV64IZCMP-SR-NEXT: sw t6, 48(a5)
1421 ; RV64IZCMP-SR-NEXT: sw a2, 44(a5)
1422 ; RV64IZCMP-SR-NEXT: sw s1, 40(a5)
1423 ; RV64IZCMP-SR-NEXT: sw s4, 36(a5)
1424 ; RV64IZCMP-SR-NEXT: sw s3, 32(a5)
1425 ; RV64IZCMP-SR-NEXT: sw t5, 28(a5)
1426 ; RV64IZCMP-SR-NEXT: sw t4, 24(a5)
1427 ; RV64IZCMP-SR-NEXT: sw t3, 20(a5)
1428 ; RV64IZCMP-SR-NEXT: sw t2, 16(a5)
1429 ; RV64IZCMP-SR-NEXT: sw t1, %lo(var0+12)(a0)
1430 ; RV64IZCMP-SR-NEXT: sw t0, %lo(var0+8)(a0)
1431 ; RV64IZCMP-SR-NEXT: sw a7, %lo(var0+4)(a0)
1432 ; RV64IZCMP-SR-NEXT: sw a6, %lo(var0)(a0)
1433 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0-s4}, 48
1435 ; RV32I-LABEL: many_args:
1436 ; RV32I: # %bb.0: # %entry
1437 ; RV32I-NEXT: addi sp, sp, -32
1438 ; RV32I-NEXT: sw s0, 28(sp) # 4-byte Folded Spill
1439 ; RV32I-NEXT: sw s1, 24(sp) # 4-byte Folded Spill
1440 ; RV32I-NEXT: sw s2, 20(sp) # 4-byte Folded Spill
1441 ; RV32I-NEXT: sw s3, 16(sp) # 4-byte Folded Spill
1442 ; RV32I-NEXT: sw s4, 12(sp) # 4-byte Folded Spill
1443 ; RV32I-NEXT: lui a0, %hi(var0)
1444 ; RV32I-NEXT: lw a1, %lo(var0)(a0)
1445 ; RV32I-NEXT: lw a2, %lo(var0+4)(a0)
1446 ; RV32I-NEXT: lw a3, %lo(var0+8)(a0)
1447 ; RV32I-NEXT: lw a4, %lo(var0+12)(a0)
1448 ; RV32I-NEXT: addi a5, a0, %lo(var0)
1449 ; RV32I-NEXT: lw a6, 16(a5)
1450 ; RV32I-NEXT: lw a7, 20(a5)
1451 ; RV32I-NEXT: lw t0, 24(a5)
1452 ; RV32I-NEXT: lw t1, 28(a5)
1453 ; RV32I-NEXT: lw t2, 48(a5)
1454 ; RV32I-NEXT: lw t3, 52(a5)
1455 ; RV32I-NEXT: lw t4, 56(a5)
1456 ; RV32I-NEXT: lw t5, 60(a5)
1457 ; RV32I-NEXT: lw t6, 64(a5)
1458 ; RV32I-NEXT: lw s0, 68(a5)
1459 ; RV32I-NEXT: lw s1, 32(a5)
1460 ; RV32I-NEXT: lw s2, 36(a5)
1461 ; RV32I-NEXT: lw s3, 40(a5)
1462 ; RV32I-NEXT: lw s4, 44(a5)
1463 ; RV32I-NEXT: sw s0, 68(a5)
1464 ; RV32I-NEXT: sw t6, 64(a5)
1465 ; RV32I-NEXT: sw t5, 60(a5)
1466 ; RV32I-NEXT: sw t4, 56(a5)
1467 ; RV32I-NEXT: sw t3, 52(a5)
1468 ; RV32I-NEXT: sw t2, 48(a5)
1469 ; RV32I-NEXT: sw s4, 44(a5)
1470 ; RV32I-NEXT: sw s3, 40(a5)
1471 ; RV32I-NEXT: sw s2, 36(a5)
1472 ; RV32I-NEXT: sw s1, 32(a5)
1473 ; RV32I-NEXT: sw t1, 28(a5)
1474 ; RV32I-NEXT: sw t0, 24(a5)
1475 ; RV32I-NEXT: sw a7, 20(a5)
1476 ; RV32I-NEXT: sw a6, 16(a5)
1477 ; RV32I-NEXT: sw a4, %lo(var0+12)(a0)
1478 ; RV32I-NEXT: sw a3, %lo(var0+8)(a0)
1479 ; RV32I-NEXT: sw a2, %lo(var0+4)(a0)
1480 ; RV32I-NEXT: sw a1, %lo(var0)(a0)
1481 ; RV32I-NEXT: lw s0, 28(sp) # 4-byte Folded Reload
1482 ; RV32I-NEXT: lw s1, 24(sp) # 4-byte Folded Reload
1483 ; RV32I-NEXT: lw s2, 20(sp) # 4-byte Folded Reload
1484 ; RV32I-NEXT: lw s3, 16(sp) # 4-byte Folded Reload
1485 ; RV32I-NEXT: lw s4, 12(sp) # 4-byte Folded Reload
1486 ; RV32I-NEXT: addi sp, sp, 32
1489 ; RV64I-LABEL: many_args:
1490 ; RV64I: # %bb.0: # %entry
1491 ; RV64I-NEXT: addi sp, sp, -48
1492 ; RV64I-NEXT: sd s0, 40(sp) # 8-byte Folded Spill
1493 ; RV64I-NEXT: sd s1, 32(sp) # 8-byte Folded Spill
1494 ; RV64I-NEXT: sd s2, 24(sp) # 8-byte Folded Spill
1495 ; RV64I-NEXT: sd s3, 16(sp) # 8-byte Folded Spill
1496 ; RV64I-NEXT: sd s4, 8(sp) # 8-byte Folded Spill
1497 ; RV64I-NEXT: lui a0, %hi(var0)
1498 ; RV64I-NEXT: lw a1, %lo(var0)(a0)
1499 ; RV64I-NEXT: lw a2, %lo(var0+4)(a0)
1500 ; RV64I-NEXT: lw a3, %lo(var0+8)(a0)
1501 ; RV64I-NEXT: lw a4, %lo(var0+12)(a0)
1502 ; RV64I-NEXT: addi a5, a0, %lo(var0)
1503 ; RV64I-NEXT: lw a6, 16(a5)
1504 ; RV64I-NEXT: lw a7, 20(a5)
1505 ; RV64I-NEXT: lw t0, 24(a5)
1506 ; RV64I-NEXT: lw t1, 28(a5)
1507 ; RV64I-NEXT: lw t2, 48(a5)
1508 ; RV64I-NEXT: lw t3, 52(a5)
1509 ; RV64I-NEXT: lw t4, 56(a5)
1510 ; RV64I-NEXT: lw t5, 60(a5)
1511 ; RV64I-NEXT: lw t6, 64(a5)
1512 ; RV64I-NEXT: lw s0, 68(a5)
1513 ; RV64I-NEXT: lw s1, 32(a5)
1514 ; RV64I-NEXT: lw s2, 36(a5)
1515 ; RV64I-NEXT: lw s3, 40(a5)
1516 ; RV64I-NEXT: lw s4, 44(a5)
1517 ; RV64I-NEXT: sw s0, 68(a5)
1518 ; RV64I-NEXT: sw t6, 64(a5)
1519 ; RV64I-NEXT: sw t5, 60(a5)
1520 ; RV64I-NEXT: sw t4, 56(a5)
1521 ; RV64I-NEXT: sw t3, 52(a5)
1522 ; RV64I-NEXT: sw t2, 48(a5)
1523 ; RV64I-NEXT: sw s4, 44(a5)
1524 ; RV64I-NEXT: sw s3, 40(a5)
1525 ; RV64I-NEXT: sw s2, 36(a5)
1526 ; RV64I-NEXT: sw s1, 32(a5)
1527 ; RV64I-NEXT: sw t1, 28(a5)
1528 ; RV64I-NEXT: sw t0, 24(a5)
1529 ; RV64I-NEXT: sw a7, 20(a5)
1530 ; RV64I-NEXT: sw a6, 16(a5)
1531 ; RV64I-NEXT: sw a4, %lo(var0+12)(a0)
1532 ; RV64I-NEXT: sw a3, %lo(var0+8)(a0)
1533 ; RV64I-NEXT: sw a2, %lo(var0+4)(a0)
1534 ; RV64I-NEXT: sw a1, %lo(var0)(a0)
1535 ; RV64I-NEXT: ld s0, 40(sp) # 8-byte Folded Reload
1536 ; RV64I-NEXT: ld s1, 32(sp) # 8-byte Folded Reload
1537 ; RV64I-NEXT: ld s2, 24(sp) # 8-byte Folded Reload
1538 ; RV64I-NEXT: ld s3, 16(sp) # 8-byte Folded Reload
1539 ; RV64I-NEXT: ld s4, 8(sp) # 8-byte Folded Reload
1540 ; RV64I-NEXT: addi sp, sp, 48
1543 %val = load [18 x i32], ptr @var0
1544 store volatile [18 x i32] %val, ptr @var0
1548 ; Check that dynamic allocation calculations remain correct
1550 declare ptr @llvm.stacksave()
1551 declare void @llvm.stackrestore(ptr)
1552 declare void @notdead(ptr)
1554 define void @alloca(i32 %n) nounwind {
1555 ; RV32IZCMP-LABEL: alloca:
1556 ; RV32IZCMP: # %bb.0:
1557 ; RV32IZCMP-NEXT: cm.push {ra, s0-s1}, -16
1558 ; RV32IZCMP-NEXT: addi s0, sp, 16
1559 ; RV32IZCMP-NEXT: mv s1, sp
1560 ; RV32IZCMP-NEXT: addi a0, a0, 15
1561 ; RV32IZCMP-NEXT: andi a0, a0, -16
1562 ; RV32IZCMP-NEXT: sub a0, sp, a0
1563 ; RV32IZCMP-NEXT: mv sp, a0
1564 ; RV32IZCMP-NEXT: call notdead
1565 ; RV32IZCMP-NEXT: mv sp, s1
1566 ; RV32IZCMP-NEXT: addi sp, s0, -16
1567 ; RV32IZCMP-NEXT: cm.popret {ra, s0-s1}, 16
1569 ; RV64IZCMP-LABEL: alloca:
1570 ; RV64IZCMP: # %bb.0:
1571 ; RV64IZCMP-NEXT: cm.push {ra, s0-s1}, -32
1572 ; RV64IZCMP-NEXT: addi s0, sp, 32
1573 ; RV64IZCMP-NEXT: mv s1, sp
1574 ; RV64IZCMP-NEXT: slli a0, a0, 32
1575 ; RV64IZCMP-NEXT: srli a0, a0, 32
1576 ; RV64IZCMP-NEXT: addi a0, a0, 15
1577 ; RV64IZCMP-NEXT: andi a0, a0, -16
1578 ; RV64IZCMP-NEXT: sub a0, sp, a0
1579 ; RV64IZCMP-NEXT: mv sp, a0
1580 ; RV64IZCMP-NEXT: call notdead
1581 ; RV64IZCMP-NEXT: mv sp, s1
1582 ; RV64IZCMP-NEXT: addi sp, s0, -32
1583 ; RV64IZCMP-NEXT: cm.popret {ra, s0-s1}, 32
1585 ; RV32IZCMP-SR-LABEL: alloca:
1586 ; RV32IZCMP-SR: # %bb.0:
1587 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0-s1}, -16
1588 ; RV32IZCMP-SR-NEXT: addi s0, sp, 16
1589 ; RV32IZCMP-SR-NEXT: mv s1, sp
1590 ; RV32IZCMP-SR-NEXT: addi a0, a0, 15
1591 ; RV32IZCMP-SR-NEXT: andi a0, a0, -16
1592 ; RV32IZCMP-SR-NEXT: sub a0, sp, a0
1593 ; RV32IZCMP-SR-NEXT: mv sp, a0
1594 ; RV32IZCMP-SR-NEXT: call notdead
1595 ; RV32IZCMP-SR-NEXT: mv sp, s1
1596 ; RV32IZCMP-SR-NEXT: addi sp, s0, -16
1597 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0-s1}, 16
1599 ; RV64IZCMP-SR-LABEL: alloca:
1600 ; RV64IZCMP-SR: # %bb.0:
1601 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0-s1}, -32
1602 ; RV64IZCMP-SR-NEXT: addi s0, sp, 32
1603 ; RV64IZCMP-SR-NEXT: mv s1, sp
1604 ; RV64IZCMP-SR-NEXT: slli a0, a0, 32
1605 ; RV64IZCMP-SR-NEXT: srli a0, a0, 32
1606 ; RV64IZCMP-SR-NEXT: addi a0, a0, 15
1607 ; RV64IZCMP-SR-NEXT: andi a0, a0, -16
1608 ; RV64IZCMP-SR-NEXT: sub a0, sp, a0
1609 ; RV64IZCMP-SR-NEXT: mv sp, a0
1610 ; RV64IZCMP-SR-NEXT: call notdead
1611 ; RV64IZCMP-SR-NEXT: mv sp, s1
1612 ; RV64IZCMP-SR-NEXT: addi sp, s0, -32
1613 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0-s1}, 32
1615 ; RV32I-LABEL: alloca:
1617 ; RV32I-NEXT: addi sp, sp, -16
1618 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1619 ; RV32I-NEXT: sw s0, 8(sp) # 4-byte Folded Spill
1620 ; RV32I-NEXT: sw s1, 4(sp) # 4-byte Folded Spill
1621 ; RV32I-NEXT: addi s0, sp, 16
1622 ; RV32I-NEXT: mv s1, sp
1623 ; RV32I-NEXT: addi a0, a0, 15
1624 ; RV32I-NEXT: andi a0, a0, -16
1625 ; RV32I-NEXT: sub a0, sp, a0
1626 ; RV32I-NEXT: mv sp, a0
1627 ; RV32I-NEXT: call notdead
1628 ; RV32I-NEXT: mv sp, s1
1629 ; RV32I-NEXT: addi sp, s0, -16
1630 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1631 ; RV32I-NEXT: lw s0, 8(sp) # 4-byte Folded Reload
1632 ; RV32I-NEXT: lw s1, 4(sp) # 4-byte Folded Reload
1633 ; RV32I-NEXT: addi sp, sp, 16
1636 ; RV64I-LABEL: alloca:
1638 ; RV64I-NEXT: addi sp, sp, -32
1639 ; RV64I-NEXT: sd ra, 24(sp) # 8-byte Folded Spill
1640 ; RV64I-NEXT: sd s0, 16(sp) # 8-byte Folded Spill
1641 ; RV64I-NEXT: sd s1, 8(sp) # 8-byte Folded Spill
1642 ; RV64I-NEXT: addi s0, sp, 32
1643 ; RV64I-NEXT: mv s1, sp
1644 ; RV64I-NEXT: slli a0, a0, 32
1645 ; RV64I-NEXT: srli a0, a0, 32
1646 ; RV64I-NEXT: addi a0, a0, 15
1647 ; RV64I-NEXT: andi a0, a0, -16
1648 ; RV64I-NEXT: sub a0, sp, a0
1649 ; RV64I-NEXT: mv sp, a0
1650 ; RV64I-NEXT: call notdead
1651 ; RV64I-NEXT: mv sp, s1
1652 ; RV64I-NEXT: addi sp, s0, -32
1653 ; RV64I-NEXT: ld ra, 24(sp) # 8-byte Folded Reload
1654 ; RV64I-NEXT: ld s0, 16(sp) # 8-byte Folded Reload
1655 ; RV64I-NEXT: ld s1, 8(sp) # 8-byte Folded Reload
1656 ; RV64I-NEXT: addi sp, sp, 32
1658 %sp = call ptr @llvm.stacksave()
1659 %addr = alloca i8, i32 %n
1660 call void @notdead(ptr %addr)
1661 call void @llvm.stackrestore(ptr %sp)
1665 declare i32 @foo_test_irq(...)
1666 @var_test_irq = global [32 x i32] zeroinitializer
1668 define void @foo_with_irq() nounwind "interrupt"="user" {
1669 ; RV32IZCMP-LABEL: foo_with_irq:
1670 ; RV32IZCMP: # %bb.0:
1671 ; RV32IZCMP-NEXT: cm.push {ra}, -64
1672 ; RV32IZCMP-NEXT: addi sp, sp, -16
1673 ; RV32IZCMP-NEXT: sw t0, 60(sp) # 4-byte Folded Spill
1674 ; RV32IZCMP-NEXT: sw t1, 56(sp) # 4-byte Folded Spill
1675 ; RV32IZCMP-NEXT: sw t2, 52(sp) # 4-byte Folded Spill
1676 ; RV32IZCMP-NEXT: sw a0, 48(sp) # 4-byte Folded Spill
1677 ; RV32IZCMP-NEXT: sw a1, 44(sp) # 4-byte Folded Spill
1678 ; RV32IZCMP-NEXT: sw a2, 40(sp) # 4-byte Folded Spill
1679 ; RV32IZCMP-NEXT: sw a3, 36(sp) # 4-byte Folded Spill
1680 ; RV32IZCMP-NEXT: sw a4, 32(sp) # 4-byte Folded Spill
1681 ; RV32IZCMP-NEXT: sw a5, 28(sp) # 4-byte Folded Spill
1682 ; RV32IZCMP-NEXT: sw a6, 24(sp) # 4-byte Folded Spill
1683 ; RV32IZCMP-NEXT: sw a7, 20(sp) # 4-byte Folded Spill
1684 ; RV32IZCMP-NEXT: sw t3, 16(sp) # 4-byte Folded Spill
1685 ; RV32IZCMP-NEXT: sw t4, 12(sp) # 4-byte Folded Spill
1686 ; RV32IZCMP-NEXT: sw t5, 8(sp) # 4-byte Folded Spill
1687 ; RV32IZCMP-NEXT: sw t6, 4(sp) # 4-byte Folded Spill
1688 ; RV32IZCMP-NEXT: call foo_test_irq
1689 ; RV32IZCMP-NEXT: lw t0, 60(sp) # 4-byte Folded Reload
1690 ; RV32IZCMP-NEXT: lw t1, 56(sp) # 4-byte Folded Reload
1691 ; RV32IZCMP-NEXT: lw t2, 52(sp) # 4-byte Folded Reload
1692 ; RV32IZCMP-NEXT: lw a0, 48(sp) # 4-byte Folded Reload
1693 ; RV32IZCMP-NEXT: lw a1, 44(sp) # 4-byte Folded Reload
1694 ; RV32IZCMP-NEXT: lw a2, 40(sp) # 4-byte Folded Reload
1695 ; RV32IZCMP-NEXT: lw a3, 36(sp) # 4-byte Folded Reload
1696 ; RV32IZCMP-NEXT: lw a4, 32(sp) # 4-byte Folded Reload
1697 ; RV32IZCMP-NEXT: lw a5, 28(sp) # 4-byte Folded Reload
1698 ; RV32IZCMP-NEXT: lw a6, 24(sp) # 4-byte Folded Reload
1699 ; RV32IZCMP-NEXT: lw a7, 20(sp) # 4-byte Folded Reload
1700 ; RV32IZCMP-NEXT: lw t3, 16(sp) # 4-byte Folded Reload
1701 ; RV32IZCMP-NEXT: lw t4, 12(sp) # 4-byte Folded Reload
1702 ; RV32IZCMP-NEXT: lw t5, 8(sp) # 4-byte Folded Reload
1703 ; RV32IZCMP-NEXT: lw t6, 4(sp) # 4-byte Folded Reload
1704 ; RV32IZCMP-NEXT: addi sp, sp, 16
1705 ; RV32IZCMP-NEXT: cm.pop {ra}, 64
1706 ; RV32IZCMP-NEXT: mret
1708 ; RV64IZCMP-LABEL: foo_with_irq:
1709 ; RV64IZCMP: # %bb.0:
1710 ; RV64IZCMP-NEXT: cm.push {ra}, -64
1711 ; RV64IZCMP-NEXT: addi sp, sp, -80
1712 ; RV64IZCMP-NEXT: sd t0, 120(sp) # 8-byte Folded Spill
1713 ; RV64IZCMP-NEXT: sd t1, 112(sp) # 8-byte Folded Spill
1714 ; RV64IZCMP-NEXT: sd t2, 104(sp) # 8-byte Folded Spill
1715 ; RV64IZCMP-NEXT: sd a0, 96(sp) # 8-byte Folded Spill
1716 ; RV64IZCMP-NEXT: sd a1, 88(sp) # 8-byte Folded Spill
1717 ; RV64IZCMP-NEXT: sd a2, 80(sp) # 8-byte Folded Spill
1718 ; RV64IZCMP-NEXT: sd a3, 72(sp) # 8-byte Folded Spill
1719 ; RV64IZCMP-NEXT: sd a4, 64(sp) # 8-byte Folded Spill
1720 ; RV64IZCMP-NEXT: sd a5, 56(sp) # 8-byte Folded Spill
1721 ; RV64IZCMP-NEXT: sd a6, 48(sp) # 8-byte Folded Spill
1722 ; RV64IZCMP-NEXT: sd a7, 40(sp) # 8-byte Folded Spill
1723 ; RV64IZCMP-NEXT: sd t3, 32(sp) # 8-byte Folded Spill
1724 ; RV64IZCMP-NEXT: sd t4, 24(sp) # 8-byte Folded Spill
1725 ; RV64IZCMP-NEXT: sd t5, 16(sp) # 8-byte Folded Spill
1726 ; RV64IZCMP-NEXT: sd t6, 8(sp) # 8-byte Folded Spill
1727 ; RV64IZCMP-NEXT: call foo_test_irq
1728 ; RV64IZCMP-NEXT: ld t0, 120(sp) # 8-byte Folded Reload
1729 ; RV64IZCMP-NEXT: ld t1, 112(sp) # 8-byte Folded Reload
1730 ; RV64IZCMP-NEXT: ld t2, 104(sp) # 8-byte Folded Reload
1731 ; RV64IZCMP-NEXT: ld a0, 96(sp) # 8-byte Folded Reload
1732 ; RV64IZCMP-NEXT: ld a1, 88(sp) # 8-byte Folded Reload
1733 ; RV64IZCMP-NEXT: ld a2, 80(sp) # 8-byte Folded Reload
1734 ; RV64IZCMP-NEXT: ld a3, 72(sp) # 8-byte Folded Reload
1735 ; RV64IZCMP-NEXT: ld a4, 64(sp) # 8-byte Folded Reload
1736 ; RV64IZCMP-NEXT: ld a5, 56(sp) # 8-byte Folded Reload
1737 ; RV64IZCMP-NEXT: ld a6, 48(sp) # 8-byte Folded Reload
1738 ; RV64IZCMP-NEXT: ld a7, 40(sp) # 8-byte Folded Reload
1739 ; RV64IZCMP-NEXT: ld t3, 32(sp) # 8-byte Folded Reload
1740 ; RV64IZCMP-NEXT: ld t4, 24(sp) # 8-byte Folded Reload
1741 ; RV64IZCMP-NEXT: ld t5, 16(sp) # 8-byte Folded Reload
1742 ; RV64IZCMP-NEXT: ld t6, 8(sp) # 8-byte Folded Reload
1743 ; RV64IZCMP-NEXT: addi sp, sp, 80
1744 ; RV64IZCMP-NEXT: cm.pop {ra}, 64
1745 ; RV64IZCMP-NEXT: mret
1747 ; RV32IZCMP-SR-LABEL: foo_with_irq:
1748 ; RV32IZCMP-SR: # %bb.0:
1749 ; RV32IZCMP-SR-NEXT: cm.push {ra}, -64
1750 ; RV32IZCMP-SR-NEXT: addi sp, sp, -16
1751 ; RV32IZCMP-SR-NEXT: sw t0, 60(sp) # 4-byte Folded Spill
1752 ; RV32IZCMP-SR-NEXT: sw t1, 56(sp) # 4-byte Folded Spill
1753 ; RV32IZCMP-SR-NEXT: sw t2, 52(sp) # 4-byte Folded Spill
1754 ; RV32IZCMP-SR-NEXT: sw a0, 48(sp) # 4-byte Folded Spill
1755 ; RV32IZCMP-SR-NEXT: sw a1, 44(sp) # 4-byte Folded Spill
1756 ; RV32IZCMP-SR-NEXT: sw a2, 40(sp) # 4-byte Folded Spill
1757 ; RV32IZCMP-SR-NEXT: sw a3, 36(sp) # 4-byte Folded Spill
1758 ; RV32IZCMP-SR-NEXT: sw a4, 32(sp) # 4-byte Folded Spill
1759 ; RV32IZCMP-SR-NEXT: sw a5, 28(sp) # 4-byte Folded Spill
1760 ; RV32IZCMP-SR-NEXT: sw a6, 24(sp) # 4-byte Folded Spill
1761 ; RV32IZCMP-SR-NEXT: sw a7, 20(sp) # 4-byte Folded Spill
1762 ; RV32IZCMP-SR-NEXT: sw t3, 16(sp) # 4-byte Folded Spill
1763 ; RV32IZCMP-SR-NEXT: sw t4, 12(sp) # 4-byte Folded Spill
1764 ; RV32IZCMP-SR-NEXT: sw t5, 8(sp) # 4-byte Folded Spill
1765 ; RV32IZCMP-SR-NEXT: sw t6, 4(sp) # 4-byte Folded Spill
1766 ; RV32IZCMP-SR-NEXT: call foo_test_irq
1767 ; RV32IZCMP-SR-NEXT: lw t0, 60(sp) # 4-byte Folded Reload
1768 ; RV32IZCMP-SR-NEXT: lw t1, 56(sp) # 4-byte Folded Reload
1769 ; RV32IZCMP-SR-NEXT: lw t2, 52(sp) # 4-byte Folded Reload
1770 ; RV32IZCMP-SR-NEXT: lw a0, 48(sp) # 4-byte Folded Reload
1771 ; RV32IZCMP-SR-NEXT: lw a1, 44(sp) # 4-byte Folded Reload
1772 ; RV32IZCMP-SR-NEXT: lw a2, 40(sp) # 4-byte Folded Reload
1773 ; RV32IZCMP-SR-NEXT: lw a3, 36(sp) # 4-byte Folded Reload
1774 ; RV32IZCMP-SR-NEXT: lw a4, 32(sp) # 4-byte Folded Reload
1775 ; RV32IZCMP-SR-NEXT: lw a5, 28(sp) # 4-byte Folded Reload
1776 ; RV32IZCMP-SR-NEXT: lw a6, 24(sp) # 4-byte Folded Reload
1777 ; RV32IZCMP-SR-NEXT: lw a7, 20(sp) # 4-byte Folded Reload
1778 ; RV32IZCMP-SR-NEXT: lw t3, 16(sp) # 4-byte Folded Reload
1779 ; RV32IZCMP-SR-NEXT: lw t4, 12(sp) # 4-byte Folded Reload
1780 ; RV32IZCMP-SR-NEXT: lw t5, 8(sp) # 4-byte Folded Reload
1781 ; RV32IZCMP-SR-NEXT: lw t6, 4(sp) # 4-byte Folded Reload
1782 ; RV32IZCMP-SR-NEXT: addi sp, sp, 16
1783 ; RV32IZCMP-SR-NEXT: cm.pop {ra}, 64
1784 ; RV32IZCMP-SR-NEXT: mret
1786 ; RV64IZCMP-SR-LABEL: foo_with_irq:
1787 ; RV64IZCMP-SR: # %bb.0:
1788 ; RV64IZCMP-SR-NEXT: cm.push {ra}, -64
1789 ; RV64IZCMP-SR-NEXT: addi sp, sp, -80
1790 ; RV64IZCMP-SR-NEXT: sd t0, 120(sp) # 8-byte Folded Spill
1791 ; RV64IZCMP-SR-NEXT: sd t1, 112(sp) # 8-byte Folded Spill
1792 ; RV64IZCMP-SR-NEXT: sd t2, 104(sp) # 8-byte Folded Spill
1793 ; RV64IZCMP-SR-NEXT: sd a0, 96(sp) # 8-byte Folded Spill
1794 ; RV64IZCMP-SR-NEXT: sd a1, 88(sp) # 8-byte Folded Spill
1795 ; RV64IZCMP-SR-NEXT: sd a2, 80(sp) # 8-byte Folded Spill
1796 ; RV64IZCMP-SR-NEXT: sd a3, 72(sp) # 8-byte Folded Spill
1797 ; RV64IZCMP-SR-NEXT: sd a4, 64(sp) # 8-byte Folded Spill
1798 ; RV64IZCMP-SR-NEXT: sd a5, 56(sp) # 8-byte Folded Spill
1799 ; RV64IZCMP-SR-NEXT: sd a6, 48(sp) # 8-byte Folded Spill
1800 ; RV64IZCMP-SR-NEXT: sd a7, 40(sp) # 8-byte Folded Spill
1801 ; RV64IZCMP-SR-NEXT: sd t3, 32(sp) # 8-byte Folded Spill
1802 ; RV64IZCMP-SR-NEXT: sd t4, 24(sp) # 8-byte Folded Spill
1803 ; RV64IZCMP-SR-NEXT: sd t5, 16(sp) # 8-byte Folded Spill
1804 ; RV64IZCMP-SR-NEXT: sd t6, 8(sp) # 8-byte Folded Spill
1805 ; RV64IZCMP-SR-NEXT: call foo_test_irq
1806 ; RV64IZCMP-SR-NEXT: ld t0, 120(sp) # 8-byte Folded Reload
1807 ; RV64IZCMP-SR-NEXT: ld t1, 112(sp) # 8-byte Folded Reload
1808 ; RV64IZCMP-SR-NEXT: ld t2, 104(sp) # 8-byte Folded Reload
1809 ; RV64IZCMP-SR-NEXT: ld a0, 96(sp) # 8-byte Folded Reload
1810 ; RV64IZCMP-SR-NEXT: ld a1, 88(sp) # 8-byte Folded Reload
1811 ; RV64IZCMP-SR-NEXT: ld a2, 80(sp) # 8-byte Folded Reload
1812 ; RV64IZCMP-SR-NEXT: ld a3, 72(sp) # 8-byte Folded Reload
1813 ; RV64IZCMP-SR-NEXT: ld a4, 64(sp) # 8-byte Folded Reload
1814 ; RV64IZCMP-SR-NEXT: ld a5, 56(sp) # 8-byte Folded Reload
1815 ; RV64IZCMP-SR-NEXT: ld a6, 48(sp) # 8-byte Folded Reload
1816 ; RV64IZCMP-SR-NEXT: ld a7, 40(sp) # 8-byte Folded Reload
1817 ; RV64IZCMP-SR-NEXT: ld t3, 32(sp) # 8-byte Folded Reload
1818 ; RV64IZCMP-SR-NEXT: ld t4, 24(sp) # 8-byte Folded Reload
1819 ; RV64IZCMP-SR-NEXT: ld t5, 16(sp) # 8-byte Folded Reload
1820 ; RV64IZCMP-SR-NEXT: ld t6, 8(sp) # 8-byte Folded Reload
1821 ; RV64IZCMP-SR-NEXT: addi sp, sp, 80
1822 ; RV64IZCMP-SR-NEXT: cm.pop {ra}, 64
1823 ; RV64IZCMP-SR-NEXT: mret
1825 ; RV32I-LABEL: foo_with_irq:
1827 ; RV32I-NEXT: addi sp, sp, -64
1828 ; RV32I-NEXT: sw ra, 60(sp) # 4-byte Folded Spill
1829 ; RV32I-NEXT: sw t0, 56(sp) # 4-byte Folded Spill
1830 ; RV32I-NEXT: sw t1, 52(sp) # 4-byte Folded Spill
1831 ; RV32I-NEXT: sw t2, 48(sp) # 4-byte Folded Spill
1832 ; RV32I-NEXT: sw a0, 44(sp) # 4-byte Folded Spill
1833 ; RV32I-NEXT: sw a1, 40(sp) # 4-byte Folded Spill
1834 ; RV32I-NEXT: sw a2, 36(sp) # 4-byte Folded Spill
1835 ; RV32I-NEXT: sw a3, 32(sp) # 4-byte Folded Spill
1836 ; RV32I-NEXT: sw a4, 28(sp) # 4-byte Folded Spill
1837 ; RV32I-NEXT: sw a5, 24(sp) # 4-byte Folded Spill
1838 ; RV32I-NEXT: sw a6, 20(sp) # 4-byte Folded Spill
1839 ; RV32I-NEXT: sw a7, 16(sp) # 4-byte Folded Spill
1840 ; RV32I-NEXT: sw t3, 12(sp) # 4-byte Folded Spill
1841 ; RV32I-NEXT: sw t4, 8(sp) # 4-byte Folded Spill
1842 ; RV32I-NEXT: sw t5, 4(sp) # 4-byte Folded Spill
1843 ; RV32I-NEXT: sw t6, 0(sp) # 4-byte Folded Spill
1844 ; RV32I-NEXT: call foo_test_irq
1845 ; RV32I-NEXT: lw ra, 60(sp) # 4-byte Folded Reload
1846 ; RV32I-NEXT: lw t0, 56(sp) # 4-byte Folded Reload
1847 ; RV32I-NEXT: lw t1, 52(sp) # 4-byte Folded Reload
1848 ; RV32I-NEXT: lw t2, 48(sp) # 4-byte Folded Reload
1849 ; RV32I-NEXT: lw a0, 44(sp) # 4-byte Folded Reload
1850 ; RV32I-NEXT: lw a1, 40(sp) # 4-byte Folded Reload
1851 ; RV32I-NEXT: lw a2, 36(sp) # 4-byte Folded Reload
1852 ; RV32I-NEXT: lw a3, 32(sp) # 4-byte Folded Reload
1853 ; RV32I-NEXT: lw a4, 28(sp) # 4-byte Folded Reload
1854 ; RV32I-NEXT: lw a5, 24(sp) # 4-byte Folded Reload
1855 ; RV32I-NEXT: lw a6, 20(sp) # 4-byte Folded Reload
1856 ; RV32I-NEXT: lw a7, 16(sp) # 4-byte Folded Reload
1857 ; RV32I-NEXT: lw t3, 12(sp) # 4-byte Folded Reload
1858 ; RV32I-NEXT: lw t4, 8(sp) # 4-byte Folded Reload
1859 ; RV32I-NEXT: lw t5, 4(sp) # 4-byte Folded Reload
1860 ; RV32I-NEXT: lw t6, 0(sp) # 4-byte Folded Reload
1861 ; RV32I-NEXT: addi sp, sp, 64
1864 ; RV64I-LABEL: foo_with_irq:
1866 ; RV64I-NEXT: addi sp, sp, -128
1867 ; RV64I-NEXT: sd ra, 120(sp) # 8-byte Folded Spill
1868 ; RV64I-NEXT: sd t0, 112(sp) # 8-byte Folded Spill
1869 ; RV64I-NEXT: sd t1, 104(sp) # 8-byte Folded Spill
1870 ; RV64I-NEXT: sd t2, 96(sp) # 8-byte Folded Spill
1871 ; RV64I-NEXT: sd a0, 88(sp) # 8-byte Folded Spill
1872 ; RV64I-NEXT: sd a1, 80(sp) # 8-byte Folded Spill
1873 ; RV64I-NEXT: sd a2, 72(sp) # 8-byte Folded Spill
1874 ; RV64I-NEXT: sd a3, 64(sp) # 8-byte Folded Spill
1875 ; RV64I-NEXT: sd a4, 56(sp) # 8-byte Folded Spill
1876 ; RV64I-NEXT: sd a5, 48(sp) # 8-byte Folded Spill
1877 ; RV64I-NEXT: sd a6, 40(sp) # 8-byte Folded Spill
1878 ; RV64I-NEXT: sd a7, 32(sp) # 8-byte Folded Spill
1879 ; RV64I-NEXT: sd t3, 24(sp) # 8-byte Folded Spill
1880 ; RV64I-NEXT: sd t4, 16(sp) # 8-byte Folded Spill
1881 ; RV64I-NEXT: sd t5, 8(sp) # 8-byte Folded Spill
1882 ; RV64I-NEXT: sd t6, 0(sp) # 8-byte Folded Spill
1883 ; RV64I-NEXT: call foo_test_irq
1884 ; RV64I-NEXT: ld ra, 120(sp) # 8-byte Folded Reload
1885 ; RV64I-NEXT: ld t0, 112(sp) # 8-byte Folded Reload
1886 ; RV64I-NEXT: ld t1, 104(sp) # 8-byte Folded Reload
1887 ; RV64I-NEXT: ld t2, 96(sp) # 8-byte Folded Reload
1888 ; RV64I-NEXT: ld a0, 88(sp) # 8-byte Folded Reload
1889 ; RV64I-NEXT: ld a1, 80(sp) # 8-byte Folded Reload
1890 ; RV64I-NEXT: ld a2, 72(sp) # 8-byte Folded Reload
1891 ; RV64I-NEXT: ld a3, 64(sp) # 8-byte Folded Reload
1892 ; RV64I-NEXT: ld a4, 56(sp) # 8-byte Folded Reload
1893 ; RV64I-NEXT: ld a5, 48(sp) # 8-byte Folded Reload
1894 ; RV64I-NEXT: ld a6, 40(sp) # 8-byte Folded Reload
1895 ; RV64I-NEXT: ld a7, 32(sp) # 8-byte Folded Reload
1896 ; RV64I-NEXT: ld t3, 24(sp) # 8-byte Folded Reload
1897 ; RV64I-NEXT: ld t4, 16(sp) # 8-byte Folded Reload
1898 ; RV64I-NEXT: ld t5, 8(sp) # 8-byte Folded Reload
1899 ; RV64I-NEXT: ld t6, 0(sp) # 8-byte Folded Reload
1900 ; RV64I-NEXT: addi sp, sp, 128
1902 %call = call i32 @foo_test_irq()
1906 define void @foo_no_irq() nounwind{
1907 ; RV32IZCMP-LABEL: foo_no_irq:
1908 ; RV32IZCMP: # %bb.0:
1909 ; RV32IZCMP-NEXT: cm.push {ra}, -16
1910 ; RV32IZCMP-NEXT: call foo_test_irq
1911 ; RV32IZCMP-NEXT: cm.popret {ra}, 16
1913 ; RV64IZCMP-LABEL: foo_no_irq:
1914 ; RV64IZCMP: # %bb.0:
1915 ; RV64IZCMP-NEXT: cm.push {ra}, -16
1916 ; RV64IZCMP-NEXT: call foo_test_irq
1917 ; RV64IZCMP-NEXT: cm.popret {ra}, 16
1919 ; RV32IZCMP-SR-LABEL: foo_no_irq:
1920 ; RV32IZCMP-SR: # %bb.0:
1921 ; RV32IZCMP-SR-NEXT: cm.push {ra}, -16
1922 ; RV32IZCMP-SR-NEXT: call foo_test_irq
1923 ; RV32IZCMP-SR-NEXT: cm.popret {ra}, 16
1925 ; RV64IZCMP-SR-LABEL: foo_no_irq:
1926 ; RV64IZCMP-SR: # %bb.0:
1927 ; RV64IZCMP-SR-NEXT: cm.push {ra}, -16
1928 ; RV64IZCMP-SR-NEXT: call foo_test_irq
1929 ; RV64IZCMP-SR-NEXT: cm.popret {ra}, 16
1931 ; RV32I-LABEL: foo_no_irq:
1933 ; RV32I-NEXT: addi sp, sp, -16
1934 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
1935 ; RV32I-NEXT: call foo_test_irq
1936 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
1937 ; RV32I-NEXT: addi sp, sp, 16
1940 ; RV64I-LABEL: foo_no_irq:
1942 ; RV64I-NEXT: addi sp, sp, -16
1943 ; RV64I-NEXT: sd ra, 8(sp) # 8-byte Folded Spill
1944 ; RV64I-NEXT: call foo_test_irq
1945 ; RV64I-NEXT: ld ra, 8(sp) # 8-byte Folded Reload
1946 ; RV64I-NEXT: addi sp, sp, 16
1948 %call = call i32 @foo_test_irq()
1952 define void @callee_with_irq() nounwind "interrupt"="user" {
1953 ; RV32IZCMP-LABEL: callee_with_irq:
1954 ; RV32IZCMP: # %bb.0:
1955 ; RV32IZCMP-NEXT: cm.push {ra, s0-s11}, -112
1956 ; RV32IZCMP-NEXT: addi sp, sp, -48
1957 ; RV32IZCMP-NEXT: sw t0, 92(sp) # 4-byte Folded Spill
1958 ; RV32IZCMP-NEXT: sw t1, 88(sp) # 4-byte Folded Spill
1959 ; RV32IZCMP-NEXT: sw t2, 84(sp) # 4-byte Folded Spill
1960 ; RV32IZCMP-NEXT: sw a0, 80(sp) # 4-byte Folded Spill
1961 ; RV32IZCMP-NEXT: sw a1, 76(sp) # 4-byte Folded Spill
1962 ; RV32IZCMP-NEXT: sw a2, 72(sp) # 4-byte Folded Spill
1963 ; RV32IZCMP-NEXT: sw a3, 68(sp) # 4-byte Folded Spill
1964 ; RV32IZCMP-NEXT: sw a4, 64(sp) # 4-byte Folded Spill
1965 ; RV32IZCMP-NEXT: sw a5, 60(sp) # 4-byte Folded Spill
1966 ; RV32IZCMP-NEXT: sw a6, 56(sp) # 4-byte Folded Spill
1967 ; RV32IZCMP-NEXT: sw a7, 52(sp) # 4-byte Folded Spill
1968 ; RV32IZCMP-NEXT: sw t3, 48(sp) # 4-byte Folded Spill
1969 ; RV32IZCMP-NEXT: sw t4, 44(sp) # 4-byte Folded Spill
1970 ; RV32IZCMP-NEXT: sw t5, 40(sp) # 4-byte Folded Spill
1971 ; RV32IZCMP-NEXT: sw t6, 36(sp) # 4-byte Folded Spill
1972 ; RV32IZCMP-NEXT: lui t0, %hi(var_test_irq)
1973 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq)(t0)
1974 ; RV32IZCMP-NEXT: sw a0, 32(sp) # 4-byte Folded Spill
1975 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq+4)(t0)
1976 ; RV32IZCMP-NEXT: sw a0, 28(sp) # 4-byte Folded Spill
1977 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq+8)(t0)
1978 ; RV32IZCMP-NEXT: sw a0, 24(sp) # 4-byte Folded Spill
1979 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq+12)(t0)
1980 ; RV32IZCMP-NEXT: sw a0, 20(sp) # 4-byte Folded Spill
1981 ; RV32IZCMP-NEXT: addi a5, t0, %lo(var_test_irq)
1982 ; RV32IZCMP-NEXT: lw a0, 16(a5)
1983 ; RV32IZCMP-NEXT: sw a0, 16(sp) # 4-byte Folded Spill
1984 ; RV32IZCMP-NEXT: lw a0, 20(a5)
1985 ; RV32IZCMP-NEXT: sw a0, 12(sp) # 4-byte Folded Spill
1986 ; RV32IZCMP-NEXT: lw t4, 24(a5)
1987 ; RV32IZCMP-NEXT: lw t5, 28(a5)
1988 ; RV32IZCMP-NEXT: lw t6, 32(a5)
1989 ; RV32IZCMP-NEXT: lw s2, 36(a5)
1990 ; RV32IZCMP-NEXT: lw s3, 40(a5)
1991 ; RV32IZCMP-NEXT: lw s4, 44(a5)
1992 ; RV32IZCMP-NEXT: lw s5, 48(a5)
1993 ; RV32IZCMP-NEXT: lw s6, 52(a5)
1994 ; RV32IZCMP-NEXT: lw s7, 56(a5)
1995 ; RV32IZCMP-NEXT: lw s8, 60(a5)
1996 ; RV32IZCMP-NEXT: lw s9, 64(a5)
1997 ; RV32IZCMP-NEXT: lw s10, 68(a5)
1998 ; RV32IZCMP-NEXT: lw s11, 72(a5)
1999 ; RV32IZCMP-NEXT: lw ra, 76(a5)
2000 ; RV32IZCMP-NEXT: lw s1, 80(a5)
2001 ; RV32IZCMP-NEXT: lw t3, 84(a5)
2002 ; RV32IZCMP-NEXT: lw t2, 88(a5)
2003 ; RV32IZCMP-NEXT: lw t1, 92(a5)
2004 ; RV32IZCMP-NEXT: lw a7, 112(a5)
2005 ; RV32IZCMP-NEXT: lw s0, 116(a5)
2006 ; RV32IZCMP-NEXT: lw a3, 120(a5)
2007 ; RV32IZCMP-NEXT: lw a0, 124(a5)
2008 ; RV32IZCMP-NEXT: lw a6, 96(a5)
2009 ; RV32IZCMP-NEXT: lw a4, 100(a5)
2010 ; RV32IZCMP-NEXT: lw a2, 104(a5)
2011 ; RV32IZCMP-NEXT: lw a1, 108(a5)
2012 ; RV32IZCMP-NEXT: sw a0, 124(a5)
2013 ; RV32IZCMP-NEXT: sw a3, 120(a5)
2014 ; RV32IZCMP-NEXT: sw s0, 116(a5)
2015 ; RV32IZCMP-NEXT: sw a7, 112(a5)
2016 ; RV32IZCMP-NEXT: sw a1, 108(a5)
2017 ; RV32IZCMP-NEXT: sw a2, 104(a5)
2018 ; RV32IZCMP-NEXT: sw a4, 100(a5)
2019 ; RV32IZCMP-NEXT: sw a6, 96(a5)
2020 ; RV32IZCMP-NEXT: sw t1, 92(a5)
2021 ; RV32IZCMP-NEXT: sw t2, 88(a5)
2022 ; RV32IZCMP-NEXT: sw t3, 84(a5)
2023 ; RV32IZCMP-NEXT: sw s1, 80(a5)
2024 ; RV32IZCMP-NEXT: sw ra, 76(a5)
2025 ; RV32IZCMP-NEXT: sw s11, 72(a5)
2026 ; RV32IZCMP-NEXT: sw s10, 68(a5)
2027 ; RV32IZCMP-NEXT: sw s9, 64(a5)
2028 ; RV32IZCMP-NEXT: sw s8, 60(a5)
2029 ; RV32IZCMP-NEXT: sw s7, 56(a5)
2030 ; RV32IZCMP-NEXT: sw s6, 52(a5)
2031 ; RV32IZCMP-NEXT: sw s5, 48(a5)
2032 ; RV32IZCMP-NEXT: sw s4, 44(a5)
2033 ; RV32IZCMP-NEXT: sw s3, 40(a5)
2034 ; RV32IZCMP-NEXT: sw s2, 36(a5)
2035 ; RV32IZCMP-NEXT: sw t6, 32(a5)
2036 ; RV32IZCMP-NEXT: sw t5, 28(a5)
2037 ; RV32IZCMP-NEXT: sw t4, 24(a5)
2038 ; RV32IZCMP-NEXT: lw a0, 12(sp) # 4-byte Folded Reload
2039 ; RV32IZCMP-NEXT: sw a0, 20(a5)
2040 ; RV32IZCMP-NEXT: lw a0, 16(sp) # 4-byte Folded Reload
2041 ; RV32IZCMP-NEXT: sw a0, 16(a5)
2042 ; RV32IZCMP-NEXT: lw a0, 20(sp) # 4-byte Folded Reload
2043 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq+12)(t0)
2044 ; RV32IZCMP-NEXT: lw a0, 24(sp) # 4-byte Folded Reload
2045 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq+8)(t0)
2046 ; RV32IZCMP-NEXT: lw a0, 28(sp) # 4-byte Folded Reload
2047 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq+4)(t0)
2048 ; RV32IZCMP-NEXT: lw a0, 32(sp) # 4-byte Folded Reload
2049 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq)(t0)
2050 ; RV32IZCMP-NEXT: lw t0, 92(sp) # 4-byte Folded Reload
2051 ; RV32IZCMP-NEXT: lw t1, 88(sp) # 4-byte Folded Reload
2052 ; RV32IZCMP-NEXT: lw t2, 84(sp) # 4-byte Folded Reload
2053 ; RV32IZCMP-NEXT: lw a0, 80(sp) # 4-byte Folded Reload
2054 ; RV32IZCMP-NEXT: lw a1, 76(sp) # 4-byte Folded Reload
2055 ; RV32IZCMP-NEXT: lw a2, 72(sp) # 4-byte Folded Reload
2056 ; RV32IZCMP-NEXT: lw a3, 68(sp) # 4-byte Folded Reload
2057 ; RV32IZCMP-NEXT: lw a4, 64(sp) # 4-byte Folded Reload
2058 ; RV32IZCMP-NEXT: lw a5, 60(sp) # 4-byte Folded Reload
2059 ; RV32IZCMP-NEXT: lw a6, 56(sp) # 4-byte Folded Reload
2060 ; RV32IZCMP-NEXT: lw a7, 52(sp) # 4-byte Folded Reload
2061 ; RV32IZCMP-NEXT: lw t3, 48(sp) # 4-byte Folded Reload
2062 ; RV32IZCMP-NEXT: lw t4, 44(sp) # 4-byte Folded Reload
2063 ; RV32IZCMP-NEXT: lw t5, 40(sp) # 4-byte Folded Reload
2064 ; RV32IZCMP-NEXT: lw t6, 36(sp) # 4-byte Folded Reload
2065 ; RV32IZCMP-NEXT: addi sp, sp, 48
2066 ; RV32IZCMP-NEXT: cm.pop {ra, s0-s11}, 112
2067 ; RV32IZCMP-NEXT: mret
2069 ; RV64IZCMP-LABEL: callee_with_irq:
2070 ; RV64IZCMP: # %bb.0:
2071 ; RV64IZCMP-NEXT: cm.push {ra, s0-s11}, -160
2072 ; RV64IZCMP-NEXT: addi sp, sp, -128
2073 ; RV64IZCMP-NEXT: sd t0, 168(sp) # 8-byte Folded Spill
2074 ; RV64IZCMP-NEXT: sd t1, 160(sp) # 8-byte Folded Spill
2075 ; RV64IZCMP-NEXT: sd t2, 152(sp) # 8-byte Folded Spill
2076 ; RV64IZCMP-NEXT: sd a0, 144(sp) # 8-byte Folded Spill
2077 ; RV64IZCMP-NEXT: sd a1, 136(sp) # 8-byte Folded Spill
2078 ; RV64IZCMP-NEXT: sd a2, 128(sp) # 8-byte Folded Spill
2079 ; RV64IZCMP-NEXT: sd a3, 120(sp) # 8-byte Folded Spill
2080 ; RV64IZCMP-NEXT: sd a4, 112(sp) # 8-byte Folded Spill
2081 ; RV64IZCMP-NEXT: sd a5, 104(sp) # 8-byte Folded Spill
2082 ; RV64IZCMP-NEXT: sd a6, 96(sp) # 8-byte Folded Spill
2083 ; RV64IZCMP-NEXT: sd a7, 88(sp) # 8-byte Folded Spill
2084 ; RV64IZCMP-NEXT: sd t3, 80(sp) # 8-byte Folded Spill
2085 ; RV64IZCMP-NEXT: sd t4, 72(sp) # 8-byte Folded Spill
2086 ; RV64IZCMP-NEXT: sd t5, 64(sp) # 8-byte Folded Spill
2087 ; RV64IZCMP-NEXT: sd t6, 56(sp) # 8-byte Folded Spill
2088 ; RV64IZCMP-NEXT: lui t0, %hi(var_test_irq)
2089 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq)(t0)
2090 ; RV64IZCMP-NEXT: sd a0, 48(sp) # 8-byte Folded Spill
2091 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq+4)(t0)
2092 ; RV64IZCMP-NEXT: sd a0, 40(sp) # 8-byte Folded Spill
2093 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq+8)(t0)
2094 ; RV64IZCMP-NEXT: sd a0, 32(sp) # 8-byte Folded Spill
2095 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq+12)(t0)
2096 ; RV64IZCMP-NEXT: sd a0, 24(sp) # 8-byte Folded Spill
2097 ; RV64IZCMP-NEXT: addi a5, t0, %lo(var_test_irq)
2098 ; RV64IZCMP-NEXT: lw a0, 16(a5)
2099 ; RV64IZCMP-NEXT: sd a0, 16(sp) # 8-byte Folded Spill
2100 ; RV64IZCMP-NEXT: lw a0, 20(a5)
2101 ; RV64IZCMP-NEXT: sd a0, 8(sp) # 8-byte Folded Spill
2102 ; RV64IZCMP-NEXT: lw t4, 24(a5)
2103 ; RV64IZCMP-NEXT: lw t5, 28(a5)
2104 ; RV64IZCMP-NEXT: lw t6, 32(a5)
2105 ; RV64IZCMP-NEXT: lw s2, 36(a5)
2106 ; RV64IZCMP-NEXT: lw s3, 40(a5)
2107 ; RV64IZCMP-NEXT: lw s4, 44(a5)
2108 ; RV64IZCMP-NEXT: lw s5, 48(a5)
2109 ; RV64IZCMP-NEXT: lw s6, 52(a5)
2110 ; RV64IZCMP-NEXT: lw s7, 56(a5)
2111 ; RV64IZCMP-NEXT: lw s8, 60(a5)
2112 ; RV64IZCMP-NEXT: lw s9, 64(a5)
2113 ; RV64IZCMP-NEXT: lw s10, 68(a5)
2114 ; RV64IZCMP-NEXT: lw s11, 72(a5)
2115 ; RV64IZCMP-NEXT: lw ra, 76(a5)
2116 ; RV64IZCMP-NEXT: lw s1, 80(a5)
2117 ; RV64IZCMP-NEXT: lw t3, 84(a5)
2118 ; RV64IZCMP-NEXT: lw t2, 88(a5)
2119 ; RV64IZCMP-NEXT: lw t1, 92(a5)
2120 ; RV64IZCMP-NEXT: lw a7, 112(a5)
2121 ; RV64IZCMP-NEXT: lw s0, 116(a5)
2122 ; RV64IZCMP-NEXT: lw a3, 120(a5)
2123 ; RV64IZCMP-NEXT: lw a0, 124(a5)
2124 ; RV64IZCMP-NEXT: lw a6, 96(a5)
2125 ; RV64IZCMP-NEXT: lw a4, 100(a5)
2126 ; RV64IZCMP-NEXT: lw a2, 104(a5)
2127 ; RV64IZCMP-NEXT: lw a1, 108(a5)
2128 ; RV64IZCMP-NEXT: sw a0, 124(a5)
2129 ; RV64IZCMP-NEXT: sw a3, 120(a5)
2130 ; RV64IZCMP-NEXT: sw s0, 116(a5)
2131 ; RV64IZCMP-NEXT: sw a7, 112(a5)
2132 ; RV64IZCMP-NEXT: sw a1, 108(a5)
2133 ; RV64IZCMP-NEXT: sw a2, 104(a5)
2134 ; RV64IZCMP-NEXT: sw a4, 100(a5)
2135 ; RV64IZCMP-NEXT: sw a6, 96(a5)
2136 ; RV64IZCMP-NEXT: sw t1, 92(a5)
2137 ; RV64IZCMP-NEXT: sw t2, 88(a5)
2138 ; RV64IZCMP-NEXT: sw t3, 84(a5)
2139 ; RV64IZCMP-NEXT: sw s1, 80(a5)
2140 ; RV64IZCMP-NEXT: sw ra, 76(a5)
2141 ; RV64IZCMP-NEXT: sw s11, 72(a5)
2142 ; RV64IZCMP-NEXT: sw s10, 68(a5)
2143 ; RV64IZCMP-NEXT: sw s9, 64(a5)
2144 ; RV64IZCMP-NEXT: sw s8, 60(a5)
2145 ; RV64IZCMP-NEXT: sw s7, 56(a5)
2146 ; RV64IZCMP-NEXT: sw s6, 52(a5)
2147 ; RV64IZCMP-NEXT: sw s5, 48(a5)
2148 ; RV64IZCMP-NEXT: sw s4, 44(a5)
2149 ; RV64IZCMP-NEXT: sw s3, 40(a5)
2150 ; RV64IZCMP-NEXT: sw s2, 36(a5)
2151 ; RV64IZCMP-NEXT: sw t6, 32(a5)
2152 ; RV64IZCMP-NEXT: sw t5, 28(a5)
2153 ; RV64IZCMP-NEXT: sw t4, 24(a5)
2154 ; RV64IZCMP-NEXT: ld a0, 8(sp) # 8-byte Folded Reload
2155 ; RV64IZCMP-NEXT: sw a0, 20(a5)
2156 ; RV64IZCMP-NEXT: ld a0, 16(sp) # 8-byte Folded Reload
2157 ; RV64IZCMP-NEXT: sw a0, 16(a5)
2158 ; RV64IZCMP-NEXT: ld a0, 24(sp) # 8-byte Folded Reload
2159 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq+12)(t0)
2160 ; RV64IZCMP-NEXT: ld a0, 32(sp) # 8-byte Folded Reload
2161 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq+8)(t0)
2162 ; RV64IZCMP-NEXT: ld a0, 40(sp) # 8-byte Folded Reload
2163 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq+4)(t0)
2164 ; RV64IZCMP-NEXT: ld a0, 48(sp) # 8-byte Folded Reload
2165 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq)(t0)
2166 ; RV64IZCMP-NEXT: ld t0, 168(sp) # 8-byte Folded Reload
2167 ; RV64IZCMP-NEXT: ld t1, 160(sp) # 8-byte Folded Reload
2168 ; RV64IZCMP-NEXT: ld t2, 152(sp) # 8-byte Folded Reload
2169 ; RV64IZCMP-NEXT: ld a0, 144(sp) # 8-byte Folded Reload
2170 ; RV64IZCMP-NEXT: ld a1, 136(sp) # 8-byte Folded Reload
2171 ; RV64IZCMP-NEXT: ld a2, 128(sp) # 8-byte Folded Reload
2172 ; RV64IZCMP-NEXT: ld a3, 120(sp) # 8-byte Folded Reload
2173 ; RV64IZCMP-NEXT: ld a4, 112(sp) # 8-byte Folded Reload
2174 ; RV64IZCMP-NEXT: ld a5, 104(sp) # 8-byte Folded Reload
2175 ; RV64IZCMP-NEXT: ld a6, 96(sp) # 8-byte Folded Reload
2176 ; RV64IZCMP-NEXT: ld a7, 88(sp) # 8-byte Folded Reload
2177 ; RV64IZCMP-NEXT: ld t3, 80(sp) # 8-byte Folded Reload
2178 ; RV64IZCMP-NEXT: ld t4, 72(sp) # 8-byte Folded Reload
2179 ; RV64IZCMP-NEXT: ld t5, 64(sp) # 8-byte Folded Reload
2180 ; RV64IZCMP-NEXT: ld t6, 56(sp) # 8-byte Folded Reload
2181 ; RV64IZCMP-NEXT: addi sp, sp, 128
2182 ; RV64IZCMP-NEXT: cm.pop {ra, s0-s11}, 160
2183 ; RV64IZCMP-NEXT: mret
2185 ; RV32IZCMP-SR-LABEL: callee_with_irq:
2186 ; RV32IZCMP-SR: # %bb.0:
2187 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0-s11}, -112
2188 ; RV32IZCMP-SR-NEXT: addi sp, sp, -48
2189 ; RV32IZCMP-SR-NEXT: sw t0, 92(sp) # 4-byte Folded Spill
2190 ; RV32IZCMP-SR-NEXT: sw t1, 88(sp) # 4-byte Folded Spill
2191 ; RV32IZCMP-SR-NEXT: sw t2, 84(sp) # 4-byte Folded Spill
2192 ; RV32IZCMP-SR-NEXT: sw a0, 80(sp) # 4-byte Folded Spill
2193 ; RV32IZCMP-SR-NEXT: sw a1, 76(sp) # 4-byte Folded Spill
2194 ; RV32IZCMP-SR-NEXT: sw a2, 72(sp) # 4-byte Folded Spill
2195 ; RV32IZCMP-SR-NEXT: sw a3, 68(sp) # 4-byte Folded Spill
2196 ; RV32IZCMP-SR-NEXT: sw a4, 64(sp) # 4-byte Folded Spill
2197 ; RV32IZCMP-SR-NEXT: sw a5, 60(sp) # 4-byte Folded Spill
2198 ; RV32IZCMP-SR-NEXT: sw a6, 56(sp) # 4-byte Folded Spill
2199 ; RV32IZCMP-SR-NEXT: sw a7, 52(sp) # 4-byte Folded Spill
2200 ; RV32IZCMP-SR-NEXT: sw t3, 48(sp) # 4-byte Folded Spill
2201 ; RV32IZCMP-SR-NEXT: sw t4, 44(sp) # 4-byte Folded Spill
2202 ; RV32IZCMP-SR-NEXT: sw t5, 40(sp) # 4-byte Folded Spill
2203 ; RV32IZCMP-SR-NEXT: sw t6, 36(sp) # 4-byte Folded Spill
2204 ; RV32IZCMP-SR-NEXT: lui t0, %hi(var_test_irq)
2205 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq)(t0)
2206 ; RV32IZCMP-SR-NEXT: sw a0, 32(sp) # 4-byte Folded Spill
2207 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+4)(t0)
2208 ; RV32IZCMP-SR-NEXT: sw a0, 28(sp) # 4-byte Folded Spill
2209 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+8)(t0)
2210 ; RV32IZCMP-SR-NEXT: sw a0, 24(sp) # 4-byte Folded Spill
2211 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+12)(t0)
2212 ; RV32IZCMP-SR-NEXT: sw a0, 20(sp) # 4-byte Folded Spill
2213 ; RV32IZCMP-SR-NEXT: addi a5, t0, %lo(var_test_irq)
2214 ; RV32IZCMP-SR-NEXT: lw a0, 16(a5)
2215 ; RV32IZCMP-SR-NEXT: sw a0, 16(sp) # 4-byte Folded Spill
2216 ; RV32IZCMP-SR-NEXT: lw a0, 20(a5)
2217 ; RV32IZCMP-SR-NEXT: sw a0, 12(sp) # 4-byte Folded Spill
2218 ; RV32IZCMP-SR-NEXT: lw t4, 24(a5)
2219 ; RV32IZCMP-SR-NEXT: lw t5, 28(a5)
2220 ; RV32IZCMP-SR-NEXT: lw t6, 32(a5)
2221 ; RV32IZCMP-SR-NEXT: lw s2, 36(a5)
2222 ; RV32IZCMP-SR-NEXT: lw s3, 40(a5)
2223 ; RV32IZCMP-SR-NEXT: lw s4, 44(a5)
2224 ; RV32IZCMP-SR-NEXT: lw s5, 48(a5)
2225 ; RV32IZCMP-SR-NEXT: lw s6, 52(a5)
2226 ; RV32IZCMP-SR-NEXT: lw s7, 56(a5)
2227 ; RV32IZCMP-SR-NEXT: lw s8, 60(a5)
2228 ; RV32IZCMP-SR-NEXT: lw s9, 64(a5)
2229 ; RV32IZCMP-SR-NEXT: lw s10, 68(a5)
2230 ; RV32IZCMP-SR-NEXT: lw s11, 72(a5)
2231 ; RV32IZCMP-SR-NEXT: lw ra, 76(a5)
2232 ; RV32IZCMP-SR-NEXT: lw s1, 80(a5)
2233 ; RV32IZCMP-SR-NEXT: lw t3, 84(a5)
2234 ; RV32IZCMP-SR-NEXT: lw t2, 88(a5)
2235 ; RV32IZCMP-SR-NEXT: lw t1, 92(a5)
2236 ; RV32IZCMP-SR-NEXT: lw a7, 112(a5)
2237 ; RV32IZCMP-SR-NEXT: lw s0, 116(a5)
2238 ; RV32IZCMP-SR-NEXT: lw a3, 120(a5)
2239 ; RV32IZCMP-SR-NEXT: lw a0, 124(a5)
2240 ; RV32IZCMP-SR-NEXT: lw a6, 96(a5)
2241 ; RV32IZCMP-SR-NEXT: lw a4, 100(a5)
2242 ; RV32IZCMP-SR-NEXT: lw a2, 104(a5)
2243 ; RV32IZCMP-SR-NEXT: lw a1, 108(a5)
2244 ; RV32IZCMP-SR-NEXT: sw a0, 124(a5)
2245 ; RV32IZCMP-SR-NEXT: sw a3, 120(a5)
2246 ; RV32IZCMP-SR-NEXT: sw s0, 116(a5)
2247 ; RV32IZCMP-SR-NEXT: sw a7, 112(a5)
2248 ; RV32IZCMP-SR-NEXT: sw a1, 108(a5)
2249 ; RV32IZCMP-SR-NEXT: sw a2, 104(a5)
2250 ; RV32IZCMP-SR-NEXT: sw a4, 100(a5)
2251 ; RV32IZCMP-SR-NEXT: sw a6, 96(a5)
2252 ; RV32IZCMP-SR-NEXT: sw t1, 92(a5)
2253 ; RV32IZCMP-SR-NEXT: sw t2, 88(a5)
2254 ; RV32IZCMP-SR-NEXT: sw t3, 84(a5)
2255 ; RV32IZCMP-SR-NEXT: sw s1, 80(a5)
2256 ; RV32IZCMP-SR-NEXT: sw ra, 76(a5)
2257 ; RV32IZCMP-SR-NEXT: sw s11, 72(a5)
2258 ; RV32IZCMP-SR-NEXT: sw s10, 68(a5)
2259 ; RV32IZCMP-SR-NEXT: sw s9, 64(a5)
2260 ; RV32IZCMP-SR-NEXT: sw s8, 60(a5)
2261 ; RV32IZCMP-SR-NEXT: sw s7, 56(a5)
2262 ; RV32IZCMP-SR-NEXT: sw s6, 52(a5)
2263 ; RV32IZCMP-SR-NEXT: sw s5, 48(a5)
2264 ; RV32IZCMP-SR-NEXT: sw s4, 44(a5)
2265 ; RV32IZCMP-SR-NEXT: sw s3, 40(a5)
2266 ; RV32IZCMP-SR-NEXT: sw s2, 36(a5)
2267 ; RV32IZCMP-SR-NEXT: sw t6, 32(a5)
2268 ; RV32IZCMP-SR-NEXT: sw t5, 28(a5)
2269 ; RV32IZCMP-SR-NEXT: sw t4, 24(a5)
2270 ; RV32IZCMP-SR-NEXT: lw a0, 12(sp) # 4-byte Folded Reload
2271 ; RV32IZCMP-SR-NEXT: sw a0, 20(a5)
2272 ; RV32IZCMP-SR-NEXT: lw a0, 16(sp) # 4-byte Folded Reload
2273 ; RV32IZCMP-SR-NEXT: sw a0, 16(a5)
2274 ; RV32IZCMP-SR-NEXT: lw a0, 20(sp) # 4-byte Folded Reload
2275 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+12)(t0)
2276 ; RV32IZCMP-SR-NEXT: lw a0, 24(sp) # 4-byte Folded Reload
2277 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+8)(t0)
2278 ; RV32IZCMP-SR-NEXT: lw a0, 28(sp) # 4-byte Folded Reload
2279 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+4)(t0)
2280 ; RV32IZCMP-SR-NEXT: lw a0, 32(sp) # 4-byte Folded Reload
2281 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq)(t0)
2282 ; RV32IZCMP-SR-NEXT: lw t0, 92(sp) # 4-byte Folded Reload
2283 ; RV32IZCMP-SR-NEXT: lw t1, 88(sp) # 4-byte Folded Reload
2284 ; RV32IZCMP-SR-NEXT: lw t2, 84(sp) # 4-byte Folded Reload
2285 ; RV32IZCMP-SR-NEXT: lw a0, 80(sp) # 4-byte Folded Reload
2286 ; RV32IZCMP-SR-NEXT: lw a1, 76(sp) # 4-byte Folded Reload
2287 ; RV32IZCMP-SR-NEXT: lw a2, 72(sp) # 4-byte Folded Reload
2288 ; RV32IZCMP-SR-NEXT: lw a3, 68(sp) # 4-byte Folded Reload
2289 ; RV32IZCMP-SR-NEXT: lw a4, 64(sp) # 4-byte Folded Reload
2290 ; RV32IZCMP-SR-NEXT: lw a5, 60(sp) # 4-byte Folded Reload
2291 ; RV32IZCMP-SR-NEXT: lw a6, 56(sp) # 4-byte Folded Reload
2292 ; RV32IZCMP-SR-NEXT: lw a7, 52(sp) # 4-byte Folded Reload
2293 ; RV32IZCMP-SR-NEXT: lw t3, 48(sp) # 4-byte Folded Reload
2294 ; RV32IZCMP-SR-NEXT: lw t4, 44(sp) # 4-byte Folded Reload
2295 ; RV32IZCMP-SR-NEXT: lw t5, 40(sp) # 4-byte Folded Reload
2296 ; RV32IZCMP-SR-NEXT: lw t6, 36(sp) # 4-byte Folded Reload
2297 ; RV32IZCMP-SR-NEXT: addi sp, sp, 48
2298 ; RV32IZCMP-SR-NEXT: cm.pop {ra, s0-s11}, 112
2299 ; RV32IZCMP-SR-NEXT: mret
2301 ; RV64IZCMP-SR-LABEL: callee_with_irq:
2302 ; RV64IZCMP-SR: # %bb.0:
2303 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0-s11}, -160
2304 ; RV64IZCMP-SR-NEXT: addi sp, sp, -128
2305 ; RV64IZCMP-SR-NEXT: sd t0, 168(sp) # 8-byte Folded Spill
2306 ; RV64IZCMP-SR-NEXT: sd t1, 160(sp) # 8-byte Folded Spill
2307 ; RV64IZCMP-SR-NEXT: sd t2, 152(sp) # 8-byte Folded Spill
2308 ; RV64IZCMP-SR-NEXT: sd a0, 144(sp) # 8-byte Folded Spill
2309 ; RV64IZCMP-SR-NEXT: sd a1, 136(sp) # 8-byte Folded Spill
2310 ; RV64IZCMP-SR-NEXT: sd a2, 128(sp) # 8-byte Folded Spill
2311 ; RV64IZCMP-SR-NEXT: sd a3, 120(sp) # 8-byte Folded Spill
2312 ; RV64IZCMP-SR-NEXT: sd a4, 112(sp) # 8-byte Folded Spill
2313 ; RV64IZCMP-SR-NEXT: sd a5, 104(sp) # 8-byte Folded Spill
2314 ; RV64IZCMP-SR-NEXT: sd a6, 96(sp) # 8-byte Folded Spill
2315 ; RV64IZCMP-SR-NEXT: sd a7, 88(sp) # 8-byte Folded Spill
2316 ; RV64IZCMP-SR-NEXT: sd t3, 80(sp) # 8-byte Folded Spill
2317 ; RV64IZCMP-SR-NEXT: sd t4, 72(sp) # 8-byte Folded Spill
2318 ; RV64IZCMP-SR-NEXT: sd t5, 64(sp) # 8-byte Folded Spill
2319 ; RV64IZCMP-SR-NEXT: sd t6, 56(sp) # 8-byte Folded Spill
2320 ; RV64IZCMP-SR-NEXT: lui t0, %hi(var_test_irq)
2321 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq)(t0)
2322 ; RV64IZCMP-SR-NEXT: sd a0, 48(sp) # 8-byte Folded Spill
2323 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+4)(t0)
2324 ; RV64IZCMP-SR-NEXT: sd a0, 40(sp) # 8-byte Folded Spill
2325 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+8)(t0)
2326 ; RV64IZCMP-SR-NEXT: sd a0, 32(sp) # 8-byte Folded Spill
2327 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+12)(t0)
2328 ; RV64IZCMP-SR-NEXT: sd a0, 24(sp) # 8-byte Folded Spill
2329 ; RV64IZCMP-SR-NEXT: addi a5, t0, %lo(var_test_irq)
2330 ; RV64IZCMP-SR-NEXT: lw a0, 16(a5)
2331 ; RV64IZCMP-SR-NEXT: sd a0, 16(sp) # 8-byte Folded Spill
2332 ; RV64IZCMP-SR-NEXT: lw a0, 20(a5)
2333 ; RV64IZCMP-SR-NEXT: sd a0, 8(sp) # 8-byte Folded Spill
2334 ; RV64IZCMP-SR-NEXT: lw t4, 24(a5)
2335 ; RV64IZCMP-SR-NEXT: lw t5, 28(a5)
2336 ; RV64IZCMP-SR-NEXT: lw t6, 32(a5)
2337 ; RV64IZCMP-SR-NEXT: lw s2, 36(a5)
2338 ; RV64IZCMP-SR-NEXT: lw s3, 40(a5)
2339 ; RV64IZCMP-SR-NEXT: lw s4, 44(a5)
2340 ; RV64IZCMP-SR-NEXT: lw s5, 48(a5)
2341 ; RV64IZCMP-SR-NEXT: lw s6, 52(a5)
2342 ; RV64IZCMP-SR-NEXT: lw s7, 56(a5)
2343 ; RV64IZCMP-SR-NEXT: lw s8, 60(a5)
2344 ; RV64IZCMP-SR-NEXT: lw s9, 64(a5)
2345 ; RV64IZCMP-SR-NEXT: lw s10, 68(a5)
2346 ; RV64IZCMP-SR-NEXT: lw s11, 72(a5)
2347 ; RV64IZCMP-SR-NEXT: lw ra, 76(a5)
2348 ; RV64IZCMP-SR-NEXT: lw s1, 80(a5)
2349 ; RV64IZCMP-SR-NEXT: lw t3, 84(a5)
2350 ; RV64IZCMP-SR-NEXT: lw t2, 88(a5)
2351 ; RV64IZCMP-SR-NEXT: lw t1, 92(a5)
2352 ; RV64IZCMP-SR-NEXT: lw a7, 112(a5)
2353 ; RV64IZCMP-SR-NEXT: lw s0, 116(a5)
2354 ; RV64IZCMP-SR-NEXT: lw a3, 120(a5)
2355 ; RV64IZCMP-SR-NEXT: lw a0, 124(a5)
2356 ; RV64IZCMP-SR-NEXT: lw a6, 96(a5)
2357 ; RV64IZCMP-SR-NEXT: lw a4, 100(a5)
2358 ; RV64IZCMP-SR-NEXT: lw a2, 104(a5)
2359 ; RV64IZCMP-SR-NEXT: lw a1, 108(a5)
2360 ; RV64IZCMP-SR-NEXT: sw a0, 124(a5)
2361 ; RV64IZCMP-SR-NEXT: sw a3, 120(a5)
2362 ; RV64IZCMP-SR-NEXT: sw s0, 116(a5)
2363 ; RV64IZCMP-SR-NEXT: sw a7, 112(a5)
2364 ; RV64IZCMP-SR-NEXT: sw a1, 108(a5)
2365 ; RV64IZCMP-SR-NEXT: sw a2, 104(a5)
2366 ; RV64IZCMP-SR-NEXT: sw a4, 100(a5)
2367 ; RV64IZCMP-SR-NEXT: sw a6, 96(a5)
2368 ; RV64IZCMP-SR-NEXT: sw t1, 92(a5)
2369 ; RV64IZCMP-SR-NEXT: sw t2, 88(a5)
2370 ; RV64IZCMP-SR-NEXT: sw t3, 84(a5)
2371 ; RV64IZCMP-SR-NEXT: sw s1, 80(a5)
2372 ; RV64IZCMP-SR-NEXT: sw ra, 76(a5)
2373 ; RV64IZCMP-SR-NEXT: sw s11, 72(a5)
2374 ; RV64IZCMP-SR-NEXT: sw s10, 68(a5)
2375 ; RV64IZCMP-SR-NEXT: sw s9, 64(a5)
2376 ; RV64IZCMP-SR-NEXT: sw s8, 60(a5)
2377 ; RV64IZCMP-SR-NEXT: sw s7, 56(a5)
2378 ; RV64IZCMP-SR-NEXT: sw s6, 52(a5)
2379 ; RV64IZCMP-SR-NEXT: sw s5, 48(a5)
2380 ; RV64IZCMP-SR-NEXT: sw s4, 44(a5)
2381 ; RV64IZCMP-SR-NEXT: sw s3, 40(a5)
2382 ; RV64IZCMP-SR-NEXT: sw s2, 36(a5)
2383 ; RV64IZCMP-SR-NEXT: sw t6, 32(a5)
2384 ; RV64IZCMP-SR-NEXT: sw t5, 28(a5)
2385 ; RV64IZCMP-SR-NEXT: sw t4, 24(a5)
2386 ; RV64IZCMP-SR-NEXT: ld a0, 8(sp) # 8-byte Folded Reload
2387 ; RV64IZCMP-SR-NEXT: sw a0, 20(a5)
2388 ; RV64IZCMP-SR-NEXT: ld a0, 16(sp) # 8-byte Folded Reload
2389 ; RV64IZCMP-SR-NEXT: sw a0, 16(a5)
2390 ; RV64IZCMP-SR-NEXT: ld a0, 24(sp) # 8-byte Folded Reload
2391 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+12)(t0)
2392 ; RV64IZCMP-SR-NEXT: ld a0, 32(sp) # 8-byte Folded Reload
2393 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+8)(t0)
2394 ; RV64IZCMP-SR-NEXT: ld a0, 40(sp) # 8-byte Folded Reload
2395 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+4)(t0)
2396 ; RV64IZCMP-SR-NEXT: ld a0, 48(sp) # 8-byte Folded Reload
2397 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq)(t0)
2398 ; RV64IZCMP-SR-NEXT: ld t0, 168(sp) # 8-byte Folded Reload
2399 ; RV64IZCMP-SR-NEXT: ld t1, 160(sp) # 8-byte Folded Reload
2400 ; RV64IZCMP-SR-NEXT: ld t2, 152(sp) # 8-byte Folded Reload
2401 ; RV64IZCMP-SR-NEXT: ld a0, 144(sp) # 8-byte Folded Reload
2402 ; RV64IZCMP-SR-NEXT: ld a1, 136(sp) # 8-byte Folded Reload
2403 ; RV64IZCMP-SR-NEXT: ld a2, 128(sp) # 8-byte Folded Reload
2404 ; RV64IZCMP-SR-NEXT: ld a3, 120(sp) # 8-byte Folded Reload
2405 ; RV64IZCMP-SR-NEXT: ld a4, 112(sp) # 8-byte Folded Reload
2406 ; RV64IZCMP-SR-NEXT: ld a5, 104(sp) # 8-byte Folded Reload
2407 ; RV64IZCMP-SR-NEXT: ld a6, 96(sp) # 8-byte Folded Reload
2408 ; RV64IZCMP-SR-NEXT: ld a7, 88(sp) # 8-byte Folded Reload
2409 ; RV64IZCMP-SR-NEXT: ld t3, 80(sp) # 8-byte Folded Reload
2410 ; RV64IZCMP-SR-NEXT: ld t4, 72(sp) # 8-byte Folded Reload
2411 ; RV64IZCMP-SR-NEXT: ld t5, 64(sp) # 8-byte Folded Reload
2412 ; RV64IZCMP-SR-NEXT: ld t6, 56(sp) # 8-byte Folded Reload
2413 ; RV64IZCMP-SR-NEXT: addi sp, sp, 128
2414 ; RV64IZCMP-SR-NEXT: cm.pop {ra, s0-s11}, 160
2415 ; RV64IZCMP-SR-NEXT: mret
2417 ; RV32I-LABEL: callee_with_irq:
2419 ; RV32I-NEXT: addi sp, sp, -144
2420 ; RV32I-NEXT: sw ra, 140(sp) # 4-byte Folded Spill
2421 ; RV32I-NEXT: sw t0, 136(sp) # 4-byte Folded Spill
2422 ; RV32I-NEXT: sw t1, 132(sp) # 4-byte Folded Spill
2423 ; RV32I-NEXT: sw t2, 128(sp) # 4-byte Folded Spill
2424 ; RV32I-NEXT: sw s0, 124(sp) # 4-byte Folded Spill
2425 ; RV32I-NEXT: sw s1, 120(sp) # 4-byte Folded Spill
2426 ; RV32I-NEXT: sw a0, 116(sp) # 4-byte Folded Spill
2427 ; RV32I-NEXT: sw a1, 112(sp) # 4-byte Folded Spill
2428 ; RV32I-NEXT: sw a2, 108(sp) # 4-byte Folded Spill
2429 ; RV32I-NEXT: sw a3, 104(sp) # 4-byte Folded Spill
2430 ; RV32I-NEXT: sw a4, 100(sp) # 4-byte Folded Spill
2431 ; RV32I-NEXT: sw a5, 96(sp) # 4-byte Folded Spill
2432 ; RV32I-NEXT: sw a6, 92(sp) # 4-byte Folded Spill
2433 ; RV32I-NEXT: sw a7, 88(sp) # 4-byte Folded Spill
2434 ; RV32I-NEXT: sw s2, 84(sp) # 4-byte Folded Spill
2435 ; RV32I-NEXT: sw s3, 80(sp) # 4-byte Folded Spill
2436 ; RV32I-NEXT: sw s4, 76(sp) # 4-byte Folded Spill
2437 ; RV32I-NEXT: sw s5, 72(sp) # 4-byte Folded Spill
2438 ; RV32I-NEXT: sw s6, 68(sp) # 4-byte Folded Spill
2439 ; RV32I-NEXT: sw s7, 64(sp) # 4-byte Folded Spill
2440 ; RV32I-NEXT: sw s8, 60(sp) # 4-byte Folded Spill
2441 ; RV32I-NEXT: sw s9, 56(sp) # 4-byte Folded Spill
2442 ; RV32I-NEXT: sw s10, 52(sp) # 4-byte Folded Spill
2443 ; RV32I-NEXT: sw s11, 48(sp) # 4-byte Folded Spill
2444 ; RV32I-NEXT: sw t3, 44(sp) # 4-byte Folded Spill
2445 ; RV32I-NEXT: sw t4, 40(sp) # 4-byte Folded Spill
2446 ; RV32I-NEXT: sw t5, 36(sp) # 4-byte Folded Spill
2447 ; RV32I-NEXT: sw t6, 32(sp) # 4-byte Folded Spill
2448 ; RV32I-NEXT: lui a7, %hi(var_test_irq)
2449 ; RV32I-NEXT: lw a0, %lo(var_test_irq)(a7)
2450 ; RV32I-NEXT: sw a0, 28(sp) # 4-byte Folded Spill
2451 ; RV32I-NEXT: lw a0, %lo(var_test_irq+4)(a7)
2452 ; RV32I-NEXT: sw a0, 24(sp) # 4-byte Folded Spill
2453 ; RV32I-NEXT: lw a0, %lo(var_test_irq+8)(a7)
2454 ; RV32I-NEXT: sw a0, 20(sp) # 4-byte Folded Spill
2455 ; RV32I-NEXT: lw a0, %lo(var_test_irq+12)(a7)
2456 ; RV32I-NEXT: sw a0, 16(sp) # 4-byte Folded Spill
2457 ; RV32I-NEXT: addi a5, a7, %lo(var_test_irq)
2458 ; RV32I-NEXT: lw a0, 16(a5)
2459 ; RV32I-NEXT: sw a0, 12(sp) # 4-byte Folded Spill
2460 ; RV32I-NEXT: lw a0, 20(a5)
2461 ; RV32I-NEXT: sw a0, 8(sp) # 4-byte Folded Spill
2462 ; RV32I-NEXT: lw t0, 24(a5)
2463 ; RV32I-NEXT: lw t1, 28(a5)
2464 ; RV32I-NEXT: lw t2, 32(a5)
2465 ; RV32I-NEXT: lw t3, 36(a5)
2466 ; RV32I-NEXT: lw t4, 40(a5)
2467 ; RV32I-NEXT: lw t5, 44(a5)
2468 ; RV32I-NEXT: lw t6, 48(a5)
2469 ; RV32I-NEXT: lw s0, 52(a5)
2470 ; RV32I-NEXT: lw s1, 56(a5)
2471 ; RV32I-NEXT: lw s2, 60(a5)
2472 ; RV32I-NEXT: lw s3, 64(a5)
2473 ; RV32I-NEXT: lw s4, 68(a5)
2474 ; RV32I-NEXT: lw s5, 72(a5)
2475 ; RV32I-NEXT: lw s6, 76(a5)
2476 ; RV32I-NEXT: lw s7, 80(a5)
2477 ; RV32I-NEXT: lw s8, 84(a5)
2478 ; RV32I-NEXT: lw s9, 88(a5)
2479 ; RV32I-NEXT: lw s10, 92(a5)
2480 ; RV32I-NEXT: lw s11, 112(a5)
2481 ; RV32I-NEXT: lw ra, 116(a5)
2482 ; RV32I-NEXT: lw a3, 120(a5)
2483 ; RV32I-NEXT: lw a0, 124(a5)
2484 ; RV32I-NEXT: lw a6, 96(a5)
2485 ; RV32I-NEXT: lw a4, 100(a5)
2486 ; RV32I-NEXT: lw a2, 104(a5)
2487 ; RV32I-NEXT: lw a1, 108(a5)
2488 ; RV32I-NEXT: sw a0, 124(a5)
2489 ; RV32I-NEXT: sw a3, 120(a5)
2490 ; RV32I-NEXT: sw ra, 116(a5)
2491 ; RV32I-NEXT: sw s11, 112(a5)
2492 ; RV32I-NEXT: sw a1, 108(a5)
2493 ; RV32I-NEXT: sw a2, 104(a5)
2494 ; RV32I-NEXT: sw a4, 100(a5)
2495 ; RV32I-NEXT: sw a6, 96(a5)
2496 ; RV32I-NEXT: sw s10, 92(a5)
2497 ; RV32I-NEXT: sw s9, 88(a5)
2498 ; RV32I-NEXT: sw s8, 84(a5)
2499 ; RV32I-NEXT: sw s7, 80(a5)
2500 ; RV32I-NEXT: sw s6, 76(a5)
2501 ; RV32I-NEXT: sw s5, 72(a5)
2502 ; RV32I-NEXT: sw s4, 68(a5)
2503 ; RV32I-NEXT: sw s3, 64(a5)
2504 ; RV32I-NEXT: sw s2, 60(a5)
2505 ; RV32I-NEXT: sw s1, 56(a5)
2506 ; RV32I-NEXT: sw s0, 52(a5)
2507 ; RV32I-NEXT: sw t6, 48(a5)
2508 ; RV32I-NEXT: sw t5, 44(a5)
2509 ; RV32I-NEXT: sw t4, 40(a5)
2510 ; RV32I-NEXT: sw t3, 36(a5)
2511 ; RV32I-NEXT: sw t2, 32(a5)
2512 ; RV32I-NEXT: sw t1, 28(a5)
2513 ; RV32I-NEXT: sw t0, 24(a5)
2514 ; RV32I-NEXT: lw a0, 8(sp) # 4-byte Folded Reload
2515 ; RV32I-NEXT: sw a0, 20(a5)
2516 ; RV32I-NEXT: lw a0, 12(sp) # 4-byte Folded Reload
2517 ; RV32I-NEXT: sw a0, 16(a5)
2518 ; RV32I-NEXT: lw a0, 16(sp) # 4-byte Folded Reload
2519 ; RV32I-NEXT: sw a0, %lo(var_test_irq+12)(a7)
2520 ; RV32I-NEXT: lw a0, 20(sp) # 4-byte Folded Reload
2521 ; RV32I-NEXT: sw a0, %lo(var_test_irq+8)(a7)
2522 ; RV32I-NEXT: lw a0, 24(sp) # 4-byte Folded Reload
2523 ; RV32I-NEXT: sw a0, %lo(var_test_irq+4)(a7)
2524 ; RV32I-NEXT: lw a0, 28(sp) # 4-byte Folded Reload
2525 ; RV32I-NEXT: sw a0, %lo(var_test_irq)(a7)
2526 ; RV32I-NEXT: lw ra, 140(sp) # 4-byte Folded Reload
2527 ; RV32I-NEXT: lw t0, 136(sp) # 4-byte Folded Reload
2528 ; RV32I-NEXT: lw t1, 132(sp) # 4-byte Folded Reload
2529 ; RV32I-NEXT: lw t2, 128(sp) # 4-byte Folded Reload
2530 ; RV32I-NEXT: lw s0, 124(sp) # 4-byte Folded Reload
2531 ; RV32I-NEXT: lw s1, 120(sp) # 4-byte Folded Reload
2532 ; RV32I-NEXT: lw a0, 116(sp) # 4-byte Folded Reload
2533 ; RV32I-NEXT: lw a1, 112(sp) # 4-byte Folded Reload
2534 ; RV32I-NEXT: lw a2, 108(sp) # 4-byte Folded Reload
2535 ; RV32I-NEXT: lw a3, 104(sp) # 4-byte Folded Reload
2536 ; RV32I-NEXT: lw a4, 100(sp) # 4-byte Folded Reload
2537 ; RV32I-NEXT: lw a5, 96(sp) # 4-byte Folded Reload
2538 ; RV32I-NEXT: lw a6, 92(sp) # 4-byte Folded Reload
2539 ; RV32I-NEXT: lw a7, 88(sp) # 4-byte Folded Reload
2540 ; RV32I-NEXT: lw s2, 84(sp) # 4-byte Folded Reload
2541 ; RV32I-NEXT: lw s3, 80(sp) # 4-byte Folded Reload
2542 ; RV32I-NEXT: lw s4, 76(sp) # 4-byte Folded Reload
2543 ; RV32I-NEXT: lw s5, 72(sp) # 4-byte Folded Reload
2544 ; RV32I-NEXT: lw s6, 68(sp) # 4-byte Folded Reload
2545 ; RV32I-NEXT: lw s7, 64(sp) # 4-byte Folded Reload
2546 ; RV32I-NEXT: lw s8, 60(sp) # 4-byte Folded Reload
2547 ; RV32I-NEXT: lw s9, 56(sp) # 4-byte Folded Reload
2548 ; RV32I-NEXT: lw s10, 52(sp) # 4-byte Folded Reload
2549 ; RV32I-NEXT: lw s11, 48(sp) # 4-byte Folded Reload
2550 ; RV32I-NEXT: lw t3, 44(sp) # 4-byte Folded Reload
2551 ; RV32I-NEXT: lw t4, 40(sp) # 4-byte Folded Reload
2552 ; RV32I-NEXT: lw t5, 36(sp) # 4-byte Folded Reload
2553 ; RV32I-NEXT: lw t6, 32(sp) # 4-byte Folded Reload
2554 ; RV32I-NEXT: addi sp, sp, 144
2557 ; RV64I-LABEL: callee_with_irq:
2559 ; RV64I-NEXT: addi sp, sp, -272
2560 ; RV64I-NEXT: sd ra, 264(sp) # 8-byte Folded Spill
2561 ; RV64I-NEXT: sd t0, 256(sp) # 8-byte Folded Spill
2562 ; RV64I-NEXT: sd t1, 248(sp) # 8-byte Folded Spill
2563 ; RV64I-NEXT: sd t2, 240(sp) # 8-byte Folded Spill
2564 ; RV64I-NEXT: sd s0, 232(sp) # 8-byte Folded Spill
2565 ; RV64I-NEXT: sd s1, 224(sp) # 8-byte Folded Spill
2566 ; RV64I-NEXT: sd a0, 216(sp) # 8-byte Folded Spill
2567 ; RV64I-NEXT: sd a1, 208(sp) # 8-byte Folded Spill
2568 ; RV64I-NEXT: sd a2, 200(sp) # 8-byte Folded Spill
2569 ; RV64I-NEXT: sd a3, 192(sp) # 8-byte Folded Spill
2570 ; RV64I-NEXT: sd a4, 184(sp) # 8-byte Folded Spill
2571 ; RV64I-NEXT: sd a5, 176(sp) # 8-byte Folded Spill
2572 ; RV64I-NEXT: sd a6, 168(sp) # 8-byte Folded Spill
2573 ; RV64I-NEXT: sd a7, 160(sp) # 8-byte Folded Spill
2574 ; RV64I-NEXT: sd s2, 152(sp) # 8-byte Folded Spill
2575 ; RV64I-NEXT: sd s3, 144(sp) # 8-byte Folded Spill
2576 ; RV64I-NEXT: sd s4, 136(sp) # 8-byte Folded Spill
2577 ; RV64I-NEXT: sd s5, 128(sp) # 8-byte Folded Spill
2578 ; RV64I-NEXT: sd s6, 120(sp) # 8-byte Folded Spill
2579 ; RV64I-NEXT: sd s7, 112(sp) # 8-byte Folded Spill
2580 ; RV64I-NEXT: sd s8, 104(sp) # 8-byte Folded Spill
2581 ; RV64I-NEXT: sd s9, 96(sp) # 8-byte Folded Spill
2582 ; RV64I-NEXT: sd s10, 88(sp) # 8-byte Folded Spill
2583 ; RV64I-NEXT: sd s11, 80(sp) # 8-byte Folded Spill
2584 ; RV64I-NEXT: sd t3, 72(sp) # 8-byte Folded Spill
2585 ; RV64I-NEXT: sd t4, 64(sp) # 8-byte Folded Spill
2586 ; RV64I-NEXT: sd t5, 56(sp) # 8-byte Folded Spill
2587 ; RV64I-NEXT: sd t6, 48(sp) # 8-byte Folded Spill
2588 ; RV64I-NEXT: lui a7, %hi(var_test_irq)
2589 ; RV64I-NEXT: lw a0, %lo(var_test_irq)(a7)
2590 ; RV64I-NEXT: sd a0, 40(sp) # 8-byte Folded Spill
2591 ; RV64I-NEXT: lw a0, %lo(var_test_irq+4)(a7)
2592 ; RV64I-NEXT: sd a0, 32(sp) # 8-byte Folded Spill
2593 ; RV64I-NEXT: lw a0, %lo(var_test_irq+8)(a7)
2594 ; RV64I-NEXT: sd a0, 24(sp) # 8-byte Folded Spill
2595 ; RV64I-NEXT: lw a0, %lo(var_test_irq+12)(a7)
2596 ; RV64I-NEXT: sd a0, 16(sp) # 8-byte Folded Spill
2597 ; RV64I-NEXT: addi a5, a7, %lo(var_test_irq)
2598 ; RV64I-NEXT: lw a0, 16(a5)
2599 ; RV64I-NEXT: sd a0, 8(sp) # 8-byte Folded Spill
2600 ; RV64I-NEXT: lw a0, 20(a5)
2601 ; RV64I-NEXT: sd a0, 0(sp) # 8-byte Folded Spill
2602 ; RV64I-NEXT: lw t0, 24(a5)
2603 ; RV64I-NEXT: lw t1, 28(a5)
2604 ; RV64I-NEXT: lw t2, 32(a5)
2605 ; RV64I-NEXT: lw t3, 36(a5)
2606 ; RV64I-NEXT: lw t4, 40(a5)
2607 ; RV64I-NEXT: lw t5, 44(a5)
2608 ; RV64I-NEXT: lw t6, 48(a5)
2609 ; RV64I-NEXT: lw s0, 52(a5)
2610 ; RV64I-NEXT: lw s1, 56(a5)
2611 ; RV64I-NEXT: lw s2, 60(a5)
2612 ; RV64I-NEXT: lw s3, 64(a5)
2613 ; RV64I-NEXT: lw s4, 68(a5)
2614 ; RV64I-NEXT: lw s5, 72(a5)
2615 ; RV64I-NEXT: lw s6, 76(a5)
2616 ; RV64I-NEXT: lw s7, 80(a5)
2617 ; RV64I-NEXT: lw s8, 84(a5)
2618 ; RV64I-NEXT: lw s9, 88(a5)
2619 ; RV64I-NEXT: lw s10, 92(a5)
2620 ; RV64I-NEXT: lw s11, 112(a5)
2621 ; RV64I-NEXT: lw ra, 116(a5)
2622 ; RV64I-NEXT: lw a3, 120(a5)
2623 ; RV64I-NEXT: lw a0, 124(a5)
2624 ; RV64I-NEXT: lw a6, 96(a5)
2625 ; RV64I-NEXT: lw a4, 100(a5)
2626 ; RV64I-NEXT: lw a2, 104(a5)
2627 ; RV64I-NEXT: lw a1, 108(a5)
2628 ; RV64I-NEXT: sw a0, 124(a5)
2629 ; RV64I-NEXT: sw a3, 120(a5)
2630 ; RV64I-NEXT: sw ra, 116(a5)
2631 ; RV64I-NEXT: sw s11, 112(a5)
2632 ; RV64I-NEXT: sw a1, 108(a5)
2633 ; RV64I-NEXT: sw a2, 104(a5)
2634 ; RV64I-NEXT: sw a4, 100(a5)
2635 ; RV64I-NEXT: sw a6, 96(a5)
2636 ; RV64I-NEXT: sw s10, 92(a5)
2637 ; RV64I-NEXT: sw s9, 88(a5)
2638 ; RV64I-NEXT: sw s8, 84(a5)
2639 ; RV64I-NEXT: sw s7, 80(a5)
2640 ; RV64I-NEXT: sw s6, 76(a5)
2641 ; RV64I-NEXT: sw s5, 72(a5)
2642 ; RV64I-NEXT: sw s4, 68(a5)
2643 ; RV64I-NEXT: sw s3, 64(a5)
2644 ; RV64I-NEXT: sw s2, 60(a5)
2645 ; RV64I-NEXT: sw s1, 56(a5)
2646 ; RV64I-NEXT: sw s0, 52(a5)
2647 ; RV64I-NEXT: sw t6, 48(a5)
2648 ; RV64I-NEXT: sw t5, 44(a5)
2649 ; RV64I-NEXT: sw t4, 40(a5)
2650 ; RV64I-NEXT: sw t3, 36(a5)
2651 ; RV64I-NEXT: sw t2, 32(a5)
2652 ; RV64I-NEXT: sw t1, 28(a5)
2653 ; RV64I-NEXT: sw t0, 24(a5)
2654 ; RV64I-NEXT: ld a0, 0(sp) # 8-byte Folded Reload
2655 ; RV64I-NEXT: sw a0, 20(a5)
2656 ; RV64I-NEXT: ld a0, 8(sp) # 8-byte Folded Reload
2657 ; RV64I-NEXT: sw a0, 16(a5)
2658 ; RV64I-NEXT: ld a0, 16(sp) # 8-byte Folded Reload
2659 ; RV64I-NEXT: sw a0, %lo(var_test_irq+12)(a7)
2660 ; RV64I-NEXT: ld a0, 24(sp) # 8-byte Folded Reload
2661 ; RV64I-NEXT: sw a0, %lo(var_test_irq+8)(a7)
2662 ; RV64I-NEXT: ld a0, 32(sp) # 8-byte Folded Reload
2663 ; RV64I-NEXT: sw a0, %lo(var_test_irq+4)(a7)
2664 ; RV64I-NEXT: ld a0, 40(sp) # 8-byte Folded Reload
2665 ; RV64I-NEXT: sw a0, %lo(var_test_irq)(a7)
2666 ; RV64I-NEXT: ld ra, 264(sp) # 8-byte Folded Reload
2667 ; RV64I-NEXT: ld t0, 256(sp) # 8-byte Folded Reload
2668 ; RV64I-NEXT: ld t1, 248(sp) # 8-byte Folded Reload
2669 ; RV64I-NEXT: ld t2, 240(sp) # 8-byte Folded Reload
2670 ; RV64I-NEXT: ld s0, 232(sp) # 8-byte Folded Reload
2671 ; RV64I-NEXT: ld s1, 224(sp) # 8-byte Folded Reload
2672 ; RV64I-NEXT: ld a0, 216(sp) # 8-byte Folded Reload
2673 ; RV64I-NEXT: ld a1, 208(sp) # 8-byte Folded Reload
2674 ; RV64I-NEXT: ld a2, 200(sp) # 8-byte Folded Reload
2675 ; RV64I-NEXT: ld a3, 192(sp) # 8-byte Folded Reload
2676 ; RV64I-NEXT: ld a4, 184(sp) # 8-byte Folded Reload
2677 ; RV64I-NEXT: ld a5, 176(sp) # 8-byte Folded Reload
2678 ; RV64I-NEXT: ld a6, 168(sp) # 8-byte Folded Reload
2679 ; RV64I-NEXT: ld a7, 160(sp) # 8-byte Folded Reload
2680 ; RV64I-NEXT: ld s2, 152(sp) # 8-byte Folded Reload
2681 ; RV64I-NEXT: ld s3, 144(sp) # 8-byte Folded Reload
2682 ; RV64I-NEXT: ld s4, 136(sp) # 8-byte Folded Reload
2683 ; RV64I-NEXT: ld s5, 128(sp) # 8-byte Folded Reload
2684 ; RV64I-NEXT: ld s6, 120(sp) # 8-byte Folded Reload
2685 ; RV64I-NEXT: ld s7, 112(sp) # 8-byte Folded Reload
2686 ; RV64I-NEXT: ld s8, 104(sp) # 8-byte Folded Reload
2687 ; RV64I-NEXT: ld s9, 96(sp) # 8-byte Folded Reload
2688 ; RV64I-NEXT: ld s10, 88(sp) # 8-byte Folded Reload
2689 ; RV64I-NEXT: ld s11, 80(sp) # 8-byte Folded Reload
2690 ; RV64I-NEXT: ld t3, 72(sp) # 8-byte Folded Reload
2691 ; RV64I-NEXT: ld t4, 64(sp) # 8-byte Folded Reload
2692 ; RV64I-NEXT: ld t5, 56(sp) # 8-byte Folded Reload
2693 ; RV64I-NEXT: ld t6, 48(sp) # 8-byte Folded Reload
2694 ; RV64I-NEXT: addi sp, sp, 272
2696 %val = load [32 x i32], ptr @var_test_irq
2697 store volatile [32 x i32] %val, ptr @var_test_irq
2701 define void @callee_no_irq() nounwind{
2702 ; RV32IZCMP-LABEL: callee_no_irq:
2703 ; RV32IZCMP: # %bb.0:
2704 ; RV32IZCMP-NEXT: cm.push {ra, s0-s11}, -96
2705 ; RV32IZCMP-NEXT: lui t0, %hi(var_test_irq)
2706 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq)(t0)
2707 ; RV32IZCMP-NEXT: sw a0, 28(sp) # 4-byte Folded Spill
2708 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq+4)(t0)
2709 ; RV32IZCMP-NEXT: sw a0, 24(sp) # 4-byte Folded Spill
2710 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq+8)(t0)
2711 ; RV32IZCMP-NEXT: sw a0, 20(sp) # 4-byte Folded Spill
2712 ; RV32IZCMP-NEXT: lw a0, %lo(var_test_irq+12)(t0)
2713 ; RV32IZCMP-NEXT: sw a0, 16(sp) # 4-byte Folded Spill
2714 ; RV32IZCMP-NEXT: addi a5, t0, %lo(var_test_irq)
2715 ; RV32IZCMP-NEXT: lw a0, 16(a5)
2716 ; RV32IZCMP-NEXT: sw a0, 12(sp) # 4-byte Folded Spill
2717 ; RV32IZCMP-NEXT: lw a0, 20(a5)
2718 ; RV32IZCMP-NEXT: sw a0, 8(sp) # 4-byte Folded Spill
2719 ; RV32IZCMP-NEXT: lw t4, 24(a5)
2720 ; RV32IZCMP-NEXT: lw t5, 28(a5)
2721 ; RV32IZCMP-NEXT: lw t6, 32(a5)
2722 ; RV32IZCMP-NEXT: lw s2, 36(a5)
2723 ; RV32IZCMP-NEXT: lw s3, 40(a5)
2724 ; RV32IZCMP-NEXT: lw s4, 44(a5)
2725 ; RV32IZCMP-NEXT: lw s5, 48(a5)
2726 ; RV32IZCMP-NEXT: lw s6, 52(a5)
2727 ; RV32IZCMP-NEXT: lw s7, 56(a5)
2728 ; RV32IZCMP-NEXT: lw s8, 60(a5)
2729 ; RV32IZCMP-NEXT: lw s9, 64(a5)
2730 ; RV32IZCMP-NEXT: lw s10, 68(a5)
2731 ; RV32IZCMP-NEXT: lw s11, 72(a5)
2732 ; RV32IZCMP-NEXT: lw ra, 76(a5)
2733 ; RV32IZCMP-NEXT: lw s1, 80(a5)
2734 ; RV32IZCMP-NEXT: lw t3, 84(a5)
2735 ; RV32IZCMP-NEXT: lw t2, 88(a5)
2736 ; RV32IZCMP-NEXT: lw t1, 92(a5)
2737 ; RV32IZCMP-NEXT: lw a7, 112(a5)
2738 ; RV32IZCMP-NEXT: lw s0, 116(a5)
2739 ; RV32IZCMP-NEXT: lw a3, 120(a5)
2740 ; RV32IZCMP-NEXT: lw a0, 124(a5)
2741 ; RV32IZCMP-NEXT: lw a6, 96(a5)
2742 ; RV32IZCMP-NEXT: lw a4, 100(a5)
2743 ; RV32IZCMP-NEXT: lw a2, 104(a5)
2744 ; RV32IZCMP-NEXT: lw a1, 108(a5)
2745 ; RV32IZCMP-NEXT: sw a0, 124(a5)
2746 ; RV32IZCMP-NEXT: sw a3, 120(a5)
2747 ; RV32IZCMP-NEXT: sw s0, 116(a5)
2748 ; RV32IZCMP-NEXT: sw a7, 112(a5)
2749 ; RV32IZCMP-NEXT: sw a1, 108(a5)
2750 ; RV32IZCMP-NEXT: sw a2, 104(a5)
2751 ; RV32IZCMP-NEXT: sw a4, 100(a5)
2752 ; RV32IZCMP-NEXT: sw a6, 96(a5)
2753 ; RV32IZCMP-NEXT: sw t1, 92(a5)
2754 ; RV32IZCMP-NEXT: sw t2, 88(a5)
2755 ; RV32IZCMP-NEXT: sw t3, 84(a5)
2756 ; RV32IZCMP-NEXT: sw s1, 80(a5)
2757 ; RV32IZCMP-NEXT: sw ra, 76(a5)
2758 ; RV32IZCMP-NEXT: sw s11, 72(a5)
2759 ; RV32IZCMP-NEXT: sw s10, 68(a5)
2760 ; RV32IZCMP-NEXT: sw s9, 64(a5)
2761 ; RV32IZCMP-NEXT: sw s8, 60(a5)
2762 ; RV32IZCMP-NEXT: sw s7, 56(a5)
2763 ; RV32IZCMP-NEXT: sw s6, 52(a5)
2764 ; RV32IZCMP-NEXT: sw s5, 48(a5)
2765 ; RV32IZCMP-NEXT: sw s4, 44(a5)
2766 ; RV32IZCMP-NEXT: sw s3, 40(a5)
2767 ; RV32IZCMP-NEXT: sw s2, 36(a5)
2768 ; RV32IZCMP-NEXT: sw t6, 32(a5)
2769 ; RV32IZCMP-NEXT: sw t5, 28(a5)
2770 ; RV32IZCMP-NEXT: sw t4, 24(a5)
2771 ; RV32IZCMP-NEXT: lw a0, 8(sp) # 4-byte Folded Reload
2772 ; RV32IZCMP-NEXT: sw a0, 20(a5)
2773 ; RV32IZCMP-NEXT: lw a0, 12(sp) # 4-byte Folded Reload
2774 ; RV32IZCMP-NEXT: sw a0, 16(a5)
2775 ; RV32IZCMP-NEXT: lw a0, 16(sp) # 4-byte Folded Reload
2776 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq+12)(t0)
2777 ; RV32IZCMP-NEXT: lw a0, 20(sp) # 4-byte Folded Reload
2778 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq+8)(t0)
2779 ; RV32IZCMP-NEXT: lw a0, 24(sp) # 4-byte Folded Reload
2780 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq+4)(t0)
2781 ; RV32IZCMP-NEXT: lw a0, 28(sp) # 4-byte Folded Reload
2782 ; RV32IZCMP-NEXT: sw a0, %lo(var_test_irq)(t0)
2783 ; RV32IZCMP-NEXT: cm.popret {ra, s0-s11}, 96
2785 ; RV64IZCMP-LABEL: callee_no_irq:
2786 ; RV64IZCMP: # %bb.0:
2787 ; RV64IZCMP-NEXT: cm.push {ra, s0-s11}, -160
2788 ; RV64IZCMP-NEXT: lui t0, %hi(var_test_irq)
2789 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq)(t0)
2790 ; RV64IZCMP-NEXT: sd a0, 40(sp) # 8-byte Folded Spill
2791 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq+4)(t0)
2792 ; RV64IZCMP-NEXT: sd a0, 32(sp) # 8-byte Folded Spill
2793 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq+8)(t0)
2794 ; RV64IZCMP-NEXT: sd a0, 24(sp) # 8-byte Folded Spill
2795 ; RV64IZCMP-NEXT: lw a0, %lo(var_test_irq+12)(t0)
2796 ; RV64IZCMP-NEXT: sd a0, 16(sp) # 8-byte Folded Spill
2797 ; RV64IZCMP-NEXT: addi a5, t0, %lo(var_test_irq)
2798 ; RV64IZCMP-NEXT: lw a0, 16(a5)
2799 ; RV64IZCMP-NEXT: sd a0, 8(sp) # 8-byte Folded Spill
2800 ; RV64IZCMP-NEXT: lw a0, 20(a5)
2801 ; RV64IZCMP-NEXT: sd a0, 0(sp) # 8-byte Folded Spill
2802 ; RV64IZCMP-NEXT: lw t4, 24(a5)
2803 ; RV64IZCMP-NEXT: lw t5, 28(a5)
2804 ; RV64IZCMP-NEXT: lw t6, 32(a5)
2805 ; RV64IZCMP-NEXT: lw s2, 36(a5)
2806 ; RV64IZCMP-NEXT: lw s3, 40(a5)
2807 ; RV64IZCMP-NEXT: lw s4, 44(a5)
2808 ; RV64IZCMP-NEXT: lw s5, 48(a5)
2809 ; RV64IZCMP-NEXT: lw s6, 52(a5)
2810 ; RV64IZCMP-NEXT: lw s7, 56(a5)
2811 ; RV64IZCMP-NEXT: lw s8, 60(a5)
2812 ; RV64IZCMP-NEXT: lw s9, 64(a5)
2813 ; RV64IZCMP-NEXT: lw s10, 68(a5)
2814 ; RV64IZCMP-NEXT: lw s11, 72(a5)
2815 ; RV64IZCMP-NEXT: lw ra, 76(a5)
2816 ; RV64IZCMP-NEXT: lw s1, 80(a5)
2817 ; RV64IZCMP-NEXT: lw t3, 84(a5)
2818 ; RV64IZCMP-NEXT: lw t2, 88(a5)
2819 ; RV64IZCMP-NEXT: lw t1, 92(a5)
2820 ; RV64IZCMP-NEXT: lw a7, 112(a5)
2821 ; RV64IZCMP-NEXT: lw s0, 116(a5)
2822 ; RV64IZCMP-NEXT: lw a3, 120(a5)
2823 ; RV64IZCMP-NEXT: lw a0, 124(a5)
2824 ; RV64IZCMP-NEXT: lw a6, 96(a5)
2825 ; RV64IZCMP-NEXT: lw a4, 100(a5)
2826 ; RV64IZCMP-NEXT: lw a2, 104(a5)
2827 ; RV64IZCMP-NEXT: lw a1, 108(a5)
2828 ; RV64IZCMP-NEXT: sw a0, 124(a5)
2829 ; RV64IZCMP-NEXT: sw a3, 120(a5)
2830 ; RV64IZCMP-NEXT: sw s0, 116(a5)
2831 ; RV64IZCMP-NEXT: sw a7, 112(a5)
2832 ; RV64IZCMP-NEXT: sw a1, 108(a5)
2833 ; RV64IZCMP-NEXT: sw a2, 104(a5)
2834 ; RV64IZCMP-NEXT: sw a4, 100(a5)
2835 ; RV64IZCMP-NEXT: sw a6, 96(a5)
2836 ; RV64IZCMP-NEXT: sw t1, 92(a5)
2837 ; RV64IZCMP-NEXT: sw t2, 88(a5)
2838 ; RV64IZCMP-NEXT: sw t3, 84(a5)
2839 ; RV64IZCMP-NEXT: sw s1, 80(a5)
2840 ; RV64IZCMP-NEXT: sw ra, 76(a5)
2841 ; RV64IZCMP-NEXT: sw s11, 72(a5)
2842 ; RV64IZCMP-NEXT: sw s10, 68(a5)
2843 ; RV64IZCMP-NEXT: sw s9, 64(a5)
2844 ; RV64IZCMP-NEXT: sw s8, 60(a5)
2845 ; RV64IZCMP-NEXT: sw s7, 56(a5)
2846 ; RV64IZCMP-NEXT: sw s6, 52(a5)
2847 ; RV64IZCMP-NEXT: sw s5, 48(a5)
2848 ; RV64IZCMP-NEXT: sw s4, 44(a5)
2849 ; RV64IZCMP-NEXT: sw s3, 40(a5)
2850 ; RV64IZCMP-NEXT: sw s2, 36(a5)
2851 ; RV64IZCMP-NEXT: sw t6, 32(a5)
2852 ; RV64IZCMP-NEXT: sw t5, 28(a5)
2853 ; RV64IZCMP-NEXT: sw t4, 24(a5)
2854 ; RV64IZCMP-NEXT: ld a0, 0(sp) # 8-byte Folded Reload
2855 ; RV64IZCMP-NEXT: sw a0, 20(a5)
2856 ; RV64IZCMP-NEXT: ld a0, 8(sp) # 8-byte Folded Reload
2857 ; RV64IZCMP-NEXT: sw a0, 16(a5)
2858 ; RV64IZCMP-NEXT: ld a0, 16(sp) # 8-byte Folded Reload
2859 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq+12)(t0)
2860 ; RV64IZCMP-NEXT: ld a0, 24(sp) # 8-byte Folded Reload
2861 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq+8)(t0)
2862 ; RV64IZCMP-NEXT: ld a0, 32(sp) # 8-byte Folded Reload
2863 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq+4)(t0)
2864 ; RV64IZCMP-NEXT: ld a0, 40(sp) # 8-byte Folded Reload
2865 ; RV64IZCMP-NEXT: sw a0, %lo(var_test_irq)(t0)
2866 ; RV64IZCMP-NEXT: cm.popret {ra, s0-s11}, 160
2868 ; RV32IZCMP-SR-LABEL: callee_no_irq:
2869 ; RV32IZCMP-SR: # %bb.0:
2870 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0-s11}, -96
2871 ; RV32IZCMP-SR-NEXT: lui t0, %hi(var_test_irq)
2872 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq)(t0)
2873 ; RV32IZCMP-SR-NEXT: sw a0, 28(sp) # 4-byte Folded Spill
2874 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+4)(t0)
2875 ; RV32IZCMP-SR-NEXT: sw a0, 24(sp) # 4-byte Folded Spill
2876 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+8)(t0)
2877 ; RV32IZCMP-SR-NEXT: sw a0, 20(sp) # 4-byte Folded Spill
2878 ; RV32IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+12)(t0)
2879 ; RV32IZCMP-SR-NEXT: sw a0, 16(sp) # 4-byte Folded Spill
2880 ; RV32IZCMP-SR-NEXT: addi a5, t0, %lo(var_test_irq)
2881 ; RV32IZCMP-SR-NEXT: lw a0, 16(a5)
2882 ; RV32IZCMP-SR-NEXT: sw a0, 12(sp) # 4-byte Folded Spill
2883 ; RV32IZCMP-SR-NEXT: lw a0, 20(a5)
2884 ; RV32IZCMP-SR-NEXT: sw a0, 8(sp) # 4-byte Folded Spill
2885 ; RV32IZCMP-SR-NEXT: lw t4, 24(a5)
2886 ; RV32IZCMP-SR-NEXT: lw t5, 28(a5)
2887 ; RV32IZCMP-SR-NEXT: lw t6, 32(a5)
2888 ; RV32IZCMP-SR-NEXT: lw s2, 36(a5)
2889 ; RV32IZCMP-SR-NEXT: lw s3, 40(a5)
2890 ; RV32IZCMP-SR-NEXT: lw s4, 44(a5)
2891 ; RV32IZCMP-SR-NEXT: lw s5, 48(a5)
2892 ; RV32IZCMP-SR-NEXT: lw s6, 52(a5)
2893 ; RV32IZCMP-SR-NEXT: lw s7, 56(a5)
2894 ; RV32IZCMP-SR-NEXT: lw s8, 60(a5)
2895 ; RV32IZCMP-SR-NEXT: lw s9, 64(a5)
2896 ; RV32IZCMP-SR-NEXT: lw s10, 68(a5)
2897 ; RV32IZCMP-SR-NEXT: lw s11, 72(a5)
2898 ; RV32IZCMP-SR-NEXT: lw ra, 76(a5)
2899 ; RV32IZCMP-SR-NEXT: lw s1, 80(a5)
2900 ; RV32IZCMP-SR-NEXT: lw t3, 84(a5)
2901 ; RV32IZCMP-SR-NEXT: lw t2, 88(a5)
2902 ; RV32IZCMP-SR-NEXT: lw t1, 92(a5)
2903 ; RV32IZCMP-SR-NEXT: lw a7, 112(a5)
2904 ; RV32IZCMP-SR-NEXT: lw s0, 116(a5)
2905 ; RV32IZCMP-SR-NEXT: lw a3, 120(a5)
2906 ; RV32IZCMP-SR-NEXT: lw a0, 124(a5)
2907 ; RV32IZCMP-SR-NEXT: lw a6, 96(a5)
2908 ; RV32IZCMP-SR-NEXT: lw a4, 100(a5)
2909 ; RV32IZCMP-SR-NEXT: lw a2, 104(a5)
2910 ; RV32IZCMP-SR-NEXT: lw a1, 108(a5)
2911 ; RV32IZCMP-SR-NEXT: sw a0, 124(a5)
2912 ; RV32IZCMP-SR-NEXT: sw a3, 120(a5)
2913 ; RV32IZCMP-SR-NEXT: sw s0, 116(a5)
2914 ; RV32IZCMP-SR-NEXT: sw a7, 112(a5)
2915 ; RV32IZCMP-SR-NEXT: sw a1, 108(a5)
2916 ; RV32IZCMP-SR-NEXT: sw a2, 104(a5)
2917 ; RV32IZCMP-SR-NEXT: sw a4, 100(a5)
2918 ; RV32IZCMP-SR-NEXT: sw a6, 96(a5)
2919 ; RV32IZCMP-SR-NEXT: sw t1, 92(a5)
2920 ; RV32IZCMP-SR-NEXT: sw t2, 88(a5)
2921 ; RV32IZCMP-SR-NEXT: sw t3, 84(a5)
2922 ; RV32IZCMP-SR-NEXT: sw s1, 80(a5)
2923 ; RV32IZCMP-SR-NEXT: sw ra, 76(a5)
2924 ; RV32IZCMP-SR-NEXT: sw s11, 72(a5)
2925 ; RV32IZCMP-SR-NEXT: sw s10, 68(a5)
2926 ; RV32IZCMP-SR-NEXT: sw s9, 64(a5)
2927 ; RV32IZCMP-SR-NEXT: sw s8, 60(a5)
2928 ; RV32IZCMP-SR-NEXT: sw s7, 56(a5)
2929 ; RV32IZCMP-SR-NEXT: sw s6, 52(a5)
2930 ; RV32IZCMP-SR-NEXT: sw s5, 48(a5)
2931 ; RV32IZCMP-SR-NEXT: sw s4, 44(a5)
2932 ; RV32IZCMP-SR-NEXT: sw s3, 40(a5)
2933 ; RV32IZCMP-SR-NEXT: sw s2, 36(a5)
2934 ; RV32IZCMP-SR-NEXT: sw t6, 32(a5)
2935 ; RV32IZCMP-SR-NEXT: sw t5, 28(a5)
2936 ; RV32IZCMP-SR-NEXT: sw t4, 24(a5)
2937 ; RV32IZCMP-SR-NEXT: lw a0, 8(sp) # 4-byte Folded Reload
2938 ; RV32IZCMP-SR-NEXT: sw a0, 20(a5)
2939 ; RV32IZCMP-SR-NEXT: lw a0, 12(sp) # 4-byte Folded Reload
2940 ; RV32IZCMP-SR-NEXT: sw a0, 16(a5)
2941 ; RV32IZCMP-SR-NEXT: lw a0, 16(sp) # 4-byte Folded Reload
2942 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+12)(t0)
2943 ; RV32IZCMP-SR-NEXT: lw a0, 20(sp) # 4-byte Folded Reload
2944 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+8)(t0)
2945 ; RV32IZCMP-SR-NEXT: lw a0, 24(sp) # 4-byte Folded Reload
2946 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+4)(t0)
2947 ; RV32IZCMP-SR-NEXT: lw a0, 28(sp) # 4-byte Folded Reload
2948 ; RV32IZCMP-SR-NEXT: sw a0, %lo(var_test_irq)(t0)
2949 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0-s11}, 96
2951 ; RV64IZCMP-SR-LABEL: callee_no_irq:
2952 ; RV64IZCMP-SR: # %bb.0:
2953 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0-s11}, -160
2954 ; RV64IZCMP-SR-NEXT: lui t0, %hi(var_test_irq)
2955 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq)(t0)
2956 ; RV64IZCMP-SR-NEXT: sd a0, 40(sp) # 8-byte Folded Spill
2957 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+4)(t0)
2958 ; RV64IZCMP-SR-NEXT: sd a0, 32(sp) # 8-byte Folded Spill
2959 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+8)(t0)
2960 ; RV64IZCMP-SR-NEXT: sd a0, 24(sp) # 8-byte Folded Spill
2961 ; RV64IZCMP-SR-NEXT: lw a0, %lo(var_test_irq+12)(t0)
2962 ; RV64IZCMP-SR-NEXT: sd a0, 16(sp) # 8-byte Folded Spill
2963 ; RV64IZCMP-SR-NEXT: addi a5, t0, %lo(var_test_irq)
2964 ; RV64IZCMP-SR-NEXT: lw a0, 16(a5)
2965 ; RV64IZCMP-SR-NEXT: sd a0, 8(sp) # 8-byte Folded Spill
2966 ; RV64IZCMP-SR-NEXT: lw a0, 20(a5)
2967 ; RV64IZCMP-SR-NEXT: sd a0, 0(sp) # 8-byte Folded Spill
2968 ; RV64IZCMP-SR-NEXT: lw t4, 24(a5)
2969 ; RV64IZCMP-SR-NEXT: lw t5, 28(a5)
2970 ; RV64IZCMP-SR-NEXT: lw t6, 32(a5)
2971 ; RV64IZCMP-SR-NEXT: lw s2, 36(a5)
2972 ; RV64IZCMP-SR-NEXT: lw s3, 40(a5)
2973 ; RV64IZCMP-SR-NEXT: lw s4, 44(a5)
2974 ; RV64IZCMP-SR-NEXT: lw s5, 48(a5)
2975 ; RV64IZCMP-SR-NEXT: lw s6, 52(a5)
2976 ; RV64IZCMP-SR-NEXT: lw s7, 56(a5)
2977 ; RV64IZCMP-SR-NEXT: lw s8, 60(a5)
2978 ; RV64IZCMP-SR-NEXT: lw s9, 64(a5)
2979 ; RV64IZCMP-SR-NEXT: lw s10, 68(a5)
2980 ; RV64IZCMP-SR-NEXT: lw s11, 72(a5)
2981 ; RV64IZCMP-SR-NEXT: lw ra, 76(a5)
2982 ; RV64IZCMP-SR-NEXT: lw s1, 80(a5)
2983 ; RV64IZCMP-SR-NEXT: lw t3, 84(a5)
2984 ; RV64IZCMP-SR-NEXT: lw t2, 88(a5)
2985 ; RV64IZCMP-SR-NEXT: lw t1, 92(a5)
2986 ; RV64IZCMP-SR-NEXT: lw a7, 112(a5)
2987 ; RV64IZCMP-SR-NEXT: lw s0, 116(a5)
2988 ; RV64IZCMP-SR-NEXT: lw a3, 120(a5)
2989 ; RV64IZCMP-SR-NEXT: lw a0, 124(a5)
2990 ; RV64IZCMP-SR-NEXT: lw a6, 96(a5)
2991 ; RV64IZCMP-SR-NEXT: lw a4, 100(a5)
2992 ; RV64IZCMP-SR-NEXT: lw a2, 104(a5)
2993 ; RV64IZCMP-SR-NEXT: lw a1, 108(a5)
2994 ; RV64IZCMP-SR-NEXT: sw a0, 124(a5)
2995 ; RV64IZCMP-SR-NEXT: sw a3, 120(a5)
2996 ; RV64IZCMP-SR-NEXT: sw s0, 116(a5)
2997 ; RV64IZCMP-SR-NEXT: sw a7, 112(a5)
2998 ; RV64IZCMP-SR-NEXT: sw a1, 108(a5)
2999 ; RV64IZCMP-SR-NEXT: sw a2, 104(a5)
3000 ; RV64IZCMP-SR-NEXT: sw a4, 100(a5)
3001 ; RV64IZCMP-SR-NEXT: sw a6, 96(a5)
3002 ; RV64IZCMP-SR-NEXT: sw t1, 92(a5)
3003 ; RV64IZCMP-SR-NEXT: sw t2, 88(a5)
3004 ; RV64IZCMP-SR-NEXT: sw t3, 84(a5)
3005 ; RV64IZCMP-SR-NEXT: sw s1, 80(a5)
3006 ; RV64IZCMP-SR-NEXT: sw ra, 76(a5)
3007 ; RV64IZCMP-SR-NEXT: sw s11, 72(a5)
3008 ; RV64IZCMP-SR-NEXT: sw s10, 68(a5)
3009 ; RV64IZCMP-SR-NEXT: sw s9, 64(a5)
3010 ; RV64IZCMP-SR-NEXT: sw s8, 60(a5)
3011 ; RV64IZCMP-SR-NEXT: sw s7, 56(a5)
3012 ; RV64IZCMP-SR-NEXT: sw s6, 52(a5)
3013 ; RV64IZCMP-SR-NEXT: sw s5, 48(a5)
3014 ; RV64IZCMP-SR-NEXT: sw s4, 44(a5)
3015 ; RV64IZCMP-SR-NEXT: sw s3, 40(a5)
3016 ; RV64IZCMP-SR-NEXT: sw s2, 36(a5)
3017 ; RV64IZCMP-SR-NEXT: sw t6, 32(a5)
3018 ; RV64IZCMP-SR-NEXT: sw t5, 28(a5)
3019 ; RV64IZCMP-SR-NEXT: sw t4, 24(a5)
3020 ; RV64IZCMP-SR-NEXT: ld a0, 0(sp) # 8-byte Folded Reload
3021 ; RV64IZCMP-SR-NEXT: sw a0, 20(a5)
3022 ; RV64IZCMP-SR-NEXT: ld a0, 8(sp) # 8-byte Folded Reload
3023 ; RV64IZCMP-SR-NEXT: sw a0, 16(a5)
3024 ; RV64IZCMP-SR-NEXT: ld a0, 16(sp) # 8-byte Folded Reload
3025 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+12)(t0)
3026 ; RV64IZCMP-SR-NEXT: ld a0, 24(sp) # 8-byte Folded Reload
3027 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+8)(t0)
3028 ; RV64IZCMP-SR-NEXT: ld a0, 32(sp) # 8-byte Folded Reload
3029 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq+4)(t0)
3030 ; RV64IZCMP-SR-NEXT: ld a0, 40(sp) # 8-byte Folded Reload
3031 ; RV64IZCMP-SR-NEXT: sw a0, %lo(var_test_irq)(t0)
3032 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0-s11}, 160
3034 ; RV32I-LABEL: callee_no_irq:
3036 ; RV32I-NEXT: addi sp, sp, -80
3037 ; RV32I-NEXT: sw ra, 76(sp) # 4-byte Folded Spill
3038 ; RV32I-NEXT: sw s0, 72(sp) # 4-byte Folded Spill
3039 ; RV32I-NEXT: sw s1, 68(sp) # 4-byte Folded Spill
3040 ; RV32I-NEXT: sw s2, 64(sp) # 4-byte Folded Spill
3041 ; RV32I-NEXT: sw s3, 60(sp) # 4-byte Folded Spill
3042 ; RV32I-NEXT: sw s4, 56(sp) # 4-byte Folded Spill
3043 ; RV32I-NEXT: sw s5, 52(sp) # 4-byte Folded Spill
3044 ; RV32I-NEXT: sw s6, 48(sp) # 4-byte Folded Spill
3045 ; RV32I-NEXT: sw s7, 44(sp) # 4-byte Folded Spill
3046 ; RV32I-NEXT: sw s8, 40(sp) # 4-byte Folded Spill
3047 ; RV32I-NEXT: sw s9, 36(sp) # 4-byte Folded Spill
3048 ; RV32I-NEXT: sw s10, 32(sp) # 4-byte Folded Spill
3049 ; RV32I-NEXT: sw s11, 28(sp) # 4-byte Folded Spill
3050 ; RV32I-NEXT: lui a7, %hi(var_test_irq)
3051 ; RV32I-NEXT: lw a0, %lo(var_test_irq)(a7)
3052 ; RV32I-NEXT: sw a0, 24(sp) # 4-byte Folded Spill
3053 ; RV32I-NEXT: lw a0, %lo(var_test_irq+4)(a7)
3054 ; RV32I-NEXT: sw a0, 20(sp) # 4-byte Folded Spill
3055 ; RV32I-NEXT: lw a0, %lo(var_test_irq+8)(a7)
3056 ; RV32I-NEXT: sw a0, 16(sp) # 4-byte Folded Spill
3057 ; RV32I-NEXT: lw a0, %lo(var_test_irq+12)(a7)
3058 ; RV32I-NEXT: sw a0, 12(sp) # 4-byte Folded Spill
3059 ; RV32I-NEXT: addi a5, a7, %lo(var_test_irq)
3060 ; RV32I-NEXT: lw a0, 16(a5)
3061 ; RV32I-NEXT: sw a0, 8(sp) # 4-byte Folded Spill
3062 ; RV32I-NEXT: lw a0, 20(a5)
3063 ; RV32I-NEXT: sw a0, 4(sp) # 4-byte Folded Spill
3064 ; RV32I-NEXT: lw t0, 24(a5)
3065 ; RV32I-NEXT: lw t1, 28(a5)
3066 ; RV32I-NEXT: lw t2, 32(a5)
3067 ; RV32I-NEXT: lw t3, 36(a5)
3068 ; RV32I-NEXT: lw t4, 40(a5)
3069 ; RV32I-NEXT: lw t5, 44(a5)
3070 ; RV32I-NEXT: lw t6, 48(a5)
3071 ; RV32I-NEXT: lw s0, 52(a5)
3072 ; RV32I-NEXT: lw s1, 56(a5)
3073 ; RV32I-NEXT: lw s2, 60(a5)
3074 ; RV32I-NEXT: lw s3, 64(a5)
3075 ; RV32I-NEXT: lw s4, 68(a5)
3076 ; RV32I-NEXT: lw s5, 72(a5)
3077 ; RV32I-NEXT: lw s6, 76(a5)
3078 ; RV32I-NEXT: lw s7, 80(a5)
3079 ; RV32I-NEXT: lw s8, 84(a5)
3080 ; RV32I-NEXT: lw s9, 88(a5)
3081 ; RV32I-NEXT: lw s10, 92(a5)
3082 ; RV32I-NEXT: lw s11, 112(a5)
3083 ; RV32I-NEXT: lw ra, 116(a5)
3084 ; RV32I-NEXT: lw a3, 120(a5)
3085 ; RV32I-NEXT: lw a0, 124(a5)
3086 ; RV32I-NEXT: lw a6, 96(a5)
3087 ; RV32I-NEXT: lw a4, 100(a5)
3088 ; RV32I-NEXT: lw a2, 104(a5)
3089 ; RV32I-NEXT: lw a1, 108(a5)
3090 ; RV32I-NEXT: sw a0, 124(a5)
3091 ; RV32I-NEXT: sw a3, 120(a5)
3092 ; RV32I-NEXT: sw ra, 116(a5)
3093 ; RV32I-NEXT: sw s11, 112(a5)
3094 ; RV32I-NEXT: sw a1, 108(a5)
3095 ; RV32I-NEXT: sw a2, 104(a5)
3096 ; RV32I-NEXT: sw a4, 100(a5)
3097 ; RV32I-NEXT: sw a6, 96(a5)
3098 ; RV32I-NEXT: sw s10, 92(a5)
3099 ; RV32I-NEXT: sw s9, 88(a5)
3100 ; RV32I-NEXT: sw s8, 84(a5)
3101 ; RV32I-NEXT: sw s7, 80(a5)
3102 ; RV32I-NEXT: sw s6, 76(a5)
3103 ; RV32I-NEXT: sw s5, 72(a5)
3104 ; RV32I-NEXT: sw s4, 68(a5)
3105 ; RV32I-NEXT: sw s3, 64(a5)
3106 ; RV32I-NEXT: sw s2, 60(a5)
3107 ; RV32I-NEXT: sw s1, 56(a5)
3108 ; RV32I-NEXT: sw s0, 52(a5)
3109 ; RV32I-NEXT: sw t6, 48(a5)
3110 ; RV32I-NEXT: sw t5, 44(a5)
3111 ; RV32I-NEXT: sw t4, 40(a5)
3112 ; RV32I-NEXT: sw t3, 36(a5)
3113 ; RV32I-NEXT: sw t2, 32(a5)
3114 ; RV32I-NEXT: sw t1, 28(a5)
3115 ; RV32I-NEXT: sw t0, 24(a5)
3116 ; RV32I-NEXT: lw a0, 4(sp) # 4-byte Folded Reload
3117 ; RV32I-NEXT: sw a0, 20(a5)
3118 ; RV32I-NEXT: lw a0, 8(sp) # 4-byte Folded Reload
3119 ; RV32I-NEXT: sw a0, 16(a5)
3120 ; RV32I-NEXT: lw a0, 12(sp) # 4-byte Folded Reload
3121 ; RV32I-NEXT: sw a0, %lo(var_test_irq+12)(a7)
3122 ; RV32I-NEXT: lw a0, 16(sp) # 4-byte Folded Reload
3123 ; RV32I-NEXT: sw a0, %lo(var_test_irq+8)(a7)
3124 ; RV32I-NEXT: lw a0, 20(sp) # 4-byte Folded Reload
3125 ; RV32I-NEXT: sw a0, %lo(var_test_irq+4)(a7)
3126 ; RV32I-NEXT: lw a0, 24(sp) # 4-byte Folded Reload
3127 ; RV32I-NEXT: sw a0, %lo(var_test_irq)(a7)
3128 ; RV32I-NEXT: lw ra, 76(sp) # 4-byte Folded Reload
3129 ; RV32I-NEXT: lw s0, 72(sp) # 4-byte Folded Reload
3130 ; RV32I-NEXT: lw s1, 68(sp) # 4-byte Folded Reload
3131 ; RV32I-NEXT: lw s2, 64(sp) # 4-byte Folded Reload
3132 ; RV32I-NEXT: lw s3, 60(sp) # 4-byte Folded Reload
3133 ; RV32I-NEXT: lw s4, 56(sp) # 4-byte Folded Reload
3134 ; RV32I-NEXT: lw s5, 52(sp) # 4-byte Folded Reload
3135 ; RV32I-NEXT: lw s6, 48(sp) # 4-byte Folded Reload
3136 ; RV32I-NEXT: lw s7, 44(sp) # 4-byte Folded Reload
3137 ; RV32I-NEXT: lw s8, 40(sp) # 4-byte Folded Reload
3138 ; RV32I-NEXT: lw s9, 36(sp) # 4-byte Folded Reload
3139 ; RV32I-NEXT: lw s10, 32(sp) # 4-byte Folded Reload
3140 ; RV32I-NEXT: lw s11, 28(sp) # 4-byte Folded Reload
3141 ; RV32I-NEXT: addi sp, sp, 80
3144 ; RV64I-LABEL: callee_no_irq:
3146 ; RV64I-NEXT: addi sp, sp, -160
3147 ; RV64I-NEXT: sd ra, 152(sp) # 8-byte Folded Spill
3148 ; RV64I-NEXT: sd s0, 144(sp) # 8-byte Folded Spill
3149 ; RV64I-NEXT: sd s1, 136(sp) # 8-byte Folded Spill
3150 ; RV64I-NEXT: sd s2, 128(sp) # 8-byte Folded Spill
3151 ; RV64I-NEXT: sd s3, 120(sp) # 8-byte Folded Spill
3152 ; RV64I-NEXT: sd s4, 112(sp) # 8-byte Folded Spill
3153 ; RV64I-NEXT: sd s5, 104(sp) # 8-byte Folded Spill
3154 ; RV64I-NEXT: sd s6, 96(sp) # 8-byte Folded Spill
3155 ; RV64I-NEXT: sd s7, 88(sp) # 8-byte Folded Spill
3156 ; RV64I-NEXT: sd s8, 80(sp) # 8-byte Folded Spill
3157 ; RV64I-NEXT: sd s9, 72(sp) # 8-byte Folded Spill
3158 ; RV64I-NEXT: sd s10, 64(sp) # 8-byte Folded Spill
3159 ; RV64I-NEXT: sd s11, 56(sp) # 8-byte Folded Spill
3160 ; RV64I-NEXT: lui a7, %hi(var_test_irq)
3161 ; RV64I-NEXT: lw a0, %lo(var_test_irq)(a7)
3162 ; RV64I-NEXT: sd a0, 48(sp) # 8-byte Folded Spill
3163 ; RV64I-NEXT: lw a0, %lo(var_test_irq+4)(a7)
3164 ; RV64I-NEXT: sd a0, 40(sp) # 8-byte Folded Spill
3165 ; RV64I-NEXT: lw a0, %lo(var_test_irq+8)(a7)
3166 ; RV64I-NEXT: sd a0, 32(sp) # 8-byte Folded Spill
3167 ; RV64I-NEXT: lw a0, %lo(var_test_irq+12)(a7)
3168 ; RV64I-NEXT: sd a0, 24(sp) # 8-byte Folded Spill
3169 ; RV64I-NEXT: addi a5, a7, %lo(var_test_irq)
3170 ; RV64I-NEXT: lw a0, 16(a5)
3171 ; RV64I-NEXT: sd a0, 16(sp) # 8-byte Folded Spill
3172 ; RV64I-NEXT: lw a0, 20(a5)
3173 ; RV64I-NEXT: sd a0, 8(sp) # 8-byte Folded Spill
3174 ; RV64I-NEXT: lw t0, 24(a5)
3175 ; RV64I-NEXT: lw t1, 28(a5)
3176 ; RV64I-NEXT: lw t2, 32(a5)
3177 ; RV64I-NEXT: lw t3, 36(a5)
3178 ; RV64I-NEXT: lw t4, 40(a5)
3179 ; RV64I-NEXT: lw t5, 44(a5)
3180 ; RV64I-NEXT: lw t6, 48(a5)
3181 ; RV64I-NEXT: lw s0, 52(a5)
3182 ; RV64I-NEXT: lw s1, 56(a5)
3183 ; RV64I-NEXT: lw s2, 60(a5)
3184 ; RV64I-NEXT: lw s3, 64(a5)
3185 ; RV64I-NEXT: lw s4, 68(a5)
3186 ; RV64I-NEXT: lw s5, 72(a5)
3187 ; RV64I-NEXT: lw s6, 76(a5)
3188 ; RV64I-NEXT: lw s7, 80(a5)
3189 ; RV64I-NEXT: lw s8, 84(a5)
3190 ; RV64I-NEXT: lw s9, 88(a5)
3191 ; RV64I-NEXT: lw s10, 92(a5)
3192 ; RV64I-NEXT: lw s11, 112(a5)
3193 ; RV64I-NEXT: lw ra, 116(a5)
3194 ; RV64I-NEXT: lw a3, 120(a5)
3195 ; RV64I-NEXT: lw a0, 124(a5)
3196 ; RV64I-NEXT: lw a6, 96(a5)
3197 ; RV64I-NEXT: lw a4, 100(a5)
3198 ; RV64I-NEXT: lw a2, 104(a5)
3199 ; RV64I-NEXT: lw a1, 108(a5)
3200 ; RV64I-NEXT: sw a0, 124(a5)
3201 ; RV64I-NEXT: sw a3, 120(a5)
3202 ; RV64I-NEXT: sw ra, 116(a5)
3203 ; RV64I-NEXT: sw s11, 112(a5)
3204 ; RV64I-NEXT: sw a1, 108(a5)
3205 ; RV64I-NEXT: sw a2, 104(a5)
3206 ; RV64I-NEXT: sw a4, 100(a5)
3207 ; RV64I-NEXT: sw a6, 96(a5)
3208 ; RV64I-NEXT: sw s10, 92(a5)
3209 ; RV64I-NEXT: sw s9, 88(a5)
3210 ; RV64I-NEXT: sw s8, 84(a5)
3211 ; RV64I-NEXT: sw s7, 80(a5)
3212 ; RV64I-NEXT: sw s6, 76(a5)
3213 ; RV64I-NEXT: sw s5, 72(a5)
3214 ; RV64I-NEXT: sw s4, 68(a5)
3215 ; RV64I-NEXT: sw s3, 64(a5)
3216 ; RV64I-NEXT: sw s2, 60(a5)
3217 ; RV64I-NEXT: sw s1, 56(a5)
3218 ; RV64I-NEXT: sw s0, 52(a5)
3219 ; RV64I-NEXT: sw t6, 48(a5)
3220 ; RV64I-NEXT: sw t5, 44(a5)
3221 ; RV64I-NEXT: sw t4, 40(a5)
3222 ; RV64I-NEXT: sw t3, 36(a5)
3223 ; RV64I-NEXT: sw t2, 32(a5)
3224 ; RV64I-NEXT: sw t1, 28(a5)
3225 ; RV64I-NEXT: sw t0, 24(a5)
3226 ; RV64I-NEXT: ld a0, 8(sp) # 8-byte Folded Reload
3227 ; RV64I-NEXT: sw a0, 20(a5)
3228 ; RV64I-NEXT: ld a0, 16(sp) # 8-byte Folded Reload
3229 ; RV64I-NEXT: sw a0, 16(a5)
3230 ; RV64I-NEXT: ld a0, 24(sp) # 8-byte Folded Reload
3231 ; RV64I-NEXT: sw a0, %lo(var_test_irq+12)(a7)
3232 ; RV64I-NEXT: ld a0, 32(sp) # 8-byte Folded Reload
3233 ; RV64I-NEXT: sw a0, %lo(var_test_irq+8)(a7)
3234 ; RV64I-NEXT: ld a0, 40(sp) # 8-byte Folded Reload
3235 ; RV64I-NEXT: sw a0, %lo(var_test_irq+4)(a7)
3236 ; RV64I-NEXT: ld a0, 48(sp) # 8-byte Folded Reload
3237 ; RV64I-NEXT: sw a0, %lo(var_test_irq)(a7)
3238 ; RV64I-NEXT: ld ra, 152(sp) # 8-byte Folded Reload
3239 ; RV64I-NEXT: ld s0, 144(sp) # 8-byte Folded Reload
3240 ; RV64I-NEXT: ld s1, 136(sp) # 8-byte Folded Reload
3241 ; RV64I-NEXT: ld s2, 128(sp) # 8-byte Folded Reload
3242 ; RV64I-NEXT: ld s3, 120(sp) # 8-byte Folded Reload
3243 ; RV64I-NEXT: ld s4, 112(sp) # 8-byte Folded Reload
3244 ; RV64I-NEXT: ld s5, 104(sp) # 8-byte Folded Reload
3245 ; RV64I-NEXT: ld s6, 96(sp) # 8-byte Folded Reload
3246 ; RV64I-NEXT: ld s7, 88(sp) # 8-byte Folded Reload
3247 ; RV64I-NEXT: ld s8, 80(sp) # 8-byte Folded Reload
3248 ; RV64I-NEXT: ld s9, 72(sp) # 8-byte Folded Reload
3249 ; RV64I-NEXT: ld s10, 64(sp) # 8-byte Folded Reload
3250 ; RV64I-NEXT: ld s11, 56(sp) # 8-byte Folded Reload
3251 ; RV64I-NEXT: addi sp, sp, 160
3253 %val = load [32 x i32], ptr @var_test_irq
3254 store volatile [32 x i32] %val, ptr @var_test_irq
3258 declare void @bar(ptr, ptr)
3259 declare ptr @llvm.frameaddress.p0(i32 immarg)
3261 define i32 @use_fp(i32 %x) {
3262 ; RV32IZCMP-LABEL: use_fp:
3263 ; RV32IZCMP: # %bb.0: # %entry
3264 ; RV32IZCMP-NEXT: cm.push {ra, s0-s1}, -32
3265 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 32
3266 ; RV32IZCMP-NEXT: .cfi_offset ra, -12
3267 ; RV32IZCMP-NEXT: .cfi_offset s0, -8
3268 ; RV32IZCMP-NEXT: .cfi_offset s1, -4
3269 ; RV32IZCMP-NEXT: addi s0, sp, 32
3270 ; RV32IZCMP-NEXT: .cfi_def_cfa s0, 0
3271 ; RV32IZCMP-NEXT: mv s1, a0
3272 ; RV32IZCMP-NEXT: addi a1, s0, -20
3273 ; RV32IZCMP-NEXT: mv a0, s0
3274 ; RV32IZCMP-NEXT: call bar
3275 ; RV32IZCMP-NEXT: mv a0, s1
3276 ; RV32IZCMP-NEXT: .cfi_def_cfa sp, 32
3277 ; RV32IZCMP-NEXT: cm.popret {ra, s0-s1}, 32
3279 ; RV64IZCMP-LABEL: use_fp:
3280 ; RV64IZCMP: # %bb.0: # %entry
3281 ; RV64IZCMP-NEXT: cm.push {ra, s0-s1}, -48
3282 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 48
3283 ; RV64IZCMP-NEXT: .cfi_offset ra, -24
3284 ; RV64IZCMP-NEXT: .cfi_offset s0, -16
3285 ; RV64IZCMP-NEXT: .cfi_offset s1, -8
3286 ; RV64IZCMP-NEXT: addi s0, sp, 48
3287 ; RV64IZCMP-NEXT: .cfi_def_cfa s0, 0
3288 ; RV64IZCMP-NEXT: mv s1, a0
3289 ; RV64IZCMP-NEXT: addi a1, s0, -36
3290 ; RV64IZCMP-NEXT: mv a0, s0
3291 ; RV64IZCMP-NEXT: call bar
3292 ; RV64IZCMP-NEXT: mv a0, s1
3293 ; RV64IZCMP-NEXT: .cfi_def_cfa sp, 48
3294 ; RV64IZCMP-NEXT: cm.popret {ra, s0-s1}, 48
3296 ; RV32IZCMP-SR-LABEL: use_fp:
3297 ; RV32IZCMP-SR: # %bb.0: # %entry
3298 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0-s1}, -32
3299 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 32
3300 ; RV32IZCMP-SR-NEXT: .cfi_offset ra, -12
3301 ; RV32IZCMP-SR-NEXT: .cfi_offset s0, -8
3302 ; RV32IZCMP-SR-NEXT: .cfi_offset s1, -4
3303 ; RV32IZCMP-SR-NEXT: addi s0, sp, 32
3304 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
3305 ; RV32IZCMP-SR-NEXT: mv s1, a0
3306 ; RV32IZCMP-SR-NEXT: addi a1, s0, -20
3307 ; RV32IZCMP-SR-NEXT: mv a0, s0
3308 ; RV32IZCMP-SR-NEXT: call bar
3309 ; RV32IZCMP-SR-NEXT: mv a0, s1
3310 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa sp, 32
3311 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0-s1}, 32
3313 ; RV64IZCMP-SR-LABEL: use_fp:
3314 ; RV64IZCMP-SR: # %bb.0: # %entry
3315 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0-s1}, -48
3316 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 48
3317 ; RV64IZCMP-SR-NEXT: .cfi_offset ra, -24
3318 ; RV64IZCMP-SR-NEXT: .cfi_offset s0, -16
3319 ; RV64IZCMP-SR-NEXT: .cfi_offset s1, -8
3320 ; RV64IZCMP-SR-NEXT: addi s0, sp, 48
3321 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa s0, 0
3322 ; RV64IZCMP-SR-NEXT: mv s1, a0
3323 ; RV64IZCMP-SR-NEXT: addi a1, s0, -36
3324 ; RV64IZCMP-SR-NEXT: mv a0, s0
3325 ; RV64IZCMP-SR-NEXT: call bar
3326 ; RV64IZCMP-SR-NEXT: mv a0, s1
3327 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa sp, 48
3328 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0-s1}, 48
3330 ; RV32I-LABEL: use_fp:
3331 ; RV32I: # %bb.0: # %entry
3332 ; RV32I-NEXT: addi sp, sp, -16
3333 ; RV32I-NEXT: .cfi_def_cfa_offset 16
3334 ; RV32I-NEXT: sw ra, 12(sp) # 4-byte Folded Spill
3335 ; RV32I-NEXT: sw s0, 8(sp) # 4-byte Folded Spill
3336 ; RV32I-NEXT: sw s1, 4(sp) # 4-byte Folded Spill
3337 ; RV32I-NEXT: .cfi_offset ra, -4
3338 ; RV32I-NEXT: .cfi_offset s0, -8
3339 ; RV32I-NEXT: .cfi_offset s1, -12
3340 ; RV32I-NEXT: addi s0, sp, 16
3341 ; RV32I-NEXT: .cfi_def_cfa s0, 0
3342 ; RV32I-NEXT: mv s1, a0
3343 ; RV32I-NEXT: addi a1, s0, -16
3344 ; RV32I-NEXT: mv a0, s0
3345 ; RV32I-NEXT: call bar
3346 ; RV32I-NEXT: mv a0, s1
3347 ; RV32I-NEXT: .cfi_def_cfa sp, 16
3348 ; RV32I-NEXT: lw ra, 12(sp) # 4-byte Folded Reload
3349 ; RV32I-NEXT: lw s0, 8(sp) # 4-byte Folded Reload
3350 ; RV32I-NEXT: lw s1, 4(sp) # 4-byte Folded Reload
3351 ; RV32I-NEXT: .cfi_restore ra
3352 ; RV32I-NEXT: .cfi_restore s0
3353 ; RV32I-NEXT: .cfi_restore s1
3354 ; RV32I-NEXT: addi sp, sp, 16
3355 ; RV32I-NEXT: .cfi_def_cfa_offset 0
3358 ; RV64I-LABEL: use_fp:
3359 ; RV64I: # %bb.0: # %entry
3360 ; RV64I-NEXT: addi sp, sp, -32
3361 ; RV64I-NEXT: .cfi_def_cfa_offset 32
3362 ; RV64I-NEXT: sd ra, 24(sp) # 8-byte Folded Spill
3363 ; RV64I-NEXT: sd s0, 16(sp) # 8-byte Folded Spill
3364 ; RV64I-NEXT: sd s1, 8(sp) # 8-byte Folded Spill
3365 ; RV64I-NEXT: .cfi_offset ra, -8
3366 ; RV64I-NEXT: .cfi_offset s0, -16
3367 ; RV64I-NEXT: .cfi_offset s1, -24
3368 ; RV64I-NEXT: addi s0, sp, 32
3369 ; RV64I-NEXT: .cfi_def_cfa s0, 0
3370 ; RV64I-NEXT: mv s1, a0
3371 ; RV64I-NEXT: addi a1, s0, -28
3372 ; RV64I-NEXT: mv a0, s0
3373 ; RV64I-NEXT: call bar
3374 ; RV64I-NEXT: mv a0, s1
3375 ; RV64I-NEXT: .cfi_def_cfa sp, 32
3376 ; RV64I-NEXT: ld ra, 24(sp) # 8-byte Folded Reload
3377 ; RV64I-NEXT: ld s0, 16(sp) # 8-byte Folded Reload
3378 ; RV64I-NEXT: ld s1, 8(sp) # 8-byte Folded Reload
3379 ; RV64I-NEXT: .cfi_restore ra
3380 ; RV64I-NEXT: .cfi_restore s0
3381 ; RV64I-NEXT: .cfi_restore s1
3382 ; RV64I-NEXT: addi sp, sp, 32
3383 ; RV64I-NEXT: .cfi_def_cfa_offset 0
3386 %var = alloca i32, align 4
3387 %0 = tail call ptr @llvm.frameaddress.p0(i32 0)
3388 call void @bar(ptr %0, ptr %var)
3392 define void @spill_x10() {
3393 ; RV32IZCMP-LABEL: spill_x10:
3394 ; RV32IZCMP: # %bb.0: # %entry
3395 ; RV32IZCMP-NEXT: cm.push {ra, s0-s11}, -64
3396 ; RV32IZCMP-NEXT: .cfi_def_cfa_offset 64
3397 ; RV32IZCMP-NEXT: .cfi_offset s10, -8
3398 ; RV32IZCMP-NEXT: .cfi_offset s11, -4
3399 ; RV32IZCMP-NEXT: #APP
3400 ; RV32IZCMP-NEXT: li s10, 0
3401 ; RV32IZCMP-NEXT: #NO_APP
3402 ; RV32IZCMP-NEXT: cm.popret {ra, s0-s11}, 64
3404 ; RV64IZCMP-LABEL: spill_x10:
3405 ; RV64IZCMP: # %bb.0: # %entry
3406 ; RV64IZCMP-NEXT: cm.push {ra, s0-s11}, -112
3407 ; RV64IZCMP-NEXT: .cfi_def_cfa_offset 112
3408 ; RV64IZCMP-NEXT: .cfi_offset s10, -16
3409 ; RV64IZCMP-NEXT: .cfi_offset s11, -8
3410 ; RV64IZCMP-NEXT: #APP
3411 ; RV64IZCMP-NEXT: li s10, 0
3412 ; RV64IZCMP-NEXT: #NO_APP
3413 ; RV64IZCMP-NEXT: cm.popret {ra, s0-s11}, 112
3415 ; RV32IZCMP-SR-LABEL: spill_x10:
3416 ; RV32IZCMP-SR: # %bb.0: # %entry
3417 ; RV32IZCMP-SR-NEXT: cm.push {ra, s0-s11}, -64
3418 ; RV32IZCMP-SR-NEXT: .cfi_def_cfa_offset 64
3419 ; RV32IZCMP-SR-NEXT: .cfi_offset s10, -8
3420 ; RV32IZCMP-SR-NEXT: .cfi_offset s11, -4
3421 ; RV32IZCMP-SR-NEXT: #APP
3422 ; RV32IZCMP-SR-NEXT: li s10, 0
3423 ; RV32IZCMP-SR-NEXT: #NO_APP
3424 ; RV32IZCMP-SR-NEXT: cm.popret {ra, s0-s11}, 64
3426 ; RV64IZCMP-SR-LABEL: spill_x10:
3427 ; RV64IZCMP-SR: # %bb.0: # %entry
3428 ; RV64IZCMP-SR-NEXT: cm.push {ra, s0-s11}, -112
3429 ; RV64IZCMP-SR-NEXT: .cfi_def_cfa_offset 112
3430 ; RV64IZCMP-SR-NEXT: .cfi_offset s10, -16
3431 ; RV64IZCMP-SR-NEXT: .cfi_offset s11, -8
3432 ; RV64IZCMP-SR-NEXT: #APP
3433 ; RV64IZCMP-SR-NEXT: li s10, 0
3434 ; RV64IZCMP-SR-NEXT: #NO_APP
3435 ; RV64IZCMP-SR-NEXT: cm.popret {ra, s0-s11}, 112
3437 ; RV32I-LABEL: spill_x10:
3438 ; RV32I: # %bb.0: # %entry
3439 ; RV32I-NEXT: addi sp, sp, -16
3440 ; RV32I-NEXT: .cfi_def_cfa_offset 16
3441 ; RV32I-NEXT: sw s10, 12(sp) # 4-byte Folded Spill
3442 ; RV32I-NEXT: .cfi_offset s10, -4
3444 ; RV32I-NEXT: li s10, 0
3445 ; RV32I-NEXT: #NO_APP
3446 ; RV32I-NEXT: lw s10, 12(sp) # 4-byte Folded Reload
3447 ; RV32I-NEXT: .cfi_restore s10
3448 ; RV32I-NEXT: addi sp, sp, 16
3449 ; RV32I-NEXT: .cfi_def_cfa_offset 0
3452 ; RV64I-LABEL: spill_x10:
3453 ; RV64I: # %bb.0: # %entry
3454 ; RV64I-NEXT: addi sp, sp, -16
3455 ; RV64I-NEXT: .cfi_def_cfa_offset 16
3456 ; RV64I-NEXT: sd s10, 8(sp) # 8-byte Folded Spill
3457 ; RV64I-NEXT: .cfi_offset s10, -8
3459 ; RV64I-NEXT: li s10, 0
3460 ; RV64I-NEXT: #NO_APP
3461 ; RV64I-NEXT: ld s10, 8(sp) # 8-byte Folded Reload
3462 ; RV64I-NEXT: .cfi_restore s10
3463 ; RV64I-NEXT: addi sp, sp, 16
3464 ; RV64I-NEXT: .cfi_def_cfa_offset 0
3467 tail call void asm sideeffect "li s10, 0", "~{s10}"()