1 ; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2 ; RUN: llc < %s -mtriple arm64-apple-darwin -global-isel -stop-after=irtranslator -verify-machineinstrs | FileCheck %s
4 ; Check that we don't try to tail-call with a non-forwarded sret parameter.
5 declare void @test_explicit_sret(ptr sret(i64))
7 ; Forwarded explicit sret pointer => we can tail call.
8 define void @can_tail_call_forwarded_explicit_sret_ptr(ptr sret(i64) %arg) {
9 ; CHECK-LABEL: name: can_tail_call_forwarded_explicit_sret_ptr
10 ; CHECK: bb.1 (%ir-block.0):
12 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x8
13 ; CHECK: $x8 = COPY [[COPY]](p0)
14 ; CHECK: TCRETURNdi @test_explicit_sret, 0, csr_darwin_aarch64_aapcs, implicit $sp, implicit $x8
15 tail call void @test_explicit_sret(ptr %arg)
19 ; Not marked as tail, so don't tail call.
20 define void @test_call_explicit_sret(ptr sret(i64) %arg) {
21 ; CHECK-LABEL: name: test_call_explicit_sret
22 ; CHECK: bb.1 (%ir-block.0):
24 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x8
25 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
26 ; CHECK: $x8 = COPY [[COPY]](p0)
27 ; CHECK: BL @test_explicit_sret, csr_darwin_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x8
28 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
30 call void @test_explicit_sret(ptr %arg)
34 define void @dont_tail_call_explicit_sret_alloca_unused() {
35 ; CHECK-LABEL: name: dont_tail_call_explicit_sret_alloca_unused
36 ; CHECK: bb.1 (%ir-block.0):
37 ; CHECK: [[FRAME_INDEX:%[0-9]+]]:_(p0) = G_FRAME_INDEX %stack.0.l
38 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
39 ; CHECK: $x8 = COPY [[FRAME_INDEX]](p0)
40 ; CHECK: BL @test_explicit_sret, csr_darwin_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x8
41 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
43 %l = alloca i64, align 8
44 tail call void @test_explicit_sret(ptr %l)
48 define void @dont_tail_call_explicit_sret_alloca_dummyusers(ptr %ptr) {
49 ; CHECK-LABEL: name: dont_tail_call_explicit_sret_alloca_dummyusers
50 ; CHECK: bb.1 (%ir-block.0):
52 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
53 ; CHECK: [[FRAME_INDEX:%[0-9]+]]:_(p0) = G_FRAME_INDEX %stack.0.l
54 ; CHECK: [[LOAD:%[0-9]+]]:_(s64) = G_LOAD [[COPY]](p0) :: (load (s64) from %ir.ptr)
55 ; CHECK: G_STORE [[LOAD]](s64), [[FRAME_INDEX]](p0) :: (store (s64) into %ir.l)
56 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
57 ; CHECK: $x8 = COPY [[FRAME_INDEX]](p0)
58 ; CHECK: BL @test_explicit_sret, csr_darwin_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x8
59 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
61 %l = alloca i64, align 8
62 %r = load i64, ptr %ptr, align 8
63 store i64 %r, ptr %l, align 8
64 tail call void @test_explicit_sret(ptr %l)
68 define void @dont_tail_call_tailcall_explicit_sret_gep(ptr %ptr) {
69 ; CHECK-LABEL: name: dont_tail_call_tailcall_explicit_sret_gep
70 ; CHECK: bb.1 (%ir-block.0):
72 ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
73 ; CHECK: [[C:%[0-9]+]]:_(s64) = G_CONSTANT i64 8
74 ; CHECK: [[PTR_ADD:%[0-9]+]]:_(p0) = G_PTR_ADD [[COPY]], [[C]](s64)
75 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
76 ; CHECK: $x8 = COPY [[PTR_ADD]](p0)
77 ; CHECK: BL @test_explicit_sret, csr_darwin_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x8
78 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
80 %ptr2 = getelementptr i64, ptr %ptr, i32 1
81 tail call void @test_explicit_sret(ptr %ptr2)
85 define i64 @dont_tail_call_sret_alloca_returned() {
86 ; CHECK-LABEL: name: dont_tail_call_sret_alloca_returned
87 ; CHECK: bb.1 (%ir-block.0):
88 ; CHECK: [[FRAME_INDEX:%[0-9]+]]:_(p0) = G_FRAME_INDEX %stack.0.l
89 ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
90 ; CHECK: $x8 = COPY [[FRAME_INDEX]](p0)
91 ; CHECK: BL @test_explicit_sret, csr_darwin_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x8
92 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
93 ; CHECK: [[LOAD:%[0-9]+]]:_(s64) = G_LOAD [[FRAME_INDEX]](p0) :: (dereferenceable load (s64) from %ir.l)
94 ; CHECK: $x0 = COPY [[LOAD]](s64)
95 ; CHECK: RET_ReallyLR implicit $x0
96 %l = alloca i64, align 8
97 tail call void @test_explicit_sret(ptr %l)
98 %r = load i64, ptr %l, align 8