1 ; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 -verify-machineinstrs < %s | FileCheck --check-prefix=GCN %s
2 ; RUN: llc -global-isel -amdgpu-fixed-function-abi -mtriple=amdgcn-amd-amdhsa -mcpu=gfx900 -verify-machineinstrs < %s | FileCheck --check-prefix=GCN %s
5 ; GCN-LABEL: {{^}}func1
6 ; GCN: v_mov_b32_e32 v0, s30
7 ; GCN: v_mov_b32_e32 v1, s31
8 ; GCN: s_setpc_b64 s[30:31]
9 define i8* @func1() nounwind {
11 %0 = tail call i8* @llvm.returnaddress(i32 0)
15 ; Test with non-zero frame
16 ; GCN-LABEL: {{^}}func2
17 ; GCN: v_mov_b32_e32 v0, 0
18 ; GCN: v_mov_b32_e32 v1, 0
19 ; GCN: s_setpc_b64 s[30:31]
20 define i8* @func2() nounwind {
22 %0 = tail call i8* @llvm.returnaddress(i32 1)
26 ; Test with amdgpu_kernel
27 ; GCN-LABEL: {{^}}func3
28 ; GCN: v_mov_b32_e32 v0, 0
29 ; GCN: v_mov_b32_e32 v1, {{v0|0}}
30 define amdgpu_kernel void @func3(i8** %out) nounwind {
32 %tmp = tail call i8* @llvm.returnaddress(i32 0)
33 store i8* %tmp, i8** %out, align 4
37 ; Test with use outside the entry-block
38 ; GCN-LABEL: {{^}}func4
39 ; GCN: v_mov_b32_e32 v0, 0
40 ; GCN: v_mov_b32_e32 v1, {{v0|0}}
41 define amdgpu_kernel void @func4(i8** %out, i32 %val) nounwind {
43 %cmp = icmp ne i32 %val, 0
44 br i1 %cmp, label %store, label %exit
47 %tmp = tail call i8* @llvm.returnaddress(i32 1)
48 store i8* %tmp, i8** %out, align 4
55 ; Test ending in unreachable
56 ; GCN-LABEL: {{^}}func5
57 ; GCN: v_mov_b32_e32 v0, 0
58 define void @func5() nounwind {
60 %tmp = tail call i8* @llvm.returnaddress(i32 2)
61 store volatile i32 0, i32 addrspace(3)* undef, align 4
65 declare void @callee()
67 ; GCN-LABEL: {{^}}multi_use:
68 ; GCN-DAG: v_mov_b32_e32 v[[LO:4[0-9]+]], s30
69 ; GCN-DAG: v_mov_b32_e32 v[[HI:4[0-9]+]], s31
70 ; GCN: global_store_dwordx2 v{{\[[0-9]+:[0-9]+\]}}, v{{\[}}[[LO]]:[[HI]]{{\]}}
72 ; GCN: global_store_dwordx2 v{{\[[0-9]+:[0-9]+\]}}, v{{\[}}[[LO]]:[[HI]]{{\]}}
73 define void @multi_use() nounwind {
75 %ret0 = tail call i8* @llvm.returnaddress(i32 0)
76 store volatile i8* %ret0, i8* addrspace(1)* undef
78 %ret1 = tail call i8* @llvm.returnaddress(i32 0)
79 store volatile i8* %ret1, i8* addrspace(1)* undef
83 declare i8* @llvm.returnaddress(i32) nounwind readnone