1 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py UTC_ARGS: --version 5
2 ; RUN: llc --mtriple=x86_64-- < %s | FileCheck --check-prefix=X64 %s
3 ; RUN: llc --mtriple=i386-- < %s | FileCheck --check-prefix=X86 %s
5 ; This test is to ensure rbp/rbx/ebp/esi is correctly saved/restored before clobbered when enable ipra.
7 define internal void @callee_clobber_rbp() nounwind norecurse {
8 ; X64-LABEL: callee_clobber_rbp:
10 ; X64-NEXT: pushq %rbp
12 ; X64-NEXT: xorl %ebp, %ebp
17 ; X86-LABEL: callee_clobber_rbp:
19 ; X86-NEXT: pushl %ebp
21 ; X86-NEXT: xorl %ebp, %ebp
25 call void asm sideeffect "xor %ebp, %ebp", "~{ebp}"()
29 define internal void @callee_clobber_rbx(ptr %addr) nounwind norecurse {
30 ; X64-LABEL: callee_clobber_rbx:
32 ; X64-NEXT: pushq %rbx
34 ; X64-NEXT: xorl %ebx, %ebx
38 call void asm sideeffect "xor %ebx, %ebx", "~{ebx}"()
42 define internal void @callee_clobber_esi(ptr %addr) nounwind norecurse {
43 ; X86-LABEL: callee_clobber_esi:
45 ; X86-NEXT: pushl %esi
47 ; X86-NEXT: xorl %esi, %esi
51 call void asm sideeffect "xor %esi, %esi", "~{esi}"()
55 define void @caller_use_rbp() "frame-pointer"="all" nounwind {
56 ; X64-LABEL: caller_use_rbp:
58 ; X64-NEXT: pushq %rbp
59 ; X64-NEXT: movq %rsp, %rbp
60 ; X64-NEXT: subq $16, %rsp
61 ; X64-NEXT: callq callee_clobber_rbp
62 ; X64-NEXT: movl $5, -4(%rbp)
63 ; X64-NEXT: addq $16, %rsp
67 ; X86-LABEL: caller_use_rbp:
69 ; X86-NEXT: pushl %ebp
70 ; X86-NEXT: movl %esp, %ebp
71 ; X86-NEXT: pushl %eax
72 ; X86-NEXT: calll callee_clobber_rbp
73 ; X86-NEXT: movl $5, -4(%ebp)
74 ; X86-NEXT: addl $4, %esp
77 call void @callee_clobber_rbp()
78 %addr = alloca i32, align 4
79 store i32 5, ptr %addr, align 4
83 define void @caller_use_rbx(i32 %X) nounwind ssp {
84 ; X64-LABEL: caller_use_rbx:
86 ; X64-NEXT: pushq %rbp
87 ; X64-NEXT: movq %rsp, %rbp
88 ; X64-NEXT: pushq %rbx
89 ; X64-NEXT: andq $-32, %rsp
90 ; X64-NEXT: subq $64, %rsp
91 ; X64-NEXT: movq %rsp, %rbx
92 ; X64-NEXT: movq __stack_chk_guard(%rip), %rax
93 ; X64-NEXT: movq %rax, 32(%rbx)
94 ; X64-NEXT: movq %rsp, %rax
95 ; X64-NEXT: movl %edi, %ecx
96 ; X64-NEXT: leaq 15(,%rcx,4), %rcx
97 ; X64-NEXT: andq $-16, %rcx
98 ; X64-NEXT: subq %rcx, %rax
99 ; X64-NEXT: movq %rax, %rsp
100 ; X64-NEXT: movq %rbx, %rdi
101 ; X64-NEXT: callq callee_clobber_rbx
102 ; X64-NEXT: movq __stack_chk_guard(%rip), %rax
103 ; X64-NEXT: cmpq 32(%rbx), %rax
104 ; X64-NEXT: jne .LBB4_2
106 ; X64-NEXT: leaq -8(%rbp), %rsp
107 ; X64-NEXT: popq %rbx
108 ; X64-NEXT: popq %rbp
111 ; X64-NEXT: callq __stack_chk_fail@PLT
112 %realign = alloca i32, align 32
113 %addr = alloca i32, i32 %X
114 call void @callee_clobber_rbx(ptr %realign)
118 define void @caller_use_esi(i32 %X) nounwind ssp {
119 ; X86-LABEL: caller_use_esi:
121 ; X86-NEXT: pushl %ebp
122 ; X86-NEXT: movl %esp, %ebp
123 ; X86-NEXT: pushl %esi
124 ; X86-NEXT: andl $-32, %esp
125 ; X86-NEXT: subl $32, %esp
126 ; X86-NEXT: movl %esp, %esi
127 ; X86-NEXT: movl 8(%ebp), %eax
128 ; X86-NEXT: movl __stack_chk_guard, %ecx
129 ; X86-NEXT: movl %ecx, 16(%esi)
130 ; X86-NEXT: movl %esp, %ecx
131 ; X86-NEXT: shll $2, %eax
132 ; X86-NEXT: subl %eax, %ecx
133 ; X86-NEXT: movl %ecx, %esp
134 ; X86-NEXT: movl %esi, %eax
135 ; X86-NEXT: pushl %eax
136 ; X86-NEXT: calll callee_clobber_esi
137 ; X86-NEXT: addl $4, %esp
138 ; X86-NEXT: movl __stack_chk_guard, %eax
139 ; X86-NEXT: cmpl 16(%esi), %eax
140 ; X86-NEXT: jne .LBB5_2
142 ; X86-NEXT: leal -4(%ebp), %esp
143 ; X86-NEXT: popl %esi
144 ; X86-NEXT: popl %ebp
147 ; X86-NEXT: calll __stack_chk_fail
148 %realign = alloca i32, align 32
149 %addr = alloca i32, i32 %X
150 call void @callee_clobber_esi(ptr %realign)