1 ; RUN: llc -mtriple=x86_64-pc-linux-gnu -start-before=stack-protector \
2 ; RUN: -stop-after=stack-protector -o - < %s | FileCheck %s
3 ; Bugs 42238/43308: Test some additional situations not caught previously.
5 define void @store_captures() #0 {
6 ; CHECK-LABEL: @store_captures(
8 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca ptr
9 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
10 ; CHECK-NEXT: call void @llvm.stackprotector(ptr [[STACKGUARD]], ptr [[STACKGUARDSLOT]])
11 ; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
12 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
13 ; CHECK-NEXT: [[J:%.*]] = alloca ptr, align 8
14 ; CHECK-NEXT: store i32 0, ptr [[RETVAL]]
15 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, ptr [[A]], align 4
16 ; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
17 ; CHECK-NEXT: store i32 [[ADD]], ptr [[A]], align 4
18 ; CHECK-NEXT: store ptr [[A]], ptr [[J]], align 8
19 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
20 ; CHECK-NEXT: [[TMP0:%.*]] = load volatile ptr, ptr [[STACKGUARDSLOT]]
21 ; CHECK-NEXT: [[TMP1:%.*]] = icmp eq ptr [[STACKGUARD1]], [[TMP0]]
22 ; CHECK-NEXT: br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
24 ; CHECK-NEXT: ret void
25 ; CHECK: CallStackCheckFailBlk:
26 ; CHECK-NEXT: call void @__stack_chk_fail()
27 ; CHECK-NEXT: unreachable
30 %retval = alloca i32, align 4
31 %a = alloca i32, align 4
32 %j = alloca ptr, align 8
33 store i32 0, ptr %retval
34 %load = load i32, ptr %a, align 4
35 %add = add nsw i32 %load, 1
36 store i32 %add, ptr %a, align 4
37 store ptr %a, ptr %j, align 8
41 define ptr @non_captures() #0 {
42 ; load, atomicrmw, and ret do not trigger a stack protector.
43 ; CHECK-LABEL: @non_captures(
45 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
46 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, ptr [[A]], align 4
47 ; CHECK-NEXT: [[ATOM:%.*]] = atomicrmw add ptr [[A]], i32 1 seq_cst
48 ; CHECK-NEXT: ret ptr [[A]]
51 %a = alloca i32, align 4
52 %load = load i32, ptr %a, align 4
53 %atom = atomicrmw add ptr %a, i32 1 seq_cst
57 define void @store_addrspacecast_captures() #0 {
58 ; CHECK-LABEL: @store_addrspacecast_captures(
60 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca ptr
61 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
62 ; CHECK-NEXT: call void @llvm.stackprotector(ptr [[STACKGUARD]], ptr [[STACKGUARDSLOT]])
63 ; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
64 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
65 ; CHECK-NEXT: [[J:%.*]] = alloca ptr addrspace(1), align 8
66 ; CHECK-NEXT: store i32 0, ptr [[RETVAL]]
67 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, ptr [[A]], align 4
68 ; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
69 ; CHECK-NEXT: store i32 [[ADD]], ptr [[A]], align 4
70 ; CHECK-NEXT: [[A_ADDRSPACECAST:%.*]] = addrspacecast ptr [[A]] to ptr addrspace(1)
71 ; CHECK-NEXT: store ptr addrspace(1) [[A_ADDRSPACECAST]], ptr [[J]], align 8
72 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
73 ; CHECK-NEXT: [[TMP0:%.*]] = load volatile ptr, ptr [[STACKGUARDSLOT]]
74 ; CHECK-NEXT: [[TMP1:%.*]] = icmp eq ptr [[STACKGUARD1]], [[TMP0]]
75 ; CHECK-NEXT: br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
77 ; CHECK-NEXT: ret void
78 ; CHECK: CallStackCheckFailBlk:
79 ; CHECK-NEXT: call void @__stack_chk_fail()
80 ; CHECK-NEXT: unreachable
83 %retval = alloca i32, align 4
84 %a = alloca i32, align 4
85 %j = alloca ptr addrspace(1), align 8
86 store i32 0, ptr %retval
87 %load = load i32, ptr %a, align 4
88 %add = add nsw i32 %load, 1
89 store i32 %add, ptr %a, align 4
90 %a.addrspacecast = addrspacecast ptr %a to ptr addrspace(1)
91 store ptr addrspace(1) %a.addrspacecast, ptr %j, align 8
95 define void @cmpxchg_captures() #0 {
96 ; CHECK-LABEL: @cmpxchg_captures(
98 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca ptr
99 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
100 ; CHECK-NEXT: call void @llvm.stackprotector(ptr [[STACKGUARD]], ptr [[STACKGUARDSLOT]])
101 ; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
102 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
103 ; CHECK-NEXT: [[J:%.*]] = alloca ptr, align 8
104 ; CHECK-NEXT: store i32 0, ptr [[RETVAL]]
105 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, ptr [[A]], align 4
106 ; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
107 ; CHECK-NEXT: store i32 [[ADD]], ptr [[A]], align 4
108 ; CHECK-NEXT: [[TMP0:%.*]] = cmpxchg ptr [[J]], ptr null, ptr [[A]] seq_cst monotonic
109 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
110 ; CHECK-NEXT: [[TMP1:%.*]] = load volatile ptr, ptr [[STACKGUARDSLOT]]
111 ; CHECK-NEXT: [[TMP2:%.*]] = icmp eq ptr [[STACKGUARD1]], [[TMP1]]
112 ; CHECK-NEXT: br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
114 ; CHECK-NEXT: ret void
115 ; CHECK: CallStackCheckFailBlk:
116 ; CHECK-NEXT: call void @__stack_chk_fail()
117 ; CHECK-NEXT: unreachable
120 %retval = alloca i32, align 4
121 %a = alloca i32, align 4
122 %j = alloca ptr, align 8
123 store i32 0, ptr %retval
124 %load = load i32, ptr %a, align 4
125 %add = add nsw i32 %load, 1
126 store i32 %add, ptr %a, align 4
128 cmpxchg ptr %j, ptr null, ptr %a seq_cst monotonic
132 define void @memset_captures(i64 %c) #0 {
133 ; CHECK-LABEL: @memset_captures(
135 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca ptr
136 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
137 ; CHECK-NEXT: call void @llvm.stackprotector(ptr [[STACKGUARD]], ptr [[STACKGUARDSLOT]])
138 ; CHECK-NEXT: [[CADDR:%.*]] = alloca i64, align 8
139 ; CHECK-NEXT: store i64 %c, ptr [[CADDR]], align 8
140 ; CHECK-NEXT: [[I:%.*]] = alloca i32, align 4
141 ; CHECK-NEXT: [[COUNT:%.*]] = load i64, ptr [[CADDR]], align 8
142 ; CHECK-NEXT: call void @llvm.memset.p0.i64(ptr align 4 [[I]], i8 0, i64 [[COUNT]], i1 false)
143 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257))
144 ; CHECK-NEXT: [[TMP1:%.*]] = load volatile ptr, ptr [[STACKGUARDSLOT]]
145 ; CHECK-NEXT: [[TMP2:%.*]] = icmp eq ptr [[STACKGUARD1]], [[TMP1]]
146 ; CHECK-NEXT: br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
148 ; CHECK-NEXT: ret void
149 ; CHECK: CallStackCheckFailBlk:
150 ; CHECK-NEXT: call void @__stack_chk_fail()
151 ; CHECK-NEXT: unreachable
154 %c.addr = alloca i64, align 8
155 store i64 %c, ptr %c.addr, align 8
156 %i = alloca i32, align 4
157 %count = load i64, ptr %c.addr, align 8
158 call void @llvm.memset.p0.i64(ptr align 4 %i, i8 0, i64 %count, i1 false)
162 declare void @llvm.memset.p0.i64(ptr nocapture writeonly, i8, i64, i1 immarg)
164 ; Intentionally does not have any fn attrs.
165 declare dso_local void @foo(ptr)
167 ; @bar_sspstrong and @bar_nossp are the same function, but differ only in
168 ; function attributes. Test that a callee without stack protector function
169 ; attribute does not trigger a stack guard slot in a caller that also does not
170 ; have a stack protector slot.
171 define dso_local void @bar_sspstrong(i64 %0) #0 {
172 ; CHECK-LABEL: @bar_sspstrong
173 ; CHECK-NEXT: %StackGuardSlot = alloca ptr
174 %2 = alloca i64, align 8
175 store i64 %0, ptr %2, align 8
176 %3 = load i64, ptr %2, align 8
177 %4 = alloca i8, i64 %3, align 16
178 call void @foo(ptr %4)
182 ; Intentionally does not have any fn attrs.
183 define dso_local void @bar_nossp(i64 %0) {
184 ; CHECK-LABEL: @bar_nossp
185 ; CHECK-NEXT: %2 = alloca i64
186 %2 = alloca i64, align 8
187 store i64 %0, ptr %2, align 8
188 %3 = load i64, ptr %2, align 8
189 %4 = alloca i8, i64 %3, align 16
190 call void @foo(ptr %4)
194 ; Check stack protect for noreturn call
195 define dso_local i32 @foo_no_return(i32 %0) #1 {
196 ; CHECK-LABEL: @foo_no_return
198 %cmp = icmp sgt i32 %0, 4
199 br i1 %cmp, label %if.then, label %if.end
201 ; CHECK: if.then: ; preds = %entry
202 ; CHECK-NEXT: %StackGuard1 = load volatile ptr, ptr addrspace(257) inttoptr (i32 40 to ptr addrspace(257)), align 8
203 ; CHECK-NEXT: %1 = load volatile ptr, ptr %StackGuardSlot, align 8
204 ; CHECK-NEXT: %2 = icmp eq ptr %StackGuard1, %1
205 ; CHECK-NEXT: br i1 %2, label %SP_return, label %CallStackCheckFailBlk
206 ; CHECK: SP_return: ; preds = %if.then
207 ; CHECK-NEXT: %call = call i32 @foo_no_return(i32 1)
208 ; CHECK-NEXT: br label %return
209 ; CHECK: if.end: ; preds = %entry
210 ; CHECK-NEXT: br label %return
212 if.then: ; preds = %entry
213 %call = call i32 @foo_no_return(i32 1)
216 if.end: ; preds = %entry
219 return: ; preds = %if.end, %if.then
223 declare void @callee() noreturn nounwind
224 define void @caller() sspstrong {
225 ; Test that a stack protector is NOT inserted when we call nounwind functions.
226 ; CHECK-LABEL: @caller
227 ; CHECK-NEXT: call void @callee
228 call void @callee() noreturn nounwind
232 attributes #0 = { sspstrong }
233 attributes #1 = { noreturn sspreq}