1 ; RUN: llc -mtriple=x86_64-pc-linux-gnu -start-before=stack-protector -stop-after=stack-protector -o - < %s | FileCheck %s
2 ; Bugs 42238/43308: Test some additional situations not caught previously.
4 define void @store_captures() #0 {
5 ; CHECK-LABEL: @store_captures(
7 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca i8*
8 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
9 ; CHECK-NEXT: call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
10 ; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
11 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
12 ; CHECK-NEXT: [[J:%.*]] = alloca i32*, align 8
13 ; CHECK-NEXT: store i32 0, i32* [[RETVAL]]
14 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
15 ; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
16 ; CHECK-NEXT: store i32 [[ADD]], i32* [[A]], align 4
17 ; CHECK-NEXT: store i32* [[A]], i32** [[J]], align 8
18 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
19 ; CHECK-NEXT: [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
20 ; CHECK-NEXT: [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
21 ; CHECK-NEXT: br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
23 ; CHECK-NEXT: ret void
24 ; CHECK: CallStackCheckFailBlk:
25 ; CHECK-NEXT: call void @__stack_chk_fail()
26 ; CHECK-NEXT: unreachable
29 %retval = alloca i32, align 4
30 %a = alloca i32, align 4
31 %j = alloca i32*, align 8
32 store i32 0, i32* %retval
33 %load = load i32, i32* %a, align 4
34 %add = add nsw i32 %load, 1
35 store i32 %add, i32* %a, align 4
36 store i32* %a, i32** %j, align 8
40 define i32* @non_captures() #0 {
41 ; load, atomicrmw, and ret do not trigger a stack protector.
42 ; CHECK-LABEL: @non_captures(
44 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
45 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
46 ; CHECK-NEXT: [[ATOM:%.*]] = atomicrmw add i32* [[A]], i32 1 seq_cst
47 ; CHECK-NEXT: ret i32* [[A]]
50 %a = alloca i32, align 4
51 %load = load i32, i32* %a, align 4
52 %atom = atomicrmw add i32* %a, i32 1 seq_cst
56 define void @store_addrspacecast_captures() #0 {
57 ; CHECK-LABEL: @store_addrspacecast_captures(
59 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca i8*
60 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
61 ; CHECK-NEXT: call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
62 ; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
63 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
64 ; CHECK-NEXT: [[J:%.*]] = alloca i32 addrspace(1)*, align 8
65 ; CHECK-NEXT: store i32 0, i32* [[RETVAL]]
66 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
67 ; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
68 ; CHECK-NEXT: store i32 [[ADD]], i32* [[A]], align 4
69 ; CHECK-NEXT: [[A_ADDRSPACECAST:%.*]] = addrspacecast i32* [[A]] to i32 addrspace(1)*
70 ; CHECK-NEXT: store i32 addrspace(1)* [[A_ADDRSPACECAST]], i32 addrspace(1)** [[J]], align 8
71 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
72 ; CHECK-NEXT: [[TMP0:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
73 ; CHECK-NEXT: [[TMP1:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP0]]
74 ; CHECK-NEXT: br i1 [[TMP1]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
76 ; CHECK-NEXT: ret void
77 ; CHECK: CallStackCheckFailBlk:
78 ; CHECK-NEXT: call void @__stack_chk_fail()
79 ; CHECK-NEXT: unreachable
82 %retval = alloca i32, align 4
83 %a = alloca i32, align 4
84 %j = alloca i32 addrspace(1)*, align 8
85 store i32 0, i32* %retval
86 %load = load i32, i32* %a, align 4
87 %add = add nsw i32 %load, 1
88 store i32 %add, i32* %a, align 4
89 %a.addrspacecast = addrspacecast i32* %a to i32 addrspace(1)*
90 store i32 addrspace(1)* %a.addrspacecast, i32 addrspace(1)** %j, align 8
94 define void @cmpxchg_captures() #0 {
95 ; CHECK-LABEL: @cmpxchg_captures(
97 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca i8*
98 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
99 ; CHECK-NEXT: call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
100 ; CHECK-NEXT: [[RETVAL:%.*]] = alloca i32, align 4
101 ; CHECK-NEXT: [[A:%.*]] = alloca i32, align 4
102 ; CHECK-NEXT: [[J:%.*]] = alloca i32*, align 8
103 ; CHECK-NEXT: store i32 0, i32* [[RETVAL]]
104 ; CHECK-NEXT: [[LOAD:%.*]] = load i32, i32* [[A]], align 4
105 ; CHECK-NEXT: [[ADD:%.*]] = add nsw i32 [[LOAD]], 1
106 ; CHECK-NEXT: store i32 [[ADD]], i32* [[A]], align 4
107 ; CHECK-NEXT: [[TMP0:%.*]] = cmpxchg i32** [[J]], i32* null, i32* [[A]] seq_cst monotonic
108 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
109 ; CHECK-NEXT: [[TMP1:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
110 ; CHECK-NEXT: [[TMP2:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP1]]
111 ; CHECK-NEXT: br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
113 ; CHECK-NEXT: ret void
114 ; CHECK: CallStackCheckFailBlk:
115 ; CHECK-NEXT: call void @__stack_chk_fail()
116 ; CHECK-NEXT: unreachable
119 %retval = alloca i32, align 4
120 %a = alloca i32, align 4
121 %j = alloca i32*, align 8
122 store i32 0, i32* %retval
123 %load = load i32, i32* %a, align 4
124 %add = add nsw i32 %load, 1
125 store i32 %add, i32* %a, align 4
127 cmpxchg i32** %j, i32* null, i32* %a seq_cst monotonic
131 define void @memset_captures(i64 %c) #0 {
132 ; CHECK-LABEL: @memset_captures(
134 ; CHECK-NEXT: [[STACKGUARDSLOT:%.*]] = alloca i8*
135 ; CHECK-NEXT: [[STACKGUARD:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
136 ; CHECK-NEXT: call void @llvm.stackprotector(i8* [[STACKGUARD]], i8** [[STACKGUARDSLOT]])
137 ; CHECK-NEXT: [[CADDR:%.*]] = alloca i64, align 8
138 ; CHECK-NEXT: store i64 %c, i64* [[CADDR]], align 8
139 ; CHECK-NEXT: [[I:%.*]] = alloca i32, align 4
140 ; CHECK-NEXT: [[IPTR:%.*]] = bitcast i32* [[I]] to i8*
141 ; CHECK-NEXT: [[COUNT:%.*]] = load i64, i64* [[CADDR]], align 8
142 ; CHECK-NEXT: call void @llvm.memset.p0i8.i64(i8* align 4 [[IPTR]], i8 0, i64 [[COUNT]], i1 false)
143 ; CHECK-NEXT: [[STACKGUARD1:%.*]] = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*)
144 ; CHECK-NEXT: [[TMP1:%.*]] = load volatile i8*, i8** [[STACKGUARDSLOT]]
145 ; CHECK-NEXT: [[TMP2:%.*]] = icmp eq i8* [[STACKGUARD1]], [[TMP1]]
146 ; CHECK-NEXT: br i1 [[TMP2]], label [[SP_RETURN:%.*]], label [[CALLSTACKCHECKFAILBLK:%.*]], !prof !0
148 ; CHECK-NEXT: ret void
149 ; CHECK: CallStackCheckFailBlk:
150 ; CHECK-NEXT: call void @__stack_chk_fail()
151 ; CHECK-NEXT: unreachable
154 %c.addr = alloca i64, align 8
155 store i64 %c, i64* %c.addr, align 8
156 %i = alloca i32, align 4
157 %i.ptr = bitcast i32* %i to i8*
158 %count = load i64, i64* %c.addr, align 8
159 call void @llvm.memset.p0i8.i64(i8* align 4 %i.ptr, i8 0, i64 %count, i1 false)
163 declare void @llvm.memset.p0i8.i64(i8* nocapture writeonly, i8, i64, i1 immarg)
165 attributes #0 = { sspstrong }