1 //===- ThreadSafetyTIL.cpp ------------------------------------------------===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
9 #include "clang/Analysis/Analyses/ThreadSafetyTIL.h"
10 #include "clang/Basic/LLVM.h"
11 #include "llvm/Support/Casting.h"
15 using namespace clang
;
16 using namespace threadSafety
;
19 StringRef
til::getUnaryOpcodeString(TIL_UnaryOpcode Op
) {
21 case UOP_Minus
: return "-";
22 case UOP_BitNot
: return "~";
23 case UOP_LogicNot
: return "!";
28 StringRef
til::getBinaryOpcodeString(TIL_BinaryOpcode Op
) {
30 case BOP_Mul
: return "*";
31 case BOP_Div
: return "/";
32 case BOP_Rem
: return "%";
33 case BOP_Add
: return "+";
34 case BOP_Sub
: return "-";
35 case BOP_Shl
: return "<<";
36 case BOP_Shr
: return ">>";
37 case BOP_BitAnd
: return "&";
38 case BOP_BitXor
: return "^";
39 case BOP_BitOr
: return "|";
40 case BOP_Eq
: return "==";
41 case BOP_Neq
: return "!=";
42 case BOP_Lt
: return "<";
43 case BOP_Leq
: return "<=";
44 case BOP_Cmp
: return "<=>";
45 case BOP_LogicAnd
: return "&&";
46 case BOP_LogicOr
: return "||";
51 SExpr
* Future::force() {
52 Status
= FS_evaluating
;
58 unsigned BasicBlock::addPredecessor(BasicBlock
*Pred
) {
59 unsigned Idx
= Predecessors
.size();
60 Predecessors
.reserveCheck(1, Arena
);
61 Predecessors
.push_back(Pred
);
62 for (auto *E
: Args
) {
63 if (auto *Ph
= dyn_cast
<Phi
>(E
)) {
64 Ph
->values().reserveCheck(1, Arena
);
65 Ph
->values().push_back(nullptr);
71 void BasicBlock::reservePredecessors(unsigned NumPreds
) {
72 Predecessors
.reserve(NumPreds
, Arena
);
73 for (auto *E
: Args
) {
74 if (auto *Ph
= dyn_cast
<Phi
>(E
)) {
75 Ph
->values().reserve(NumPreds
, Arena
);
80 // If E is a variable, then trace back through any aliases or redundant
81 // Phi nodes to find the canonical definition.
82 const SExpr
*til::getCanonicalVal(const SExpr
*E
) {
84 if (const auto *V
= dyn_cast
<Variable
>(E
)) {
85 if (V
->kind() == Variable::VK_Let
) {
90 if (const auto *Ph
= dyn_cast
<Phi
>(E
)) {
91 if (Ph
->status() == Phi::PH_SingleVal
) {
101 // If E is a variable, then trace back through any aliases or redundant
102 // Phi nodes to find the canonical definition.
103 // The non-const version will simplify incomplete Phi nodes.
104 SExpr
*til::simplifyToCanonicalVal(SExpr
*E
) {
106 if (auto *V
= dyn_cast
<Variable
>(E
)) {
107 if (V
->kind() != Variable::VK_Let
)
109 // Eliminate redundant variables, e.g. x = y, or x = 5,
110 // but keep anything more complicated.
111 if (til::ThreadSafetyTIL::isTrivial(V
->definition())) {
117 if (auto *Ph
= dyn_cast
<Phi
>(E
)) {
118 if (Ph
->status() == Phi::PH_Incomplete
)
119 simplifyIncompleteArg(Ph
);
120 // Eliminate redundant Phi nodes.
121 if (Ph
->status() == Phi::PH_SingleVal
) {
130 // Trace the arguments of an incomplete Phi node to see if they have the same
131 // canonical definition. If so, mark the Phi node as redundant.
132 // getCanonicalVal() will recursively call simplifyIncompletePhi().
133 void til::simplifyIncompleteArg(til::Phi
*Ph
) {
134 assert(Ph
&& Ph
->status() == Phi::PH_Incomplete
);
136 // eliminate infinite recursion -- assume that this node is not redundant.
137 Ph
->setStatus(Phi::PH_MultiVal
);
139 SExpr
*E0
= simplifyToCanonicalVal(Ph
->values()[0]);
140 for (unsigned i
= 1, n
= Ph
->values().size(); i
< n
; ++i
) {
141 SExpr
*Ei
= simplifyToCanonicalVal(Ph
->values()[i
]);
143 continue; // Recursive reference to itself. Don't count.
145 return; // Status is already set to MultiVal.
148 Ph
->setStatus(Phi::PH_SingleVal
);
151 // Renumbers the arguments and instructions to have unique, sequential IDs.
152 unsigned BasicBlock::renumberInstrs(unsigned ID
) {
153 for (auto *Arg
: Args
)
154 Arg
->setID(this, ID
++);
155 for (auto *Instr
: Instrs
)
156 Instr
->setID(this, ID
++);
157 TermInstr
->setID(this, ID
++);
161 // Sorts the CFGs blocks using a reverse post-order depth-first traversal.
162 // Each block will be written into the Blocks array in order, and its BlockID
163 // will be set to the index in the array. Sorting should start from the entry
164 // block, and ID should be the total number of blocks.
165 unsigned BasicBlock::topologicalSort(SimpleArray
<BasicBlock
*> &Blocks
,
167 if (Visited
) return ID
;
169 for (auto *Block
: successors())
170 ID
= Block
->topologicalSort(Blocks
, ID
);
171 // set ID and update block array in place.
172 // We may lose pointers to unreachable blocks.
175 Blocks
[BlockID
] = this;
179 // Performs a reverse topological traversal, starting from the exit block and
180 // following back-edges. The dominator is serialized before any predecessors,
181 // which guarantees that all blocks are serialized after their dominator and
182 // before their post-dominator (because it's a reverse topological traversal).
183 // ID should be initially set to 0.
185 // This sort assumes that (1) dominators have been computed, (2) there are no
186 // critical edges, and (3) the entry block is reachable from the exit block
187 // and no blocks are accessible via traversal of back-edges from the exit that
188 // weren't accessible via forward edges from the entry.
189 unsigned BasicBlock::topologicalFinalSort(SimpleArray
<BasicBlock
*> &Blocks
,
191 // Visited is assumed to have been set by the topologicalSort. This pass
192 // assumes !Visited means that we've visited this node before.
193 if (!Visited
) return ID
;
195 if (DominatorNode
.Parent
)
196 ID
= DominatorNode
.Parent
->topologicalFinalSort(Blocks
, ID
);
197 for (auto *Pred
: Predecessors
)
198 ID
= Pred
->topologicalFinalSort(Blocks
, ID
);
199 assert(static_cast<size_t>(ID
) < Blocks
.size());
201 Blocks
[BlockID
] = this;
205 // Computes the immediate dominator of the current block. Assumes that all of
206 // its predecessors have already computed their dominators. This is achieved
207 // by visiting the nodes in topological order.
208 void BasicBlock::computeDominator() {
209 BasicBlock
*Candidate
= nullptr;
210 // Walk backwards from each predecessor to find the common dominator node.
211 for (auto *Pred
: Predecessors
) {
213 if (Pred
->BlockID
>= BlockID
) continue;
214 // If we don't yet have a candidate for dominator yet, take this one.
215 if (Candidate
== nullptr) {
219 // Walk the alternate and current candidate back to find a common ancestor.
220 auto *Alternate
= Pred
;
221 while (Alternate
!= Candidate
) {
222 if (Candidate
->BlockID
> Alternate
->BlockID
)
223 Candidate
= Candidate
->DominatorNode
.Parent
;
225 Alternate
= Alternate
->DominatorNode
.Parent
;
228 DominatorNode
.Parent
= Candidate
;
229 DominatorNode
.SizeOfSubTree
= 1;
232 // Computes the immediate post-dominator of the current block. Assumes that all
233 // of its successors have already computed their post-dominators. This is
234 // achieved visiting the nodes in reverse topological order.
235 void BasicBlock::computePostDominator() {
236 BasicBlock
*Candidate
= nullptr;
237 // Walk back from each predecessor to find the common post-dominator node.
238 for (auto *Succ
: successors()) {
240 if (Succ
->BlockID
<= BlockID
) continue;
241 // If we don't yet have a candidate for post-dominator yet, take this one.
242 if (Candidate
== nullptr) {
246 // Walk the alternate and current candidate back to find a common ancestor.
247 auto *Alternate
= Succ
;
248 while (Alternate
!= Candidate
) {
249 if (Candidate
->BlockID
< Alternate
->BlockID
)
250 Candidate
= Candidate
->PostDominatorNode
.Parent
;
252 Alternate
= Alternate
->PostDominatorNode
.Parent
;
255 PostDominatorNode
.Parent
= Candidate
;
256 PostDominatorNode
.SizeOfSubTree
= 1;
259 // Renumber instructions in all blocks
260 void SCFG::renumberInstrs() {
261 unsigned InstrID
= 0;
262 for (auto *Block
: Blocks
)
263 InstrID
= Block
->renumberInstrs(InstrID
);
266 static inline void computeNodeSize(BasicBlock
*B
,
267 BasicBlock::TopologyNode
BasicBlock::*TN
) {
268 BasicBlock::TopologyNode
*N
= &(B
->*TN
);
270 BasicBlock::TopologyNode
*P
= &(N
->Parent
->*TN
);
271 // Initially set ID relative to the (as yet uncomputed) parent ID
272 N
->NodeID
= P
->SizeOfSubTree
;
273 P
->SizeOfSubTree
+= N
->SizeOfSubTree
;
277 static inline void computeNodeID(BasicBlock
*B
,
278 BasicBlock::TopologyNode
BasicBlock::*TN
) {
279 BasicBlock::TopologyNode
*N
= &(B
->*TN
);
281 BasicBlock::TopologyNode
*P
= &(N
->Parent
->*TN
);
282 N
->NodeID
+= P
->NodeID
; // Fix NodeIDs relative to starting node.
286 // Normalizes a CFG. Normalization has a few major components:
287 // 1) Removing unreachable blocks.
288 // 2) Computing dominators and post-dominators
289 // 3) Topologically sorting the blocks into the "Blocks" array.
290 void SCFG::computeNormalForm() {
291 // Topologically sort the blocks starting from the entry block.
292 unsigned NumUnreachableBlocks
= Entry
->topologicalSort(Blocks
, Blocks
.size());
293 if (NumUnreachableBlocks
> 0) {
294 // If there were unreachable blocks shift everything down, and delete them.
295 for (unsigned I
= NumUnreachableBlocks
, E
= Blocks
.size(); I
< E
; ++I
) {
296 unsigned NI
= I
- NumUnreachableBlocks
;
297 Blocks
[NI
] = Blocks
[I
];
298 Blocks
[NI
]->BlockID
= NI
;
299 // FIXME: clean up predecessor pointers to unreachable blocks?
301 Blocks
.drop(NumUnreachableBlocks
);
304 // Compute dominators.
305 for (auto *Block
: Blocks
)
306 Block
->computeDominator();
308 // Once dominators have been computed, the final sort may be performed.
309 unsigned NumBlocks
= Exit
->topologicalFinalSort(Blocks
, 0);
310 assert(static_cast<size_t>(NumBlocks
) == Blocks
.size());
313 // Renumber the instructions now that we have a final sort.
316 // Compute post-dominators and compute the sizes of each node in the
318 for (auto *Block
: Blocks
.reverse()) {
319 Block
->computePostDominator();
320 computeNodeSize(Block
, &BasicBlock::DominatorNode
);
322 // Compute the sizes of each node in the post-dominator tree and assign IDs in
323 // the dominator tree.
324 for (auto *Block
: Blocks
) {
325 computeNodeID(Block
, &BasicBlock::DominatorNode
);
326 computeNodeSize(Block
, &BasicBlock::PostDominatorNode
);
328 // Assign IDs in the post-dominator tree.
329 for (auto *Block
: Blocks
.reverse()) {
330 computeNodeID(Block
, &BasicBlock::PostDominatorNode
);