clang -cc1 -cc1 -triple amd64-unknown-openbsd7.0 -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name MemorySSAUpdater.cpp -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -mrelocation-model pic -pic-level 1 -fhalf-no-semantic-interposition -mframe-pointer=all -relaxed-aliasing -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -tune-cpu generic -debugger-tuning=gdb -fcoverage-compilation-dir=/usr/src/gnu/usr.bin/clang/libLLVM/obj -resource-dir /usr/local/lib/clang/13.0.0 -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/AMDGPU -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Analysis -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ASMParser -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/BinaryFormat -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Bitcode -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Bitcode -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Bitstream -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /include/llvm/CodeGen -I /include/llvm/CodeGen/PBQP -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/IR -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/IR -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms/Coroutines -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ProfileData/Coverage -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/DebugInfo/CodeView -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/DebugInfo/DWARF -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/DebugInfo -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/DebugInfo/MSF -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/DebugInfo/PDB -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Demangle -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ExecutionEngine -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ExecutionEngine/JITLink -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ExecutionEngine/Orc -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Frontend -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Frontend/OpenACC -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Frontend -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Frontend/OpenMP -I /include/llvm/CodeGen/GlobalISel -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/IRReader -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms/InstCombine -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/Transforms/InstCombine -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/LTO -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Linker -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/MC -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/MC/MCParser -I /include/llvm/CodeGen/MIRParser -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Object -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Option -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Passes -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ProfileData -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms/Scalar -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/ADT -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Support -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/DebugInfo/Symbolize -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Target -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms/Utils -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms/Vectorize -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include/llvm/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Target/X86 -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include/llvm/Transforms/IPO -I /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/include -I /usr/src/gnu/usr.bin/clang/libLLVM/../include -I /usr/src/gnu/usr.bin/clang/libLLVM/obj -I /usr/src/gnu/usr.bin/clang/libLLVM/obj/../include -D NDEBUG -D __STDC_LIMIT_MACROS -D __STDC_CONSTANT_MACROS -D __STDC_FORMAT_MACROS -D LLVM_PREFIX="/usr" -D PIC -internal-isystem /usr/include/c++/v1 -internal-isystem /usr/local/lib/clang/13.0.0/include -internal-externc-isystem /usr/include -O2 -Wno-unused-parameter -Wwrite-strings -Wno-missing-field-initializers -Wno-long-long -Wno-comment -std=c++14 -fdeprecated-macro -fdebug-compilation-dir=/usr/src/gnu/usr.bin/clang/libLLVM/obj -ferror-limit 19 -fvisibility-inlines-hidden -fwrapv -D_RET_PROTECTOR -ret-protector -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -fno-builtin-malloc -fno-builtin-calloc -fno-builtin-realloc -fno-builtin-valloc -fno-builtin-free -fno-builtin-strdup -fno-builtin-strndup -analyzer-output=html -faddrsig -D__GCC_HAVE_DWARF2_CFI_ASM=1 -o /home/ben/Projects/vmm/scan-build/2022-01-12-194120-40624-1 -x c++ /usr/src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Analysis/MemorySSAUpdater.cpp
1 | |
2 | |
3 | |
4 | |
5 | |
6 | |
7 | |
8 | |
9 | |
10 | |
11 | |
12 | #include "llvm/Analysis/MemorySSAUpdater.h" |
13 | #include "llvm/Analysis/LoopIterator.h" |
14 | #include "llvm/ADT/STLExtras.h" |
15 | #include "llvm/ADT/SetVector.h" |
16 | #include "llvm/ADT/SmallPtrSet.h" |
17 | #include "llvm/Analysis/IteratedDominanceFrontier.h" |
18 | #include "llvm/Analysis/MemorySSA.h" |
19 | #include "llvm/IR/BasicBlock.h" |
20 | #include "llvm/IR/DataLayout.h" |
21 | #include "llvm/IR/Dominators.h" |
22 | #include "llvm/IR/GlobalVariable.h" |
23 | #include "llvm/IR/IRBuilder.h" |
24 | #include "llvm/IR/LLVMContext.h" |
25 | #include "llvm/IR/Metadata.h" |
26 | #include "llvm/IR/Module.h" |
27 | #include "llvm/Support/Debug.h" |
28 | #include "llvm/Support/FormattedStream.h" |
29 | #include <algorithm> |
30 | |
31 | #define DEBUG_TYPE "memoryssa" |
32 | using namespace llvm; |
33 | |
34 | |
35 | |
36 | |
37 | |
38 | |
39 | |
40 | |
41 | |
42 | |
43 | MemoryAccess *MemorySSAUpdater::getPreviousDefRecursive( |
44 | BasicBlock *BB, |
45 | DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { |
46 | |
47 | |
48 | auto Cached = CachedPreviousDef.find(BB); |
49 | if (Cached != CachedPreviousDef.end()) |
50 | return Cached->second; |
51 | |
52 | |
53 | if (!MSSA->DT->isReachableFromEntry(BB)) |
54 | return MSSA->getLiveOnEntryDef(); |
55 | |
56 | if (BasicBlock *Pred = BB->getUniquePredecessor()) { |
57 | VisitedBlocks.insert(BB); |
58 | |
59 | MemoryAccess *Result = getPreviousDefFromEnd(Pred, CachedPreviousDef); |
60 | CachedPreviousDef.insert({BB, Result}); |
61 | return Result; |
62 | } |
63 | |
64 | if (VisitedBlocks.count(BB)) { |
65 | |
66 | |
67 | |
68 | MemoryAccess *Result = MSSA->createMemoryPhi(BB); |
69 | CachedPreviousDef.insert({BB, Result}); |
70 | return Result; |
71 | } |
72 | |
73 | if (VisitedBlocks.insert(BB).second) { |
74 | |
75 | SmallVector<TrackingVH<MemoryAccess>, 8> PhiOps; |
76 | |
77 | |
78 | |
79 | |
80 | bool UniqueIncomingAccess = true; |
81 | MemoryAccess *SingleAccess = nullptr; |
82 | for (auto *Pred : predecessors(BB)) { |
83 | if (MSSA->DT->isReachableFromEntry(Pred)) { |
84 | auto *IncomingAccess = getPreviousDefFromEnd(Pred, CachedPreviousDef); |
85 | if (!SingleAccess) |
86 | SingleAccess = IncomingAccess; |
87 | else if (IncomingAccess != SingleAccess) |
88 | UniqueIncomingAccess = false; |
89 | PhiOps.push_back(IncomingAccess); |
90 | } else |
91 | PhiOps.push_back(MSSA->getLiveOnEntryDef()); |
92 | } |
93 | |
94 | |
95 | |
96 | MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MSSA->getMemoryAccess(BB)); |
97 | |
98 | |
99 | auto *Result = tryRemoveTrivialPhi(Phi, PhiOps); |
100 | |
101 | if (Result == Phi && UniqueIncomingAccess && SingleAccess) { |
102 | |
103 | if (Phi) { |
104 | assert(Phi->operands().empty() && "Expected empty Phi"); |
105 | Phi->replaceAllUsesWith(SingleAccess); |
106 | removeMemoryAccess(Phi); |
107 | } |
108 | Result = SingleAccess; |
109 | } else if (Result == Phi && !(UniqueIncomingAccess && SingleAccess)) { |
110 | if (!Phi) |
111 | Phi = MSSA->createMemoryPhi(BB); |
112 | |
113 | |
114 | |
115 | |
116 | if (Phi->getNumOperands() != 0) { |
117 | |
118 | if (!std::equal(Phi->op_begin(), Phi->op_end(), PhiOps.begin())) { |
119 | |
120 | llvm::copy(PhiOps, Phi->op_begin()); |
121 | std::copy(pred_begin(BB), pred_end(BB), Phi->block_begin()); |
122 | } |
123 | } else { |
124 | unsigned i = 0; |
125 | for (auto *Pred : predecessors(BB)) |
126 | Phi->addIncoming(&*PhiOps[i++], Pred); |
127 | InsertedPHIs.push_back(Phi); |
128 | } |
129 | Result = Phi; |
130 | } |
131 | |
132 | |
133 | VisitedBlocks.erase(BB); |
134 | CachedPreviousDef.insert({BB, Result}); |
135 | return Result; |
136 | } |
137 | llvm_unreachable("Should have hit one of the three cases above"); |
138 | } |
139 | |
140 | |
141 | |
142 | |
143 | |
144 | MemoryAccess *MemorySSAUpdater::getPreviousDef(MemoryAccess *MA) { |
145 | if (auto *LocalResult = getPreviousDefInBlock(MA)) |
146 | return LocalResult; |
147 | DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; |
148 | return getPreviousDefRecursive(MA->getBlock(), CachedPreviousDef); |
149 | } |
150 | |
151 | |
152 | |
153 | |
154 | MemoryAccess *MemorySSAUpdater::getPreviousDefInBlock(MemoryAccess *MA) { |
155 | auto *Defs = MSSA->getWritableBlockDefs(MA->getBlock()); |
156 | |
157 | |
158 | if (Defs) { |
159 | |
160 | if (!isa<MemoryUse>(MA)) { |
161 | auto Iter = MA->getReverseDefsIterator(); |
162 | ++Iter; |
163 | if (Iter != Defs->rend()) |
164 | return &*Iter; |
165 | } else { |
166 | |
167 | auto End = MSSA->getWritableBlockAccesses(MA->getBlock())->rend(); |
168 | for (auto &U : make_range(++MA->getReverseIterator(), End)) |
169 | if (!isa<MemoryUse>(U)) |
170 | return cast<MemoryAccess>(&U); |
171 | |
172 | return nullptr; |
173 | } |
174 | } |
175 | return nullptr; |
176 | } |
177 | |
178 | |
179 | MemoryAccess *MemorySSAUpdater::getPreviousDefFromEnd( |
180 | BasicBlock *BB, |
181 | DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> &CachedPreviousDef) { |
182 | auto *Defs = MSSA->getWritableBlockDefs(BB); |
183 | |
184 | if (Defs) { |
185 | CachedPreviousDef.insert({BB, &*Defs->rbegin()}); |
186 | return &*Defs->rbegin(); |
187 | } |
188 | |
189 | return getPreviousDefRecursive(BB, CachedPreviousDef); |
190 | } |
191 | |
192 | MemoryAccess *MemorySSAUpdater::recursePhi(MemoryAccess *Phi) { |
193 | if (!Phi) |
194 | return nullptr; |
195 | TrackingVH<MemoryAccess> Res(Phi); |
196 | SmallVector<TrackingVH<Value>, 8> Uses; |
197 | std::copy(Phi->user_begin(), Phi->user_end(), std::back_inserter(Uses)); |
198 | for (auto &U : Uses) |
199 | if (MemoryPhi *UsePhi = dyn_cast<MemoryPhi>(&*U)) |
200 | tryRemoveTrivialPhi(UsePhi); |
201 | return Res; |
202 | } |
203 | |
204 | |
205 | |
206 | |
207 | |
208 | |
209 | MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi) { |
210 | assert(Phi && "Can only remove concrete Phi."); |
211 | auto OperRange = Phi->operands(); |
212 | return tryRemoveTrivialPhi(Phi, OperRange); |
213 | } |
214 | template <class RangeType> |
215 | MemoryAccess *MemorySSAUpdater::tryRemoveTrivialPhi(MemoryPhi *Phi, |
216 | RangeType &Operands) { |
217 | |
218 | if (NonOptPhis.count(Phi)) |
219 | return Phi; |
220 | |
221 | |
222 | MemoryAccess *Same = nullptr; |
223 | for (auto &Op : Operands) { |
224 | |
225 | if (Op == Phi || Op == Same) |
226 | continue; |
227 | |
228 | if (Same) |
229 | return Phi; |
230 | Same = cast<MemoryAccess>(&*Op); |
231 | } |
232 | |
233 | if (Same == nullptr) |
234 | return MSSA->getLiveOnEntryDef(); |
235 | if (Phi) { |
236 | Phi->replaceAllUsesWith(Same); |
237 | removeMemoryAccess(Phi); |
238 | } |
239 | |
240 | |
241 | |
242 | return recursePhi(Same); |
243 | } |
244 | |
245 | void MemorySSAUpdater::insertUse(MemoryUse *MU, bool RenameUses) { |
246 | InsertedPHIs.clear(); |
247 | MU->setDefiningAccess(getPreviousDef(MU)); |
248 | |
249 | |
250 | |
251 | |
252 | |
253 | |
254 | |
255 | |
256 | |
257 | |
258 | |
259 | |
260 | |
261 | |
262 | if (!RenameUses && !InsertedPHIs.empty()) { |
263 | auto *Defs = MSSA->getBlockDefs(MU->getBlock()); |
264 | (void)Defs; |
265 | assert((!Defs || (++Defs->begin() == Defs->end())) && |
266 | "Block may have only a Phi or no defs"); |
267 | } |
268 | |
269 | if (RenameUses && InsertedPHIs.size()) { |
270 | SmallPtrSet<BasicBlock *, 16> Visited; |
271 | BasicBlock *StartBlock = MU->getBlock(); |
272 | |
273 | if (auto *Defs = MSSA->getWritableBlockDefs(StartBlock)) { |
274 | MemoryAccess *FirstDef = &*Defs->begin(); |
275 | |
276 | |
277 | if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) |
278 | FirstDef = MD->getDefiningAccess(); |
279 | |
280 | MSSA->renamePass(MU->getBlock(), FirstDef, Visited); |
281 | } |
282 | |
283 | |
284 | for (auto &MP : InsertedPHIs) |
285 | if (MemoryPhi *Phi = cast_or_null<MemoryPhi>(MP)) |
286 | MSSA->renamePass(Phi->getBlock(), nullptr, Visited); |
287 | } |
288 | } |
289 | |
290 | |
291 | static void setMemoryPhiValueForBlock(MemoryPhi *MP, const BasicBlock *BB, |
292 | MemoryAccess *NewDef) { |
293 | |
294 | |
295 | int i = MP->getBasicBlockIndex(BB); |
296 | assert(i != -1 && "Should have found the basic block in the phi"); |
297 | |
298 | |
299 | for (auto BBIter = MP->block_begin() + i; BBIter != MP->block_end(); |
300 | ++BBIter) { |
301 | if (*BBIter != BB) |
302 | break; |
303 | MP->setIncomingValue(i, NewDef); |
304 | ++i; |
305 | } |
306 | } |
307 | |
308 | |
309 | |
310 | |
311 | |
312 | |
313 | |
314 | void MemorySSAUpdater::insertDef(MemoryDef *MD, bool RenameUses) { |
315 | InsertedPHIs.clear(); |
316 | |
317 | |
318 | MemoryAccess *DefBefore = getPreviousDef(MD); |
319 | bool DefBeforeSameBlock = false; |
320 | if (DefBefore->getBlock() == MD->getBlock() && |
321 | !(isa<MemoryPhi>(DefBefore) && |
322 | llvm::is_contained(InsertedPHIs, DefBefore))) |
323 | DefBeforeSameBlock = true; |
324 | |
325 | |
326 | |
327 | |
328 | |
329 | if (DefBeforeSameBlock) { |
330 | DefBefore->replaceUsesWithIf(MD, [MD](Use &U) { |
331 | |
332 | |
333 | User *Usr = U.getUser(); |
334 | return !isa<MemoryUse>(Usr) && Usr != MD; |
335 | |
336 | |
337 | }); |
338 | } |
339 | |
340 | |
341 | MD->setDefiningAccess(DefBefore); |
342 | |
343 | SmallVector<WeakVH, 8> FixupList(InsertedPHIs.begin(), InsertedPHIs.end()); |
344 | |
345 | SmallSet<WeakVH, 8> ExistingPhis; |
346 | |
347 | |
348 | unsigned NewPhiIndex = InsertedPHIs.size(); |
349 | if (!DefBeforeSameBlock) { |
350 | |
351 | |
352 | |
353 | |
354 | |
355 | |
356 | |
357 | |
358 | |
359 | |
360 | |
361 | |
362 | |
363 | |
364 | SmallPtrSet<BasicBlock *, 2> DefiningBlocks; |
365 | |
366 | |
367 | |
368 | |
369 | |
370 | DefiningBlocks.insert(MD->getBlock()); |
371 | for (const auto &VH : InsertedPHIs) |
372 | if (const auto *RealPHI = cast_or_null<MemoryPhi>(VH)) |
373 | DefiningBlocks.insert(RealPHI->getBlock()); |
374 | ForwardIDFCalculator IDFs(*MSSA->DT); |
375 | SmallVector<BasicBlock *, 32> IDFBlocks; |
376 | IDFs.setDefiningBlocks(DefiningBlocks); |
377 | IDFs.calculate(IDFBlocks); |
378 | SmallVector<AssertingVH<MemoryPhi>, 4> NewInsertedPHIs; |
379 | for (auto *BBIDF : IDFBlocks) { |
380 | auto *MPhi = MSSA->getMemoryAccess(BBIDF); |
381 | if (!MPhi) { |
382 | MPhi = MSSA->createMemoryPhi(BBIDF); |
383 | NewInsertedPHIs.push_back(MPhi); |
384 | } else { |
385 | ExistingPhis.insert(MPhi); |
386 | } |
387 | |
388 | |
389 | |
390 | |
391 | |
392 | |
393 | NonOptPhis.insert(MPhi); |
394 | } |
395 | for (auto &MPhi : NewInsertedPHIs) { |
396 | auto *BBIDF = MPhi->getBlock(); |
397 | for (auto *Pred : predecessors(BBIDF)) { |
398 | DenseMap<BasicBlock *, TrackingVH<MemoryAccess>> CachedPreviousDef; |
399 | MPhi->addIncoming(getPreviousDefFromEnd(Pred, CachedPreviousDef), Pred); |
400 | } |
401 | } |
402 | |
403 | |
404 | |
405 | NewPhiIndex = InsertedPHIs.size(); |
406 | for (auto &MPhi : NewInsertedPHIs) { |
407 | InsertedPHIs.push_back(&*MPhi); |
408 | FixupList.push_back(&*MPhi); |
409 | } |
410 | |
411 | FixupList.push_back(MD); |
412 | } |
413 | |
414 | |
415 | |
416 | unsigned NewPhiIndexEnd = InsertedPHIs.size(); |
417 | |
418 | while (!FixupList.empty()) { |
419 | unsigned StartingPHISize = InsertedPHIs.size(); |
420 | fixupDefs(FixupList); |
421 | FixupList.clear(); |
422 | |
423 | FixupList.append(InsertedPHIs.begin() + StartingPHISize, InsertedPHIs.end()); |
424 | } |
425 | |
426 | |
427 | unsigned NewPhiSize = NewPhiIndexEnd - NewPhiIndex; |
428 | if (NewPhiSize) |
429 | tryRemoveTrivialPhis(ArrayRef<WeakVH>(&InsertedPHIs[NewPhiIndex], NewPhiSize)); |
430 | |
431 | |
432 | |
433 | BasicBlock *StartBlock = MD->getBlock(); |
434 | if (RenameUses && MSSA->getDomTree().getNode(StartBlock)) { |
435 | SmallPtrSet<BasicBlock *, 16> Visited; |
436 | |
437 | |
438 | MemoryAccess *FirstDef = &*MSSA->getWritableBlockDefs(StartBlock)->begin(); |
439 | |
440 | |
441 | if (auto *MD = dyn_cast<MemoryDef>(FirstDef)) |
442 | FirstDef = MD->getDefiningAccess(); |
443 | |
444 | MSSA->renamePass(MD->getBlock(), FirstDef, Visited); |
445 | |
446 | |
447 | for (auto &MP : InsertedPHIs) { |
448 | MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP); |
449 | if (Phi) |
450 | MSSA->renamePass(Phi->getBlock(), nullptr, Visited); |
451 | } |
452 | |
453 | |
454 | for (auto &MP : ExistingPhis) { |
455 | MemoryPhi *Phi = dyn_cast_or_null<MemoryPhi>(MP); |
456 | if (Phi) |
457 | MSSA->renamePass(Phi->getBlock(), nullptr, Visited); |
458 | } |
459 | } |
460 | } |
461 | |
462 | void MemorySSAUpdater::fixupDefs(const SmallVectorImpl<WeakVH> &Vars) { |
463 | SmallPtrSet<const BasicBlock *, 8> Seen; |
464 | SmallVector<const BasicBlock *, 16> Worklist; |
465 | for (auto &Var : Vars) { |
466 | MemoryAccess *NewDef = dyn_cast_or_null<MemoryAccess>(Var); |
467 | if (!NewDef) |
468 | continue; |
469 | |
470 | auto *Defs = MSSA->getWritableBlockDefs(NewDef->getBlock()); |
471 | auto DefIter = NewDef->getDefsIterator(); |
472 | |
473 | |
474 | if (MemoryPhi *Phi = dyn_cast<MemoryPhi>(NewDef)) |
475 | NonOptPhis.erase(Phi); |
476 | |
477 | |
478 | if (++DefIter != Defs->end()) { |
479 | cast<MemoryDef>(DefIter)->setDefiningAccess(NewDef); |
480 | continue; |
481 | } |
482 | |
483 | |
484 | |
485 | |
486 | for (const auto *S : successors(NewDef->getBlock())) { |
487 | if (auto *MP = MSSA->getMemoryAccess(S)) |
488 | setMemoryPhiValueForBlock(MP, NewDef->getBlock(), NewDef); |
489 | else |
490 | Worklist.push_back(S); |
491 | } |
492 | |
493 | while (!Worklist.empty()) { |
494 | const BasicBlock *FixupBlock = Worklist.back(); |
495 | Worklist.pop_back(); |
496 | |
497 | |
498 | if (auto *Defs = MSSA->getWritableBlockDefs(FixupBlock)) { |
499 | auto *FirstDef = &*Defs->begin(); |
500 | |
501 | assert(!isa<MemoryPhi>(FirstDef) && |
502 | "Should have already handled phi nodes!"); |
503 | |
504 | |
505 | assert(MSSA->dominates(NewDef, FirstDef) && |
506 | "Should have dominated the new access"); |
507 | |
508 | |
509 | |
510 | |
511 | cast<MemoryDef>(FirstDef)->setDefiningAccess(getPreviousDef(FirstDef)); |
512 | return; |
513 | } |
514 | |
515 | for (const auto *S : successors(FixupBlock)) { |
516 | |
517 | |
518 | if (auto *MP = MSSA->getMemoryAccess(S)) |
519 | setMemoryPhiValueForBlock(MP, FixupBlock, NewDef); |
520 | else { |
521 | |
522 | |
523 | if (!Seen.insert(S).second) |
524 | continue; |
525 | Worklist.push_back(S); |
526 | } |
527 | } |
528 | } |
529 | } |
530 | } |
531 | |
532 | void MemorySSAUpdater::removeEdge(BasicBlock *From, BasicBlock *To) { |
533 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) { |
534 | MPhi->unorderedDeleteIncomingBlock(From); |
535 | tryRemoveTrivialPhi(MPhi); |
536 | } |
537 | } |
538 | |
539 | void MemorySSAUpdater::removeDuplicatePhiEdgesBetween(const BasicBlock *From, |
540 | const BasicBlock *To) { |
541 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(To)) { |
542 | bool Found = false; |
543 | MPhi->unorderedDeleteIncomingIf([&](const MemoryAccess *, BasicBlock *B) { |
544 | if (From != B) |
545 | return false; |
546 | if (Found) |
547 | return true; |
548 | Found = true; |
549 | return false; |
550 | }); |
551 | tryRemoveTrivialPhi(MPhi); |
552 | } |
553 | } |
554 | |
555 | |
556 | |
557 | static MemoryAccess *onlySingleValue(MemoryPhi *MP) { |
558 | MemoryAccess *MA = nullptr; |
559 | |
560 | for (auto &Arg : MP->operands()) { |
561 | if (!MA) |
562 | MA = cast<MemoryAccess>(Arg); |
563 | else if (MA != Arg) |
564 | return nullptr; |
565 | } |
566 | return MA; |
567 | } |
568 | |
569 | static MemoryAccess *getNewDefiningAccessForClone(MemoryAccess *MA, |
570 | const ValueToValueMapTy &VMap, |
571 | PhiToDefMap &MPhiMap, |
572 | bool CloneWasSimplified, |
573 | MemorySSA *MSSA) { |
574 | MemoryAccess *InsnDefining = MA; |
575 | if (MemoryDef *DefMUD = dyn_cast<MemoryDef>(InsnDefining)) { |
576 | if (!MSSA->isLiveOnEntryDef(DefMUD)) { |
577 | Instruction *DefMUDI = DefMUD->getMemoryInst(); |
578 | assert(DefMUDI && "Found MemoryUseOrDef with no Instruction."); |
579 | if (Instruction *NewDefMUDI = |
580 | cast_or_null<Instruction>(VMap.lookup(DefMUDI))) { |
581 | InsnDefining = MSSA->getMemoryAccess(NewDefMUDI); |
582 | if (!CloneWasSimplified) |
583 | assert(InsnDefining && "Defining instruction cannot be nullptr."); |
584 | else if (!InsnDefining || isa<MemoryUse>(InsnDefining)) { |
585 | |
586 | auto DefIt = DefMUD->getDefsIterator(); |
587 | |
588 | |
589 | |
590 | assert(DefIt != MSSA->getBlockDefs(DefMUD->getBlock())->begin() && |
591 | "Previous def must exist"); |
592 | InsnDefining = getNewDefiningAccessForClone( |
593 | &*(--DefIt), VMap, MPhiMap, CloneWasSimplified, MSSA); |
594 | } |
595 | } |
596 | } |
597 | } else { |
598 | MemoryPhi *DefPhi = cast<MemoryPhi>(InsnDefining); |
599 | if (MemoryAccess *NewDefPhi = MPhiMap.lookup(DefPhi)) |
600 | InsnDefining = NewDefPhi; |
601 | } |
602 | assert(InsnDefining && "Defining instruction cannot be nullptr."); |
603 | return InsnDefining; |
604 | } |
605 | |
606 | void MemorySSAUpdater::cloneUsesAndDefs(BasicBlock *BB, BasicBlock *NewBB, |
607 | const ValueToValueMapTy &VMap, |
608 | PhiToDefMap &MPhiMap, |
609 | bool CloneWasSimplified) { |
610 | const MemorySSA::AccessList *Acc = MSSA->getBlockAccesses(BB); |
611 | if (!Acc) |
612 | return; |
613 | for (const MemoryAccess &MA : *Acc) { |
614 | if (const MemoryUseOrDef *MUD = dyn_cast<MemoryUseOrDef>(&MA)) { |
615 | Instruction *Insn = MUD->getMemoryInst(); |
616 | |
617 | |
618 | |
619 | |
620 | |
621 | |
622 | |
623 | if (Instruction *NewInsn = |
624 | dyn_cast_or_null<Instruction>(VMap.lookup(Insn))) { |
625 | MemoryAccess *NewUseOrDef = MSSA->createDefinedAccess( |
626 | NewInsn, |
627 | getNewDefiningAccessForClone(MUD->getDefiningAccess(), VMap, |
628 | MPhiMap, CloneWasSimplified, MSSA), |
629 | CloneWasSimplified ? nullptr : MUD, |
630 | CloneWasSimplified ? false : true); |
631 | if (NewUseOrDef) |
632 | MSSA->insertIntoListsForBlock(NewUseOrDef, NewBB, MemorySSA::End); |
633 | } |
634 | } |
635 | } |
636 | } |
637 | |
638 | void MemorySSAUpdater::updatePhisWhenInsertingUniqueBackedgeBlock( |
639 | BasicBlock *Header, BasicBlock *Preheader, BasicBlock *BEBlock) { |
640 | auto *MPhi = MSSA->getMemoryAccess(Header); |
641 | if (!MPhi) |
642 | return; |
643 | |
644 | |
645 | |
646 | auto *NewMPhi = MSSA->createMemoryPhi(BEBlock); |
647 | bool HasUniqueIncomingValue = true; |
648 | MemoryAccess *UniqueValue = nullptr; |
649 | for (unsigned I = 0, E = MPhi->getNumIncomingValues(); I != E; ++I) { |
650 | BasicBlock *IBB = MPhi->getIncomingBlock(I); |
651 | MemoryAccess *IV = MPhi->getIncomingValue(I); |
652 | if (IBB != Preheader) { |
653 | NewMPhi->addIncoming(IV, IBB); |
654 | if (HasUniqueIncomingValue) { |
655 | if (!UniqueValue) |
656 | UniqueValue = IV; |
657 | else if (UniqueValue != IV) |
658 | HasUniqueIncomingValue = false; |
659 | } |
660 | } |
661 | } |
662 | |
663 | |
664 | |
665 | auto *AccFromPreheader = MPhi->getIncomingValueForBlock(Preheader); |
666 | MPhi->setIncomingValue(0, AccFromPreheader); |
667 | MPhi->setIncomingBlock(0, Preheader); |
668 | for (unsigned I = MPhi->getNumIncomingValues() - 1; I >= 1; --I) |
669 | MPhi->unorderedDeleteIncoming(I); |
670 | MPhi->addIncoming(NewMPhi, BEBlock); |
671 | |
672 | |
673 | |
674 | tryRemoveTrivialPhi(NewMPhi); |
675 | } |
676 | |
677 | void MemorySSAUpdater::updateForClonedLoop(const LoopBlocksRPO &LoopBlocks, |
678 | ArrayRef<BasicBlock *> ExitBlocks, |
679 | const ValueToValueMapTy &VMap, |
680 | bool IgnoreIncomingWithNoClones) { |
681 | PhiToDefMap MPhiMap; |
682 | |
683 | auto FixPhiIncomingValues = [&](MemoryPhi *Phi, MemoryPhi *NewPhi) { |
684 | assert(Phi && NewPhi && "Invalid Phi nodes."); |
685 | BasicBlock *NewPhiBB = NewPhi->getBlock(); |
686 | SmallPtrSet<BasicBlock *, 4> NewPhiBBPreds(pred_begin(NewPhiBB), |
687 | pred_end(NewPhiBB)); |
688 | for (unsigned It = 0, E = Phi->getNumIncomingValues(); It < E; ++It) { |
689 | MemoryAccess *IncomingAccess = Phi->getIncomingValue(It); |
690 | BasicBlock *IncBB = Phi->getIncomingBlock(It); |
691 | |
692 | if (BasicBlock *NewIncBB = cast_or_null<BasicBlock>(VMap.lookup(IncBB))) |
693 | IncBB = NewIncBB; |
694 | else if (IgnoreIncomingWithNoClones) |
695 | continue; |
696 | |
697 | |
698 | |
699 | |
700 | |
701 | if (!NewPhiBBPreds.count(IncBB)) |
702 | continue; |
703 | |
704 | |
705 | if (MemoryUseOrDef *IncMUD = dyn_cast<MemoryUseOrDef>(IncomingAccess)) { |
706 | if (!MSSA->isLiveOnEntryDef(IncMUD)) { |
707 | Instruction *IncI = IncMUD->getMemoryInst(); |
708 | assert(IncI && "Found MemoryUseOrDef with no Instruction."); |
709 | if (Instruction *NewIncI = |
710 | cast_or_null<Instruction>(VMap.lookup(IncI))) { |
711 | IncMUD = MSSA->getMemoryAccess(NewIncI); |
712 | assert(IncMUD && |
713 | "MemoryUseOrDef cannot be null, all preds processed."); |
714 | } |
715 | } |
716 | NewPhi->addIncoming(IncMUD, IncBB); |
717 | } else { |
718 | MemoryPhi *IncPhi = cast<MemoryPhi>(IncomingAccess); |
719 | if (MemoryAccess *NewDefPhi = MPhiMap.lookup(IncPhi)) |
720 | NewPhi->addIncoming(NewDefPhi, IncBB); |
721 | else |
722 | NewPhi->addIncoming(IncPhi, IncBB); |
723 | } |
724 | } |
725 | if (auto *SingleAccess = onlySingleValue(NewPhi)) { |
726 | MPhiMap[Phi] = SingleAccess; |
727 | removeMemoryAccess(NewPhi); |
728 | } |
729 | }; |
730 | |
731 | auto ProcessBlock = [&](BasicBlock *BB) { |
732 | BasicBlock *NewBlock = cast_or_null<BasicBlock>(VMap.lookup(BB)); |
733 | if (!NewBlock) |
734 | return; |
735 | |
736 | assert(!MSSA->getWritableBlockAccesses(NewBlock) && |
737 | "Cloned block should have no accesses"); |
738 | |
739 | |
740 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) { |
741 | MemoryPhi *NewPhi = MSSA->createMemoryPhi(NewBlock); |
742 | MPhiMap[MPhi] = NewPhi; |
743 | } |
744 | |
745 | cloneUsesAndDefs(BB, NewBlock, VMap, MPhiMap); |
746 | }; |
747 | |
748 | for (auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks)) |
749 | ProcessBlock(BB); |
750 | |
751 | for (auto BB : llvm::concat<BasicBlock *const>(LoopBlocks, ExitBlocks)) |
752 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) |
753 | if (MemoryAccess *NewPhi = MPhiMap.lookup(MPhi)) |
754 | FixPhiIncomingValues(MPhi, cast<MemoryPhi>(NewPhi)); |
755 | } |
756 | |
757 | void MemorySSAUpdater::updateForClonedBlockIntoPred( |
758 | BasicBlock *BB, BasicBlock *P1, const ValueToValueMapTy &VM) { |
759 | |
760 | |
761 | |
762 | |
763 | |
764 | |
765 | |
766 | |
767 | PhiToDefMap MPhiMap; |
768 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(BB)) |
769 | MPhiMap[MPhi] = MPhi->getIncomingValueForBlock(P1); |
770 | cloneUsesAndDefs(BB, P1, VM, MPhiMap, true); |
771 | } |
772 | |
773 | template <typename Iter> |
774 | void MemorySSAUpdater::privateUpdateExitBlocksForClonedLoop( |
775 | ArrayRef<BasicBlock *> ExitBlocks, Iter ValuesBegin, Iter ValuesEnd, |
776 | DominatorTree &DT) { |
777 | SmallVector<CFGUpdate, 4> Updates; |
778 | |
779 | for (auto *Exit : ExitBlocks) |
780 | for (const ValueToValueMapTy *VMap : make_range(ValuesBegin, ValuesEnd)) |
781 | if (BasicBlock *NewExit = cast_or_null<BasicBlock>(VMap->lookup(Exit))) { |
782 | BasicBlock *ExitSucc = NewExit->getTerminator()->getSuccessor(0); |
783 | Updates.push_back({DT.Insert, NewExit, ExitSucc}); |
784 | } |
785 | applyInsertUpdates(Updates, DT); |
786 | } |
787 | |
788 | void MemorySSAUpdater::updateExitBlocksForClonedLoop( |
789 | ArrayRef<BasicBlock *> ExitBlocks, const ValueToValueMapTy &VMap, |
790 | DominatorTree &DT) { |
791 | const ValueToValueMapTy *const Arr[] = {&VMap}; |
792 | privateUpdateExitBlocksForClonedLoop(ExitBlocks, std::begin(Arr), |
793 | std::end(Arr), DT); |
794 | } |
795 | |
796 | void MemorySSAUpdater::updateExitBlocksForClonedLoop( |
797 | ArrayRef<BasicBlock *> ExitBlocks, |
798 | ArrayRef<std::unique_ptr<ValueToValueMapTy>> VMaps, DominatorTree &DT) { |
799 | auto GetPtr = [&](const std::unique_ptr<ValueToValueMapTy> &I) { |
800 | return I.get(); |
801 | }; |
802 | using MappedIteratorType = |
803 | mapped_iterator<const std::unique_ptr<ValueToValueMapTy> *, |
804 | decltype(GetPtr)>; |
805 | auto MapBegin = MappedIteratorType(VMaps.begin(), GetPtr); |
806 | auto MapEnd = MappedIteratorType(VMaps.end(), GetPtr); |
807 | privateUpdateExitBlocksForClonedLoop(ExitBlocks, MapBegin, MapEnd, DT); |
808 | } |
809 | |
810 | void MemorySSAUpdater::applyUpdates(ArrayRef<CFGUpdate> Updates, |
811 | DominatorTree &DT, bool UpdateDT) { |
812 | SmallVector<CFGUpdate, 4> DeleteUpdates; |
813 | SmallVector<CFGUpdate, 4> RevDeleteUpdates; |
814 | SmallVector<CFGUpdate, 4> InsertUpdates; |
815 | for (auto &Update : Updates) { |
816 | if (Update.getKind() == DT.Insert) |
817 | InsertUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()}); |
818 | else { |
819 | DeleteUpdates.push_back({DT.Delete, Update.getFrom(), Update.getTo()}); |
820 | RevDeleteUpdates.push_back({DT.Insert, Update.getFrom(), Update.getTo()}); |
821 | } |
822 | } |
823 | |
824 | if (!DeleteUpdates.empty()) { |
825 | if (!UpdateDT) { |
826 | SmallVector<CFGUpdate, 0> Empty; |
827 | |
828 | |
829 | DT.applyUpdates(Empty, RevDeleteUpdates); |
830 | } else { |
831 | |
832 | DT.applyUpdates(Updates, RevDeleteUpdates); |
833 | } |
834 | |
835 | |
836 | |
837 | |
838 | |
839 | GraphDiff<BasicBlock *> GD(RevDeleteUpdates); |
840 | applyInsertUpdates(InsertUpdates, DT, &GD); |
841 | |
842 | |
843 | DT.applyUpdates(DeleteUpdates); |
844 | } else { |
845 | if (UpdateDT) |
846 | DT.applyUpdates(Updates); |
847 | GraphDiff<BasicBlock *> GD; |
848 | applyInsertUpdates(InsertUpdates, DT, &GD); |
849 | } |
850 | |
851 | |
852 | for (auto &Update : DeleteUpdates) |
853 | removeEdge(Update.getFrom(), Update.getTo()); |
854 | } |
855 | |
856 | void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates, |
857 | DominatorTree &DT) { |
858 | GraphDiff<BasicBlock *> GD; |
859 | applyInsertUpdates(Updates, DT, &GD); |
860 | } |
861 | |
862 | void MemorySSAUpdater::applyInsertUpdates(ArrayRef<CFGUpdate> Updates, |
863 | DominatorTree &DT, |
864 | const GraphDiff<BasicBlock *> *GD) { |
865 | |
866 | auto GetLastDef = [&](BasicBlock *BB) -> MemoryAccess * { |
867 | while (true) { |
868 | MemorySSA::DefsList *Defs = MSSA->getWritableBlockDefs(BB); |
869 | |
870 | if (Defs) |
871 | return &*(--Defs->end()); |
872 | |
873 | |
874 | unsigned Count = 0; |
875 | BasicBlock *Pred = nullptr; |
876 | for (auto *Pi : GD->template getChildren<true>(BB)) { |
877 | Pred = Pi; |
878 | Count++; |
879 | if (Count == 2) |
880 | break; |
881 | } |
882 | |
883 | |
884 | if (Count != 1) { |
885 | |
886 | |
887 | |
888 | if (!DT.getNode(BB)) |
889 | return MSSA->getLiveOnEntryDef(); |
890 | if (auto *IDom = DT.getNode(BB)->getIDom()) |
891 | if (IDom->getBlock() != BB) { |
892 | BB = IDom->getBlock(); |
893 | continue; |
894 | } |
895 | return MSSA->getLiveOnEntryDef(); |
896 | } else { |
897 | |
898 | assert(Count == 1 && Pred && "Single predecessor expected."); |
899 | |
900 | if (!DT.getNode(BB)) |
901 | return MSSA->getLiveOnEntryDef(); |
902 | BB = Pred; |
903 | } |
904 | }; |
905 | llvm_unreachable("Unable to get last definition."); |
906 | }; |
907 | |
908 | |
909 | |
910 | |
911 | auto FindNearestCommonDominator = |
912 | [&](const SmallSetVector<BasicBlock *, 2> &BBSet) -> BasicBlock * { |
913 | BasicBlock *PrevIDom = *BBSet.begin(); |
914 | for (auto *BB : BBSet) |
| 13 | | Assuming '__begin1' is not equal to '__end1' | |
|
915 | PrevIDom = DT.findNearestCommonDominator(PrevIDom, BB); |
| 14 | | Calling 'DominatorTreeBase::findNearestCommonDominator' | |
|
916 | return PrevIDom; |
917 | }; |
918 | |
919 | |
920 | |
921 | auto GetNoLongerDomBlocks = |
922 | [&](BasicBlock *PrevIDom, BasicBlock *CurrIDom, |
923 | SmallVectorImpl<BasicBlock *> &BlocksPrevDom) { |
924 | if (PrevIDom == CurrIDom) |
925 | return; |
926 | BlocksPrevDom.push_back(PrevIDom); |
927 | BasicBlock *NextIDom = PrevIDom; |
928 | while (BasicBlock *UpIDom = |
929 | DT.getNode(NextIDom)->getIDom()->getBlock()) { |
930 | if (UpIDom == CurrIDom) |
931 | break; |
932 | BlocksPrevDom.push_back(UpIDom); |
933 | NextIDom = UpIDom; |
934 | } |
935 | }; |
936 | |
937 | |
938 | |
939 | |
940 | |
941 | |
942 | |
943 | |
944 | |
945 | |
946 | |
947 | |
948 | |
949 | |
950 | |
951 | struct PredInfo { |
952 | SmallSetVector<BasicBlock *, 2> Added; |
953 | SmallSetVector<BasicBlock *, 2> Prev; |
954 | }; |
955 | SmallDenseMap<BasicBlock *, PredInfo> PredMap; |
956 | |
957 | for (auto &Edge : Updates) { |
| 1 | Assuming '__begin1' is equal to '__end1' | |
|
958 | BasicBlock *BB = Edge.getTo(); |
959 | auto &AddedBlockSet = PredMap[BB].Added; |
960 | AddedBlockSet.insert(Edge.getFrom()); |
961 | } |
962 | |
963 | |
964 | SmallDenseMap<std::pair<BasicBlock *, BasicBlock *>, int> EdgeCountMap; |
965 | SmallPtrSet<BasicBlock *, 2> NewBlocks; |
966 | for (auto &BBPredPair : PredMap) { |
967 | auto *BB = BBPredPair.first; |
968 | const auto &AddedBlockSet = BBPredPair.second.Added; |
969 | auto &PrevBlockSet = BBPredPair.second.Prev; |
970 | for (auto *Pi : GD->template getChildren<true>(BB)) { |
971 | if (!AddedBlockSet.count(Pi)) |
972 | PrevBlockSet.insert(Pi); |
973 | EdgeCountMap[{Pi, BB}]++; |
974 | } |
975 | |
976 | if (PrevBlockSet.empty()) { |
977 | assert(pred_size(BB) == AddedBlockSet.size() && "Duplicate edges added."); |
978 | LLVM_DEBUG( |
979 | dbgs() |
980 | << "Adding a predecessor to a block with no predecessors. " |
981 | "This must be an edge added to a new, likely cloned, block. " |
982 | "Its memory accesses must be already correct, assuming completed " |
983 | "via the updateExitBlocksForClonedLoop API. " |
984 | "Assert a single such edge is added so no phi addition or " |
985 | "additional processing is required.\n"); |
986 | assert(AddedBlockSet.size() == 1 && |
987 | "Can only handle adding one predecessor to a new block."); |
988 | |
989 | |
990 | NewBlocks.insert(BB); |
991 | } |
992 | } |
993 | |
994 | for (auto *BB : NewBlocks) |
995 | PredMap.erase(BB); |
996 | |
997 | SmallVector<BasicBlock *, 16> BlocksWithDefsToReplace; |
998 | SmallVector<WeakVH, 8> InsertedPhis; |
999 | |
1000 | |
1001 | |
1002 | for (auto &Edge : Updates) { |
| 2 | | Assuming '__begin1' is equal to '__end1' | |
|
1003 | BasicBlock *BB = Edge.getTo(); |
1004 | if (PredMap.count(BB) && !MSSA->getMemoryAccess(BB)) |
1005 | InsertedPhis.push_back(MSSA->createMemoryPhi(BB)); |
1006 | } |
1007 | |
1008 | |
1009 | for (auto &BBPredPair : PredMap) { |
1010 | auto *BB = BBPredPair.first; |
1011 | const auto &PrevBlockSet = BBPredPair.second.Prev; |
1012 | const auto &AddedBlockSet = BBPredPair.second.Added; |
1013 | assert(!PrevBlockSet.empty() && |
1014 | "At least one previous predecessor must exist."); |
1015 | |
1016 | |
1017 | |
1018 | |
1019 | |
1020 | |
1021 | |
1022 | SmallDenseMap<BasicBlock *, MemoryAccess *> LastDefAddedPred; |
1023 | for (auto *AddedPred : AddedBlockSet) { |
| 3 | | Assuming '__begin2' is equal to '__end2' | |
|
1024 | auto *DefPn = GetLastDef(AddedPred); |
1025 | assert(DefPn != nullptr && "Unable to find last definition."); |
1026 | LastDefAddedPred[AddedPred] = DefPn; |
1027 | } |
1028 | |
1029 | MemoryPhi *NewPhi = MSSA->getMemoryAccess(BB); |
1030 | |
1031 | |
1032 | if (NewPhi->getNumOperands()) { |
| 4 | | Assuming the condition is false | |
|
| |
1033 | for (auto *Pred : AddedBlockSet) { |
1034 | auto *LastDefForPred = LastDefAddedPred[Pred]; |
1035 | for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) |
1036 | NewPhi->addIncoming(LastDefForPred, Pred); |
1037 | } |
1038 | } else { |
1039 | |
1040 | |
1041 | auto *P1 = *PrevBlockSet.begin(); |
1042 | MemoryAccess *DefP1 = GetLastDef(P1); |
1043 | |
1044 | |
1045 | |
1046 | bool InsertPhi = false; |
1047 | for (auto LastDefPredPair : LastDefAddedPred) |
1048 | if (DefP1 != LastDefPredPair.second) { |
| 6 | | Assuming 'DefP1' is not equal to field 'second' | |
|
| |
1049 | InsertPhi = true; |
1050 | break; |
| 8 | | Execution continues on line 1052 | |
|
1051 | } |
1052 | if (!InsertPhi) { |
| |
1053 | |
1054 | |
1055 | |
1056 | NewPhi->replaceAllUsesWith(DefP1); |
1057 | removeMemoryAccess(NewPhi); |
1058 | continue; |
1059 | } |
1060 | |
1061 | |
1062 | |
1063 | |
1064 | for (auto *Pred : AddedBlockSet) { |
| 10 | | Assuming '__begin3' is equal to '__end3' | |
|
1065 | auto *LastDefForPred = LastDefAddedPred[Pred]; |
1066 | for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) |
1067 | NewPhi->addIncoming(LastDefForPred, Pred); |
1068 | } |
1069 | for (auto *Pred : PrevBlockSet) |
| 11 | | Assuming '__begin3' is equal to '__end3' | |
|
1070 | for (int I = 0, E = EdgeCountMap[{Pred, BB}]; I < E; ++I) |
1071 | NewPhi->addIncoming(DefP1, Pred); |
1072 | } |
1073 | |
1074 | |
1075 | |
1076 | assert(DT.getNode(BB)->getIDom() && "BB does not have valid idom"); |
1077 | BasicBlock *PrevIDom = FindNearestCommonDominator(PrevBlockSet); |
| |
1078 | assert(PrevIDom && "Previous IDom should exists"); |
1079 | BasicBlock *NewIDom = DT.getNode(BB)->getIDom()->getBlock(); |
1080 | assert(NewIDom && "BB should have a new valid idom"); |
1081 | assert(DT.dominates(NewIDom, PrevIDom) && |
1082 | "New idom should dominate old idom"); |
1083 | GetNoLongerDomBlocks(PrevIDom, NewIDom, BlocksWithDefsToReplace); |
1084 | } |
1085 | |
1086 | tryRemoveTrivialPhis(InsertedPhis); |
1087 | |
1088 | |
1089 | SmallVector<BasicBlock *, 8> BlocksToProcess; |
1090 | for (auto &VH : InsertedPhis) |
1091 | if (auto *MPhi = cast_or_null<MemoryPhi>(VH)) |
1092 | BlocksToProcess.push_back(MPhi->getBlock()); |
1093 | |
1094 | |
1095 | SmallVector<BasicBlock *, 32> IDFBlocks; |
1096 | if (!BlocksToProcess.empty()) { |
1097 | ForwardIDFCalculator IDFs(DT, GD); |
1098 | SmallPtrSet<BasicBlock *, 16> DefiningBlocks(BlocksToProcess.begin(), |
1099 | BlocksToProcess.end()); |
1100 | IDFs.setDefiningBlocks(DefiningBlocks); |
1101 | IDFs.calculate(IDFBlocks); |
1102 | |
1103 | SmallSetVector<MemoryPhi *, 4> PhisToFill; |
1104 | |
1105 | for (auto *BBIDF : IDFBlocks) |
1106 | if (!MSSA->getMemoryAccess(BBIDF)) { |
1107 | auto *IDFPhi = MSSA->createMemoryPhi(BBIDF); |
1108 | InsertedPhis.push_back(IDFPhi); |
1109 | PhisToFill.insert(IDFPhi); |
1110 | } |
1111 | |
1112 | for (auto *BBIDF : IDFBlocks) { |
1113 | auto *IDFPhi = MSSA->getMemoryAccess(BBIDF); |
1114 | assert(IDFPhi && "Phi must exist"); |
1115 | if (!PhisToFill.count(IDFPhi)) { |
1116 | |
1117 | |
1118 | for (unsigned I = 0, E = IDFPhi->getNumIncomingValues(); I < E; ++I) |
1119 | IDFPhi->setIncomingValue(I, GetLastDef(IDFPhi->getIncomingBlock(I))); |
1120 | } else { |
1121 | for (auto *Pi : GD->template getChildren<true>(BBIDF)) |
1122 | IDFPhi->addIncoming(GetLastDef(Pi), Pi); |
1123 | } |
1124 | } |
1125 | } |
1126 | |
1127 | |
1128 | |
1129 | |
1130 | for (auto *BlockWithDefsToReplace : BlocksWithDefsToReplace) { |
1131 | if (auto DefsList = MSSA->getWritableBlockDefs(BlockWithDefsToReplace)) { |
1132 | for (auto &DefToReplaceUses : *DefsList) { |
1133 | BasicBlock *DominatingBlock = DefToReplaceUses.getBlock(); |
1134 | Value::use_iterator UI = DefToReplaceUses.use_begin(), |
1135 | E = DefToReplaceUses.use_end(); |
1136 | for (; UI != E;) { |
1137 | Use &U = *UI; |
1138 | ++UI; |
1139 | MemoryAccess *Usr = cast<MemoryAccess>(U.getUser()); |
1140 | if (MemoryPhi *UsrPhi = dyn_cast<MemoryPhi>(Usr)) { |
1141 | BasicBlock *DominatedBlock = UsrPhi->getIncomingBlock(U); |
1142 | if (!DT.dominates(DominatingBlock, DominatedBlock)) |
1143 | U.set(GetLastDef(DominatedBlock)); |
1144 | } else { |
1145 | BasicBlock *DominatedBlock = Usr->getBlock(); |
1146 | if (!DT.dominates(DominatingBlock, DominatedBlock)) { |
1147 | if (auto *DomBlPhi = MSSA->getMemoryAccess(DominatedBlock)) |
1148 | U.set(DomBlPhi); |
1149 | else { |
1150 | auto *IDom = DT.getNode(DominatedBlock)->getIDom(); |
1151 | assert(IDom && "Block must have a valid IDom."); |
1152 | U.set(GetLastDef(IDom->getBlock())); |
1153 | } |
1154 | cast<MemoryUseOrDef>(Usr)->resetOptimized(); |
1155 | } |
1156 | } |
1157 | } |
1158 | } |
1159 | } |
1160 | } |
1161 | tryRemoveTrivialPhis(InsertedPhis); |
1162 | } |
1163 | |
1164 | |
1165 | template <class WhereType> |
1166 | void MemorySSAUpdater::moveTo(MemoryUseOrDef *What, BasicBlock *BB, |
1167 | WhereType Where) { |
1168 | |
1169 | for (auto *U : What->users()) |
1170 | if (MemoryPhi *PhiUser = dyn_cast<MemoryPhi>(U)) |
1171 | NonOptPhis.insert(PhiUser); |
1172 | |
1173 | |
1174 | What->replaceAllUsesWith(What->getDefiningAccess()); |
1175 | |
1176 | |
1177 | MSSA->moveTo(What, BB, Where); |
1178 | |
1179 | |
1180 | if (auto *MD = dyn_cast<MemoryDef>(What)) |
1181 | insertDef(MD, true); |
1182 | else |
1183 | insertUse(cast<MemoryUse>(What), true); |
1184 | |
1185 | |
1186 | |
1187 | NonOptPhis.clear(); |
1188 | } |
1189 | |
1190 | |
1191 | void MemorySSAUpdater::moveBefore(MemoryUseOrDef *What, MemoryUseOrDef *Where) { |
1192 | moveTo(What, Where->getBlock(), Where->getIterator()); |
1193 | } |
1194 | |
1195 | |
1196 | void MemorySSAUpdater::moveAfter(MemoryUseOrDef *What, MemoryUseOrDef *Where) { |
1197 | moveTo(What, Where->getBlock(), ++Where->getIterator()); |
1198 | } |
1199 | |
1200 | void MemorySSAUpdater::moveToPlace(MemoryUseOrDef *What, BasicBlock *BB, |
1201 | MemorySSA::InsertionPlace Where) { |
1202 | if (Where != MemorySSA::InsertionPlace::BeforeTerminator) |
1203 | return moveTo(What, BB, Where); |
1204 | |
1205 | if (auto *Where = MSSA->getMemoryAccess(BB->getTerminator())) |
1206 | return moveBefore(What, Where); |
1207 | else |
1208 | return moveTo(What, BB, MemorySSA::InsertionPlace::End); |
1209 | } |
1210 | |
1211 | |
1212 | void MemorySSAUpdater::moveAllAccesses(BasicBlock *From, BasicBlock *To, |
1213 | Instruction *Start) { |
1214 | |
1215 | MemorySSA::AccessList *Accs = MSSA->getWritableBlockAccesses(From); |
1216 | if (!Accs) |
1217 | return; |
1218 | |
1219 | assert(Start->getParent() == To && "Incorrect Start instruction"); |
1220 | MemoryAccess *FirstInNew = nullptr; |
1221 | for (Instruction &I : make_range(Start->getIterator(), To->end())) |
1222 | if ((FirstInNew = MSSA->getMemoryAccess(&I))) |
1223 | break; |
1224 | if (FirstInNew) { |
1225 | auto *MUD = cast<MemoryUseOrDef>(FirstInNew); |
1226 | do { |
1227 | auto NextIt = ++MUD->getIterator(); |
1228 | MemoryUseOrDef *NextMUD = (!Accs || NextIt == Accs->end()) |
1229 | ? nullptr |
1230 | : cast<MemoryUseOrDef>(&*NextIt); |
1231 | MSSA->moveTo(MUD, To, MemorySSA::End); |
1232 | |
1233 | |
1234 | Accs = MSSA->getWritableBlockAccesses(From); |
1235 | MUD = NextMUD; |
1236 | } while (MUD); |
1237 | } |
1238 | |
1239 | |
1240 | |
1241 | auto *Defs = MSSA->getWritableBlockDefs(From); |
1242 | if (Defs && !Defs->empty()) |
1243 | if (auto *Phi = dyn_cast<MemoryPhi>(&*Defs->begin())) |
1244 | tryRemoveTrivialPhi(Phi); |
1245 | } |
1246 | |
1247 | void MemorySSAUpdater::moveAllAfterSpliceBlocks(BasicBlock *From, |
1248 | BasicBlock *To, |
1249 | Instruction *Start) { |
1250 | assert(MSSA->getBlockAccesses(To) == nullptr && |
1251 | "To block is expected to be free of MemoryAccesses."); |
1252 | moveAllAccesses(From, To, Start); |
1253 | for (BasicBlock *Succ : successors(To)) |
1254 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ)) |
1255 | MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To); |
1256 | } |
1257 | |
1258 | void MemorySSAUpdater::moveAllAfterMergeBlocks(BasicBlock *From, BasicBlock *To, |
1259 | Instruction *Start) { |
1260 | assert(From->getUniquePredecessor() == To && |
1261 | "From block is expected to have a single predecessor (To)."); |
1262 | moveAllAccesses(From, To, Start); |
1263 | for (BasicBlock *Succ : successors(From)) |
1264 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Succ)) |
1265 | MPhi->setIncomingBlock(MPhi->getBasicBlockIndex(From), To); |
1266 | } |
1267 | |
1268 | void MemorySSAUpdater::wireOldPredecessorsToNewImmediatePredecessor( |
1269 | BasicBlock *Old, BasicBlock *New, ArrayRef<BasicBlock *> Preds, |
1270 | bool IdenticalEdgesWereMerged) { |
1271 | assert(!MSSA->getWritableBlockAccesses(New) && |
1272 | "Access list should be null for a new block."); |
1273 | MemoryPhi *Phi = MSSA->getMemoryAccess(Old); |
1274 | if (!Phi) |
1275 | return; |
1276 | if (Old->hasNPredecessors(1)) { |
1277 | assert(pred_size(New) == Preds.size() && |
1278 | "Should have moved all predecessors."); |
1279 | MSSA->moveTo(Phi, New, MemorySSA::Beginning); |
1280 | } else { |
1281 | assert(!Preds.empty() && "Must be moving at least one predecessor to the " |
1282 | "new immediate predecessor."); |
1283 | MemoryPhi *NewPhi = MSSA->createMemoryPhi(New); |
1284 | SmallPtrSet<BasicBlock *, 16> PredsSet(Preds.begin(), Preds.end()); |
1285 | |
1286 | |
1287 | if (!IdenticalEdgesWereMerged) |
1288 | assert(PredsSet.size() == Preds.size() && |
1289 | "If identical edges were not merged, we cannot have duplicate " |
1290 | "blocks in the predecessors"); |
1291 | Phi->unorderedDeleteIncomingIf([&](MemoryAccess *MA, BasicBlock *B) { |
1292 | if (PredsSet.count(B)) { |
1293 | NewPhi->addIncoming(MA, B); |
1294 | if (!IdenticalEdgesWereMerged) |
1295 | PredsSet.erase(B); |
1296 | return true; |
1297 | } |
1298 | return false; |
1299 | }); |
1300 | Phi->addIncoming(NewPhi, New); |
1301 | tryRemoveTrivialPhi(NewPhi); |
1302 | } |
1303 | } |
1304 | |
1305 | void MemorySSAUpdater::removeMemoryAccess(MemoryAccess *MA, bool OptimizePhis) { |
1306 | assert(!MSSA->isLiveOnEntryDef(MA) && |
1307 | "Trying to remove the live on entry def"); |
1308 | |
1309 | |
1310 | MemoryAccess *NewDefTarget = nullptr; |
1311 | if (MemoryPhi *MP = dyn_cast<MemoryPhi>(MA)) { |
1312 | |
1313 | |
1314 | |
1315 | |
1316 | |
1317 | NewDefTarget = onlySingleValue(MP); |
1318 | assert((NewDefTarget || MP->use_empty()) && |
1319 | "We can't delete this memory phi"); |
1320 | } else { |
1321 | NewDefTarget = cast<MemoryUseOrDef>(MA)->getDefiningAccess(); |
1322 | } |
1323 | |
1324 | SmallSetVector<MemoryPhi *, 4> PhisToCheck; |
1325 | |
1326 | |
1327 | if (!isa<MemoryUse>(MA) && !MA->use_empty()) { |
1328 | |
1329 | |
1330 | |
1331 | |
1332 | |
1333 | |
1334 | |
1335 | |
1336 | if (MA->hasValueHandle()) |
1337 | ValueHandleBase::ValueIsRAUWd(MA, NewDefTarget); |
1338 | |
1339 | |
1340 | |
1341 | assert(NewDefTarget != MA && "Going into an infinite loop"); |
1342 | while (!MA->use_empty()) { |
1343 | Use &U = *MA->use_begin(); |
1344 | if (auto *MUD = dyn_cast<MemoryUseOrDef>(U.getUser())) |
1345 | MUD->resetOptimized(); |
1346 | if (OptimizePhis) |
1347 | if (MemoryPhi *MP = dyn_cast<MemoryPhi>(U.getUser())) |
1348 | PhisToCheck.insert(MP); |
1349 | U.set(NewDefTarget); |
1350 | } |
1351 | } |
1352 | |
1353 | |
1354 | |
1355 | MSSA->removeFromLookups(MA); |
1356 | MSSA->removeFromLists(MA); |
1357 | |
1358 | |
1359 | if (!PhisToCheck.empty()) { |
1360 | SmallVector<WeakVH, 16> PhisToOptimize{PhisToCheck.begin(), |
1361 | PhisToCheck.end()}; |
1362 | PhisToCheck.clear(); |
1363 | |
1364 | unsigned PhisSize = PhisToOptimize.size(); |
1365 | while (PhisSize-- > 0) |
1366 | if (MemoryPhi *MP = |
1367 | cast_or_null<MemoryPhi>(PhisToOptimize.pop_back_val())) |
1368 | tryRemoveTrivialPhi(MP); |
1369 | } |
1370 | } |
1371 | |
1372 | void MemorySSAUpdater::removeBlocks( |
1373 | const SmallSetVector<BasicBlock *, 8> &DeadBlocks) { |
1374 | |
1375 | for (BasicBlock *BB : DeadBlocks) { |
1376 | Instruction *TI = BB->getTerminator(); |
1377 | assert(TI && "Basic block expected to have a terminator instruction"); |
1378 | for (BasicBlock *Succ : successors(TI)) |
1379 | if (!DeadBlocks.count(Succ)) |
1380 | if (MemoryPhi *MP = MSSA->getMemoryAccess(Succ)) { |
1381 | MP->unorderedDeleteIncomingBlock(BB); |
1382 | tryRemoveTrivialPhi(MP); |
1383 | } |
1384 | |
1385 | if (MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB)) |
1386 | for (MemoryAccess &MA : *Acc) |
1387 | MA.dropAllReferences(); |
1388 | } |
1389 | |
1390 | |
1391 | for (BasicBlock *BB : DeadBlocks) { |
1392 | MemorySSA::AccessList *Acc = MSSA->getWritableBlockAccesses(BB); |
1393 | if (!Acc) |
1394 | continue; |
1395 | for (MemoryAccess &MA : llvm::make_early_inc_range(*Acc)) { |
1396 | MSSA->removeFromLookups(&MA); |
1397 | MSSA->removeFromLists(&MA); |
1398 | } |
1399 | } |
1400 | } |
1401 | |
1402 | void MemorySSAUpdater::tryRemoveTrivialPhis(ArrayRef<WeakVH> UpdatedPHIs) { |
1403 | for (auto &VH : UpdatedPHIs) |
1404 | if (auto *MPhi = cast_or_null<MemoryPhi>(VH)) |
1405 | tryRemoveTrivialPhi(MPhi); |
1406 | } |
1407 | |
1408 | void MemorySSAUpdater::changeToUnreachable(const Instruction *I) { |
1409 | const BasicBlock *BB = I->getParent(); |
1410 | |
1411 | auto BBI = I->getIterator(), BBE = BB->end(); |
1412 | |
1413 | |
1414 | while (BBI != BBE) |
1415 | removeMemoryAccess(&*(BBI++)); |
1416 | |
1417 | SmallVector<WeakVH, 16> UpdatedPHIs; |
1418 | for (const BasicBlock *Successor : successors(BB)) { |
1419 | removeDuplicatePhiEdgesBetween(BB, Successor); |
1420 | if (MemoryPhi *MPhi = MSSA->getMemoryAccess(Successor)) { |
1421 | MPhi->unorderedDeleteIncomingBlock(BB); |
1422 | UpdatedPHIs.push_back(MPhi); |
1423 | } |
1424 | } |
1425 | |
1426 | tryRemoveTrivialPhis(UpdatedPHIs); |
1427 | } |
1428 | |
1429 | MemoryAccess *MemorySSAUpdater::createMemoryAccessInBB( |
1430 | Instruction *I, MemoryAccess *Definition, const BasicBlock *BB, |
1431 | MemorySSA::InsertionPlace Point) { |
1432 | MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); |
1433 | MSSA->insertIntoListsForBlock(NewAccess, BB, Point); |
1434 | return NewAccess; |
1435 | } |
1436 | |
1437 | MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessBefore( |
1438 | Instruction *I, MemoryAccess *Definition, MemoryUseOrDef *InsertPt) { |
1439 | assert(I->getParent() == InsertPt->getBlock() && |
1440 | "New and old access must be in the same block"); |
1441 | MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); |
1442 | MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), |
1443 | InsertPt->getIterator()); |
1444 | return NewAccess; |
1445 | } |
1446 | |
1447 | MemoryUseOrDef *MemorySSAUpdater::createMemoryAccessAfter( |
1448 | Instruction *I, MemoryAccess *Definition, MemoryAccess *InsertPt) { |
1449 | assert(I->getParent() == InsertPt->getBlock() && |
1450 | "New and old access must be in the same block"); |
1451 | MemoryUseOrDef *NewAccess = MSSA->createDefinedAccess(I, Definition); |
1452 | MSSA->insertIntoListsBefore(NewAccess, InsertPt->getBlock(), |
1453 | ++InsertPt->getIterator()); |
1454 | return NewAccess; |
1455 | } |
1 | |
2 | |
3 | |
4 | |
5 | |
6 | |
7 | |
8 | |
9 | |
10 | |
11 | |
12 | |
13 | |
14 | |
15 | |
16 | |
17 | |
18 | |
19 | |
20 | |
21 | |
22 | |
23 | #ifndef LLVM_SUPPORT_GENERICDOMTREE_H |
24 | #define LLVM_SUPPORT_GENERICDOMTREE_H |
25 | |
26 | #include "llvm/ADT/DenseMap.h" |
27 | #include "llvm/ADT/GraphTraits.h" |
28 | #include "llvm/ADT/STLExtras.h" |
29 | #include "llvm/ADT/SmallPtrSet.h" |
30 | #include "llvm/ADT/SmallVector.h" |
31 | #include "llvm/Support/CFGDiff.h" |
32 | #include "llvm/Support/CFGUpdate.h" |
33 | #include "llvm/Support/raw_ostream.h" |
34 | #include <algorithm> |
35 | #include <cassert> |
36 | #include <cstddef> |
37 | #include <iterator> |
38 | #include <memory> |
39 | #include <type_traits> |
40 | #include <utility> |
41 | |
42 | namespace llvm { |
43 | |
44 | template <typename NodeT, bool IsPostDom> |
45 | class DominatorTreeBase; |
46 | |
47 | namespace DomTreeBuilder { |
48 | template <typename DomTreeT> |
49 | struct SemiNCAInfo; |
50 | } |
51 | |
52 | |
53 | template <class NodeT> class DomTreeNodeBase { |
54 | friend class PostDominatorTree; |
55 | friend class DominatorTreeBase<NodeT, false>; |
56 | friend class DominatorTreeBase<NodeT, true>; |
57 | friend struct DomTreeBuilder::SemiNCAInfo<DominatorTreeBase<NodeT, false>>; |
58 | friend struct DomTreeBuilder::SemiNCAInfo<DominatorTreeBase<NodeT, true>>; |
59 | |
60 | NodeT *TheBB; |
61 | DomTreeNodeBase *IDom; |
62 | unsigned Level; |
63 | SmallVector<DomTreeNodeBase *, 4> Children; |
64 | mutable unsigned DFSNumIn = ~0; |
65 | mutable unsigned DFSNumOut = ~0; |
66 | |
67 | public: |
68 | DomTreeNodeBase(NodeT *BB, DomTreeNodeBase *iDom) |
69 | : TheBB(BB), IDom(iDom), Level(IDom ? IDom->Level + 1 : 0) {} |
70 | |
71 | using iterator = typename SmallVector<DomTreeNodeBase *, 4>::iterator; |
72 | using const_iterator = |
73 | typename SmallVector<DomTreeNodeBase *, 4>::const_iterator; |
74 | |
75 | iterator begin() { return Children.begin(); } |
76 | iterator end() { return Children.end(); } |
77 | const_iterator begin() const { return Children.begin(); } |
78 | const_iterator end() const { return Children.end(); } |
79 | |
80 | DomTreeNodeBase *const &back() const { return Children.back(); } |
81 | DomTreeNodeBase *&back() { return Children.back(); } |
82 | |
83 | iterator_range<iterator> children() { return make_range(begin(), end()); } |
84 | iterator_range<const_iterator> children() const { |
85 | return make_range(begin(), end()); |
86 | } |
87 | |
88 | NodeT *getBlock() const { return TheBB; } |
89 | DomTreeNodeBase *getIDom() const { return IDom; } |
90 | unsigned getLevel() const { return Level; } |
91 | |
92 | std::unique_ptr<DomTreeNodeBase> addChild( |
93 | std::unique_ptr<DomTreeNodeBase> C) { |
94 | Children.push_back(C.get()); |
95 | return C; |
96 | } |
97 | |
98 | bool isLeaf() const { return Children.empty(); } |
99 | size_t getNumChildren() const { return Children.size(); } |
100 | |
101 | void clearAllChildren() { Children.clear(); } |
102 | |
103 | bool compare(const DomTreeNodeBase *Other) const { |
104 | if (getNumChildren() != Other->getNumChildren()) |
105 | return true; |
106 | |
107 | if (Level != Other->Level) return true; |
108 | |
109 | SmallPtrSet<const NodeT *, 4> OtherChildren; |
110 | for (const DomTreeNodeBase *I : *Other) { |
111 | const NodeT *Nd = I->getBlock(); |
112 | OtherChildren.insert(Nd); |
113 | } |
114 | |
115 | for (const DomTreeNodeBase *I : *this) { |
116 | const NodeT *N = I->getBlock(); |
117 | if (OtherChildren.count(N) == 0) |
118 | return true; |
119 | } |
120 | return false; |
121 | } |
122 | |
123 | void setIDom(DomTreeNodeBase *NewIDom) { |
124 | assert(IDom && "No immediate dominator?"); |
125 | if (IDom == NewIDom) return; |
126 | |
127 | auto I = find(IDom->Children, this); |
128 | assert(I != IDom->Children.end() && |
129 | "Not in immediate dominator children set!"); |
130 | |
131 | IDom->Children.erase(I); |
132 | |
133 | |
134 | IDom = NewIDom; |
135 | IDom->Children.push_back(this); |
136 | |
137 | UpdateLevel(); |
138 | } |
139 | |
140 | |
141 | |
142 | |
143 | unsigned getDFSNumIn() const { return DFSNumIn; } |
144 | unsigned getDFSNumOut() const { return DFSNumOut; } |
145 | |
146 | private: |
147 | |
148 | |
149 | bool DominatedBy(const DomTreeNodeBase *other) const { |
150 | return this->DFSNumIn >= other->DFSNumIn && |
151 | this->DFSNumOut <= other->DFSNumOut; |
152 | } |
153 | |
154 | void UpdateLevel() { |
155 | assert(IDom); |
156 | if (Level == IDom->Level + 1) return; |
157 | |
158 | SmallVector<DomTreeNodeBase *, 64> WorkStack = {this}; |
159 | |
160 | while (!WorkStack.empty()) { |
161 | DomTreeNodeBase *Current = WorkStack.pop_back_val(); |
162 | Current->Level = Current->IDom->Level + 1; |
163 | |
164 | for (DomTreeNodeBase *C : *Current) { |
165 | assert(C->IDom); |
166 | if (C->Level != C->IDom->Level + 1) WorkStack.push_back(C); |
167 | } |
168 | } |
169 | } |
170 | }; |
171 | |
172 | template <class NodeT> |
173 | raw_ostream &operator<<(raw_ostream &O, const DomTreeNodeBase<NodeT> *Node) { |
174 | if (Node->getBlock()) |
175 | Node->getBlock()->printAsOperand(O, false); |
176 | else |
177 | O << " <<exit node>>"; |
178 | |
179 | O << " {" << Node->getDFSNumIn() << "," << Node->getDFSNumOut() << "} [" |
180 | << Node->getLevel() << "]\n"; |
181 | |
182 | return O; |
183 | } |
184 | |
185 | template <class NodeT> |
186 | void PrintDomTree(const DomTreeNodeBase<NodeT> *N, raw_ostream &O, |
187 | unsigned Lev) { |
188 | O.indent(2 * Lev) << "[" << Lev << "] " << N; |
189 | for (typename DomTreeNodeBase<NodeT>::const_iterator I = N->begin(), |
190 | E = N->end(); |
191 | I != E; ++I) |
192 | PrintDomTree<NodeT>(*I, O, Lev + 1); |
193 | } |
194 | |
195 | namespace DomTreeBuilder { |
196 | |
197 | template <typename DomTreeT> |
198 | void Calculate(DomTreeT &DT); |
199 | |
200 | template <typename DomTreeT> |
201 | void CalculateWithUpdates(DomTreeT &DT, |
202 | ArrayRef<typename DomTreeT::UpdateType> Updates); |
203 | |
204 | template <typename DomTreeT> |
205 | void InsertEdge(DomTreeT &DT, typename DomTreeT::NodePtr From, |
206 | typename DomTreeT::NodePtr To); |
207 | |
208 | template <typename DomTreeT> |
209 | void DeleteEdge(DomTreeT &DT, typename DomTreeT::NodePtr From, |
210 | typename DomTreeT::NodePtr To); |
211 | |
212 | template <typename DomTreeT> |
213 | void ApplyUpdates(DomTreeT &DT, |
214 | GraphDiff<typename DomTreeT::NodePtr, |
215 | DomTreeT::IsPostDominator> &PreViewCFG, |
216 | GraphDiff<typename DomTreeT::NodePtr, |
217 | DomTreeT::IsPostDominator> *PostViewCFG); |
218 | |
219 | template <typename DomTreeT> |
220 | bool Verify(const DomTreeT &DT, typename DomTreeT::VerificationLevel VL); |
221 | } |
222 | |
223 | |
224 | |
225 | |
226 | |
227 | template <typename NodeT, bool IsPostDom> |
228 | class DominatorTreeBase { |
229 | public: |
230 | static_assert(std::is_pointer<typename GraphTraits<NodeT *>::NodeRef>::value, |
231 | "Currently DominatorTreeBase supports only pointer nodes"); |
232 | using NodeType = NodeT; |
233 | using NodePtr = NodeT *; |
234 | using ParentPtr = decltype(std::declval<NodeT *>()->getParent()); |
235 | static_assert(std::is_pointer<ParentPtr>::value, |
236 | "Currently NodeT's parent must be a pointer type"); |
237 | using ParentType = std::remove_pointer_t<ParentPtr>; |
238 | static constexpr bool IsPostDominator = IsPostDom; |
239 | |
240 | using UpdateType = cfg::Update<NodePtr>; |
241 | using UpdateKind = cfg::UpdateKind; |
242 | static constexpr UpdateKind Insert = UpdateKind::Insert; |
243 | static constexpr UpdateKind Delete = UpdateKind::Delete; |
244 | |
245 | enum class VerificationLevel { Fast, Basic, Full }; |
246 | |
247 | protected: |
248 | |
249 | SmallVector<NodeT *, IsPostDom ? 4 : 1> Roots; |
250 | |
251 | using DomTreeNodeMapType = |
252 | DenseMap<NodeT *, std::unique_ptr<DomTreeNodeBase<NodeT>>>; |
253 | DomTreeNodeMapType DomTreeNodes; |
254 | DomTreeNodeBase<NodeT> *RootNode = nullptr; |
255 | ParentPtr Parent = nullptr; |
256 | |
257 | mutable bool DFSInfoValid = false; |
258 | mutable unsigned int SlowQueries = 0; |
259 | |
260 | friend struct DomTreeBuilder::SemiNCAInfo<DominatorTreeBase>; |
261 | |
262 | public: |
263 | DominatorTreeBase() {} |
264 | |
265 | DominatorTreeBase(DominatorTreeBase &&Arg) |
266 | : Roots(std::move(Arg.Roots)), |
267 | DomTreeNodes(std::move(Arg.DomTreeNodes)), |
268 | RootNode(Arg.RootNode), |
269 | Parent(Arg.Parent), |
270 | DFSInfoValid(Arg.DFSInfoValid), |
271 | SlowQueries(Arg.SlowQueries) { |
272 | Arg.wipe(); |
273 | } |
274 | |
275 | DominatorTreeBase &operator=(DominatorTreeBase &&RHS) { |
276 | Roots = std::move(RHS.Roots); |
277 | DomTreeNodes = std::move(RHS.DomTreeNodes); |
278 | RootNode = RHS.RootNode; |
279 | Parent = RHS.Parent; |
280 | DFSInfoValid = RHS.DFSInfoValid; |
281 | SlowQueries = RHS.SlowQueries; |
282 | RHS.wipe(); |
283 | return *this; |
284 | } |
285 | |
286 | DominatorTreeBase(const DominatorTreeBase &) = delete; |
287 | DominatorTreeBase &operator=(const DominatorTreeBase &) = delete; |
288 | |
289 | |
290 | |
291 | |
292 | |
293 | |
294 | using root_iterator = typename SmallVectorImpl<NodeT *>::iterator; |
295 | using const_root_iterator = typename SmallVectorImpl<NodeT *>::const_iterator; |
296 | |
297 | root_iterator root_begin() { return Roots.begin(); } |
298 | const_root_iterator root_begin() const { return Roots.begin(); } |
299 | root_iterator root_end() { return Roots.end(); } |
300 | const_root_iterator root_end() const { return Roots.end(); } |
301 | |
302 | size_t root_size() const { return Roots.size(); } |
303 | |
304 | iterator_range<root_iterator> roots() { |
305 | return make_range(root_begin(), root_end()); |
306 | } |
307 | iterator_range<const_root_iterator> roots() const { |
308 | return make_range(root_begin(), root_end()); |
309 | } |
310 | |
311 | |
312 | |
313 | bool isPostDominator() const { return IsPostDominator; } |
| 16 | | Returning zero (loaded from 'IsPostDominator'), which participates in a condition later | |
|
314 | |
315 | |
316 | |
317 | bool compare(const DominatorTreeBase &Other) const { |
318 | if (Parent != Other.Parent) return true; |
319 | |
320 | if (Roots.size() != Other.Roots.size()) |
321 | return true; |
322 | |
323 | if (!std::is_permutation(Roots.begin(), Roots.end(), Other.Roots.begin())) |
324 | return true; |
325 | |
326 | const DomTreeNodeMapType &OtherDomTreeNodes = Other.DomTreeNodes; |
327 | if (DomTreeNodes.size() != OtherDomTreeNodes.size()) |
328 | return true; |
329 | |
330 | for (const auto &DomTreeNode : DomTreeNodes) { |
331 | NodeT *BB = DomTreeNode.first; |
332 | typename DomTreeNodeMapType::const_iterator OI = |
333 | OtherDomTreeNodes.find(BB); |
334 | if (OI == OtherDomTreeNodes.end()) |
335 | return true; |
336 | |
337 | DomTreeNodeBase<NodeT> &MyNd = *DomTreeNode.second; |
338 | DomTreeNodeBase<NodeT> &OtherNd = *OI->second; |
339 | |
340 | if (MyNd.compare(&OtherNd)) |
341 | return true; |
342 | } |
343 | |
344 | return false; |
345 | } |
346 | |
347 | |
348 | |
349 | |
350 | |
351 | DomTreeNodeBase<NodeT> *getNode(const NodeT *BB) const { |
352 | auto I = DomTreeNodes.find(BB); |
353 | if (I != DomTreeNodes.end()) |
| |
| 29 | | Returning from 'operator!=' | |
|
| |
354 | return I->second.get(); |
| |
355 | return nullptr; |
356 | } |
357 | |
358 | |
359 | DomTreeNodeBase<NodeT> *operator[](const NodeT *BB) const { |
360 | return getNode(BB); |
361 | } |
362 | |
363 | |
364 | |
365 | |
366 | |
367 | |
368 | |
369 | |
370 | DomTreeNodeBase<NodeT> *getRootNode() { return RootNode; } |
371 | const DomTreeNodeBase<NodeT> *getRootNode() const { return RootNode; } |
372 | |
373 | |
374 | void getDescendants(NodeT *R, SmallVectorImpl<NodeT *> &Result) const { |
375 | Result.clear(); |
376 | const DomTreeNodeBase<NodeT> *RN = getNode(R); |
377 | if (!RN) |
378 | return; |
379 | SmallVector<const DomTreeNodeBase<NodeT> *, 8> WL; |
380 | WL.push_back(RN); |
381 | |
382 | while (!WL.empty()) { |
383 | const DomTreeNodeBase<NodeT> *N = WL.pop_back_val(); |
384 | Result.push_back(N->getBlock()); |
385 | WL.append(N->begin(), N->end()); |
386 | } |
387 | } |
388 | |
389 | |
390 | |
391 | |
392 | bool properlyDominates(const DomTreeNodeBase<NodeT> *A, |
393 | const DomTreeNodeBase<NodeT> *B) const { |
394 | if (!A || !B) |
395 | return false; |
396 | if (A == B) |
397 | return false; |
398 | return dominates(A, B); |
399 | } |
400 | |
401 | bool properlyDominates(const NodeT *A, const NodeT *B) const; |
402 | |
403 | |
404 | |
405 | bool isReachableFromEntry(const NodeT *A) const { |
406 | assert(!this->isPostDominator() && |
407 | "This is not implemented for post dominators"); |
408 | return isReachableFromEntry(getNode(const_cast<NodeT *>(A))); |
409 | } |
410 | |
411 | bool isReachableFromEntry(const DomTreeNodeBase<NodeT> *A) const { return A; } |
412 | |
413 | |
414 | |
415 | |
416 | bool dominates(const DomTreeNodeBase<NodeT> *A, |
417 | const DomTreeNodeBase<NodeT> *B) const { |
418 | |
419 | if (B == A) |
420 | return true; |
421 | |
422 | |
423 | if (!isReachableFromEntry(B)) |
424 | return true; |
425 | |
426 | |
427 | if (!isReachableFromEntry(A)) |
428 | return false; |
429 | |
430 | if (B->getIDom() == A) return true; |
431 | |
432 | if (A->getIDom() == B) return false; |
433 | |
434 | |
435 | if (A->getLevel() >= B->getLevel()) return false; |
436 | |
437 | |
438 | |
439 | #ifdef EXPENSIVE_CHECKS |
440 | assert((!DFSInfoValid || |
441 | (dominatedBySlowTreeWalk(A, B) == B->DominatedBy(A))) && |
442 | "Tree walk disagrees with dfs numbers!"); |
443 | #endif |
444 | |
445 | if (DFSInfoValid) |
446 | return B->DominatedBy(A); |
447 | |
448 | |
449 | |
450 | SlowQueries++; |
451 | if (SlowQueries > 32) { |
452 | updateDFSNumbers(); |
453 | return B->DominatedBy(A); |
454 | } |
455 | |
456 | return dominatedBySlowTreeWalk(A, B); |
457 | } |
458 | |
459 | bool dominates(const NodeT *A, const NodeT *B) const; |
460 | |
461 | NodeT *getRoot() const { |
462 | assert(this->Roots.size() == 1 && "Should always have entry node!"); |
463 | return this->Roots[0]; |
464 | } |
465 | |
466 | |
467 | |
468 | NodeT *findNearestCommonDominator(NodeT *A, NodeT *B) const { |
469 | assert(A && B && "Pointers are not valid"); |
470 | assert(A->getParent() == B->getParent() && |
471 | "Two blocks are not in same function"); |
472 | |
473 | |
474 | |
475 | if (!isPostDominator()) { |
| 15 | | Calling 'DominatorTreeBase::isPostDominator' | |
|
| 17 | | Returning from 'DominatorTreeBase::isPostDominator' | |
|
| |
476 | NodeT &Entry = A->getParent()->front(); |
477 | if (A == &Entry || B == &Entry) |
| 19 | | Assuming the condition is false | |
|
| 20 | | Assuming the condition is false | |
|
| |
478 | return &Entry; |
479 | } |
480 | |
481 | DomTreeNodeBase<NodeT> *NodeA = getNode(A); |
| 22 | | Calling 'DominatorTreeBase::getNode' | |
|
| 32 | | Returning from 'DominatorTreeBase::getNode' | |
|
| 33 | | 'NodeA' initialized here | |
|
482 | DomTreeNodeBase<NodeT> *NodeB = getNode(B); |
483 | assert(NodeA && "A must be in the tree"); |
484 | assert(NodeB && "B must be in the tree"); |
485 | |
486 | |
487 | |
488 | while (NodeA != NodeB) { |
| 34 | | Assuming 'NodeA' is equal to 'NodeB' | |
|
| 35 | | Loop condition is false. Execution continues on line 494 | |
|
489 | if (NodeA->getLevel() < NodeB->getLevel()) std::swap(NodeA, NodeB); |
490 | |
491 | NodeA = NodeA->IDom; |
492 | } |
493 | |
494 | return NodeA->getBlock(); |
| 36 | | Called C++ object pointer is null |
|
495 | } |
496 | |
497 | const NodeT *findNearestCommonDominator(const NodeT *A, |
498 | const NodeT *B) const { |
499 | |
500 | |
501 | return findNearestCommonDominator(const_cast<NodeT *>(A), |
502 | const_cast<NodeT *>(B)); |
503 | } |
504 | |
505 | bool isVirtualRoot(const DomTreeNodeBase<NodeT> *A) const { |
506 | return isPostDominator() && !A->getBlock(); |
507 | } |
508 | |
509 | |
510 | |
511 | |
512 | |
513 | |
514 | |
515 | |
516 | |
517 | |
518 | |
519 | |
520 | |
521 | |
522 | |
523 | |
524 | |
525 | |
526 | |
527 | |
528 | |
529 | |
530 | |
531 | |
532 | |
533 | |
534 | |
535 | |
536 | |
537 | |
538 | |
539 | |
540 | |
541 | |
542 | |
543 | |
544 | void applyUpdates(ArrayRef<UpdateType> Updates) { |
545 | GraphDiff<NodePtr, IsPostDominator> PreViewCFG( |
546 | Updates, true); |
547 | DomTreeBuilder::ApplyUpdates(*this, PreViewCFG, nullptr); |
548 | } |
549 | |
550 | |
551 | |
552 | |
553 | |
554 | |
555 | void applyUpdates(ArrayRef<UpdateType> Updates, |
556 | ArrayRef<UpdateType> PostViewUpdates) { |
557 | if (Updates.empty()) { |
558 | GraphDiff<NodePtr, IsPostDom> PostViewCFG(PostViewUpdates); |
559 | DomTreeBuilder::ApplyUpdates(*this, PostViewCFG, &PostViewCFG); |
560 | } else { |
561 | |
562 | |
563 | |
564 | |
565 | |
566 | SmallVector<UpdateType> AllUpdates(Updates.begin(), Updates.end()); |
567 | append_range(AllUpdates, PostViewUpdates); |
568 | GraphDiff<NodePtr, IsPostDom> PreViewCFG(AllUpdates, |
569 | true); |
570 | GraphDiff<NodePtr, IsPostDom> PostViewCFG(PostViewUpdates); |
571 | DomTreeBuilder::ApplyUpdates(*this, PreViewCFG, &PostViewCFG); |
572 | } |
573 | } |
574 | |
575 | |
576 | |
577 | |
578 | |
579 | |
580 | |
581 | |
582 | |
583 | |
584 | void insertEdge(NodeT *From, NodeT *To) { |
585 | assert(From); |
586 | assert(To); |
587 | assert(From->getParent() == Parent); |
588 | assert(To->getParent() == Parent); |
589 | DomTreeBuilder::InsertEdge(*this, From, To); |
590 | } |
591 | |
592 | |
593 | |
594 | |
595 | |
596 | |
597 | |
598 | |
599 | |
600 | |
601 | |
602 | void deleteEdge(NodeT *From, NodeT *To) { |
603 | assert(From); |
604 | assert(To); |
605 | assert(From->getParent() == Parent); |
606 | assert(To->getParent() == Parent); |
607 | DomTreeBuilder::DeleteEdge(*this, From, To); |
608 | } |
609 | |
610 | |
611 | |
612 | |
613 | |
614 | |
615 | |
616 | |
617 | |
618 | |
619 | DomTreeNodeBase<NodeT> *addNewBlock(NodeT *BB, NodeT *DomBB) { |
620 | assert(getNode(BB) == nullptr && "Block already in dominator tree!"); |
621 | DomTreeNodeBase<NodeT> *IDomNode = getNode(DomBB); |
622 | assert(IDomNode && "Not immediate dominator specified for block!"); |
623 | DFSInfoValid = false; |
624 | return createChild(BB, IDomNode); |
625 | } |
626 | |
627 | |
628 | |
629 | |
630 | |
631 | |
632 | DomTreeNodeBase<NodeT> *setNewRoot(NodeT *BB) { |
633 | assert(getNode(BB) == nullptr && "Block already in dominator tree!"); |
634 | assert(!this->isPostDominator() && |
635 | "Cannot change root of post-dominator tree"); |
636 | DFSInfoValid = false; |
637 | DomTreeNodeBase<NodeT> *NewNode = createNode(BB); |
638 | if (Roots.empty()) { |
639 | addRoot(BB); |
640 | } else { |
641 | assert(Roots.size() == 1); |
642 | NodeT *OldRoot = Roots.front(); |
643 | auto &OldNode = DomTreeNodes[OldRoot]; |
644 | OldNode = NewNode->addChild(std::move(DomTreeNodes[OldRoot])); |
645 | OldNode->IDom = NewNode; |
646 | OldNode->UpdateLevel(); |
647 | Roots[0] = BB; |
648 | } |
649 | return RootNode = NewNode; |
650 | } |
651 | |
652 | |
653 | |
654 | |
655 | void changeImmediateDominator(DomTreeNodeBase<NodeT> *N, |
656 | DomTreeNodeBase<NodeT> *NewIDom) { |
657 | assert(N && NewIDom && "Cannot change null node pointers!"); |
658 | DFSInfoValid = false; |
659 | N->setIDom(NewIDom); |
660 | } |
661 | |
662 | void changeImmediateDominator(NodeT *BB, NodeT *NewBB) { |
663 | changeImmediateDominator(getNode(BB), getNode(NewBB)); |
664 | } |
665 | |
666 | |
667 | |
668 | |
669 | void eraseNode(NodeT *BB) { |
670 | DomTreeNodeBase<NodeT> *Node = getNode(BB); |
671 | assert(Node && "Removing node that isn't in dominator tree."); |
672 | assert(Node->isLeaf() && "Node is not a leaf node."); |
673 | |
674 | DFSInfoValid = false; |
675 | |
676 | |
677 | DomTreeNodeBase<NodeT> *IDom = Node->getIDom(); |
678 | if (IDom) { |
679 | const auto I = find(IDom->Children, Node); |
680 | assert(I != IDom->Children.end() && |
681 | "Not in immediate dominator children set!"); |
682 | |
683 | IDom->Children.erase(I); |
684 | } |
685 | |
686 | DomTreeNodes.erase(BB); |
687 | |
688 | if (!IsPostDom) return; |
689 | |
690 | |
691 | auto RIt = llvm::find(Roots, BB); |
692 | if (RIt != Roots.end()) { |
693 | std::swap(*RIt, Roots.back()); |
694 | Roots.pop_back(); |
695 | } |
696 | } |
697 | |
698 | |
699 | |
700 | void splitBlock(NodeT *NewBB) { |
701 | if (IsPostDominator) |
702 | Split<Inverse<NodeT *>>(NewBB); |
703 | else |
704 | Split<NodeT *>(NewBB); |
705 | } |
706 | |
707 | |
708 | |
709 | void print(raw_ostream &O) const { |
710 | O << "=============================--------------------------------\n"; |
711 | if (IsPostDominator) |
712 | O << "Inorder PostDominator Tree: "; |
713 | else |
714 | O << "Inorder Dominator Tree: "; |
715 | if (!DFSInfoValid) |
716 | O << "DFSNumbers invalid: " << SlowQueries << " slow queries."; |
717 | O << "\n"; |
718 | |
719 | |
720 | if (getRootNode()) PrintDomTree<NodeT>(getRootNode(), O, 1); |
721 | O << "Roots: "; |
722 | for (const NodePtr Block : Roots) { |
723 | Block->printAsOperand(O, false); |
724 | O << " "; |
725 | } |
726 | O << "\n"; |
727 | } |
728 | |
729 | public: |
730 | |
731 | |
732 | void updateDFSNumbers() const { |
733 | if (DFSInfoValid) { |
734 | SlowQueries = 0; |
735 | return; |
736 | } |
737 | |
738 | SmallVector<std::pair<const DomTreeNodeBase<NodeT> *, |
739 | typename DomTreeNodeBase<NodeT>::const_iterator>, |
740 | 32> WorkStack; |
741 | |
742 | const DomTreeNodeBase<NodeT> *ThisRoot = getRootNode(); |
743 | assert((!Parent || ThisRoot) && "Empty constructed DomTree"); |
744 | if (!ThisRoot) |
745 | return; |
746 | |
747 | |
748 | |
749 | WorkStack.push_back({ThisRoot, ThisRoot->begin()}); |
750 | |
751 | unsigned DFSNum = 0; |
752 | ThisRoot->DFSNumIn = DFSNum++; |
753 | |
754 | while (!WorkStack.empty()) { |
755 | const DomTreeNodeBase<NodeT> *Node = WorkStack.back().first; |
756 | const auto ChildIt = WorkStack.back().second; |
757 | |
758 | |
759 | |
760 | if (ChildIt == Node->end()) { |
761 | Node->DFSNumOut = DFSNum++; |
762 | WorkStack.pop_back(); |
763 | } else { |
764 | |
765 | const DomTreeNodeBase<NodeT> *Child = *ChildIt; |
766 | ++WorkStack.back().second; |
767 | |
768 | WorkStack.push_back({Child, Child->begin()}); |
769 | Child->DFSNumIn = DFSNum++; |
770 | } |
771 | } |
772 | |
773 | SlowQueries = 0; |
774 | DFSInfoValid = true; |
775 | } |
776 | |
777 | |
778 | void recalculate(ParentType &Func) { |
779 | Parent = &Func; |
780 | DomTreeBuilder::Calculate(*this); |
781 | } |
782 | |
783 | void recalculate(ParentType &Func, ArrayRef<UpdateType> Updates) { |
784 | Parent = &Func; |
785 | DomTreeBuilder::CalculateWithUpdates(*this, Updates); |
786 | } |
787 | |
788 | |
789 | |
790 | |
791 | |
792 | |
793 | |
794 | |
795 | |
796 | |
797 | |
798 | |
799 | |
800 | |
801 | |
802 | bool verify(VerificationLevel VL = VerificationLevel::Full) const { |
803 | return DomTreeBuilder::Verify(*this, VL); |
804 | } |
805 | |
806 | void reset() { |
807 | DomTreeNodes.clear(); |
808 | Roots.clear(); |
809 | RootNode = nullptr; |
810 | Parent = nullptr; |
811 | DFSInfoValid = false; |
812 | SlowQueries = 0; |
813 | } |
814 | |
815 | protected: |
816 | void addRoot(NodeT *BB) { this->Roots.push_back(BB); } |
817 | |
818 | DomTreeNodeBase<NodeT> *createChild(NodeT *BB, DomTreeNodeBase<NodeT> *IDom) { |
819 | return (DomTreeNodes[BB] = IDom->addChild( |
820 | std::make_unique<DomTreeNodeBase<NodeT>>(BB, IDom))) |
821 | .get(); |
822 | } |
823 | |
824 | DomTreeNodeBase<NodeT> *createNode(NodeT *BB) { |
825 | return (DomTreeNodes[BB] = |
826 | std::make_unique<DomTreeNodeBase<NodeT>>(BB, nullptr)) |
827 | .get(); |
828 | } |
829 | |
830 | |
831 | |
832 | template <class N> |
833 | void Split(typename GraphTraits<N>::NodeRef NewBB) { |
834 | using GraphT = GraphTraits<N>; |
835 | using NodeRef = typename GraphT::NodeRef; |
836 | assert(std::distance(GraphT::child_begin(NewBB), |
837 | GraphT::child_end(NewBB)) == 1 && |
838 | "NewBB should have a single successor!"); |
839 | NodeRef NewBBSucc = *GraphT::child_begin(NewBB); |
840 | |
841 | SmallVector<NodeRef, 4> PredBlocks(children<Inverse<N>>(NewBB)); |
842 | |
843 | assert(!PredBlocks.empty() && "No predblocks?"); |
844 | |
845 | bool NewBBDominatesNewBBSucc = true; |
846 | for (auto Pred : children<Inverse<N>>(NewBBSucc)) { |
847 | if (Pred != NewBB && !dominates(NewBBSucc, Pred) && |
848 | isReachableFromEntry(Pred)) { |
849 | NewBBDominatesNewBBSucc = false; |
850 | break; |
851 | } |
852 | } |
853 | |
854 | |
855 | |
856 | NodeT *NewBBIDom = nullptr; |
857 | unsigned i = 0; |
858 | for (i = 0; i < PredBlocks.size(); ++i) |
859 | if (isReachableFromEntry(PredBlocks[i])) { |
860 | NewBBIDom = PredBlocks[i]; |
861 | break; |
862 | } |
863 | |
864 | |
865 | |
866 | |
867 | if (!NewBBIDom) return; |
868 | |
869 | for (i = i + 1; i < PredBlocks.size(); ++i) { |
870 | if (isReachableFromEntry(PredBlocks[i])) |
871 | NewBBIDom = findNearestCommonDominator(NewBBIDom, PredBlocks[i]); |
872 | } |
873 | |
874 | |
875 | DomTreeNodeBase<NodeT> *NewBBNode = addNewBlock(NewBB, NewBBIDom); |
876 | |
877 | |
878 | |
879 | if (NewBBDominatesNewBBSucc) { |
880 | DomTreeNodeBase<NodeT> *NewBBSuccNode = getNode(NewBBSucc); |
881 | changeImmediateDominator(NewBBSuccNode, NewBBNode); |
882 | } |
883 | } |
884 | |
885 | private: |
886 | bool dominatedBySlowTreeWalk(const DomTreeNodeBase<NodeT> *A, |
887 | const DomTreeNodeBase<NodeT> *B) const { |
888 | assert(A != B); |
889 | assert(isReachableFromEntry(B)); |
890 | assert(isReachableFromEntry(A)); |
891 | |
892 | const unsigned ALevel = A->getLevel(); |
893 | const DomTreeNodeBase<NodeT> *IDom; |
894 | |
895 | |
896 | |
897 | while ((IDom = B->getIDom()) != nullptr && IDom->getLevel() >= ALevel) |
898 | B = IDom; |
899 | |
900 | return B == A; |
901 | } |
902 | |
903 | |
904 | |
905 | |
906 | |
907 | void wipe() { |
908 | DomTreeNodes.clear(); |
909 | RootNode = nullptr; |
910 | Parent = nullptr; |
911 | } |
912 | }; |
913 | |
914 | template <typename T> |
915 | using DomTreeBase = DominatorTreeBase<T, false>; |
916 | |
917 | template <typename T> |
918 | using PostDomTreeBase = DominatorTreeBase<T, true>; |
919 | |
920 | |
921 | |
922 | template <typename NodeT, bool IsPostDom> |
923 | bool DominatorTreeBase<NodeT, IsPostDom>::dominates(const NodeT *A, |
924 | const NodeT *B) const { |
925 | if (A == B) |
926 | return true; |
927 | |
928 | |
929 | |
930 | |
931 | return dominates(getNode(const_cast<NodeT *>(A)), |
932 | getNode(const_cast<NodeT *>(B))); |
933 | } |
934 | template <typename NodeT, bool IsPostDom> |
935 | bool DominatorTreeBase<NodeT, IsPostDom>::properlyDominates( |
936 | const NodeT *A, const NodeT *B) const { |
937 | if (A == B) |
938 | return false; |
939 | |
940 | |
941 | |
942 | |
943 | return dominates(getNode(const_cast<NodeT *>(A)), |
944 | getNode(const_cast<NodeT *>(B))); |
945 | } |
946 | |
947 | } |
948 | |
949 | #endif // LLVM_SUPPORT_GENERICDOMTREE_H |
1 | |
2 | |
3 | |
4 | |
5 | |
6 | |
7 | |
8 | |
9 | |
10 | |
11 | |
12 | |
13 | #ifndef LLVM_ADT_DENSEMAP_H |
14 | #define LLVM_ADT_DENSEMAP_H |
15 | |
16 | #include "llvm/ADT/DenseMapInfo.h" |
17 | #include "llvm/ADT/EpochTracker.h" |
18 | #include "llvm/Support/AlignOf.h" |
19 | #include "llvm/Support/Compiler.h" |
20 | #include "llvm/Support/MathExtras.h" |
21 | #include "llvm/Support/MemAlloc.h" |
22 | #include "llvm/Support/ReverseIteration.h" |
23 | #include "llvm/Support/type_traits.h" |
24 | #include <algorithm> |
25 | #include <cassert> |
26 | #include <cstddef> |
27 | #include <cstring> |
28 | #include <initializer_list> |
29 | #include <iterator> |
30 | #include <new> |
31 | #include <type_traits> |
32 | #include <utility> |
33 | |
34 | namespace llvm { |
35 | |
36 | namespace detail { |
37 | |
38 | |
39 | |
40 | template <typename KeyT, typename ValueT> |
41 | struct DenseMapPair : public std::pair<KeyT, ValueT> { |
42 | using std::pair<KeyT, ValueT>::pair; |
43 | |
44 | KeyT &getFirst() { return std::pair<KeyT, ValueT>::first; } |
45 | const KeyT &getFirst() const { return std::pair<KeyT, ValueT>::first; } |
46 | ValueT &getSecond() { return std::pair<KeyT, ValueT>::second; } |
47 | const ValueT &getSecond() const { return std::pair<KeyT, ValueT>::second; } |
48 | }; |
49 | |
50 | } |
51 | |
52 | template <typename KeyT, typename ValueT, |
53 | typename KeyInfoT = DenseMapInfo<KeyT>, |
54 | typename Bucket = llvm::detail::DenseMapPair<KeyT, ValueT>, |
55 | bool IsConst = false> |
56 | class DenseMapIterator; |
57 | |
58 | template <typename DerivedT, typename KeyT, typename ValueT, typename KeyInfoT, |
59 | typename BucketT> |
60 | class DenseMapBase : public DebugEpochBase { |
61 | template <typename T> |
62 | using const_arg_type_t = typename const_pointer_or_const_ref<T>::type; |
63 | |
64 | public: |
65 | using size_type = unsigned; |
66 | using key_type = KeyT; |
67 | using mapped_type = ValueT; |
68 | using value_type = BucketT; |
69 | |
70 | using iterator = DenseMapIterator<KeyT, ValueT, KeyInfoT, BucketT>; |
71 | using const_iterator = |
72 | DenseMapIterator<KeyT, ValueT, KeyInfoT, BucketT, true>; |
73 | |
74 | inline iterator begin() { |
75 | |
76 | |
77 | if (empty()) |
78 | return end(); |
79 | if (shouldReverseIterate<KeyT>()) |
80 | return makeIterator(getBucketsEnd() - 1, getBuckets(), *this); |
81 | return makeIterator(getBuckets(), getBucketsEnd(), *this); |
82 | } |
83 | inline iterator end() { |
84 | return makeIterator(getBucketsEnd(), getBucketsEnd(), *this, true); |
85 | } |
86 | inline const_iterator begin() const { |
87 | if (empty()) |
88 | return end(); |
89 | if (shouldReverseIterate<KeyT>()) |
90 | return makeConstIterator(getBucketsEnd() - 1, getBuckets(), *this); |
91 | return makeConstIterator(getBuckets(), getBucketsEnd(), *this); |
92 | } |
93 | inline const_iterator end() const { |
94 | return makeConstIterator(getBucketsEnd(), getBucketsEnd(), *this, true); |
95 | } |
96 | |
97 | LLVM_NODISCARD bool empty() const { |
98 | return getNumEntries() == 0; |
99 | } |
100 | unsigned size() const { return getNumEntries(); } |
101 | |
102 | |
103 | |
104 | void reserve(size_type NumEntries) { |
105 | auto NumBuckets = getMinBucketToReserveForEntries(NumEntries); |
106 | incrementEpoch(); |
107 | if (NumBuckets > getNumBuckets()) |
108 | grow(NumBuckets); |
109 | } |
110 | |
111 | void clear() { |
112 | incrementEpoch(); |
113 | if (getNumEntries() == 0 && getNumTombstones() == 0) return; |
114 | |
115 | |
116 | |
117 | if (getNumEntries() * 4 < getNumBuckets() && getNumBuckets() > 64) { |
118 | shrink_and_clear(); |
119 | return; |
120 | } |
121 | |
122 | const KeyT EmptyKey = getEmptyKey(), TombstoneKey = getTombstoneKey(); |
123 | if (std::is_trivially_destructible<ValueT>::value) { |
124 | |
125 | for (BucketT *P = getBuckets(), *E = getBucketsEnd(); P != E; ++P) |
126 | P->getFirst() = EmptyKey; |
127 | } else { |
128 | unsigned NumEntries = getNumEntries(); |
129 | for (BucketT *P = getBuckets(), *E = getBucketsEnd(); P != E; ++P) { |
130 | if (!KeyInfoT::isEqual(P->getFirst(), EmptyKey)) { |
131 | if (!KeyInfoT::isEqual(P->getFirst(), TombstoneKey)) { |
132 | P->getSecond().~ValueT(); |
133 | --NumEntries; |
134 | } |
135 | P->getFirst() = EmptyKey; |
136 | } |
137 | } |
138 | assert(NumEntries == 0 && "Node count imbalance!"); |
139 | } |
140 | setNumEntries(0); |
141 | setNumTombstones(0); |
142 | } |
143 | |
144 | |
145 | size_type count(const_arg_type_t<KeyT> Val) const { |
146 | const BucketT *TheBucket; |
147 | return LookupBucketFor(Val, TheBucket) ? 1 : 0; |
148 | } |
149 | |
150 | iterator find(const_arg_type_t<KeyT> Val) { |
151 | BucketT *TheBucket; |
152 | if (LookupBucketFor(Val, TheBucket)) |
153 | return makeIterator(TheBucket, |
154 | shouldReverseIterate<KeyT>() ? getBuckets() |
155 | : getBucketsEnd(), |
156 | *this, true); |
157 | return end(); |
158 | } |
159 | const_iterator find(const_arg_type_t<KeyT> Val) const { |
160 | const BucketT *TheBucket; |
161 | if (LookupBucketFor(Val, TheBucket)) |
162 | return makeConstIterator(TheBucket, |
163 | shouldReverseIterate<KeyT>() ? getBuckets() |
164 | : getBucketsEnd(), |
165 | *this, true); |
166 | return end(); |
167 | } |
168 | |
169 | |
170 | |
171 | |
172 | |
173 | |
174 | template<class LookupKeyT> |
175 | iterator find_as(const LookupKeyT &Val) { |
176 | BucketT *TheBucket; |
177 | if (LookupBucketFor(Val, TheBucket)) |
178 | return makeIterator(TheBucket, |
179 | shouldReverseIterate<KeyT>() ? getBuckets() |
180 | : getBucketsEnd(), |
181 | *this, true); |
182 | return end(); |
183 | } |
184 | template<class LookupKeyT> |
185 | const_iterator find_as(const LookupKeyT &Val) const { |
186 | const BucketT *TheBucket; |
187 | if (LookupBucketFor(Val, TheBucket)) |
188 | return makeConstIterator(TheBucket, |
189 | shouldReverseIterate<KeyT>() ? getBuckets() |
190 | : getBucketsEnd(), |
191 | *this, true); |
192 | return end(); |
193 | } |
194 | |
195 | |
196 | |
197 | ValueT lookup(const_arg_type_t<KeyT> Val) const { |
198 | const BucketT *TheBucket; |
199 | if (LookupBucketFor(Val, TheBucket)) |
200 | return TheBucket->getSecond(); |
201 | return ValueT(); |
202 | } |
203 | |
204 | |
205 | |
206 | |
207 | std::pair<iterator, bool> insert(const std::pair<KeyT, ValueT> &KV) { |
208 | return try_emplace(KV.first, KV.second); |
209 | } |
210 | |
211 | |
212 | |
213 | |
214 | std::pair<iterator, bool> insert(std::pair<KeyT, ValueT> &&KV) { |
215 | return try_emplace(std::move(KV.first), std::move(KV.second)); |
216 | } |
217 | |
218 | |
219 | |
220 | |
221 | template <typename... Ts> |
222 | std::pair<iterator, bool> try_emplace(KeyT &&Key, Ts &&... Args) { |
223 | BucketT *TheBucket; |
224 | if (LookupBucketFor(Key, TheBucket)) |
225 | return std::make_pair(makeIterator(TheBucket, |
226 | shouldReverseIterate<KeyT>() |
227 | ? getBuckets() |
228 | : getBucketsEnd(), |
229 | *this, true), |
230 | false); |
231 | |
232 | |
233 | TheBucket = |
234 | InsertIntoBucket(TheBucket, std::move(Key), std::forward<Ts>(Args)...); |
235 | return std::make_pair(makeIterator(TheBucket, |
236 | shouldReverseIterate<KeyT>() |
237 | ? getBuckets() |
238 | : getBucketsEnd(), |
239 | *this, true), |
240 | true); |
241 | } |
242 | |
243 | |
244 | |
245 | |
246 | template <typename... Ts> |
247 | std::pair<iterator, bool> try_emplace(const KeyT &Key, Ts &&... Args) { |
248 | BucketT *TheBucket; |
249 | if (LookupBucketFor(Key, TheBucket)) |
250 | return std::make_pair(makeIterator(TheBucket, |
251 | shouldReverseIterate<KeyT>() |
252 | ? getBuckets() |
253 | : getBucketsEnd(), |
254 | *this, true), |
255 | false); |
256 | |
257 | |
258 | TheBucket = InsertIntoBucket(TheBucket, Key, std::forward<Ts>(Args)...); |
259 | return std::make_pair(makeIterator(TheBucket, |
260 | shouldReverseIterate<KeyT>() |
261 | ? getBuckets() |
262 | : getBucketsEnd(), |
263 | *this, true), |
264 | true); |
265 | } |
266 | |
267 | |
268 | |
269 | |
270 | |
271 | |
272 | template <typename LookupKeyT> |
273 | std::pair<iterator, bool> insert_as(std::pair<KeyT, ValueT> &&KV, |
274 | const LookupKeyT &Val) { |
275 | BucketT *TheBucket; |
276 | if (LookupBucketFor(Val, TheBucket)) |
277 | return std::make_pair(makeIterator(TheBucket, |
278 | shouldReverseIterate<KeyT>() |
279 | ? getBuckets() |
280 | : getBucketsEnd(), |
281 | *this, true), |
282 | false); |
283 | |
284 | |
285 | TheBucket = InsertIntoBucketWithLookup(TheBucket, std::move(KV.first), |
286 | std::move(KV.second), Val); |
287 | return std::make_pair(makeIterator(TheBucket, |
288 | shouldReverseIterate<KeyT>() |
289 | ? getBuckets() |
290 | : getBucketsEnd(), |
291 | *this, true), |
292 | true); |
293 | } |
294 | |
295 | |
296 | template<typename InputIt> |
297 | void insert(InputIt I, InputIt E) { |
298 | for (; I != E; ++I) |
299 | insert(*I); |
300 | } |
301 | |
302 | bool erase(const KeyT &Val) { |
303 | BucketT *TheBucket; |
304 | if (!LookupBucketFor(Val, TheBucket)) |
305 | return false; |
306 | |
307 | TheBucket->getSecond().~ValueT(); |
308 | TheBucket->getFirst() = getTombstoneKey(); |
309 | decrementNumEntries(); |
310 | incrementNumTombstones(); |
311 | return true; |
312 | } |
313 | void erase(iterator I) { |
314 | BucketT *TheBucket = &*I; |
315 | TheBucket->getSecond().~ValueT(); |
316 | TheBucket->getFirst() = getTombstoneKey(); |
317 | decrementNumEntries(); |
318 | incrementNumTombstones(); |
319 | } |
320 | |
321 | value_type& FindAndConstruct(const KeyT &Key) { |
322 | BucketT *TheBucket; |
323 | if (LookupBucketFor(Key, TheBucket)) |
324 | return *TheBucket; |
325 | |
326 | return *InsertIntoBucket(TheBucket, Key); |
327 | } |
328 | |
329 | ValueT &operator[](const KeyT &Key) { |
330 | return FindAndConstruct(Key).second; |
331 | } |
332 | |
333 | value_type& FindAndConstruct(KeyT &&Key) { |
334 | BucketT *TheBucket; |
335 | if (LookupBucketFor(Key, TheBucket)) |
336 | return *TheBucket; |
337 | |
338 | return *InsertIntoBucket(TheBucket, std::move(Key)); |
339 | } |
340 | |
341 | ValueT &operator[](KeyT &&Key) { |
342 | return FindAndConstruct(std::move(Key)).second; |
343 | } |
344 | |
345 | |
346 | |
347 | |
348 | bool isPointerIntoBucketsArray(const void *Ptr) const { |
349 | return Ptr >= getBuckets() && Ptr < getBucketsEnd(); |
350 | } |
351 | |
352 | |
353 | |
354 | |
355 | const void *getPointerIntoBucketsArray() const { return getBuckets(); } |
356 | |
357 | protected: |
358 | DenseMapBase() = default; |
359 | |
360 | void destroyAll() { |
361 | if (getNumBuckets() == 0) |
362 | return; |
363 | |
364 | const KeyT EmptyKey = getEmptyKey(), TombstoneKey = getTombstoneKey(); |
365 | for (BucketT *P = getBuckets(), *E = getBucketsEnd(); P != E; ++P) { |
366 | if (!KeyInfoT::isEqual(P->getFirst(), EmptyKey) && |
367 | !KeyInfoT::isEqual(P->getFirst(), TombstoneKey)) |
368 | P->getSecond().~ValueT(); |
369 | P->getFirst().~KeyT(); |
370 | } |
371 | } |
372 | |
373 | void initEmpty() { |
374 | setNumEntries(0); |
375 | setNumTombstones(0); |
376 | |
377 | assert((getNumBuckets() & (getNumBuckets()-1)) == 0 && |
378 | "# initial buckets must be a power of two!"); |
379 | const KeyT EmptyKey = getEmptyKey(); |
380 | for (BucketT *B = getBuckets(), *E = getBucketsEnd(); B != E; ++B) |
381 | ::new (&B->getFirst()) KeyT(EmptyKey); |
382 | } |
383 | |
384 | |
385 | |
386 | unsigned getMinBucketToReserveForEntries(unsigned NumEntries) { |
387 | |
388 | if (NumEntries == 0) |
389 | return 0; |
390 | |
391 | |
392 | return NextPowerOf2(NumEntries * 4 / 3 + 1); |
393 | } |
394 | |
395 | void moveFromOldBuckets(BucketT *OldBucketsBegin, BucketT *OldBucketsEnd) { |
396 | initEmpty(); |
397 | |
398 | |
399 | const KeyT EmptyKey = getEmptyKey(); |
400 | const KeyT TombstoneKey = getTombstoneKey(); |
401 | for (BucketT *B = OldBucketsBegin, *E = OldBucketsEnd; B != E; ++B) { |
402 | if (!KeyInfoT::isEqual(B->getFirst(), EmptyKey) && |
403 | !KeyInfoT::isEqual(B->getFirst(), TombstoneKey)) { |
404 | |
405 | BucketT *DestBucket; |
406 | bool FoundVal = LookupBucketFor(B->getFirst(), DestBucket); |
407 | (void)FoundVal; |
408 | assert(!FoundVal && "Key already in new map?"); |
409 | DestBucket->getFirst() = std::move(B->getFirst()); |
410 | ::new (&DestBucket->getSecond()) ValueT(std::move(B->getSecond())); |
411 | incrementNumEntries(); |
412 | |
413 | |
414 | B->getSecond().~ValueT(); |
415 | } |
416 | B->getFirst().~KeyT(); |
417 | } |
418 | } |
419 | |
420 | template <typename OtherBaseT> |
421 | void copyFrom( |
422 | const DenseMapBase<OtherBaseT, KeyT, ValueT, KeyInfoT, BucketT> &other) { |
423 | assert(&other != this); |
424 | assert(getNumBuckets() == other.getNumBuckets()); |
425 | |
426 | setNumEntries(other.getNumEntries()); |
427 | setNumTombstones(other.getNumTombstones()); |
428 | |
429 | if (std::is_trivially_copyable<KeyT>::value && |
430 | std::is_trivially_copyable<ValueT>::value) |
431 | memcpy(reinterpret_cast<void *>(getBuckets()), other.getBuckets(), |
432 | getNumBuckets() * sizeof(BucketT)); |
433 | else |
434 | for (size_t i = 0; i < getNumBuckets(); ++i) { |
435 | ::new (&getBuckets()[i].getFirst()) |
436 | KeyT(other.getBuckets()[i].getFirst()); |
437 | if (!KeyInfoT::isEqual(getBuckets()[i].getFirst(), getEmptyKey()) && |
438 | !KeyInfoT::isEqual(getBuckets()[i].getFirst(), getTombstoneKey())) |
439 | ::new (&getBuckets()[i].getSecond()) |
440 | ValueT(other.getBuckets()[i].getSecond()); |
441 | } |
442 | } |
443 | |
444 | static unsigned getHashValue(const KeyT &Val) { |
445 | return KeyInfoT::getHashValue(Val); |
446 | } |
447 | |
448 | template<typename LookupKeyT> |
449 | static unsigned getHashValue(const LookupKeyT &Val) { |
450 | return KeyInfoT::getHashValue(Val); |
451 | } |
452 | |
453 | static const KeyT getEmptyKey() { |
454 | static_assert(std::is_base_of<DenseMapBase, DerivedT>::value, |
455 | "Must pass the derived type to this template!"); |
456 | return KeyInfoT::getEmptyKey(); |
457 | } |
458 | |
459 | static const KeyT getTombstoneKey() { |
460 | return KeyInfoT::getTombstoneKey(); |
461 | } |
462 | |
463 | private: |
464 | iterator makeIterator(BucketT *P, BucketT *E, |
465 | DebugEpochBase &Epoch, |
466 | bool NoAdvance=false) { |
467 | if (shouldReverseIterate<KeyT>()) { |
468 | BucketT *B = P == getBucketsEnd() ? getBuckets() : P + 1; |
469 | return iterator(B, E, Epoch, NoAdvance); |
470 | } |
471 | return iterator(P, E, Epoch, NoAdvance); |
472 | } |
473 | |
474 | const_iterator makeConstIterator(const BucketT *P, const BucketT *E, |
475 | const DebugEpochBase &Epoch, |
476 | const bool NoAdvance=false) const { |
477 | if (shouldReverseIterate<KeyT>()) { |
478 | const BucketT *B = P == getBucketsEnd() ? getBuckets() : P + 1; |
479 | return const_iterator(B, E, Epoch, NoAdvance); |
480 | } |
481 | return const_iterator(P, E, Epoch, NoAdvance); |
482 | } |
483 | |
484 | unsigned getNumEntries() const { |
485 | return static_cast<const DerivedT *>(this)->getNumEntries(); |
486 | } |
487 | |
488 | void setNumEntries(unsigned Num) { |
489 | static_cast<DerivedT *>(this)->setNumEntries(Num); |
490 | } |
491 | |
492 | void incrementNumEntries() { |
493 | setNumEntries(getNumEntries() + 1); |
494 | } |
495 | |
496 | void decrementNumEntries() { |
497 | setNumEntries(getNumEntries() - 1); |
498 | } |
499 | |
500 | unsigned getNumTombstones() const { |
501 | return static_cast<const DerivedT *>(this)->getNumTombstones(); |
502 | } |
503 | |
504 | void setNumTombstones(unsigned Num) { |
505 | static_cast<DerivedT *>(this)->setNumTombstones(Num); |
506 | } |
507 | |
508 | void incrementNumTombstones() { |
509 | setNumTombstones(getNumTombstones() + 1); |
510 | } |
511 | |
512 | void decrementNumTombstones() { |
513 | setNumTombstones(getNumTombstones() - 1); |
514 | } |
515 | |
516 | const BucketT *getBuckets() const { |
517 | return static_cast<const DerivedT *>(this)->getBuckets(); |
518 | } |
519 | |
520 | BucketT *getBuckets() { |
521 | return static_cast<DerivedT *>(this)->getBuckets(); |
522 | } |
523 | |
524 | unsigned getNumBuckets() const { |
525 | return static_cast<const DerivedT *>(this)->getNumBuckets(); |
526 | } |
527 | |
528 | BucketT *getBucketsEnd() { |
529 | return getBuckets() + getNumBuckets(); |
530 | } |
531 | |
532 | const BucketT *getBucketsEnd() const { |
533 | return getBuckets() + getNumBuckets(); |
534 | } |
535 | |
536 | void grow(unsigned AtLeast) { |
537 | static_cast<DerivedT *>(this)->grow(AtLeast); |
538 | } |
539 | |
540 | void shrink_and_clear() { |
541 | static_cast<DerivedT *>(this)->shrink_and_clear(); |
542 | } |
543 | |
544 | template <typename KeyArg, typename... ValueArgs> |
545 | BucketT *InsertIntoBucket(BucketT *TheBucket, KeyArg &&Key, |
546 | ValueArgs &&... Values) { |
547 | TheBucket = InsertIntoBucketImpl(Key, Key, TheBucket); |
548 | |
549 | TheBucket->getFirst() = std::forward<KeyArg>(Key); |
550 | ::new (&TheBucket->getSecond()) ValueT(std::forward<ValueArgs>(Values)...); |
551 | return TheBucket; |
552 | } |
553 | |
554 | template <typename LookupKeyT> |
555 | BucketT *InsertIntoBucketWithLookup(BucketT *TheBucket, KeyT &&Key, |
556 | ValueT &&Value, LookupKeyT &Lookup) { |
557 | TheBucket = InsertIntoBucketImpl(Key, Lookup, TheBucket); |
558 | |
559 | TheBucket->getFirst() = std::move(Key); |
560 | ::new (&TheBucket->getSecond()) ValueT(std::move(Value)); |
561 | return TheBucket; |
562 | } |
563 | |
564 | template <typename LookupKeyT> |
565 | BucketT *InsertIntoBucketImpl(const KeyT &Key, const LookupKeyT &Lookup, |
566 | BucketT *TheBucket) { |
567 | incrementEpoch(); |
568 | |
569 | |
570 | |
571 | |
572 | |
573 | |
574 | |
575 | |
576 | |
577 | |
578 | unsigned NewNumEntries = getNumEntries() + 1; |
579 | unsigned NumBuckets = getNumBuckets(); |
580 | if (LLVM_UNLIKELY(NewNumEntries * 4 >= NumBuckets * 3)) { |
581 | this->grow(NumBuckets * 2); |
582 | LookupBucketFor(Lookup, TheBucket); |
583 | NumBuckets = getNumBuckets(); |
584 | } else if (LLVM_UNLIKELY(NumBuckets-(NewNumEntries+getNumTombstones()) <= |
585 | NumBuckets/8)) { |
586 | this->grow(NumBuckets); |
587 | LookupBucketFor(Lookup, TheBucket); |
588 | } |
589 | assert(TheBucket); |
590 | |
591 | |
592 | |
593 | incrementNumEntries(); |
594 | |
595 | |
596 | const KeyT EmptyKey = getEmptyKey(); |
597 | if (!KeyInfoT::isEqual(TheBucket->getFirst(), EmptyKey)) |
598 | decrementNumTombstones(); |
599 | |
600 | return TheBucket; |
601 | } |
602 | |
603 | |
604 | |
605 | |
606 | |
607 | template<typename LookupKeyT> |
608 | bool LookupBucketFor(const LookupKeyT &Val, |
609 | const BucketT *&FoundBucket) const { |
610 | const BucketT *BucketsPtr = getBuckets(); |
611 | const unsigned NumBuckets = getNumBuckets(); |
612 | |
613 | if (NumBuckets == 0) { |
614 | FoundBucket = nullptr; |
615 | return false; |
616 | } |
617 | |
618 | |
619 | const BucketT *FoundTombstone = nullptr; |
620 | const KeyT EmptyKey = getEmptyKey(); |
621 | const KeyT TombstoneKey = getTombstoneKey(); |
622 | assert(!KeyInfoT::isEqual(Val, EmptyKey) && |
623 | !KeyInfoT::isEqual(Val, TombstoneKey) && |
624 | "Empty/Tombstone value shouldn't be inserted into map!"); |
625 | |
626 | unsigned BucketNo = getHashValue(Val) & (NumBuckets-1); |
627 | unsigned ProbeAmt = 1; |
628 | while (true) { |
629 | const BucketT *ThisBucket = BucketsPtr + BucketNo; |
630 | |
631 | if (LLVM_LIKELY(KeyInfoT::isEqual(Val, ThisBucket->getFirst()))) { |
632 | FoundBucket = ThisBucket; |
633 | return true; |
634 | } |
635 | |
636 | |
637 | |
638 | if (LLVM_LIKELY(KeyInfoT::isEqual(ThisBucket->getFirst(), EmptyKey))) { |
639 | |
640 | |
641 | FoundBucket = FoundTombstone ? FoundTombstone : ThisBucket; |
642 | return false; |
643 | } |
644 | |
645 | |
646 | |
647 | if (KeyInfoT::isEqual(ThisBucket->getFirst(), TombstoneKey) && |
648 | !FoundTombstone) |
649 | FoundTombstone = ThisBucket; |
650 | |
651 | |
652 | |
653 | BucketNo += ProbeAmt++; |
654 | BucketNo &= (NumBuckets-1); |
655 | } |
656 | } |
657 | |
658 | template <typename LookupKeyT> |
659 | bool LookupBucketFor(const LookupKeyT &Val, BucketT *&FoundBucket) { |
660 | const BucketT *ConstFoundBucket; |
661 | bool Result = const_cast<const DenseMapBase *>(this) |
662 | ->LookupBucketFor(Val, ConstFoundBucket); |
663 | FoundBucket = const_cast<BucketT *>(ConstFoundBucket); |
664 | return Result; |
665 | } |
666 | |
667 | public: |
668 | |
669 | |
670 | |
671 | |
672 | size_t getMemorySize() const { |
673 | return getNumBuckets() * sizeof(BucketT); |
674 | } |
675 | }; |
676 | |
677 | |
678 | |
679 | |
680 | |
681 | |
682 | |
683 | template <typename DerivedT, typename KeyT, typename ValueT, typename KeyInfoT, |
684 | typename BucketT> |
685 | bool operator==( |
686 | const DenseMapBase<DerivedT, KeyT, ValueT, KeyInfoT, BucketT> &LHS, |
687 | const DenseMapBase<DerivedT, KeyT, ValueT, KeyInfoT, BucketT> &RHS) { |
688 | if (LHS.size() != RHS.size()) |
689 | return false; |
690 | |
691 | for (auto &KV : LHS) { |
692 | auto I = RHS.find(KV.first); |
693 | if (I == RHS.end() || I->second != KV.second) |
694 | return false; |
695 | } |
696 | |
697 | return true; |
698 | } |
699 | |
700 | |
701 | |
702 | |
703 | template <typename DerivedT, typename KeyT, typename ValueT, typename KeyInfoT, |
704 | typename BucketT> |
705 | bool operator!=( |
706 | const DenseMapBase<DerivedT, KeyT, ValueT, KeyInfoT, BucketT> &LHS, |
707 | const DenseMapBase<DerivedT, KeyT, ValueT, KeyInfoT, BucketT> &RHS) { |
708 | return !(LHS == RHS); |
709 | } |
710 | |
711 | template <typename KeyT, typename ValueT, |
712 | typename KeyInfoT = DenseMapInfo<KeyT>, |
713 | typename BucketT = llvm::detail::DenseMapPair<KeyT, ValueT>> |
714 | class DenseMap : public DenseMapBase<DenseMap<KeyT, ValueT, KeyInfoT, BucketT>, |
715 | KeyT, ValueT, KeyInfoT, BucketT> { |
716 | friend class DenseMapBase<DenseMap, KeyT, ValueT, KeyInfoT, BucketT>; |
717 | |
718 | |
719 | |
720 | using BaseT = DenseMapBase<DenseMap, KeyT, ValueT, KeyInfoT, BucketT>; |
721 | |
722 | BucketT *Buckets; |
723 | unsigned NumEntries; |
724 | unsigned NumTombstones; |
725 | unsigned NumBuckets; |
726 | |
727 | public: |
728 | |
729 | |
730 | explicit DenseMap(unsigned InitialReserve = 0) { init(InitialReserve); } |
731 | |
732 | DenseMap(const DenseMap &other) : BaseT() { |
733 | init(0); |
734 | copyFrom(other); |
735 | } |
736 | |
737 | DenseMap(DenseMap &&other) : BaseT() { |
738 | init(0); |
739 | swap(other); |
740 | } |
741 | |
742 | template<typename InputIt> |
743 | DenseMap(const InputIt &I, const InputIt &E) { |
744 | init(std::distance(I, E)); |
745 | this->insert(I, E); |
746 | } |
747 | |
748 | DenseMap(std::initializer_list<typename BaseT::value_type> Vals) { |
749 | init(Vals.size()); |
750 | this->insert(Vals.begin(), Vals.end()); |
751 | } |
752 | |
753 | ~DenseMap() { |
754 | this->destroyAll(); |
755 | deallocate_buffer(Buckets, sizeof(BucketT) * NumBuckets, alignof(BucketT)); |
756 | } |
757 | |
758 | void swap(DenseMap& RHS) { |
759 | this->incrementEpoch(); |
760 | RHS.incrementEpoch(); |
761 | std::swap(Buckets, RHS.Buckets); |
762 | std::swap(NumEntries, RHS.NumEntries); |
763 | std::swap(NumTombstones, RHS.NumTombstones); |
764 | std::swap(NumBuckets, RHS.NumBuckets); |
765 | } |
766 | |
767 | DenseMap& operator=(const DenseMap& other) { |
768 | if (&other != this) |
769 | copyFrom(other); |
770 | return *this; |
771 | } |
772 | |
773 | DenseMap& operator=(DenseMap &&other) { |
774 | this->destroyAll(); |
775 | deallocate_buffer(Buckets, sizeof(BucketT) * NumBuckets, alignof(BucketT)); |
776 | init(0); |
777 | swap(other); |
778 | return *this; |
779 | } |
780 | |
781 | void copyFrom(const DenseMap& other) { |
782 | this->destroyAll(); |
783 | deallocate_buffer(Buckets, sizeof(BucketT) * NumBuckets, alignof(BucketT)); |
784 | if (allocateBuckets(other.NumBuckets)) { |
785 | this->BaseT::copyFrom(other); |
786 | } else { |
787 | NumEntries = 0; |
788 | NumTombstones = 0; |
789 | } |
790 | } |
791 | |
792 | void init(unsigned InitNumEntries) { |
793 | auto InitBuckets = BaseT::getMinBucketToReserveForEntries(InitNumEntries); |
794 | if (allocateBuckets(InitBuckets)) { |
795 | this->BaseT::initEmpty(); |
796 | } else { |
797 | NumEntries = 0; |
798 | NumTombstones = 0; |
799 | } |
800 | } |
801 | |
802 | void grow(unsigned AtLeast) { |
803 | unsigned OldNumBuckets = NumBuckets; |
804 | BucketT *OldBuckets = Buckets; |
805 | |
806 | allocateBuckets(std::max<unsigned>(64, static_cast<unsigned>(NextPowerOf2(AtLeast-1)))); |
807 | assert(Buckets); |
808 | if (!OldBuckets) { |
809 | this->BaseT::initEmpty(); |
810 | return; |
811 | } |
812 | |
813 | this->moveFromOldBuckets(OldBuckets, OldBuckets+OldNumBuckets); |
814 | |
815 | |
816 | deallocate_buffer(OldBuckets, sizeof(BucketT) * OldNumBuckets, |
817 | alignof(BucketT)); |
818 | } |
819 | |
820 | void shrink_and_clear() { |
821 | unsigned OldNumBuckets = NumBuckets; |
822 | unsigned OldNumEntries = NumEntries; |
823 | this->destroyAll(); |
824 | |
825 | |
826 | unsigned NewNumBuckets = 0; |
827 | if (OldNumEntries) |
828 | NewNumBuckets = std::max(64, 1 << (Log2_32_Ceil(OldNumEntries) + 1)); |
829 | if (NewNumBuckets == NumBuckets) { |
830 | this->BaseT::initEmpty(); |
831 | return; |
832 | } |
833 | |
834 | deallocate_buffer(Buckets, sizeof(BucketT) * OldNumBuckets, |
835 | alignof(BucketT)); |
836 | init(NewNumBuckets); |
837 | } |
838 | |
839 | private: |
840 | unsigned getNumEntries() const { |
841 | return NumEntries; |
842 | } |
843 | |
844 | void setNumEntries(unsigned Num) { |
845 | NumEntries = Num; |
846 | } |
847 | |
848 | unsigned getNumTombstones() const { |
849 | return NumTombstones; |
850 | } |
851 | |
852 | void setNumTombstones(unsigned Num) { |
853 | NumTombstones = Num; |
854 | } |
855 | |
856 | BucketT *getBuckets() const { |
857 | return Buckets; |
858 | } |
859 | |
860 | unsigned getNumBuckets() const { |
861 | return NumBuckets; |
862 | } |
863 | |
864 | bool allocateBuckets(unsigned Num) { |
865 | NumBuckets = Num; |
866 | if (NumBuckets == 0) { |
867 | Buckets = nullptr; |
868 | return false; |
869 | } |
870 | |
871 | Buckets = static_cast<BucketT *>( |
872 | allocate_buffer(sizeof(BucketT) * NumBuckets, alignof(BucketT))); |
873 | return true; |
874 | } |
875 | }; |
876 | |
877 | template <typename KeyT, typename ValueT, unsigned InlineBuckets = 4, |
878 | typename KeyInfoT = DenseMapInfo<KeyT>, |
879 | typename BucketT = llvm::detail::DenseMapPair<KeyT, ValueT>> |
880 | class SmallDenseMap |
881 | : public DenseMapBase< |
882 | SmallDenseMap<KeyT, ValueT, InlineBuckets, KeyInfoT, BucketT>, KeyT, |
883 | ValueT, KeyInfoT, BucketT> { |
884 | friend class DenseMapBase<SmallDenseMap, KeyT, ValueT, KeyInfoT, BucketT>; |
885 | |
886 | |
887 | |
888 | using BaseT = DenseMapBase<SmallDenseMap, KeyT, ValueT, KeyInfoT, BucketT>; |
889 | |
890 | static_assert(isPowerOf2_64(InlineBuckets), |
891 | "InlineBuckets must be a power of 2."); |
892 | |
893 | unsigned Small : 1; |
894 | unsigned NumEntries : 31; |
895 | unsigned NumTombstones; |
896 | |
897 | struct LargeRep { |
898 | BucketT *Buckets; |
899 | unsigned NumBuckets; |
900 | }; |
901 | |
902 | |
903 | |
904 | AlignedCharArrayUnion<BucketT[InlineBuckets], LargeRep> storage; |
905 | |
906 | public: |
907 | explicit SmallDenseMap(unsigned NumInitBuckets = 0) { |
908 | init(NumInitBuckets); |
909 | } |
910 | |
911 | SmallDenseMap(const SmallDenseMap &other) : BaseT() { |
912 | init(0); |
913 | copyFrom(other); |
914 | } |
915 | |
916 | SmallDenseMap(SmallDenseMap &&other) : BaseT() { |
917 | init(0); |
918 | swap(other); |
919 | } |
920 | |
921 | template<typename InputIt> |
922 | SmallDenseMap(const InputIt &I, const InputIt &E) { |
923 | init(NextPowerOf2(std::distance(I, E))); |
924 | this->insert(I, E); |
925 | } |
926 | |
927 | SmallDenseMap(std::initializer_list<typename BaseT::value_type> Vals) |
928 | : SmallDenseMap(Vals.begin(), Vals.end()) {} |
929 | |
930 | ~SmallDenseMap() { |
931 | this->destroyAll(); |
932 | deallocateBuckets(); |
933 | } |
934 | |
935 | void swap(SmallDenseMap& RHS) { |
936 | unsigned TmpNumEntries = RHS.NumEntries; |
937 | RHS.NumEntries = NumEntries; |
938 | NumEntries = TmpNumEntries; |
939 | std::swap(NumTombstones, RHS.NumTombstones); |
940 | |
941 | const KeyT EmptyKey = this->getEmptyKey(); |
942 | const KeyT TombstoneKey = this->getTombstoneKey(); |
943 | if (Small && RHS.Small) { |
944 | |
945 | |
946 | |
947 | |
948 | for (unsigned i = 0, e = InlineBuckets; i != e; ++i) { |
949 | BucketT *LHSB = &getInlineBuckets()[i], |
950 | *RHSB = &RHS.getInlineBuckets()[i]; |
951 | bool hasLHSValue = (!KeyInfoT::isEqual(LHSB->getFirst(), EmptyKey) && |
952 | !KeyInfoT::isEqual(LHSB->getFirst(), TombstoneKey)); |
953 | bool hasRHSValue = (!KeyInfoT::isEqual(RHSB->getFirst(), EmptyKey) && |
954 | !KeyInfoT::isEqual(RHSB->getFirst(), TombstoneKey)); |
955 | if (hasLHSValue && hasRHSValue) { |
956 | |
957 | std::swap(*LHSB, *RHSB); |
958 | continue; |
959 | } |
960 | |
961 | std::swap(LHSB->getFirst(), RHSB->getFirst()); |
962 | if (hasLHSValue) { |
963 | ::new (&RHSB->getSecond()) ValueT(std::move(LHSB->getSecond())); |
964 | LHSB->getSecond().~ValueT(); |
965 | } else if (hasRHSValue) { |
966 | ::new (&LHSB->getSecond()) ValueT(std::move(RHSB->getSecond())); |
967 | RHSB->getSecond().~ValueT(); |
968 | } |
969 | } |
970 | return; |
971 | } |
972 | if (!Small && !RHS.Small) { |
973 | std::swap(getLargeRep()->Buckets, RHS.getLargeRep()->Buckets); |
974 | std::swap(getLargeRep()->NumBuckets, RHS.getLargeRep()->NumBuckets); |
975 | return; |
976 | } |
977 | |
978 | SmallDenseMap &SmallSide = Small ? *this : RHS; |
979 | SmallDenseMap &LargeSide = Small ? RHS : *this; |
980 | |
981 | |
982 | LargeRep TmpRep = std::move(*LargeSide.getLargeRep()); |
983 | LargeSide.getLargeRep()->~LargeRep(); |
984 | LargeSide.Small = true; |
985 | |
986 | |
987 | |
988 | |
989 | for (unsigned i = 0, e = InlineBuckets; i != e; ++i) { |
990 | BucketT *NewB = &LargeSide.getInlineBuckets()[i], |
991 | *OldB = &SmallSide.getInlineBuckets()[i]; |
992 | ::new (&NewB->getFirst()) KeyT(std::move(OldB->getFirst())); |
993 | OldB->getFirst().~KeyT(); |
994 | if (!KeyInfoT::isEqual(NewB->getFirst(), EmptyKey) && |
995 | !KeyInfoT::isEqual(NewB->getFirst(), TombstoneKey)) { |
996 | ::new (&NewB->getSecond()) ValueT(std::move(OldB->getSecond())); |
997 | OldB->getSecond().~ValueT(); |
998 | } |
999 | } |
1000 | |
1001 | |
1002 | |
1003 | SmallSide.Small = false; |
1004 | new (SmallSide.getLargeRep()) LargeRep(std::move(TmpRep)); |
1005 | } |
1006 | |
1007 | SmallDenseMap& operator=(const SmallDenseMap& other) { |
1008 | if (&other != this) |
1009 | copyFrom(other); |
1010 | return *this; |
1011 | } |
1012 | |
1013 | SmallDenseMap& operator=(SmallDenseMap &&other) { |
1014 | this->destroyAll(); |
1015 | deallocateBuckets(); |
1016 | init(0); |
1017 | swap(other); |
1018 | return *this; |
1019 | } |
1020 | |
1021 | void copyFrom(const SmallDenseMap& other) { |
1022 | this->destroyAll(); |
1023 | deallocateBuckets(); |
1024 | Small = true; |
1025 | if (other.getNumBuckets() > InlineBuckets) { |
1026 | Small = false; |
1027 | new (getLargeRep()) LargeRep(allocateBuckets(other.getNumBuckets())); |
1028 | } |
1029 | this->BaseT::copyFrom(other); |
1030 | } |
1031 | |
1032 | void init(unsigned InitBuckets) { |
1033 | Small = true; |
1034 | if (InitBuckets > InlineBuckets) { |
1035 | Small = false; |
1036 | new (getLargeRep()) LargeRep(allocateBuckets(InitBuckets)); |
1037 | } |
1038 | this->BaseT::initEmpty(); |
1039 | } |
1040 | |
1041 | void grow(unsigned AtLeast) { |
1042 | if (AtLeast > InlineBuckets) |
1043 | AtLeast = std::max<unsigned>(64, NextPowerOf2(AtLeast-1)); |
1044 | |
1045 | if (Small) { |
1046 | |
1047 | AlignedCharArrayUnion<BucketT[InlineBuckets]> TmpStorage; |
1048 | BucketT *TmpBegin = reinterpret_cast<BucketT *>(&TmpStorage); |
1049 | BucketT *TmpEnd = TmpBegin; |
1050 | |
1051 | |
1052 | |
1053 | const KeyT EmptyKey = this->getEmptyKey(); |
1054 | const KeyT TombstoneKey = this->getTombstoneKey(); |
1055 | for (BucketT *P = getBuckets(), *E = P + InlineBuckets; P != E; ++P) { |
1056 | if (!KeyInfoT::isEqual(P->getFirst(), EmptyKey) && |
1057 | !KeyInfoT::isEqual(P->getFirst(), TombstoneKey)) { |
1058 | assert(size_t(TmpEnd - TmpBegin) < InlineBuckets && |
1059 | "Too many inline buckets!"); |
1060 | ::new (&TmpEnd->getFirst()) KeyT(std::move(P->getFirst())); |
1061 | ::new (&TmpEnd->getSecond()) ValueT(std::move(P->getSecond())); |
1062 | ++TmpEnd; |
1063 | P->getSecond().~ValueT(); |
1064 | } |
1065 | P->getFirst().~KeyT(); |
1066 | } |
1067 | |
1068 | |
1069 | |
1070 | |
1071 | if (AtLeast > InlineBuckets) { |
1072 | Small = false; |
1073 | new (getLargeRep()) LargeRep(allocateBuckets(AtLeast)); |
1074 | } |
1075 | this->moveFromOldBuckets(TmpBegin, TmpEnd); |
1076 | return; |
1077 | } |
1078 | |
1079 | LargeRep OldRep = std::move(*getLargeRep()); |
1080 | getLargeRep()->~LargeRep(); |
1081 | if (AtLeast <= InlineBuckets) { |
1082 | Small = true; |
1083 | } else { |
1084 | new (getLargeRep()) LargeRep(allocateBuckets(AtLeast)); |
1085 | } |
1086 | |
1087 | this->moveFromOldBuckets(OldRep.Buckets, OldRep.Buckets+OldRep.NumBuckets); |
1088 | |
1089 | |
1090 | deallocate_buffer(OldRep.Buckets, sizeof(BucketT) * OldRep.NumBuckets, |
1091 | alignof(BucketT)); |
1092 | } |
1093 | |
1094 | void shrink_and_clear() { |
1095 | unsigned OldSize = this->size(); |
1096 | this->destroyAll(); |
1097 | |
1098 | |
1099 | unsigned NewNumBuckets = 0; |
1100 | if (OldSize) { |
1101 | NewNumBuckets = 1 << (Log2_32_Ceil(OldSize) + 1); |
1102 | if (NewNumBuckets > InlineBuckets && NewNumBuckets < 64u) |
1103 | NewNumBuckets = 64; |
1104 | } |
1105 | if ((Small && NewNumBuckets <= InlineBuckets) || |
1106 | (!Small && NewNumBuckets == getLargeRep()->NumBuckets)) { |
1107 | this->BaseT::initEmpty(); |
1108 | return; |
1109 | } |
1110 | |
1111 | deallocateBuckets(); |
1112 | init(NewNumBuckets); |
1113 | } |
1114 | |
1115 | private: |
1116 | unsigned getNumEntries() const { |
1117 | return NumEntries; |
1118 | } |
1119 | |
1120 | void setNumEntries(unsigned Num) { |
1121 | |
1122 | assert(Num < (1U << 31) && "Cannot support more than 1<<31 entries"); |
1123 | NumEntries = Num; |
1124 | } |
1125 | |
1126 | unsigned getNumTombstones() const { |
1127 | return NumTombstones; |
1128 | } |
1129 | |
1130 | void setNumTombstones(unsigned Num) { |
1131 | NumTombstones = Num; |
1132 | } |
1133 | |
1134 | const BucketT *getInlineBuckets() const { |
1135 | assert(Small); |
1136 | |
1137 | |
1138 | |
1139 | return reinterpret_cast<const BucketT *>(&storage); |
1140 | } |
1141 | |
1142 | BucketT *getInlineBuckets() { |
1143 | return const_cast<BucketT *>( |
1144 | const_cast<const SmallDenseMap *>(this)->getInlineBuckets()); |
1145 | } |
1146 | |
1147 | const LargeRep *getLargeRep() const { |
1148 | assert(!Small); |
1149 | |
1150 | return reinterpret_cast<const LargeRep *>(&storage); |
1151 | } |
1152 | |
1153 | LargeRep *getLargeRep() { |
1154 | return const_cast<LargeRep *>( |
1155 | const_cast<const SmallDenseMap *>(this)->getLargeRep()); |
1156 | } |
1157 | |
1158 | const BucketT *getBuckets() const { |
1159 | return Small ? getInlineBuckets() : getLargeRep()->Buckets; |
1160 | } |
1161 | |
1162 | BucketT *getBuckets() { |
1163 | return const_cast<BucketT *>( |
1164 | const_cast<const SmallDenseMap *>(this)->getBuckets()); |
1165 | } |
1166 | |
1167 | unsigned getNumBuckets() const { |
1168 | return Small ? InlineBuckets : getLargeRep()->NumBuckets; |
1169 | } |
1170 | |
1171 | void deallocateBuckets() { |
1172 | if (Small) |
1173 | return; |
1174 | |
1175 | deallocate_buffer(getLargeRep()->Buckets, |
1176 | sizeof(BucketT) * getLargeRep()->NumBuckets, |
1177 | alignof(BucketT)); |
1178 | getLargeRep()->~LargeRep(); |
1179 | } |
1180 | |
1181 | LargeRep allocateBuckets(unsigned Num) { |
1182 | assert(Num > InlineBuckets && "Must allocate more buckets than are inline"); |
1183 | LargeRep Rep = {static_cast<BucketT *>(allocate_buffer( |
1184 | sizeof(BucketT) * Num, alignof(BucketT))), |
1185 | Num}; |
1186 | return Rep; |
1187 | } |
1188 | }; |
1189 | |
1190 | template <typename KeyT, typename ValueT, typename KeyInfoT, typename Bucket, |
1191 | bool IsConst> |
1192 | class DenseMapIterator : DebugEpochBase::HandleBase { |
1193 | friend class DenseMapIterator<KeyT, ValueT, KeyInfoT, Bucket, true>; |
1194 | friend class DenseMapIterator<KeyT, ValueT, KeyInfoT, Bucket, false>; |
1195 | |
1196 | public: |
1197 | using difference_type = ptrdiff_t; |
1198 | using value_type = |
1199 | typename std::conditional<IsConst, const Bucket, Bucket>::type; |
1200 | using pointer = value_type *; |
1201 | using reference = value_type &; |
1202 | using iterator_category = std::forward_iterator_tag; |
1203 | |
1204 | private: |
1205 | pointer Ptr = nullptr; |
1206 | pointer End = nullptr; |
1207 | |
1208 | public: |
1209 | DenseMapIterator() = default; |
1210 | |
1211 | DenseMapIterator(pointer Pos, pointer E, const DebugEpochBase &Epoch, |
1212 | bool NoAdvance = false) |
1213 | : DebugEpochBase::HandleBase(&Epoch), Ptr(Pos), End(E) { |
1214 | assert(isHandleInSync() && "invalid construction!"); |
1215 | |
1216 | if (NoAdvance) return; |
1217 | if (shouldReverseIterate<KeyT>()) { |
1218 | RetreatPastEmptyBuckets(); |
1219 | return; |
1220 | } |
1221 | AdvancePastEmptyBuckets(); |
1222 | } |
1223 | |
1224 | |
1225 | |
1226 | |
1227 | template <bool IsConstSrc, |
1228 | typename = std::enable_if_t<!IsConstSrc && IsConst>> |
1229 | DenseMapIterator( |
1230 | const DenseMapIterator<KeyT, ValueT, KeyInfoT, Bucket, IsConstSrc> &I) |
1231 | : DebugEpochBase::HandleBase(I), Ptr(I.Ptr), End(I.End) {} |
1232 | |
1233 | reference operator*() const { |
1234 | assert(isHandleInSync() && "invalid iterator access!"); |
1235 | assert(Ptr != End && "dereferencing end() iterator"); |
1236 | if (shouldReverseIterate<KeyT>()) |
1237 | return Ptr[-1]; |
1238 | return *Ptr; |
1239 | } |
1240 | pointer operator->() const { |
1241 | assert(isHandleInSync() && "invalid iterator access!"); |
1242 | assert(Ptr != End && "dereferencing end() iterator"); |
1243 | if (shouldReverseIterate<KeyT>()) |
1244 | return &(Ptr[-1]); |
1245 | return Ptr; |
1246 | } |
1247 | |
1248 | friend bool operator==(const DenseMapIterator &LHS, |
1249 | const DenseMapIterator &RHS) { |
1250 | assert((!LHS.Ptr || LHS.isHandleInSync()) && "handle not in sync!"); |
1251 | assert((!RHS.Ptr || RHS.isHandleInSync()) && "handle not in sync!"); |
1252 | assert(LHS.getEpochAddress() == RHS.getEpochAddress() && |
1253 | "comparing incomparable iterators!"); |
1254 | return LHS.Ptr == RHS.Ptr; |
| 25 | | Assuming 'LHS.Ptr' is not equal to 'RHS.Ptr' | |
|
| 26 | | Returning zero, which participates in a condition later | |
|
1255 | } |
1256 | |
1257 | friend bool operator!=(const DenseMapIterator &LHS, |
1258 | const DenseMapIterator &RHS) { |
1259 | return !(LHS == RHS); |
| |
| 27 | | Returning from 'operator==' | |
|
| 28 | | Returning the value 1, which participates in a condition later | |
|
1260 | } |
1261 | |
1262 | inline DenseMapIterator& operator++() { |
1263 | assert(isHandleInSync() && "invalid iterator access!"); |
1264 | assert(Ptr != End && "incrementing end() iterator"); |
1265 | if (shouldReverseIterate<KeyT>()) { |
1266 | --Ptr; |
1267 | RetreatPastEmptyBuckets(); |
1268 | return *this; |
1269 | } |
1270 | ++Ptr; |
1271 | AdvancePastEmptyBuckets(); |
1272 | return *this; |
1273 | } |
1274 | DenseMapIterator operator++(int) { |
1275 | assert(isHandleInSync() && "invalid iterator access!"); |
1276 | DenseMapIterator tmp = *this; ++*this; return tmp; |
1277 | } |
1278 | |
1279 | private: |
1280 | void AdvancePastEmptyBuckets() { |
1281 | assert(Ptr <= End); |
1282 | const KeyT Empty = KeyInfoT::getEmptyKey(); |
1283 | const KeyT Tombstone = KeyInfoT::getTombstoneKey(); |
1284 | |
1285 | while (Ptr != End && (KeyInfoT::isEqual(Ptr->getFirst(), Empty) || |
1286 | KeyInfoT::isEqual(Ptr->getFirst(), Tombstone))) |
1287 | ++Ptr; |
1288 | } |
1289 | |
1290 | void RetreatPastEmptyBuckets() { |
1291 | assert(Ptr >= End); |
1292 | const KeyT Empty = KeyInfoT::getEmptyKey(); |
1293 | const KeyT Tombstone = KeyInfoT::getTombstoneKey(); |
1294 | |
1295 | while (Ptr != End && (KeyInfoT::isEqual(Ptr[-1].getFirst(), Empty) || |
1296 | KeyInfoT::isEqual(Ptr[-1].getFirst(), Tombstone))) |
1297 | --Ptr; |
1298 | } |
1299 | }; |
1300 | |
1301 | template <typename KeyT, typename ValueT, typename KeyInfoT> |
1302 | inline size_t capacity_in_bytes(const DenseMap<KeyT, ValueT, KeyInfoT> &X) { |
1303 | return X.getMemorySize(); |
1304 | } |
1305 | |
1306 | } |
1307 | |
1308 | #endif // LLVM_ADT_DENSEMAP_H |