File: | src/gnu/usr.bin/clang/libLLVM/../../../llvm/llvm/lib/Transforms/IPO/Attributor.cpp |
Warning: | line 579, column 10 Called C++ object pointer is null |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | //===- Attributor.cpp - Module-wide attribute deduction -------------------===// | |||
2 | // | |||
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. | |||
4 | // See https://llvm.org/LICENSE.txt for license information. | |||
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception | |||
6 | // | |||
7 | //===----------------------------------------------------------------------===// | |||
8 | // | |||
9 | // This file implements an interprocedural pass that deduces and/or propagates | |||
10 | // attributes. This is done in an abstract interpretation style fixpoint | |||
11 | // iteration. See the Attributor.h file comment and the class descriptions in | |||
12 | // that file for more information. | |||
13 | // | |||
14 | //===----------------------------------------------------------------------===// | |||
15 | ||||
16 | #include "llvm/Transforms/IPO/Attributor.h" | |||
17 | ||||
18 | #include "llvm/ADT/GraphTraits.h" | |||
19 | #include "llvm/ADT/PointerIntPair.h" | |||
20 | #include "llvm/ADT/STLExtras.h" | |||
21 | #include "llvm/ADT/Statistic.h" | |||
22 | #include "llvm/ADT/TinyPtrVector.h" | |||
23 | #include "llvm/Analysis/InlineCost.h" | |||
24 | #include "llvm/Analysis/LazyValueInfo.h" | |||
25 | #include "llvm/Analysis/MemorySSAUpdater.h" | |||
26 | #include "llvm/Analysis/MustExecute.h" | |||
27 | #include "llvm/Analysis/ValueTracking.h" | |||
28 | #include "llvm/IR/Attributes.h" | |||
29 | #include "llvm/IR/Constant.h" | |||
30 | #include "llvm/IR/Constants.h" | |||
31 | #include "llvm/IR/GlobalValue.h" | |||
32 | #include "llvm/IR/GlobalVariable.h" | |||
33 | #include "llvm/IR/IRBuilder.h" | |||
34 | #include "llvm/IR/Instruction.h" | |||
35 | #include "llvm/IR/Instructions.h" | |||
36 | #include "llvm/IR/IntrinsicInst.h" | |||
37 | #include "llvm/IR/NoFolder.h" | |||
38 | #include "llvm/IR/ValueHandle.h" | |||
39 | #include "llvm/IR/Verifier.h" | |||
40 | #include "llvm/InitializePasses.h" | |||
41 | #include "llvm/Support/Casting.h" | |||
42 | #include "llvm/Support/CommandLine.h" | |||
43 | #include "llvm/Support/Debug.h" | |||
44 | #include "llvm/Support/DebugCounter.h" | |||
45 | #include "llvm/Support/FileSystem.h" | |||
46 | #include "llvm/Support/GraphWriter.h" | |||
47 | #include "llvm/Support/raw_ostream.h" | |||
48 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" | |||
49 | #include "llvm/Transforms/Utils/Cloning.h" | |||
50 | #include "llvm/Transforms/Utils/Local.h" | |||
51 | ||||
52 | #include <cassert> | |||
53 | #include <string> | |||
54 | ||||
55 | using namespace llvm; | |||
56 | ||||
57 | #define DEBUG_TYPE"attributor" "attributor" | |||
58 | ||||
59 | DEBUG_COUNTER(ManifestDBGCounter, "attributor-manifest",static const unsigned ManifestDBGCounter = DebugCounter::registerCounter ("attributor-manifest", "Determine what attributes are manifested in the IR" ) | |||
60 | "Determine what attributes are manifested in the IR")static const unsigned ManifestDBGCounter = DebugCounter::registerCounter ("attributor-manifest", "Determine what attributes are manifested in the IR" ); | |||
61 | ||||
62 | STATISTIC(NumFnDeleted, "Number of function deleted")static llvm::Statistic NumFnDeleted = {"attributor", "NumFnDeleted" , "Number of function deleted"}; | |||
63 | STATISTIC(NumFnWithExactDefinition,static llvm::Statistic NumFnWithExactDefinition = {"attributor" , "NumFnWithExactDefinition", "Number of functions with exact definitions" } | |||
64 | "Number of functions with exact definitions")static llvm::Statistic NumFnWithExactDefinition = {"attributor" , "NumFnWithExactDefinition", "Number of functions with exact definitions" }; | |||
65 | STATISTIC(NumFnWithoutExactDefinition,static llvm::Statistic NumFnWithoutExactDefinition = {"attributor" , "NumFnWithoutExactDefinition", "Number of functions without exact definitions" } | |||
66 | "Number of functions without exact definitions")static llvm::Statistic NumFnWithoutExactDefinition = {"attributor" , "NumFnWithoutExactDefinition", "Number of functions without exact definitions" }; | |||
67 | STATISTIC(NumFnShallowWrappersCreated, "Number of shallow wrappers created")static llvm::Statistic NumFnShallowWrappersCreated = {"attributor" , "NumFnShallowWrappersCreated", "Number of shallow wrappers created" }; | |||
68 | STATISTIC(NumAttributesTimedOut,static llvm::Statistic NumAttributesTimedOut = {"attributor", "NumAttributesTimedOut", "Number of abstract attributes timed out before fixpoint" } | |||
69 | "Number of abstract attributes timed out before fixpoint")static llvm::Statistic NumAttributesTimedOut = {"attributor", "NumAttributesTimedOut", "Number of abstract attributes timed out before fixpoint" }; | |||
70 | STATISTIC(NumAttributesValidFixpoint,static llvm::Statistic NumAttributesValidFixpoint = {"attributor" , "NumAttributesValidFixpoint", "Number of abstract attributes in a valid fixpoint state" } | |||
71 | "Number of abstract attributes in a valid fixpoint state")static llvm::Statistic NumAttributesValidFixpoint = {"attributor" , "NumAttributesValidFixpoint", "Number of abstract attributes in a valid fixpoint state" }; | |||
72 | STATISTIC(NumAttributesManifested,static llvm::Statistic NumAttributesManifested = {"attributor" , "NumAttributesManifested", "Number of abstract attributes manifested in IR" } | |||
73 | "Number of abstract attributes manifested in IR")static llvm::Statistic NumAttributesManifested = {"attributor" , "NumAttributesManifested", "Number of abstract attributes manifested in IR" }; | |||
74 | ||||
75 | // TODO: Determine a good default value. | |||
76 | // | |||
77 | // In the LLVM-TS and SPEC2006, 32 seems to not induce compile time overheads | |||
78 | // (when run with the first 5 abstract attributes). The results also indicate | |||
79 | // that we never reach 32 iterations but always find a fixpoint sooner. | |||
80 | // | |||
81 | // This will become more evolved once we perform two interleaved fixpoint | |||
82 | // iterations: bottom-up and top-down. | |||
83 | static cl::opt<unsigned> | |||
84 | SetFixpointIterations("attributor-max-iterations", cl::Hidden, | |||
85 | cl::desc("Maximal number of fixpoint iterations."), | |||
86 | cl::init(32)); | |||
87 | ||||
88 | static cl::opt<unsigned, true> MaxInitializationChainLengthX( | |||
89 | "attributor-max-initialization-chain-length", cl::Hidden, | |||
90 | cl::desc( | |||
91 | "Maximal number of chained initializations (to avoid stack overflows)"), | |||
92 | cl::location(MaxInitializationChainLength), cl::init(1024)); | |||
93 | unsigned llvm::MaxInitializationChainLength; | |||
94 | ||||
95 | static cl::opt<bool> VerifyMaxFixpointIterations( | |||
96 | "attributor-max-iterations-verify", cl::Hidden, | |||
97 | cl::desc("Verify that max-iterations is a tight bound for a fixpoint"), | |||
98 | cl::init(false)); | |||
99 | ||||
100 | static cl::opt<bool> AnnotateDeclarationCallSites( | |||
101 | "attributor-annotate-decl-cs", cl::Hidden, | |||
102 | cl::desc("Annotate call sites of function declarations."), cl::init(false)); | |||
103 | ||||
104 | static cl::opt<bool> EnableHeapToStack("enable-heap-to-stack-conversion", | |||
105 | cl::init(true), cl::Hidden); | |||
106 | ||||
107 | static cl::opt<bool> | |||
108 | AllowShallowWrappers("attributor-allow-shallow-wrappers", cl::Hidden, | |||
109 | cl::desc("Allow the Attributor to create shallow " | |||
110 | "wrappers for non-exact definitions."), | |||
111 | cl::init(false)); | |||
112 | ||||
113 | static cl::opt<bool> | |||
114 | AllowDeepWrapper("attributor-allow-deep-wrappers", cl::Hidden, | |||
115 | cl::desc("Allow the Attributor to use IP information " | |||
116 | "derived from non-exact functions via cloning"), | |||
117 | cl::init(false)); | |||
118 | ||||
119 | // These options can only used for debug builds. | |||
120 | #ifndef NDEBUG1 | |||
121 | static cl::list<std::string> | |||
122 | SeedAllowList("attributor-seed-allow-list", cl::Hidden, | |||
123 | cl::desc("Comma seperated list of attribute names that are " | |||
124 | "allowed to be seeded."), | |||
125 | cl::ZeroOrMore, cl::CommaSeparated); | |||
126 | ||||
127 | static cl::list<std::string> FunctionSeedAllowList( | |||
128 | "attributor-function-seed-allow-list", cl::Hidden, | |||
129 | cl::desc("Comma seperated list of function names that are " | |||
130 | "allowed to be seeded."), | |||
131 | cl::ZeroOrMore, cl::CommaSeparated); | |||
132 | #endif | |||
133 | ||||
134 | static cl::opt<bool> | |||
135 | DumpDepGraph("attributor-dump-dep-graph", cl::Hidden, | |||
136 | cl::desc("Dump the dependency graph to dot files."), | |||
137 | cl::init(false)); | |||
138 | ||||
139 | static cl::opt<std::string> DepGraphDotFileNamePrefix( | |||
140 | "attributor-depgraph-dot-filename-prefix", cl::Hidden, | |||
141 | cl::desc("The prefix used for the CallGraph dot file names.")); | |||
142 | ||||
143 | static cl::opt<bool> ViewDepGraph("attributor-view-dep-graph", cl::Hidden, | |||
144 | cl::desc("View the dependency graph."), | |||
145 | cl::init(false)); | |||
146 | ||||
147 | static cl::opt<bool> PrintDependencies("attributor-print-dep", cl::Hidden, | |||
148 | cl::desc("Print attribute dependencies"), | |||
149 | cl::init(false)); | |||
150 | ||||
151 | static cl::opt<bool> EnableCallSiteSpecific( | |||
152 | "attributor-enable-call-site-specific-deduction", cl::Hidden, | |||
153 | cl::desc("Allow the Attributor to do call site specific analysis"), | |||
154 | cl::init(false)); | |||
155 | ||||
156 | static cl::opt<bool> | |||
157 | PrintCallGraph("attributor-print-call-graph", cl::Hidden, | |||
158 | cl::desc("Print Attributor's internal call graph"), | |||
159 | cl::init(false)); | |||
160 | ||||
161 | static cl::opt<bool> SimplifyAllLoads("attributor-simplify-all-loads", | |||
162 | cl::Hidden, | |||
163 | cl::desc("Try to simplify all loads."), | |||
164 | cl::init(true)); | |||
165 | ||||
166 | /// Logic operators for the change status enum class. | |||
167 | /// | |||
168 | ///{ | |||
169 | ChangeStatus llvm::operator|(ChangeStatus L, ChangeStatus R) { | |||
170 | return L == ChangeStatus::CHANGED ? L : R; | |||
171 | } | |||
172 | ChangeStatus &llvm::operator|=(ChangeStatus &L, ChangeStatus R) { | |||
173 | L = L | R; | |||
174 | return L; | |||
175 | } | |||
176 | ChangeStatus llvm::operator&(ChangeStatus L, ChangeStatus R) { | |||
177 | return L == ChangeStatus::UNCHANGED ? L : R; | |||
178 | } | |||
179 | ChangeStatus &llvm::operator&=(ChangeStatus &L, ChangeStatus R) { | |||
180 | L = L & R; | |||
181 | return L; | |||
182 | } | |||
183 | ///} | |||
184 | ||||
185 | bool AA::isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, | |||
186 | const Value &V) { | |||
187 | if (auto *C = dyn_cast<Constant>(&V)) | |||
188 | return !C->isThreadDependent(); | |||
189 | // TODO: Inspect and cache more complex instructions. | |||
190 | if (auto *CB = dyn_cast<CallBase>(&V)) | |||
191 | return CB->getNumOperands() == 0 && !CB->mayHaveSideEffects() && | |||
192 | !CB->mayReadFromMemory(); | |||
193 | const Function *Scope = nullptr; | |||
194 | if (auto *I = dyn_cast<Instruction>(&V)) | |||
195 | Scope = I->getFunction(); | |||
196 | if (auto *A = dyn_cast<Argument>(&V)) | |||
197 | Scope = A->getParent(); | |||
198 | if (!Scope) | |||
199 | return false; | |||
200 | auto &NoRecurseAA = A.getAAFor<AANoRecurse>( | |||
201 | QueryingAA, IRPosition::function(*Scope), DepClassTy::OPTIONAL); | |||
202 | return NoRecurseAA.isAssumedNoRecurse(); | |||
203 | } | |||
204 | ||||
205 | Constant *AA::getInitialValueForObj(Value &Obj, Type &Ty) { | |||
206 | if (isa<AllocaInst>(Obj)) | |||
207 | return UndefValue::get(&Ty); | |||
208 | auto *GV = dyn_cast<GlobalVariable>(&Obj); | |||
209 | if (!GV || !GV->hasLocalLinkage()) | |||
210 | return nullptr; | |||
211 | if (!GV->hasInitializer()) | |||
212 | return UndefValue::get(&Ty); | |||
213 | return dyn_cast_or_null<Constant>(getWithType(*GV->getInitializer(), Ty)); | |||
214 | } | |||
215 | ||||
216 | bool AA::isValidInScope(const Value &V, const Function *Scope) { | |||
217 | if (isa<Constant>(V)) | |||
218 | return true; | |||
219 | if (auto *I = dyn_cast<Instruction>(&V)) | |||
220 | return I->getFunction() == Scope; | |||
221 | if (auto *A = dyn_cast<Argument>(&V)) | |||
222 | return A->getParent() == Scope; | |||
223 | return false; | |||
224 | } | |||
225 | ||||
226 | bool AA::isValidAtPosition(const Value &V, const Instruction &CtxI, | |||
227 | InformationCache &InfoCache) { | |||
228 | if (isa<Constant>(V)) | |||
229 | return true; | |||
230 | const Function *Scope = CtxI.getFunction(); | |||
231 | if (auto *A = dyn_cast<Argument>(&V)) | |||
232 | return A->getParent() == Scope; | |||
233 | if (auto *I = dyn_cast<Instruction>(&V)) | |||
234 | if (I->getFunction() == Scope) { | |||
235 | const DominatorTree *DT = | |||
236 | InfoCache.getAnalysisResultForFunction<DominatorTreeAnalysis>(*Scope); | |||
237 | return DT && DT->dominates(I, &CtxI); | |||
238 | } | |||
239 | return false; | |||
240 | } | |||
241 | ||||
242 | Value *AA::getWithType(Value &V, Type &Ty) { | |||
243 | if (V.getType() == &Ty) | |||
244 | return &V; | |||
245 | if (isa<PoisonValue>(V)) | |||
246 | return PoisonValue::get(&Ty); | |||
247 | if (isa<UndefValue>(V)) | |||
248 | return UndefValue::get(&Ty); | |||
249 | if (auto *C = dyn_cast<Constant>(&V)) { | |||
250 | if (C->isNullValue()) | |||
251 | return Constant::getNullValue(&Ty); | |||
252 | if (C->getType()->isPointerTy() && Ty.isPointerTy()) | |||
253 | return ConstantExpr::getPointerCast(C, &Ty); | |||
254 | if (C->getType()->getPrimitiveSizeInBits() >= Ty.getPrimitiveSizeInBits()) { | |||
255 | if (C->getType()->isIntegerTy() && Ty.isIntegerTy()) | |||
256 | return ConstantExpr::getTrunc(C, &Ty, /* OnlyIfReduced */ true); | |||
257 | if (C->getType()->isFloatingPointTy() && Ty.isFloatingPointTy()) | |||
258 | return ConstantExpr::getFPTrunc(C, &Ty, /* OnlyIfReduced */ true); | |||
259 | } | |||
260 | } | |||
261 | return nullptr; | |||
262 | } | |||
263 | ||||
264 | Optional<Value *> | |||
265 | AA::combineOptionalValuesInAAValueLatice(const Optional<Value *> &A, | |||
266 | const Optional<Value *> &B, Type *Ty) { | |||
267 | if (A == B) | |||
268 | return A; | |||
269 | if (!B.hasValue()) | |||
270 | return A; | |||
271 | if (*B == nullptr) | |||
272 | return nullptr; | |||
273 | if (!A.hasValue()) | |||
274 | return Ty ? getWithType(**B, *Ty) : nullptr; | |||
275 | if (*A == nullptr) | |||
276 | return nullptr; | |||
277 | if (!Ty) | |||
278 | Ty = (*A)->getType(); | |||
279 | if (isa_and_nonnull<UndefValue>(*A)) | |||
280 | return getWithType(**B, *Ty); | |||
281 | if (isa<UndefValue>(*B)) | |||
282 | return A; | |||
283 | if (*A && *B && *A == getWithType(**B, *Ty)) | |||
284 | return A; | |||
285 | return nullptr; | |||
286 | } | |||
287 | ||||
288 | bool AA::getPotentialCopiesOfStoredValue( | |||
289 | Attributor &A, StoreInst &SI, SmallSetVector<Value *, 4> &PotentialCopies, | |||
290 | const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation) { | |||
291 | ||||
292 | Value &Ptr = *SI.getPointerOperand(); | |||
293 | SmallVector<Value *, 8> Objects; | |||
294 | if (!AA::getAssumedUnderlyingObjects(A, Ptr, Objects, QueryingAA, &SI)) { | |||
295 | LLVM_DEBUG(do { } while (false) | |||
296 | dbgs() << "Underlying objects stored into could not be determined\n";)do { } while (false); | |||
297 | return false; | |||
298 | } | |||
299 | ||||
300 | SmallVector<const AAPointerInfo *> PIs; | |||
301 | SmallVector<Value *> NewCopies; | |||
302 | ||||
303 | for (Value *Obj : Objects) { | |||
304 | LLVM_DEBUG(dbgs() << "Visit underlying object " << *Obj << "\n")do { } while (false); | |||
305 | if (isa<UndefValue>(Obj)) | |||
306 | continue; | |||
307 | if (isa<ConstantPointerNull>(Obj)) { | |||
308 | // A null pointer access can be undefined but any offset from null may | |||
309 | // be OK. We do not try to optimize the latter. | |||
310 | if (!NullPointerIsDefined(SI.getFunction(), | |||
311 | Ptr.getType()->getPointerAddressSpace()) && | |||
312 | A.getAssumedSimplified(Ptr, QueryingAA, UsedAssumedInformation) == | |||
313 | Obj) | |||
314 | continue; | |||
315 | LLVM_DEBUG(do { } while (false) | |||
316 | dbgs() << "Underlying object is a valid nullptr, giving up.\n";)do { } while (false); | |||
317 | return false; | |||
318 | } | |||
319 | if (!isa<AllocaInst>(Obj) && !isa<GlobalVariable>(Obj)) { | |||
320 | LLVM_DEBUG(dbgs() << "Underlying object is not supported yet: " << *Objdo { } while (false) | |||
321 | << "\n";)do { } while (false); | |||
322 | return false; | |||
323 | } | |||
324 | if (auto *GV = dyn_cast<GlobalVariable>(Obj)) | |||
325 | if (!GV->hasLocalLinkage()) { | |||
326 | LLVM_DEBUG(dbgs() << "Underlying object is global with external "do { } while (false) | |||
327 | "linkage, not supported yet: "do { } while (false) | |||
328 | << *Obj << "\n";)do { } while (false); | |||
329 | return false; | |||
330 | } | |||
331 | ||||
332 | auto CheckAccess = [&](const AAPointerInfo::Access &Acc, bool IsExact) { | |||
333 | if (!Acc.isRead()) | |||
334 | return true; | |||
335 | auto *LI = dyn_cast<LoadInst>(Acc.getRemoteInst()); | |||
336 | if (!LI) { | |||
337 | LLVM_DEBUG(dbgs() << "Underlying object read through a non-load "do { } while (false) | |||
338 | "instruction not supported yet: "do { } while (false) | |||
339 | << *Acc.getRemoteInst() << "\n";)do { } while (false); | |||
340 | return false; | |||
341 | } | |||
342 | NewCopies.push_back(LI); | |||
343 | return true; | |||
344 | }; | |||
345 | ||||
346 | auto &PI = A.getAAFor<AAPointerInfo>(QueryingAA, IRPosition::value(*Obj), | |||
347 | DepClassTy::NONE); | |||
348 | if (!PI.forallInterferingAccesses(SI, CheckAccess)) { | |||
349 | LLVM_DEBUG(do { } while (false) | |||
350 | dbgs()do { } while (false) | |||
351 | << "Failed to verify all interfering accesses for underlying object: "do { } while (false) | |||
352 | << *Obj << "\n")do { } while (false); | |||
353 | return false; | |||
354 | } | |||
355 | PIs.push_back(&PI); | |||
356 | } | |||
357 | ||||
358 | for (auto *PI : PIs) { | |||
359 | if (!PI->getState().isAtFixpoint()) | |||
360 | UsedAssumedInformation = true; | |||
361 | A.recordDependence(*PI, QueryingAA, DepClassTy::OPTIONAL); | |||
362 | } | |||
363 | PotentialCopies.insert(NewCopies.begin(), NewCopies.end()); | |||
364 | ||||
365 | return true; | |||
366 | } | |||
367 | ||||
368 | /// Return true if \p New is equal or worse than \p Old. | |||
369 | static bool isEqualOrWorse(const Attribute &New, const Attribute &Old) { | |||
370 | if (!Old.isIntAttribute()) | |||
371 | return true; | |||
372 | ||||
373 | return Old.getValueAsInt() >= New.getValueAsInt(); | |||
374 | } | |||
375 | ||||
376 | /// Return true if the information provided by \p Attr was added to the | |||
377 | /// attribute list \p Attrs. This is only the case if it was not already present | |||
378 | /// in \p Attrs at the position describe by \p PK and \p AttrIdx. | |||
379 | static bool addIfNotExistent(LLVMContext &Ctx, const Attribute &Attr, | |||
380 | AttributeList &Attrs, int AttrIdx, | |||
381 | bool ForceReplace = false) { | |||
382 | ||||
383 | if (Attr.isEnumAttribute()) { | |||
384 | Attribute::AttrKind Kind = Attr.getKindAsEnum(); | |||
385 | if (Attrs.hasAttribute(AttrIdx, Kind)) | |||
386 | if (!ForceReplace && | |||
387 | isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind))) | |||
388 | return false; | |||
389 | Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr); | |||
390 | return true; | |||
391 | } | |||
392 | if (Attr.isStringAttribute()) { | |||
393 | StringRef Kind = Attr.getKindAsString(); | |||
394 | if (Attrs.hasAttribute(AttrIdx, Kind)) | |||
395 | if (!ForceReplace && | |||
396 | isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind))) | |||
397 | return false; | |||
398 | Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr); | |||
399 | return true; | |||
400 | } | |||
401 | if (Attr.isIntAttribute()) { | |||
402 | Attribute::AttrKind Kind = Attr.getKindAsEnum(); | |||
403 | if (Attrs.hasAttribute(AttrIdx, Kind)) | |||
404 | if (!ForceReplace && | |||
405 | isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind))) | |||
406 | return false; | |||
407 | Attrs = Attrs.removeAttribute(Ctx, AttrIdx, Kind); | |||
408 | Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr); | |||
409 | return true; | |||
410 | } | |||
411 | ||||
412 | llvm_unreachable("Expected enum or string attribute!")__builtin_unreachable(); | |||
413 | } | |||
414 | ||||
415 | Argument *IRPosition::getAssociatedArgument() const { | |||
416 | if (getPositionKind() == IRP_ARGUMENT) | |||
417 | return cast<Argument>(&getAnchorValue()); | |||
418 | ||||
419 | // Not an Argument and no argument number means this is not a call site | |||
420 | // argument, thus we cannot find a callback argument to return. | |||
421 | int ArgNo = getCallSiteArgNo(); | |||
422 | if (ArgNo < 0) | |||
423 | return nullptr; | |||
424 | ||||
425 | // Use abstract call sites to make the connection between the call site | |||
426 | // values and the ones in callbacks. If a callback was found that makes use | |||
427 | // of the underlying call site operand, we want the corresponding callback | |||
428 | // callee argument and not the direct callee argument. | |||
429 | Optional<Argument *> CBCandidateArg; | |||
430 | SmallVector<const Use *, 4> CallbackUses; | |||
431 | const auto &CB = cast<CallBase>(getAnchorValue()); | |||
432 | AbstractCallSite::getCallbackUses(CB, CallbackUses); | |||
433 | for (const Use *U : CallbackUses) { | |||
434 | AbstractCallSite ACS(U); | |||
435 | assert(ACS && ACS.isCallbackCall())((void)0); | |||
436 | if (!ACS.getCalledFunction()) | |||
437 | continue; | |||
438 | ||||
439 | for (unsigned u = 0, e = ACS.getNumArgOperands(); u < e; u++) { | |||
440 | ||||
441 | // Test if the underlying call site operand is argument number u of the | |||
442 | // callback callee. | |||
443 | if (ACS.getCallArgOperandNo(u) != ArgNo) | |||
444 | continue; | |||
445 | ||||
446 | assert(ACS.getCalledFunction()->arg_size() > u &&((void)0) | |||
447 | "ACS mapped into var-args arguments!")((void)0); | |||
448 | if (CBCandidateArg.hasValue()) { | |||
449 | CBCandidateArg = nullptr; | |||
450 | break; | |||
451 | } | |||
452 | CBCandidateArg = ACS.getCalledFunction()->getArg(u); | |||
453 | } | |||
454 | } | |||
455 | ||||
456 | // If we found a unique callback candidate argument, return it. | |||
457 | if (CBCandidateArg.hasValue() && CBCandidateArg.getValue()) | |||
458 | return CBCandidateArg.getValue(); | |||
459 | ||||
460 | // If no callbacks were found, or none used the underlying call site operand | |||
461 | // exclusively, use the direct callee argument if available. | |||
462 | const Function *Callee = CB.getCalledFunction(); | |||
463 | if (Callee && Callee->arg_size() > unsigned(ArgNo)) | |||
464 | return Callee->getArg(ArgNo); | |||
465 | ||||
466 | return nullptr; | |||
467 | } | |||
468 | ||||
469 | ChangeStatus AbstractAttribute::update(Attributor &A) { | |||
470 | ChangeStatus HasChanged = ChangeStatus::UNCHANGED; | |||
471 | if (getState().isAtFixpoint()) | |||
472 | return HasChanged; | |||
473 | ||||
474 | LLVM_DEBUG(dbgs() << "[Attributor] Update: " << *this << "\n")do { } while (false); | |||
475 | ||||
476 | HasChanged = updateImpl(A); | |||
477 | ||||
478 | LLVM_DEBUG(dbgs() << "[Attributor] Update " << HasChanged << " " << *thisdo { } while (false) | |||
479 | << "\n")do { } while (false); | |||
480 | ||||
481 | return HasChanged; | |||
482 | } | |||
483 | ||||
484 | ChangeStatus | |||
485 | IRAttributeManifest::manifestAttrs(Attributor &A, const IRPosition &IRP, | |||
486 | const ArrayRef<Attribute> &DeducedAttrs, | |||
487 | bool ForceReplace) { | |||
488 | Function *ScopeFn = IRP.getAnchorScope(); | |||
489 | IRPosition::Kind PK = IRP.getPositionKind(); | |||
490 | ||||
491 | // In the following some generic code that will manifest attributes in | |||
492 | // DeducedAttrs if they improve the current IR. Due to the different | |||
493 | // annotation positions we use the underlying AttributeList interface. | |||
494 | ||||
495 | AttributeList Attrs; | |||
496 | switch (PK) { | |||
497 | case IRPosition::IRP_INVALID: | |||
498 | case IRPosition::IRP_FLOAT: | |||
499 | return ChangeStatus::UNCHANGED; | |||
500 | case IRPosition::IRP_ARGUMENT: | |||
501 | case IRPosition::IRP_FUNCTION: | |||
502 | case IRPosition::IRP_RETURNED: | |||
503 | Attrs = ScopeFn->getAttributes(); | |||
504 | break; | |||
505 | case IRPosition::IRP_CALL_SITE: | |||
506 | case IRPosition::IRP_CALL_SITE_RETURNED: | |||
507 | case IRPosition::IRP_CALL_SITE_ARGUMENT: | |||
508 | Attrs = cast<CallBase>(IRP.getAnchorValue()).getAttributes(); | |||
509 | break; | |||
510 | } | |||
511 | ||||
512 | ChangeStatus HasChanged = ChangeStatus::UNCHANGED; | |||
513 | LLVMContext &Ctx = IRP.getAnchorValue().getContext(); | |||
514 | for (const Attribute &Attr : DeducedAttrs) { | |||
515 | if (!addIfNotExistent(Ctx, Attr, Attrs, IRP.getAttrIdx(), ForceReplace)) | |||
516 | continue; | |||
517 | ||||
518 | HasChanged = ChangeStatus::CHANGED; | |||
519 | } | |||
520 | ||||
521 | if (HasChanged == ChangeStatus::UNCHANGED) | |||
522 | return HasChanged; | |||
523 | ||||
524 | switch (PK) { | |||
525 | case IRPosition::IRP_ARGUMENT: | |||
526 | case IRPosition::IRP_FUNCTION: | |||
527 | case IRPosition::IRP_RETURNED: | |||
528 | ScopeFn->setAttributes(Attrs); | |||
529 | break; | |||
530 | case IRPosition::IRP_CALL_SITE: | |||
531 | case IRPosition::IRP_CALL_SITE_RETURNED: | |||
532 | case IRPosition::IRP_CALL_SITE_ARGUMENT: | |||
533 | cast<CallBase>(IRP.getAnchorValue()).setAttributes(Attrs); | |||
534 | break; | |||
535 | case IRPosition::IRP_INVALID: | |||
536 | case IRPosition::IRP_FLOAT: | |||
537 | break; | |||
538 | } | |||
539 | ||||
540 | return HasChanged; | |||
541 | } | |||
542 | ||||
543 | const IRPosition IRPosition::EmptyKey(DenseMapInfo<void *>::getEmptyKey()); | |||
544 | const IRPosition | |||
545 | IRPosition::TombstoneKey(DenseMapInfo<void *>::getTombstoneKey()); | |||
546 | ||||
547 | SubsumingPositionIterator::SubsumingPositionIterator(const IRPosition &IRP) { | |||
548 | IRPositions.emplace_back(IRP); | |||
549 | ||||
550 | // Helper to determine if operand bundles on a call site are benin or | |||
551 | // potentially problematic. We handle only llvm.assume for now. | |||
552 | auto CanIgnoreOperandBundles = [](const CallBase &CB) { | |||
553 | return (isa<IntrinsicInst>(CB) && | |||
554 | cast<IntrinsicInst>(CB).getIntrinsicID() == Intrinsic ::assume); | |||
555 | }; | |||
556 | ||||
557 | const auto *CB = dyn_cast<CallBase>(&IRP.getAnchorValue()); | |||
558 | switch (IRP.getPositionKind()) { | |||
559 | case IRPosition::IRP_INVALID: | |||
560 | case IRPosition::IRP_FLOAT: | |||
561 | case IRPosition::IRP_FUNCTION: | |||
562 | return; | |||
563 | case IRPosition::IRP_ARGUMENT: | |||
564 | case IRPosition::IRP_RETURNED: | |||
565 | IRPositions.emplace_back(IRPosition::function(*IRP.getAnchorScope())); | |||
566 | return; | |||
567 | case IRPosition::IRP_CALL_SITE: | |||
568 | assert(CB && "Expected call site!")((void)0); | |||
569 | // TODO: We need to look at the operand bundles similar to the redirection | |||
570 | // in CallBase. | |||
571 | if (!CB->hasOperandBundles() || CanIgnoreOperandBundles(*CB)) | |||
572 | if (const Function *Callee = CB->getCalledFunction()) | |||
573 | IRPositions.emplace_back(IRPosition::function(*Callee)); | |||
574 | return; | |||
575 | case IRPosition::IRP_CALL_SITE_RETURNED: | |||
576 | assert(CB && "Expected call site!")((void)0); | |||
577 | // TODO: We need to look at the operand bundles similar to the redirection | |||
578 | // in CallBase. | |||
579 | if (!CB->hasOperandBundles() || CanIgnoreOperandBundles(*CB)) { | |||
| ||||
580 | if (const Function *Callee = CB->getCalledFunction()) { | |||
581 | IRPositions.emplace_back(IRPosition::returned(*Callee)); | |||
582 | IRPositions.emplace_back(IRPosition::function(*Callee)); | |||
583 | for (const Argument &Arg : Callee->args()) | |||
584 | if (Arg.hasReturnedAttr()) { | |||
585 | IRPositions.emplace_back( | |||
586 | IRPosition::callsite_argument(*CB, Arg.getArgNo())); | |||
587 | IRPositions.emplace_back( | |||
588 | IRPosition::value(*CB->getArgOperand(Arg.getArgNo()))); | |||
589 | IRPositions.emplace_back(IRPosition::argument(Arg)); | |||
590 | } | |||
591 | } | |||
592 | } | |||
593 | IRPositions.emplace_back(IRPosition::callsite_function(*CB)); | |||
594 | return; | |||
595 | case IRPosition::IRP_CALL_SITE_ARGUMENT: { | |||
596 | assert(CB && "Expected call site!")((void)0); | |||
597 | // TODO: We need to look at the operand bundles similar to the redirection | |||
598 | // in CallBase. | |||
599 | if (!CB->hasOperandBundles() || CanIgnoreOperandBundles(*CB)) { | |||
600 | const Function *Callee = CB->getCalledFunction(); | |||
601 | if (Callee) { | |||
602 | if (Argument *Arg = IRP.getAssociatedArgument()) | |||
603 | IRPositions.emplace_back(IRPosition::argument(*Arg)); | |||
604 | IRPositions.emplace_back(IRPosition::function(*Callee)); | |||
605 | } | |||
606 | } | |||
607 | IRPositions.emplace_back(IRPosition::value(IRP.getAssociatedValue())); | |||
608 | return; | |||
609 | } | |||
610 | } | |||
611 | } | |||
612 | ||||
613 | bool IRPosition::hasAttr(ArrayRef<Attribute::AttrKind> AKs, | |||
614 | bool IgnoreSubsumingPositions, Attributor *A) const { | |||
615 | SmallVector<Attribute, 4> Attrs; | |||
616 | for (const IRPosition &EquivIRP : SubsumingPositionIterator(*this)) { | |||
617 | for (Attribute::AttrKind AK : AKs) | |||
618 | if (EquivIRP.getAttrsFromIRAttr(AK, Attrs)) | |||
619 | return true; | |||
620 | // The first position returned by the SubsumingPositionIterator is | |||
621 | // always the position itself. If we ignore subsuming positions we | |||
622 | // are done after the first iteration. | |||
623 | if (IgnoreSubsumingPositions) | |||
624 | break; | |||
625 | } | |||
626 | if (A) | |||
627 | for (Attribute::AttrKind AK : AKs) | |||
628 | if (getAttrsFromAssumes(AK, Attrs, *A)) | |||
629 | return true; | |||
630 | return false; | |||
631 | } | |||
632 | ||||
633 | void IRPosition::getAttrs(ArrayRef<Attribute::AttrKind> AKs, | |||
634 | SmallVectorImpl<Attribute> &Attrs, | |||
635 | bool IgnoreSubsumingPositions, Attributor *A) const { | |||
636 | for (const IRPosition &EquivIRP : SubsumingPositionIterator(*this)) { | |||
| ||||
637 | for (Attribute::AttrKind AK : AKs) | |||
638 | EquivIRP.getAttrsFromIRAttr(AK, Attrs); | |||
639 | // The first position returned by the SubsumingPositionIterator is | |||
640 | // always the position itself. If we ignore subsuming positions we | |||
641 | // are done after the first iteration. | |||
642 | if (IgnoreSubsumingPositions) | |||
643 | break; | |||
644 | } | |||
645 | if (A) | |||
646 | for (Attribute::AttrKind AK : AKs) | |||
647 | getAttrsFromAssumes(AK, Attrs, *A); | |||
648 | } | |||
649 | ||||
650 | bool IRPosition::getAttrsFromIRAttr(Attribute::AttrKind AK, | |||
651 | SmallVectorImpl<Attribute> &Attrs) const { | |||
652 | if (getPositionKind() == IRP_INVALID || getPositionKind() == IRP_FLOAT) | |||
653 | return false; | |||
654 | ||||
655 | AttributeList AttrList; | |||
656 | if (const auto *CB = dyn_cast<CallBase>(&getAnchorValue())) | |||
657 | AttrList = CB->getAttributes(); | |||
658 | else | |||
659 | AttrList = getAssociatedFunction()->getAttributes(); | |||
660 | ||||
661 | bool HasAttr = AttrList.hasAttribute(getAttrIdx(), AK); | |||
662 | if (HasAttr) | |||
663 | Attrs.push_back(AttrList.getAttribute(getAttrIdx(), AK)); | |||
664 | return HasAttr; | |||
665 | } | |||
666 | ||||
667 | bool IRPosition::getAttrsFromAssumes(Attribute::AttrKind AK, | |||
668 | SmallVectorImpl<Attribute> &Attrs, | |||
669 | Attributor &A) const { | |||
670 | assert(getPositionKind() != IRP_INVALID && "Did expect a valid position!")((void)0); | |||
671 | Value &AssociatedValue = getAssociatedValue(); | |||
672 | ||||
673 | const Assume2KnowledgeMap &A2K = | |||
674 | A.getInfoCache().getKnowledgeMap().lookup({&AssociatedValue, AK}); | |||
675 | ||||
676 | // Check if we found any potential assume use, if not we don't need to create | |||
677 | // explorer iterators. | |||
678 | if (A2K.empty()) | |||
679 | return false; | |||
680 | ||||
681 | LLVMContext &Ctx = AssociatedValue.getContext(); | |||
682 | unsigned AttrsSize = Attrs.size(); | |||
683 | MustBeExecutedContextExplorer &Explorer = | |||
684 | A.getInfoCache().getMustBeExecutedContextExplorer(); | |||
685 | auto EIt = Explorer.begin(getCtxI()), EEnd = Explorer.end(getCtxI()); | |||
686 | for (auto &It : A2K) | |||
687 | if (Explorer.findInContextOf(It.first, EIt, EEnd)) | |||
688 | Attrs.push_back(Attribute::get(Ctx, AK, It.second.Max)); | |||
689 | return AttrsSize != Attrs.size(); | |||
690 | } | |||
691 | ||||
692 | void IRPosition::verify() { | |||
693 | #ifdef EXPENSIVE_CHECKS | |||
694 | switch (getPositionKind()) { | |||
695 | case IRP_INVALID: | |||
696 | assert((CBContext == nullptr) &&((void)0) | |||
697 | "Invalid position must not have CallBaseContext!")((void)0); | |||
698 | assert(!Enc.getOpaqueValue() &&((void)0) | |||
699 | "Expected a nullptr for an invalid position!")((void)0); | |||
700 | return; | |||
701 | case IRP_FLOAT: | |||
702 | assert((!isa<CallBase>(&getAssociatedValue()) &&((void)0) | |||
703 | !isa<Argument>(&getAssociatedValue())) &&((void)0) | |||
704 | "Expected specialized kind for call base and argument values!")((void)0); | |||
705 | return; | |||
706 | case IRP_RETURNED: | |||
707 | assert(isa<Function>(getAsValuePtr()) &&((void)0) | |||
708 | "Expected function for a 'returned' position!")((void)0); | |||
709 | assert(getAsValuePtr() == &getAssociatedValue() &&((void)0) | |||
710 | "Associated value mismatch!")((void)0); | |||
711 | return; | |||
712 | case IRP_CALL_SITE_RETURNED: | |||
713 | assert((CBContext == nullptr) &&((void)0) | |||
714 | "'call site returned' position must not have CallBaseContext!")((void)0); | |||
715 | assert((isa<CallBase>(getAsValuePtr())) &&((void)0) | |||
716 | "Expected call base for 'call site returned' position!")((void)0); | |||
717 | assert(getAsValuePtr() == &getAssociatedValue() &&((void)0) | |||
718 | "Associated value mismatch!")((void)0); | |||
719 | return; | |||
720 | case IRP_CALL_SITE: | |||
721 | assert((CBContext == nullptr) &&((void)0) | |||
722 | "'call site function' position must not have CallBaseContext!")((void)0); | |||
723 | assert((isa<CallBase>(getAsValuePtr())) &&((void)0) | |||
724 | "Expected call base for 'call site function' position!")((void)0); | |||
725 | assert(getAsValuePtr() == &getAssociatedValue() &&((void)0) | |||
726 | "Associated value mismatch!")((void)0); | |||
727 | return; | |||
728 | case IRP_FUNCTION: | |||
729 | assert(isa<Function>(getAsValuePtr()) &&((void)0) | |||
730 | "Expected function for a 'function' position!")((void)0); | |||
731 | assert(getAsValuePtr() == &getAssociatedValue() &&((void)0) | |||
732 | "Associated value mismatch!")((void)0); | |||
733 | return; | |||
734 | case IRP_ARGUMENT: | |||
735 | assert(isa<Argument>(getAsValuePtr()) &&((void)0) | |||
736 | "Expected argument for a 'argument' position!")((void)0); | |||
737 | assert(getAsValuePtr() == &getAssociatedValue() &&((void)0) | |||
738 | "Associated value mismatch!")((void)0); | |||
739 | return; | |||
740 | case IRP_CALL_SITE_ARGUMENT: { | |||
741 | assert((CBContext == nullptr) &&((void)0) | |||
742 | "'call site argument' position must not have CallBaseContext!")((void)0); | |||
743 | Use *U = getAsUsePtr(); | |||
744 | assert(U && "Expected use for a 'call site argument' position!")((void)0); | |||
745 | assert(isa<CallBase>(U->getUser()) &&((void)0) | |||
746 | "Expected call base user for a 'call site argument' position!")((void)0); | |||
747 | assert(cast<CallBase>(U->getUser())->isArgOperand(U) &&((void)0) | |||
748 | "Expected call base argument operand for a 'call site argument' "((void)0) | |||
749 | "position")((void)0); | |||
750 | assert(cast<CallBase>(U->getUser())->getArgOperandNo(U) ==((void)0) | |||
751 | unsigned(getCallSiteArgNo()) &&((void)0) | |||
752 | "Argument number mismatch!")((void)0); | |||
753 | assert(U->get() == &getAssociatedValue() && "Associated value mismatch!")((void)0); | |||
754 | return; | |||
755 | } | |||
756 | } | |||
757 | #endif | |||
758 | } | |||
759 | ||||
760 | Optional<Constant *> | |||
761 | Attributor::getAssumedConstant(const IRPosition &IRP, | |||
762 | const AbstractAttribute &AA, | |||
763 | bool &UsedAssumedInformation) { | |||
764 | // First check all callbacks provided by outside AAs. If any of them returns | |||
765 | // a non-null value that is different from the associated value, or None, we | |||
766 | // assume it's simpliied. | |||
767 | for (auto &CB : SimplificationCallbacks.lookup(IRP)) { | |||
768 | Optional<Value *> SimplifiedV = CB(IRP, &AA, UsedAssumedInformation); | |||
769 | if (!SimplifiedV.hasValue()) | |||
770 | return llvm::None; | |||
771 | if (isa_and_nonnull<Constant>(*SimplifiedV)) | |||
772 | return cast<Constant>(*SimplifiedV); | |||
773 | return nullptr; | |||
774 | } | |||
775 | const auto &ValueSimplifyAA = | |||
776 | getAAFor<AAValueSimplify>(AA, IRP, DepClassTy::NONE); | |||
777 | Optional<Value *> SimplifiedV = | |||
778 | ValueSimplifyAA.getAssumedSimplifiedValue(*this); | |||
779 | bool IsKnown = ValueSimplifyAA.isAtFixpoint(); | |||
780 | UsedAssumedInformation |= !IsKnown; | |||
781 | if (!SimplifiedV.hasValue()) { | |||
782 | recordDependence(ValueSimplifyAA, AA, DepClassTy::OPTIONAL); | |||
783 | return llvm::None; | |||
784 | } | |||
785 | if (isa_and_nonnull<UndefValue>(SimplifiedV.getValue())) { | |||
786 | recordDependence(ValueSimplifyAA, AA, DepClassTy::OPTIONAL); | |||
787 | return UndefValue::get(IRP.getAssociatedType()); | |||
788 | } | |||
789 | Constant *CI = dyn_cast_or_null<Constant>(SimplifiedV.getValue()); | |||
790 | if (CI) | |||
791 | CI = dyn_cast_or_null<Constant>( | |||
792 | AA::getWithType(*CI, *IRP.getAssociatedType())); | |||
793 | if (CI) | |||
794 | recordDependence(ValueSimplifyAA, AA, DepClassTy::OPTIONAL); | |||
795 | return CI; | |||
796 | } | |||
797 | ||||
798 | Optional<Value *> | |||
799 | Attributor::getAssumedSimplified(const IRPosition &IRP, | |||
800 | const AbstractAttribute *AA, | |||
801 | bool &UsedAssumedInformation) { | |||
802 | // First check all callbacks provided by outside AAs. If any of them returns | |||
803 | // a non-null value that is different from the associated value, or None, we | |||
804 | // assume it's simpliied. | |||
805 | for (auto &CB : SimplificationCallbacks.lookup(IRP)) | |||
806 | return CB(IRP, AA, UsedAssumedInformation); | |||
807 | ||||
808 | // If no high-level/outside simplification occured, use AAValueSimplify. | |||
809 | const auto &ValueSimplifyAA = | |||
810 | getOrCreateAAFor<AAValueSimplify>(IRP, AA, DepClassTy::NONE); | |||
811 | Optional<Value *> SimplifiedV = | |||
812 | ValueSimplifyAA.getAssumedSimplifiedValue(*this); | |||
813 | bool IsKnown = ValueSimplifyAA.isAtFixpoint(); | |||
814 | UsedAssumedInformation |= !IsKnown; | |||
815 | if (!SimplifiedV.hasValue()) { | |||
816 | if (AA) | |||
817 | recordDependence(ValueSimplifyAA, *AA, DepClassTy::OPTIONAL); | |||
818 | return llvm::None; | |||
819 | } | |||
820 | if (*SimplifiedV == nullptr) | |||
821 | return const_cast<Value *>(&IRP.getAssociatedValue()); | |||
822 | if (Value *SimpleV = | |||
823 | AA::getWithType(**SimplifiedV, *IRP.getAssociatedType())) { | |||
824 | if (AA) | |||
825 | recordDependence(ValueSimplifyAA, *AA, DepClassTy::OPTIONAL); | |||
826 | return SimpleV; | |||
827 | } | |||
828 | return const_cast<Value *>(&IRP.getAssociatedValue()); | |||
829 | } | |||
830 | ||||
831 | Optional<Value *> Attributor::translateArgumentToCallSiteContent( | |||
832 | Optional<Value *> V, CallBase &CB, const AbstractAttribute &AA, | |||
833 | bool &UsedAssumedInformation) { | |||
834 | if (!V.hasValue()) | |||
835 | return V; | |||
836 | if (*V == nullptr || isa<Constant>(*V)) | |||
837 | return V; | |||
838 | if (auto *Arg = dyn_cast<Argument>(*V)) | |||
839 | if (CB.getCalledFunction() == Arg->getParent()) | |||
840 | if (!Arg->hasPointeeInMemoryValueAttr()) | |||
841 | return getAssumedSimplified( | |||
842 | IRPosition::callsite_argument(CB, Arg->getArgNo()), AA, | |||
843 | UsedAssumedInformation); | |||
844 | return nullptr; | |||
845 | } | |||
846 | ||||
847 | Attributor::~Attributor() { | |||
848 | // The abstract attributes are allocated via the BumpPtrAllocator Allocator, | |||
849 | // thus we cannot delete them. We can, and want to, destruct them though. | |||
850 | for (auto &DepAA : DG.SyntheticRoot.Deps) { | |||
851 | AbstractAttribute *AA = cast<AbstractAttribute>(DepAA.getPointer()); | |||
852 | AA->~AbstractAttribute(); | |||
853 | } | |||
854 | } | |||
855 | ||||
856 | bool Attributor::isAssumedDead(const AbstractAttribute &AA, | |||
857 | const AAIsDead *FnLivenessAA, | |||
858 | bool &UsedAssumedInformation, | |||
859 | bool CheckBBLivenessOnly, DepClassTy DepClass) { | |||
860 | const IRPosition &IRP = AA.getIRPosition(); | |||
861 | if (!Functions.count(IRP.getAnchorScope())) | |||
862 | return false; | |||
863 | return isAssumedDead(IRP, &AA, FnLivenessAA, UsedAssumedInformation, | |||
864 | CheckBBLivenessOnly, DepClass); | |||
865 | } | |||
866 | ||||
867 | bool Attributor::isAssumedDead(const Use &U, | |||
868 | const AbstractAttribute *QueryingAA, | |||
869 | const AAIsDead *FnLivenessAA, | |||
870 | bool &UsedAssumedInformation, | |||
871 | bool CheckBBLivenessOnly, DepClassTy DepClass) { | |||
872 | Instruction *UserI = dyn_cast<Instruction>(U.getUser()); | |||
873 | if (!UserI) | |||
874 | return isAssumedDead(IRPosition::value(*U.get()), QueryingAA, FnLivenessAA, | |||
875 | UsedAssumedInformation, CheckBBLivenessOnly, DepClass); | |||
876 | ||||
877 | if (auto *CB = dyn_cast<CallBase>(UserI)) { | |||
878 | // For call site argument uses we can check if the argument is | |||
879 | // unused/dead. | |||
880 | if (CB->isArgOperand(&U)) { | |||
881 | const IRPosition &CSArgPos = | |||
882 | IRPosition::callsite_argument(*CB, CB->getArgOperandNo(&U)); | |||
883 | return isAssumedDead(CSArgPos, QueryingAA, FnLivenessAA, | |||
884 | UsedAssumedInformation, CheckBBLivenessOnly, | |||
885 | DepClass); | |||
886 | } | |||
887 | } else if (ReturnInst *RI = dyn_cast<ReturnInst>(UserI)) { | |||
888 | const IRPosition &RetPos = IRPosition::returned(*RI->getFunction()); | |||
889 | return isAssumedDead(RetPos, QueryingAA, FnLivenessAA, | |||
890 | UsedAssumedInformation, CheckBBLivenessOnly, DepClass); | |||
891 | } else if (PHINode *PHI = dyn_cast<PHINode>(UserI)) { | |||
892 | BasicBlock *IncomingBB = PHI->getIncomingBlock(U); | |||
893 | return isAssumedDead(*IncomingBB->getTerminator(), QueryingAA, FnLivenessAA, | |||
894 | UsedAssumedInformation, CheckBBLivenessOnly, DepClass); | |||
895 | } | |||
896 | ||||
897 | return isAssumedDead(IRPosition::value(*UserI), QueryingAA, FnLivenessAA, | |||
898 | UsedAssumedInformation, CheckBBLivenessOnly, DepClass); | |||
899 | } | |||
900 | ||||
901 | bool Attributor::isAssumedDead(const Instruction &I, | |||
902 | const AbstractAttribute *QueryingAA, | |||
903 | const AAIsDead *FnLivenessAA, | |||
904 | bool &UsedAssumedInformation, | |||
905 | bool CheckBBLivenessOnly, DepClassTy DepClass) { | |||
906 | const IRPosition::CallBaseContext *CBCtx = | |||
907 | QueryingAA ? QueryingAA->getCallBaseContext() : nullptr; | |||
908 | ||||
909 | if (ManifestAddedBlocks.contains(I.getParent())) | |||
910 | return false; | |||
911 | ||||
912 | if (!FnLivenessAA) | |||
913 | FnLivenessAA = | |||
914 | lookupAAFor<AAIsDead>(IRPosition::function(*I.getFunction(), CBCtx), | |||
915 | QueryingAA, DepClassTy::NONE); | |||
916 | ||||
917 | // If we have a context instruction and a liveness AA we use it. | |||
918 | if (FnLivenessAA && | |||
919 | FnLivenessAA->getIRPosition().getAnchorScope() == I.getFunction() && | |||
920 | FnLivenessAA->isAssumedDead(&I)) { | |||
921 | if (QueryingAA) | |||
922 | recordDependence(*FnLivenessAA, *QueryingAA, DepClass); | |||
923 | if (!FnLivenessAA->isKnownDead(&I)) | |||
924 | UsedAssumedInformation = true; | |||
925 | return true; | |||
926 | } | |||
927 | ||||
928 | if (CheckBBLivenessOnly) | |||
929 | return false; | |||
930 | ||||
931 | const AAIsDead &IsDeadAA = getOrCreateAAFor<AAIsDead>( | |||
932 | IRPosition::value(I, CBCtx), QueryingAA, DepClassTy::NONE); | |||
933 | // Don't check liveness for AAIsDead. | |||
934 | if (QueryingAA == &IsDeadAA) | |||
935 | return false; | |||
936 | ||||
937 | if (IsDeadAA.isAssumedDead()) { | |||
938 | if (QueryingAA) | |||
939 | recordDependence(IsDeadAA, *QueryingAA, DepClass); | |||
940 | if (!IsDeadAA.isKnownDead()) | |||
941 | UsedAssumedInformation = true; | |||
942 | return true; | |||
943 | } | |||
944 | ||||
945 | return false; | |||
946 | } | |||
947 | ||||
948 | bool Attributor::isAssumedDead(const IRPosition &IRP, | |||
949 | const AbstractAttribute *QueryingAA, | |||
950 | const AAIsDead *FnLivenessAA, | |||
951 | bool &UsedAssumedInformation, | |||
952 | bool CheckBBLivenessOnly, DepClassTy DepClass) { | |||
953 | Instruction *CtxI = IRP.getCtxI(); | |||
954 | if (CtxI && | |||
955 | isAssumedDead(*CtxI, QueryingAA, FnLivenessAA, UsedAssumedInformation, | |||
956 | /* CheckBBLivenessOnly */ true, | |||
957 | CheckBBLivenessOnly ? DepClass : DepClassTy::OPTIONAL)) | |||
958 | return true; | |||
959 | ||||
960 | if (CheckBBLivenessOnly) | |||
961 | return false; | |||
962 | ||||
963 | // If we haven't succeeded we query the specific liveness info for the IRP. | |||
964 | const AAIsDead *IsDeadAA; | |||
965 | if (IRP.getPositionKind() == IRPosition::IRP_CALL_SITE) | |||
966 | IsDeadAA = &getOrCreateAAFor<AAIsDead>( | |||
967 | IRPosition::callsite_returned(cast<CallBase>(IRP.getAssociatedValue())), | |||
968 | QueryingAA, DepClassTy::NONE); | |||
969 | else | |||
970 | IsDeadAA = &getOrCreateAAFor<AAIsDead>(IRP, QueryingAA, DepClassTy::NONE); | |||
971 | // Don't check liveness for AAIsDead. | |||
972 | if (QueryingAA == IsDeadAA) | |||
973 | return false; | |||
974 | ||||
975 | if (IsDeadAA->isAssumedDead()) { | |||
976 | if (QueryingAA) | |||
977 | recordDependence(*IsDeadAA, *QueryingAA, DepClass); | |||
978 | if (!IsDeadAA->isKnownDead()) | |||
979 | UsedAssumedInformation = true; | |||
980 | return true; | |||
981 | } | |||
982 | ||||
983 | return false; | |||
984 | } | |||
985 | ||||
986 | bool Attributor::isAssumedDead(const BasicBlock &BB, | |||
987 | const AbstractAttribute *QueryingAA, | |||
988 | const AAIsDead *FnLivenessAA, | |||
989 | DepClassTy DepClass) { | |||
990 | if (!FnLivenessAA) | |||
991 | FnLivenessAA = lookupAAFor<AAIsDead>(IRPosition::function(*BB.getParent()), | |||
992 | QueryingAA, DepClassTy::NONE); | |||
993 | if (FnLivenessAA->isAssumedDead(&BB)) { | |||
994 | if (QueryingAA) | |||
995 | recordDependence(*FnLivenessAA, *QueryingAA, DepClass); | |||
996 | return true; | |||
997 | } | |||
998 | ||||
999 | return false; | |||
1000 | } | |||
1001 | ||||
1002 | bool Attributor::checkForAllUses(function_ref<bool(const Use &, bool &)> Pred, | |||
1003 | const AbstractAttribute &QueryingAA, | |||
1004 | const Value &V, bool CheckBBLivenessOnly, | |||
1005 | DepClassTy LivenessDepClass) { | |||
1006 | ||||
1007 | // Check the trivial case first as it catches void values. | |||
1008 | if (V.use_empty()) | |||
1009 | return true; | |||
1010 | ||||
1011 | const IRPosition &IRP = QueryingAA.getIRPosition(); | |||
1012 | SmallVector<const Use *, 16> Worklist; | |||
1013 | SmallPtrSet<const Use *, 16> Visited; | |||
1014 | ||||
1015 | for (const Use &U : V.uses()) | |||
1016 | Worklist.push_back(&U); | |||
1017 | ||||
1018 | LLVM_DEBUG(dbgs() << "[Attributor] Got " << Worklist.size()do { } while (false) | |||
1019 | << " initial uses to check\n")do { } while (false); | |||
1020 | ||||
1021 | const Function *ScopeFn = IRP.getAnchorScope(); | |||
1022 | const auto *LivenessAA = | |||
1023 | ScopeFn ? &getAAFor<AAIsDead>(QueryingAA, IRPosition::function(*ScopeFn), | |||
1024 | DepClassTy::NONE) | |||
1025 | : nullptr; | |||
1026 | ||||
1027 | while (!Worklist.empty()) { | |||
1028 | const Use *U = Worklist.pop_back_val(); | |||
1029 | if (isa<PHINode>(U->getUser()) && !Visited.insert(U).second) | |||
1030 | continue; | |||
1031 | LLVM_DEBUG(dbgs() << "[Attributor] Check use: " << **U << " in "do { } while (false) | |||
1032 | << *U->getUser() << "\n")do { } while (false); | |||
1033 | bool UsedAssumedInformation = false; | |||
1034 | if (isAssumedDead(*U, &QueryingAA, LivenessAA, UsedAssumedInformation, | |||
1035 | CheckBBLivenessOnly, LivenessDepClass)) { | |||
1036 | LLVM_DEBUG(dbgs() << "[Attributor] Dead use, skip!\n")do { } while (false); | |||
1037 | continue; | |||
1038 | } | |||
1039 | if (U->getUser()->isDroppable()) { | |||
1040 | LLVM_DEBUG(dbgs() << "[Attributor] Droppable user, skip!\n")do { } while (false); | |||
1041 | continue; | |||
1042 | } | |||
1043 | ||||
1044 | if (auto *SI = dyn_cast<StoreInst>(U->getUser())) { | |||
1045 | if (&SI->getOperandUse(0) == U) { | |||
1046 | SmallSetVector<Value *, 4> PotentialCopies; | |||
1047 | if (AA::getPotentialCopiesOfStoredValue(*this, *SI, PotentialCopies, | |||
1048 | QueryingAA, | |||
1049 | UsedAssumedInformation)) { | |||
1050 | LLVM_DEBUG(dbgs() << "[Attributor] Value is stored, continue with "do { } while (false) | |||
1051 | << PotentialCopies.size()do { } while (false) | |||
1052 | << " potential copies instead!\n")do { } while (false); | |||
1053 | for (Value *PotentialCopy : PotentialCopies) | |||
1054 | for (const Use &U : PotentialCopy->uses()) | |||
1055 | Worklist.push_back(&U); | |||
1056 | continue; | |||
1057 | } | |||
1058 | } | |||
1059 | } | |||
1060 | ||||
1061 | bool Follow = false; | |||
1062 | if (!Pred(*U, Follow)) | |||
1063 | return false; | |||
1064 | if (!Follow) | |||
1065 | continue; | |||
1066 | for (const Use &UU : U->getUser()->uses()) | |||
1067 | Worklist.push_back(&UU); | |||
1068 | } | |||
1069 | ||||
1070 | return true; | |||
1071 | } | |||
1072 | ||||
1073 | bool Attributor::checkForAllCallSites(function_ref<bool(AbstractCallSite)> Pred, | |||
1074 | const AbstractAttribute &QueryingAA, | |||
1075 | bool RequireAllCallSites, | |||
1076 | bool &AllCallSitesKnown) { | |||
1077 | // We can try to determine information from | |||
1078 | // the call sites. However, this is only possible all call sites are known, | |||
1079 | // hence the function has internal linkage. | |||
1080 | const IRPosition &IRP = QueryingAA.getIRPosition(); | |||
1081 | const Function *AssociatedFunction = IRP.getAssociatedFunction(); | |||
1082 | if (!AssociatedFunction) { | |||
1083 | LLVM_DEBUG(dbgs() << "[Attributor] No function associated with " << IRPdo { } while (false) | |||
1084 | << "\n")do { } while (false); | |||
1085 | AllCallSitesKnown = false; | |||
1086 | return false; | |||
1087 | } | |||
1088 | ||||
1089 | return checkForAllCallSites(Pred, *AssociatedFunction, RequireAllCallSites, | |||
1090 | &QueryingAA, AllCallSitesKnown); | |||
1091 | } | |||
1092 | ||||
1093 | bool Attributor::checkForAllCallSites(function_ref<bool(AbstractCallSite)> Pred, | |||
1094 | const Function &Fn, | |||
1095 | bool RequireAllCallSites, | |||
1096 | const AbstractAttribute *QueryingAA, | |||
1097 | bool &AllCallSitesKnown) { | |||
1098 | if (RequireAllCallSites && !Fn.hasLocalLinkage()) { | |||
1099 | LLVM_DEBUG(do { } while (false) | |||
1100 | dbgs()do { } while (false) | |||
1101 | << "[Attributor] Function " << Fn.getName()do { } while (false) | |||
1102 | << " has no internal linkage, hence not all call sites are known\n")do { } while (false); | |||
1103 | AllCallSitesKnown = false; | |||
1104 | return false; | |||
1105 | } | |||
1106 | ||||
1107 | // If we do not require all call sites we might not see all. | |||
1108 | AllCallSitesKnown = RequireAllCallSites; | |||
1109 | ||||
1110 | SmallVector<const Use *, 8> Uses(make_pointer_range(Fn.uses())); | |||
1111 | for (unsigned u = 0; u < Uses.size(); ++u) { | |||
1112 | const Use &U = *Uses[u]; | |||
1113 | LLVM_DEBUG(dbgs() << "[Attributor] Check use: " << *U << " in "do { } while (false) | |||
1114 | << *U.getUser() << "\n")do { } while (false); | |||
1115 | bool UsedAssumedInformation = false; | |||
1116 | if (isAssumedDead(U, QueryingAA, nullptr, UsedAssumedInformation, | |||
1117 | /* CheckBBLivenessOnly */ true)) { | |||
1118 | LLVM_DEBUG(dbgs() << "[Attributor] Dead use, skip!\n")do { } while (false); | |||
1119 | continue; | |||
1120 | } | |||
1121 | if (ConstantExpr *CE = dyn_cast<ConstantExpr>(U.getUser())) { | |||
1122 | if (CE->isCast() && CE->getType()->isPointerTy() && | |||
1123 | CE->getType()->getPointerElementType()->isFunctionTy()) { | |||
1124 | for (const Use &CEU : CE->uses()) | |||
1125 | Uses.push_back(&CEU); | |||
1126 | continue; | |||
1127 | } | |||
1128 | } | |||
1129 | ||||
1130 | AbstractCallSite ACS(&U); | |||
1131 | if (!ACS) { | |||
1132 | LLVM_DEBUG(dbgs() << "[Attributor] Function " << Fn.getName()do { } while (false) | |||
1133 | << " has non call site use " << *U.get() << " in "do { } while (false) | |||
1134 | << *U.getUser() << "\n")do { } while (false); | |||
1135 | // BlockAddress users are allowed. | |||
1136 | if (isa<BlockAddress>(U.getUser())) | |||
1137 | continue; | |||
1138 | return false; | |||
1139 | } | |||
1140 | ||||
1141 | const Use *EffectiveUse = | |||
1142 | ACS.isCallbackCall() ? &ACS.getCalleeUseForCallback() : &U; | |||
1143 | if (!ACS.isCallee(EffectiveUse)) { | |||
1144 | if (!RequireAllCallSites) | |||
1145 | continue; | |||
1146 | LLVM_DEBUG(dbgs() << "[Attributor] User " << EffectiveUse->getUser()do { } while (false) | |||
1147 | << " is an invalid use of " << Fn.getName() << "\n")do { } while (false); | |||
1148 | return false; | |||
1149 | } | |||
1150 | ||||
1151 | // Make sure the arguments that can be matched between the call site and the | |||
1152 | // callee argee on their type. It is unlikely they do not and it doesn't | |||
1153 | // make sense for all attributes to know/care about this. | |||
1154 | assert(&Fn == ACS.getCalledFunction() && "Expected known callee")((void)0); | |||
1155 | unsigned MinArgsParams = | |||
1156 | std::min(size_t(ACS.getNumArgOperands()), Fn.arg_size()); | |||
1157 | for (unsigned u = 0; u < MinArgsParams; ++u) { | |||
1158 | Value *CSArgOp = ACS.getCallArgOperand(u); | |||
1159 | if (CSArgOp && Fn.getArg(u)->getType() != CSArgOp->getType()) { | |||
1160 | LLVM_DEBUG(do { } while (false) | |||
1161 | dbgs() << "[Attributor] Call site / callee argument type mismatch ["do { } while (false) | |||
1162 | << u << "@" << Fn.getName() << ": "do { } while (false) | |||
1163 | << *Fn.getArg(u)->getType() << " vs. "do { } while (false) | |||
1164 | << *ACS.getCallArgOperand(u)->getType() << "\n")do { } while (false); | |||
1165 | return false; | |||
1166 | } | |||
1167 | } | |||
1168 | ||||
1169 | if (Pred(ACS)) | |||
1170 | continue; | |||
1171 | ||||
1172 | LLVM_DEBUG(dbgs() << "[Attributor] Call site callback failed for "do { } while (false) | |||
1173 | << *ACS.getInstruction() << "\n")do { } while (false); | |||
1174 | return false; | |||
1175 | } | |||
1176 | ||||
1177 | return true; | |||
1178 | } | |||
1179 | ||||
1180 | bool Attributor::shouldPropagateCallBaseContext(const IRPosition &IRP) { | |||
1181 | // TODO: Maintain a cache of Values that are | |||
1182 | // on the pathway from a Argument to a Instruction that would effect the | |||
1183 | // liveness/return state etc. | |||
1184 | return EnableCallSiteSpecific; | |||
1185 | } | |||
1186 | ||||
1187 | bool Attributor::checkForAllReturnedValuesAndReturnInsts( | |||
1188 | function_ref<bool(Value &, const SmallSetVector<ReturnInst *, 4> &)> Pred, | |||
1189 | const AbstractAttribute &QueryingAA) { | |||
1190 | ||||
1191 | const IRPosition &IRP = QueryingAA.getIRPosition(); | |||
1192 | // Since we need to provide return instructions we have to have an exact | |||
1193 | // definition. | |||
1194 | const Function *AssociatedFunction = IRP.getAssociatedFunction(); | |||
1195 | if (!AssociatedFunction) | |||
1196 | return false; | |||
1197 | ||||
1198 | // If this is a call site query we use the call site specific return values | |||
1199 | // and liveness information. | |||
1200 | // TODO: use the function scope once we have call site AAReturnedValues. | |||
1201 | const IRPosition &QueryIRP = IRPosition::function(*AssociatedFunction); | |||
1202 | const auto &AARetVal = | |||
1203 | getAAFor<AAReturnedValues>(QueryingAA, QueryIRP, DepClassTy::REQUIRED); | |||
1204 | if (!AARetVal.getState().isValidState()) | |||
1205 | return false; | |||
1206 | ||||
1207 | return AARetVal.checkForAllReturnedValuesAndReturnInsts(Pred); | |||
1208 | } | |||
1209 | ||||
1210 | bool Attributor::checkForAllReturnedValues( | |||
1211 | function_ref<bool(Value &)> Pred, const AbstractAttribute &QueryingAA) { | |||
1212 | ||||
1213 | const IRPosition &IRP = QueryingAA.getIRPosition(); | |||
1214 | const Function *AssociatedFunction = IRP.getAssociatedFunction(); | |||
1215 | if (!AssociatedFunction) | |||
1216 | return false; | |||
1217 | ||||
1218 | // TODO: use the function scope once we have call site AAReturnedValues. | |||
1219 | const IRPosition &QueryIRP = IRPosition::function( | |||
1220 | *AssociatedFunction, QueryingAA.getCallBaseContext()); | |||
1221 | const auto &AARetVal = | |||
1222 | getAAFor<AAReturnedValues>(QueryingAA, QueryIRP, DepClassTy::REQUIRED); | |||
1223 | if (!AARetVal.getState().isValidState()) | |||
1224 | return false; | |||
1225 | ||||
1226 | return AARetVal.checkForAllReturnedValuesAndReturnInsts( | |||
1227 | [&](Value &RV, const SmallSetVector<ReturnInst *, 4> &) { | |||
1228 | return Pred(RV); | |||
1229 | }); | |||
1230 | } | |||
1231 | ||||
1232 | static bool checkForAllInstructionsImpl( | |||
1233 | Attributor *A, InformationCache::OpcodeInstMapTy &OpcodeInstMap, | |||
1234 | function_ref<bool(Instruction &)> Pred, const AbstractAttribute *QueryingAA, | |||
1235 | const AAIsDead *LivenessAA, const ArrayRef<unsigned> &Opcodes, | |||
1236 | bool &UsedAssumedInformation, bool CheckBBLivenessOnly = false, | |||
1237 | bool CheckPotentiallyDead = false) { | |||
1238 | for (unsigned Opcode : Opcodes) { | |||
1239 | // Check if we have instructions with this opcode at all first. | |||
1240 | auto *Insts = OpcodeInstMap.lookup(Opcode); | |||
1241 | if (!Insts) | |||
1242 | continue; | |||
1243 | ||||
1244 | for (Instruction *I : *Insts) { | |||
1245 | // Skip dead instructions. | |||
1246 | if (A && !CheckPotentiallyDead && | |||
1247 | A->isAssumedDead(IRPosition::value(*I), QueryingAA, LivenessAA, | |||
1248 | UsedAssumedInformation, CheckBBLivenessOnly)) | |||
1249 | continue; | |||
1250 | ||||
1251 | if (!Pred(*I)) | |||
1252 | return false; | |||
1253 | } | |||
1254 | } | |||
1255 | return true; | |||
1256 | } | |||
1257 | ||||
1258 | bool Attributor::checkForAllInstructions(function_ref<bool(Instruction &)> Pred, | |||
1259 | const AbstractAttribute &QueryingAA, | |||
1260 | const ArrayRef<unsigned> &Opcodes, | |||
1261 | bool &UsedAssumedInformation, | |||
1262 | bool CheckBBLivenessOnly, | |||
1263 | bool CheckPotentiallyDead) { | |||
1264 | ||||
1265 | const IRPosition &IRP = QueryingAA.getIRPosition(); | |||
1266 | // Since we need to provide instructions we have to have an exact definition. | |||
1267 | const Function *AssociatedFunction = IRP.getAssociatedFunction(); | |||
1268 | if (!AssociatedFunction) | |||
1269 | return false; | |||
1270 | ||||
1271 | if (AssociatedFunction->isDeclaration()) | |||
1272 | return false; | |||
1273 | ||||
1274 | // TODO: use the function scope once we have call site AAReturnedValues. | |||
1275 | const IRPosition &QueryIRP = IRPosition::function(*AssociatedFunction); | |||
1276 | const auto *LivenessAA = | |||
1277 | (CheckBBLivenessOnly || CheckPotentiallyDead) | |||
1278 | ? nullptr | |||
1279 | : &(getAAFor<AAIsDead>(QueryingAA, QueryIRP, DepClassTy::NONE)); | |||
1280 | ||||
1281 | auto &OpcodeInstMap = | |||
1282 | InfoCache.getOpcodeInstMapForFunction(*AssociatedFunction); | |||
1283 | if (!checkForAllInstructionsImpl(this, OpcodeInstMap, Pred, &QueryingAA, | |||
1284 | LivenessAA, Opcodes, UsedAssumedInformation, | |||
1285 | CheckBBLivenessOnly, CheckPotentiallyDead)) | |||
1286 | return false; | |||
1287 | ||||
1288 | return true; | |||
1289 | } | |||
1290 | ||||
1291 | bool Attributor::checkForAllReadWriteInstructions( | |||
1292 | function_ref<bool(Instruction &)> Pred, AbstractAttribute &QueryingAA, | |||
1293 | bool &UsedAssumedInformation) { | |||
1294 | ||||
1295 | const Function *AssociatedFunction = | |||
1296 | QueryingAA.getIRPosition().getAssociatedFunction(); | |||
1297 | if (!AssociatedFunction) | |||
1298 | return false; | |||
1299 | ||||
1300 | // TODO: use the function scope once we have call site AAReturnedValues. | |||
1301 | const IRPosition &QueryIRP = IRPosition::function(*AssociatedFunction); | |||
1302 | const auto &LivenessAA = | |||
1303 | getAAFor<AAIsDead>(QueryingAA, QueryIRP, DepClassTy::NONE); | |||
1304 | ||||
1305 | for (Instruction *I : | |||
1306 | InfoCache.getReadOrWriteInstsForFunction(*AssociatedFunction)) { | |||
1307 | // Skip dead instructions. | |||
1308 | if (isAssumedDead(IRPosition::value(*I), &QueryingAA, &LivenessAA, | |||
1309 | UsedAssumedInformation)) | |||
1310 | continue; | |||
1311 | ||||
1312 | if (!Pred(*I)) | |||
1313 | return false; | |||
1314 | } | |||
1315 | ||||
1316 | return true; | |||
1317 | } | |||
1318 | ||||
1319 | void Attributor::runTillFixpoint() { | |||
1320 | TimeTraceScope TimeScope("Attributor::runTillFixpoint"); | |||
1321 | LLVM_DEBUG(dbgs() << "[Attributor] Identified and initialized "do { } while (false) | |||
1322 | << DG.SyntheticRoot.Deps.size()do { } while (false) | |||
1323 | << " abstract attributes.\n")do { } while (false); | |||
1324 | ||||
1325 | // Now that all abstract attributes are collected and initialized we start | |||
1326 | // the abstract analysis. | |||
1327 | ||||
1328 | unsigned IterationCounter = 1; | |||
1329 | unsigned MaxFixedPointIterations; | |||
1330 | if (MaxFixpointIterations) | |||
1331 | MaxFixedPointIterations = MaxFixpointIterations.getValue(); | |||
1332 | else | |||
1333 | MaxFixedPointIterations = SetFixpointIterations; | |||
1334 | ||||
1335 | SmallVector<AbstractAttribute *, 32> ChangedAAs; | |||
1336 | SetVector<AbstractAttribute *> Worklist, InvalidAAs; | |||
1337 | Worklist.insert(DG.SyntheticRoot.begin(), DG.SyntheticRoot.end()); | |||
1338 | ||||
1339 | do { | |||
1340 | // Remember the size to determine new attributes. | |||
1341 | size_t NumAAs = DG.SyntheticRoot.Deps.size(); | |||
1342 | LLVM_DEBUG(dbgs() << "\n\n[Attributor] #Iteration: " << IterationCounterdo { } while (false) | |||
1343 | << ", Worklist size: " << Worklist.size() << "\n")do { } while (false); | |||
1344 | ||||
1345 | // For invalid AAs we can fix dependent AAs that have a required dependence, | |||
1346 | // thereby folding long dependence chains in a single step without the need | |||
1347 | // to run updates. | |||
1348 | for (unsigned u = 0; u < InvalidAAs.size(); ++u) { | |||
1349 | AbstractAttribute *InvalidAA = InvalidAAs[u]; | |||
1350 | ||||
1351 | // Check the dependences to fast track invalidation. | |||
1352 | LLVM_DEBUG(dbgs() << "[Attributor] InvalidAA: " << *InvalidAA << " has "do { } while (false) | |||
1353 | << InvalidAA->Deps.size()do { } while (false) | |||
1354 | << " required & optional dependences\n")do { } while (false); | |||
1355 | while (!InvalidAA->Deps.empty()) { | |||
1356 | const auto &Dep = InvalidAA->Deps.back(); | |||
1357 | InvalidAA->Deps.pop_back(); | |||
1358 | AbstractAttribute *DepAA = cast<AbstractAttribute>(Dep.getPointer()); | |||
1359 | if (Dep.getInt() == unsigned(DepClassTy::OPTIONAL)) { | |||
1360 | Worklist.insert(DepAA); | |||
1361 | continue; | |||
1362 | } | |||
1363 | DepAA->getState().indicatePessimisticFixpoint(); | |||
1364 | assert(DepAA->getState().isAtFixpoint() && "Expected fixpoint state!")((void)0); | |||
1365 | if (!DepAA->getState().isValidState()) | |||
1366 | InvalidAAs.insert(DepAA); | |||
1367 | else | |||
1368 | ChangedAAs.push_back(DepAA); | |||
1369 | } | |||
1370 | } | |||
1371 | ||||
1372 | // Add all abstract attributes that are potentially dependent on one that | |||
1373 | // changed to the work list. | |||
1374 | for (AbstractAttribute *ChangedAA : ChangedAAs) | |||
1375 | while (!ChangedAA->Deps.empty()) { | |||
1376 | Worklist.insert( | |||
1377 | cast<AbstractAttribute>(ChangedAA->Deps.back().getPointer())); | |||
1378 | ChangedAA->Deps.pop_back(); | |||
1379 | } | |||
1380 | ||||
1381 | LLVM_DEBUG(dbgs() << "[Attributor] #Iteration: " << IterationCounterdo { } while (false) | |||
1382 | << ", Worklist+Dependent size: " << Worklist.size()do { } while (false) | |||
1383 | << "\n")do { } while (false); | |||
1384 | ||||
1385 | // Reset the changed and invalid set. | |||
1386 | ChangedAAs.clear(); | |||
1387 | InvalidAAs.clear(); | |||
1388 | ||||
1389 | // Update all abstract attribute in the work list and record the ones that | |||
1390 | // changed. | |||
1391 | for (AbstractAttribute *AA : Worklist) { | |||
1392 | const auto &AAState = AA->getState(); | |||
1393 | if (!AAState.isAtFixpoint()) | |||
1394 | if (updateAA(*AA) == ChangeStatus::CHANGED) | |||
1395 | ChangedAAs.push_back(AA); | |||
1396 | ||||
1397 | // Use the InvalidAAs vector to propagate invalid states fast transitively | |||
1398 | // without requiring updates. | |||
1399 | if (!AAState.isValidState()) | |||
1400 | InvalidAAs.insert(AA); | |||
1401 | } | |||
1402 | ||||
1403 | // Add attributes to the changed set if they have been created in the last | |||
1404 | // iteration. | |||
1405 | ChangedAAs.append(DG.SyntheticRoot.begin() + NumAAs, | |||
1406 | DG.SyntheticRoot.end()); | |||
1407 | ||||
1408 | // Reset the work list and repopulate with the changed abstract attributes. | |||
1409 | // Note that dependent ones are added above. | |||
1410 | Worklist.clear(); | |||
1411 | Worklist.insert(ChangedAAs.begin(), ChangedAAs.end()); | |||
1412 | ||||
1413 | } while (!Worklist.empty() && (IterationCounter++ < MaxFixedPointIterations || | |||
1414 | VerifyMaxFixpointIterations)); | |||
1415 | ||||
1416 | LLVM_DEBUG(dbgs() << "\n[Attributor] Fixpoint iteration done after: "do { } while (false) | |||
1417 | << IterationCounter << "/" << MaxFixpointIterationsdo { } while (false) | |||
1418 | << " iterations\n")do { } while (false); | |||
1419 | ||||
1420 | // Reset abstract arguments not settled in a sound fixpoint by now. This | |||
1421 | // happens when we stopped the fixpoint iteration early. Note that only the | |||
1422 | // ones marked as "changed" *and* the ones transitively depending on them | |||
1423 | // need to be reverted to a pessimistic state. Others might not be in a | |||
1424 | // fixpoint state but we can use the optimistic results for them anyway. | |||
1425 | SmallPtrSet<AbstractAttribute *, 32> Visited; | |||
1426 | for (unsigned u = 0; u < ChangedAAs.size(); u++) { | |||
1427 | AbstractAttribute *ChangedAA = ChangedAAs[u]; | |||
1428 | if (!Visited.insert(ChangedAA).second) | |||
1429 | continue; | |||
1430 | ||||
1431 | AbstractState &State = ChangedAA->getState(); | |||
1432 | if (!State.isAtFixpoint()) { | |||
1433 | State.indicatePessimisticFixpoint(); | |||
1434 | ||||
1435 | NumAttributesTimedOut++; | |||
1436 | } | |||
1437 | ||||
1438 | while (!ChangedAA->Deps.empty()) { | |||
1439 | ChangedAAs.push_back( | |||
1440 | cast<AbstractAttribute>(ChangedAA->Deps.back().getPointer())); | |||
1441 | ChangedAA->Deps.pop_back(); | |||
1442 | } | |||
1443 | } | |||
1444 | ||||
1445 | LLVM_DEBUG({do { } while (false) | |||
1446 | if (!Visited.empty())do { } while (false) | |||
1447 | dbgs() << "\n[Attributor] Finalized " << Visited.size()do { } while (false) | |||
1448 | << " abstract attributes.\n";do { } while (false) | |||
1449 | })do { } while (false); | |||
1450 | ||||
1451 | if (VerifyMaxFixpointIterations && | |||
1452 | IterationCounter != MaxFixedPointIterations) { | |||
1453 | errs() << "\n[Attributor] Fixpoint iteration done after: " | |||
1454 | << IterationCounter << "/" << MaxFixedPointIterations | |||
1455 | << " iterations\n"; | |||
1456 | llvm_unreachable("The fixpoint was not reached with exactly the number of "__builtin_unreachable() | |||
1457 | "specified iterations!")__builtin_unreachable(); | |||
1458 | } | |||
1459 | } | |||
1460 | ||||
1461 | ChangeStatus Attributor::manifestAttributes() { | |||
1462 | TimeTraceScope TimeScope("Attributor::manifestAttributes"); | |||
1463 | size_t NumFinalAAs = DG.SyntheticRoot.Deps.size(); | |||
1464 | ||||
1465 | unsigned NumManifested = 0; | |||
1466 | unsigned NumAtFixpoint = 0; | |||
1467 | ChangeStatus ManifestChange = ChangeStatus::UNCHANGED; | |||
1468 | for (auto &DepAA : DG.SyntheticRoot.Deps) { | |||
1469 | AbstractAttribute *AA = cast<AbstractAttribute>(DepAA.getPointer()); | |||
1470 | AbstractState &State = AA->getState(); | |||
1471 | ||||
1472 | // If there is not already a fixpoint reached, we can now take the | |||
1473 | // optimistic state. This is correct because we enforced a pessimistic one | |||
1474 | // on abstract attributes that were transitively dependent on a changed one | |||
1475 | // already above. | |||
1476 | if (!State.isAtFixpoint()) | |||
1477 | State.indicateOptimisticFixpoint(); | |||
1478 | ||||
1479 | // We must not manifest Attributes that use Callbase info. | |||
1480 | if (AA->hasCallBaseContext()) | |||
1481 | continue; | |||
1482 | // If the state is invalid, we do not try to manifest it. | |||
1483 | if (!State.isValidState()) | |||
1484 | continue; | |||
1485 | ||||
1486 | // Skip dead code. | |||
1487 | bool UsedAssumedInformation = false; | |||
1488 | if (isAssumedDead(*AA, nullptr, UsedAssumedInformation, | |||
1489 | /* CheckBBLivenessOnly */ true)) | |||
1490 | continue; | |||
1491 | // Check if the manifest debug counter that allows skipping manifestation of | |||
1492 | // AAs | |||
1493 | if (!DebugCounter::shouldExecute(ManifestDBGCounter)) | |||
1494 | continue; | |||
1495 | // Manifest the state and record if we changed the IR. | |||
1496 | ChangeStatus LocalChange = AA->manifest(*this); | |||
1497 | if (LocalChange == ChangeStatus::CHANGED && AreStatisticsEnabled()) | |||
1498 | AA->trackStatistics(); | |||
1499 | LLVM_DEBUG(dbgs() << "[Attributor] Manifest " << LocalChange << " : " << *AAdo { } while (false) | |||
1500 | << "\n")do { } while (false); | |||
1501 | ||||
1502 | ManifestChange = ManifestChange | LocalChange; | |||
1503 | ||||
1504 | NumAtFixpoint++; | |||
1505 | NumManifested += (LocalChange == ChangeStatus::CHANGED); | |||
1506 | } | |||
1507 | ||||
1508 | (void)NumManifested; | |||
1509 | (void)NumAtFixpoint; | |||
1510 | LLVM_DEBUG(dbgs() << "\n[Attributor] Manifested " << NumManifesteddo { } while (false) | |||
1511 | << " arguments while " << NumAtFixpointdo { } while (false) | |||
1512 | << " were in a valid fixpoint state\n")do { } while (false); | |||
1513 | ||||
1514 | NumAttributesManifested += NumManifested; | |||
1515 | NumAttributesValidFixpoint += NumAtFixpoint; | |||
1516 | ||||
1517 | (void)NumFinalAAs; | |||
1518 | if (NumFinalAAs != DG.SyntheticRoot.Deps.size()) { | |||
1519 | for (unsigned u = NumFinalAAs; u < DG.SyntheticRoot.Deps.size(); ++u) | |||
1520 | errs() << "Unexpected abstract attribute: " | |||
1521 | << cast<AbstractAttribute>(DG.SyntheticRoot.Deps[u].getPointer()) | |||
1522 | << " :: " | |||
1523 | << cast<AbstractAttribute>(DG.SyntheticRoot.Deps[u].getPointer()) | |||
1524 | ->getIRPosition() | |||
1525 | .getAssociatedValue() | |||
1526 | << "\n"; | |||
1527 | llvm_unreachable("Expected the final number of abstract attributes to "__builtin_unreachable() | |||
1528 | "remain unchanged!")__builtin_unreachable(); | |||
1529 | } | |||
1530 | return ManifestChange; | |||
1531 | } | |||
1532 | ||||
1533 | void Attributor::identifyDeadInternalFunctions() { | |||
1534 | // Early exit if we don't intend to delete functions. | |||
1535 | if (!DeleteFns) | |||
1536 | return; | |||
1537 | ||||
1538 | // Identify dead internal functions and delete them. This happens outside | |||
1539 | // the other fixpoint analysis as we might treat potentially dead functions | |||
1540 | // as live to lower the number of iterations. If they happen to be dead, the | |||
1541 | // below fixpoint loop will identify and eliminate them. | |||
1542 | SmallVector<Function *, 8> InternalFns; | |||
1543 | for (Function *F : Functions) | |||
1544 | if (F->hasLocalLinkage()) | |||
1545 | InternalFns.push_back(F); | |||
1546 | ||||
1547 | SmallPtrSet<Function *, 8> LiveInternalFns; | |||
1548 | bool FoundLiveInternal = true; | |||
1549 | while (FoundLiveInternal) { | |||
1550 | FoundLiveInternal = false; | |||
1551 | for (unsigned u = 0, e = InternalFns.size(); u < e; ++u) { | |||
1552 | Function *F = InternalFns[u]; | |||
1553 | if (!F) | |||
1554 | continue; | |||
1555 | ||||
1556 | bool AllCallSitesKnown; | |||
1557 | if (checkForAllCallSites( | |||
1558 | [&](AbstractCallSite ACS) { | |||
1559 | Function *Callee = ACS.getInstruction()->getFunction(); | |||
1560 | return ToBeDeletedFunctions.count(Callee) || | |||
1561 | (Functions.count(Callee) && Callee->hasLocalLinkage() && | |||
1562 | !LiveInternalFns.count(Callee)); | |||
1563 | }, | |||
1564 | *F, true, nullptr, AllCallSitesKnown)) { | |||
1565 | continue; | |||
1566 | } | |||
1567 | ||||
1568 | LiveInternalFns.insert(F); | |||
1569 | InternalFns[u] = nullptr; | |||
1570 | FoundLiveInternal = true; | |||
1571 | } | |||
1572 | } | |||
1573 | ||||
1574 | for (unsigned u = 0, e = InternalFns.size(); u < e; ++u) | |||
1575 | if (Function *F = InternalFns[u]) | |||
1576 | ToBeDeletedFunctions.insert(F); | |||
1577 | } | |||
1578 | ||||
1579 | ChangeStatus Attributor::cleanupIR() { | |||
1580 | TimeTraceScope TimeScope("Attributor::cleanupIR"); | |||
1581 | // Delete stuff at the end to avoid invalid references and a nice order. | |||
1582 | LLVM_DEBUG(dbgs() << "\n[Attributor] Delete/replace at least "do { } while (false) | |||
1583 | << ToBeDeletedFunctions.size() << " functions and "do { } while (false) | |||
1584 | << ToBeDeletedBlocks.size() << " blocks and "do { } while (false) | |||
1585 | << ToBeDeletedInsts.size() << " instructions and "do { } while (false) | |||
1586 | << ToBeChangedValues.size() << " values and "do { } while (false) | |||
1587 | << ToBeChangedUses.size() << " uses. "do { } while (false) | |||
1588 | << "Preserve manifest added " << ManifestAddedBlocks.size()do { } while (false) | |||
1589 | << " blocks\n")do { } while (false); | |||
1590 | ||||
1591 | SmallVector<WeakTrackingVH, 32> DeadInsts; | |||
1592 | SmallVector<Instruction *, 32> TerminatorsToFold; | |||
1593 | ||||
1594 | auto ReplaceUse = [&](Use *U, Value *NewV) { | |||
1595 | Value *OldV = U->get(); | |||
1596 | ||||
1597 | // If we plan to replace NewV we need to update it at this point. | |||
1598 | do { | |||
1599 | const auto &Entry = ToBeChangedValues.lookup(NewV); | |||
1600 | if (!Entry.first) | |||
1601 | break; | |||
1602 | NewV = Entry.first; | |||
1603 | } while (true); | |||
1604 | ||||
1605 | // Do not replace uses in returns if the value is a must-tail call we will | |||
1606 | // not delete. | |||
1607 | if (auto *RI = dyn_cast<ReturnInst>(U->getUser())) { | |||
1608 | if (auto *CI = dyn_cast<CallInst>(OldV->stripPointerCasts())) | |||
1609 | if (CI->isMustTailCall() && | |||
1610 | (!ToBeDeletedInsts.count(CI) || !isRunOn(*CI->getCaller()))) | |||
1611 | return; | |||
1612 | // If we rewrite a return and the new value is not an argument, strip the | |||
1613 | // `returned` attribute as it is wrong now. | |||
1614 | if (!isa<Argument>(NewV)) | |||
1615 | for (auto &Arg : RI->getFunction()->args()) | |||
1616 | Arg.removeAttr(Attribute::Returned); | |||
1617 | } | |||
1618 | ||||
1619 | // Do not perform call graph altering changes outside the SCC. | |||
1620 | if (auto *CB = dyn_cast<CallBase>(U->getUser())) | |||
1621 | if (CB->isCallee(U) && !isRunOn(*CB->getCaller())) | |||
1622 | return; | |||
1623 | ||||
1624 | LLVM_DEBUG(dbgs() << "Use " << *NewV << " in " << *U->getUser()do { } while (false) | |||
1625 | << " instead of " << *OldV << "\n")do { } while (false); | |||
1626 | U->set(NewV); | |||
1627 | ||||
1628 | if (Instruction *I = dyn_cast<Instruction>(OldV)) { | |||
1629 | CGModifiedFunctions.insert(I->getFunction()); | |||
1630 | if (!isa<PHINode>(I) && !ToBeDeletedInsts.count(I) && | |||
1631 | isInstructionTriviallyDead(I)) | |||
1632 | DeadInsts.push_back(I); | |||
1633 | } | |||
1634 | if (isa<UndefValue>(NewV) && isa<CallBase>(U->getUser())) { | |||
1635 | auto *CB = cast<CallBase>(U->getUser()); | |||
1636 | if (CB->isArgOperand(U)) { | |||
1637 | unsigned Idx = CB->getArgOperandNo(U); | |||
1638 | CB->removeParamAttr(Idx, Attribute::NoUndef); | |||
1639 | Function *Fn = CB->getCalledFunction(); | |||
1640 | if (Fn && Fn->arg_size() > Idx) | |||
1641 | Fn->removeParamAttr(Idx, Attribute::NoUndef); | |||
1642 | } | |||
1643 | } | |||
1644 | if (isa<Constant>(NewV) && isa<BranchInst>(U->getUser())) { | |||
1645 | Instruction *UserI = cast<Instruction>(U->getUser()); | |||
1646 | if (isa<UndefValue>(NewV)) { | |||
1647 | ToBeChangedToUnreachableInsts.insert(UserI); | |||
1648 | } else { | |||
1649 | TerminatorsToFold.push_back(UserI); | |||
1650 | } | |||
1651 | } | |||
1652 | }; | |||
1653 | ||||
1654 | for (auto &It : ToBeChangedUses) { | |||
1655 | Use *U = It.first; | |||
1656 | Value *NewV = It.second; | |||
1657 | ReplaceUse(U, NewV); | |||
1658 | } | |||
1659 | ||||
1660 | SmallVector<Use *, 4> Uses; | |||
1661 | for (auto &It : ToBeChangedValues) { | |||
1662 | Value *OldV = It.first; | |||
1663 | auto &Entry = It.second; | |||
1664 | Value *NewV = Entry.first; | |||
1665 | Uses.clear(); | |||
1666 | for (auto &U : OldV->uses()) | |||
1667 | if (Entry.second || !U.getUser()->isDroppable()) | |||
1668 | Uses.push_back(&U); | |||
1669 | for (Use *U : Uses) | |||
1670 | ReplaceUse(U, NewV); | |||
1671 | } | |||
1672 | ||||
1673 | for (auto &V : InvokeWithDeadSuccessor) | |||
1674 | if (InvokeInst *II = dyn_cast_or_null<InvokeInst>(V)) { | |||
1675 | assert(isRunOn(*II->getFunction()) &&((void)0) | |||
1676 | "Cannot replace an invoke outside the current SCC!")((void)0); | |||
1677 | bool UnwindBBIsDead = II->hasFnAttr(Attribute::NoUnwind); | |||
1678 | bool NormalBBIsDead = II->hasFnAttr(Attribute::NoReturn); | |||
1679 | bool Invoke2CallAllowed = | |||
1680 | !AAIsDead::mayCatchAsynchronousExceptions(*II->getFunction()); | |||
1681 | assert((UnwindBBIsDead || NormalBBIsDead) &&((void)0) | |||
1682 | "Invoke does not have dead successors!")((void)0); | |||
1683 | BasicBlock *BB = II->getParent(); | |||
1684 | BasicBlock *NormalDestBB = II->getNormalDest(); | |||
1685 | if (UnwindBBIsDead) { | |||
1686 | Instruction *NormalNextIP = &NormalDestBB->front(); | |||
1687 | if (Invoke2CallAllowed) { | |||
1688 | changeToCall(II); | |||
1689 | NormalNextIP = BB->getTerminator(); | |||
1690 | } | |||
1691 | if (NormalBBIsDead) | |||
1692 | ToBeChangedToUnreachableInsts.insert(NormalNextIP); | |||
1693 | } else { | |||
1694 | assert(NormalBBIsDead && "Broken invariant!")((void)0); | |||
1695 | if (!NormalDestBB->getUniquePredecessor()) | |||
1696 | NormalDestBB = SplitBlockPredecessors(NormalDestBB, {BB}, ".dead"); | |||
1697 | ToBeChangedToUnreachableInsts.insert(&NormalDestBB->front()); | |||
1698 | } | |||
1699 | } | |||
1700 | for (Instruction *I : TerminatorsToFold) { | |||
1701 | if (!isRunOn(*I->getFunction())) | |||
1702 | continue; | |||
1703 | CGModifiedFunctions.insert(I->getFunction()); | |||
1704 | ConstantFoldTerminator(I->getParent()); | |||
1705 | } | |||
1706 | for (auto &V : ToBeChangedToUnreachableInsts) | |||
1707 | if (Instruction *I = dyn_cast_or_null<Instruction>(V)) { | |||
1708 | if (!isRunOn(*I->getFunction())) | |||
1709 | continue; | |||
1710 | CGModifiedFunctions.insert(I->getFunction()); | |||
1711 | changeToUnreachable(I); | |||
1712 | } | |||
1713 | ||||
1714 | for (auto &V : ToBeDeletedInsts) { | |||
1715 | if (Instruction *I = dyn_cast_or_null<Instruction>(V)) { | |||
1716 | if (auto *CB = dyn_cast<CallBase>(I)) { | |||
1717 | if (!isRunOn(*I->getFunction())) | |||
1718 | continue; | |||
1719 | if (!isa<IntrinsicInst>(CB)) | |||
1720 | CGUpdater.removeCallSite(*CB); | |||
1721 | } | |||
1722 | I->dropDroppableUses(); | |||
1723 | CGModifiedFunctions.insert(I->getFunction()); | |||
1724 | if (!I->getType()->isVoidTy()) | |||
1725 | I->replaceAllUsesWith(UndefValue::get(I->getType())); | |||
1726 | if (!isa<PHINode>(I) && isInstructionTriviallyDead(I)) | |||
1727 | DeadInsts.push_back(I); | |||
1728 | else | |||
1729 | I->eraseFromParent(); | |||
1730 | } | |||
1731 | } | |||
1732 | ||||
1733 | llvm::erase_if(DeadInsts, [&](WeakTrackingVH I) { | |||
1734 | return !I || !isRunOn(*cast<Instruction>(I)->getFunction()); | |||
1735 | }); | |||
1736 | ||||
1737 | LLVM_DEBUG({do { } while (false) | |||
1738 | dbgs() << "[Attributor] DeadInsts size: " << DeadInsts.size() << "\n";do { } while (false) | |||
1739 | for (auto &I : DeadInsts)do { } while (false) | |||
1740 | if (I)do { } while (false) | |||
1741 | dbgs() << " - " << *I << "\n";do { } while (false) | |||
1742 | })do { } while (false); | |||
1743 | ||||
1744 | RecursivelyDeleteTriviallyDeadInstructions(DeadInsts); | |||
1745 | ||||
1746 | if (unsigned NumDeadBlocks = ToBeDeletedBlocks.size()) { | |||
1747 | SmallVector<BasicBlock *, 8> ToBeDeletedBBs; | |||
1748 | ToBeDeletedBBs.reserve(NumDeadBlocks); | |||
1749 | for (BasicBlock *BB : ToBeDeletedBlocks) { | |||
1750 | assert(isRunOn(*BB->getParent()) &&((void)0) | |||
1751 | "Cannot delete a block outside the current SCC!")((void)0); | |||
1752 | CGModifiedFunctions.insert(BB->getParent()); | |||
1753 | // Do not delete BBs added during manifests of AAs. | |||
1754 | if (ManifestAddedBlocks.contains(BB)) | |||
1755 | continue; | |||
1756 | ToBeDeletedBBs.push_back(BB); | |||
1757 | } | |||
1758 | // Actually we do not delete the blocks but squash them into a single | |||
1759 | // unreachable but untangling branches that jump here is something we need | |||
1760 | // to do in a more generic way. | |||
1761 | DetatchDeadBlocks(ToBeDeletedBBs, nullptr); | |||
1762 | } | |||
1763 | ||||
1764 | identifyDeadInternalFunctions(); | |||
1765 | ||||
1766 | // Rewrite the functions as requested during manifest. | |||
1767 | ChangeStatus ManifestChange = rewriteFunctionSignatures(CGModifiedFunctions); | |||
1768 | ||||
1769 | for (Function *Fn : CGModifiedFunctions) | |||
1770 | if (!ToBeDeletedFunctions.count(Fn) && Functions.count(Fn)) | |||
1771 | CGUpdater.reanalyzeFunction(*Fn); | |||
1772 | ||||
1773 | for (Function *Fn : ToBeDeletedFunctions) { | |||
1774 | if (!Functions.count(Fn)) | |||
1775 | continue; | |||
1776 | CGUpdater.removeFunction(*Fn); | |||
1777 | } | |||
1778 | ||||
1779 | if (!ToBeChangedUses.empty()) | |||
1780 | ManifestChange = ChangeStatus::CHANGED; | |||
1781 | ||||
1782 | if (!ToBeChangedToUnreachableInsts.empty()) | |||
1783 | ManifestChange = ChangeStatus::CHANGED; | |||
1784 | ||||
1785 | if (!ToBeDeletedFunctions.empty()) | |||
1786 | ManifestChange = ChangeStatus::CHANGED; | |||
1787 | ||||
1788 | if (!ToBeDeletedBlocks.empty()) | |||
1789 | ManifestChange = ChangeStatus::CHANGED; | |||
1790 | ||||
1791 | if (!ToBeDeletedInsts.empty()) | |||
1792 | ManifestChange = ChangeStatus::CHANGED; | |||
1793 | ||||
1794 | if (!InvokeWithDeadSuccessor.empty()) | |||
1795 | ManifestChange = ChangeStatus::CHANGED; | |||
1796 | ||||
1797 | if (!DeadInsts.empty()) | |||
1798 | ManifestChange = ChangeStatus::CHANGED; | |||
1799 | ||||
1800 | NumFnDeleted += ToBeDeletedFunctions.size(); | |||
1801 | ||||
1802 | LLVM_DEBUG(dbgs() << "[Attributor] Deleted " << ToBeDeletedFunctions.size()do { } while (false) | |||
1803 | << " functions after manifest.\n")do { } while (false); | |||
1804 | ||||
1805 | #ifdef EXPENSIVE_CHECKS | |||
1806 | for (Function *F : Functions) { | |||
1807 | if (ToBeDeletedFunctions.count(F)) | |||
1808 | continue; | |||
1809 | assert(!verifyFunction(*F, &errs()) && "Module verification failed!")((void)0); | |||
1810 | } | |||
1811 | #endif | |||
1812 | ||||
1813 | return ManifestChange; | |||
1814 | } | |||
1815 | ||||
1816 | ChangeStatus Attributor::run() { | |||
1817 | TimeTraceScope TimeScope("Attributor::run"); | |||
1818 | AttributorCallGraph ACallGraph(*this); | |||
1819 | ||||
1820 | if (PrintCallGraph) | |||
1821 | ACallGraph.populateAll(); | |||
1822 | ||||
1823 | Phase = AttributorPhase::UPDATE; | |||
1824 | runTillFixpoint(); | |||
1825 | ||||
1826 | // dump graphs on demand | |||
1827 | if (DumpDepGraph) | |||
1828 | DG.dumpGraph(); | |||
1829 | ||||
1830 | if (ViewDepGraph) | |||
1831 | DG.viewGraph(); | |||
1832 | ||||
1833 | if (PrintDependencies) | |||
1834 | DG.print(); | |||
1835 | ||||
1836 | Phase = AttributorPhase::MANIFEST; | |||
1837 | ChangeStatus ManifestChange = manifestAttributes(); | |||
1838 | ||||
1839 | Phase = AttributorPhase::CLEANUP; | |||
1840 | ChangeStatus CleanupChange = cleanupIR(); | |||
1841 | ||||
1842 | if (PrintCallGraph) | |||
1843 | ACallGraph.print(); | |||
1844 | ||||
1845 | return ManifestChange | CleanupChange; | |||
1846 | } | |||
1847 | ||||
1848 | ChangeStatus Attributor::updateAA(AbstractAttribute &AA) { | |||
1849 | TimeTraceScope TimeScope( | |||
1850 | AA.getName() + std::to_string(AA.getIRPosition().getPositionKind()) + | |||
1851 | "::updateAA"); | |||
1852 | assert(Phase == AttributorPhase::UPDATE &&((void)0) | |||
1853 | "We can update AA only in the update stage!")((void)0); | |||
1854 | ||||
1855 | // Use a new dependence vector for this update. | |||
1856 | DependenceVector DV; | |||
1857 | DependenceStack.push_back(&DV); | |||
1858 | ||||
1859 | auto &AAState = AA.getState(); | |||
1860 | ChangeStatus CS = ChangeStatus::UNCHANGED; | |||
1861 | bool UsedAssumedInformation = false; | |||
1862 | if (!isAssumedDead(AA, nullptr, UsedAssumedInformation, | |||
1863 | /* CheckBBLivenessOnly */ true)) | |||
1864 | CS = AA.update(*this); | |||
1865 | ||||
1866 | if (DV.empty()) { | |||
1867 | // If the attribute did not query any non-fix information, the state | |||
1868 | // will not change and we can indicate that right away. | |||
1869 | AAState.indicateOptimisticFixpoint(); | |||
1870 | } | |||
1871 | ||||
1872 | if (!AAState.isAtFixpoint()) | |||
1873 | rememberDependences(); | |||
1874 | ||||
1875 | // Verify the stack was used properly, that is we pop the dependence vector we | |||
1876 | // put there earlier. | |||
1877 | DependenceVector *PoppedDV = DependenceStack.pop_back_val(); | |||
1878 | (void)PoppedDV; | |||
1879 | assert(PoppedDV == &DV && "Inconsistent usage of the dependence stack!")((void)0); | |||
1880 | ||||
1881 | return CS; | |||
1882 | } | |||
1883 | ||||
1884 | void Attributor::createShallowWrapper(Function &F) { | |||
1885 | assert(!F.isDeclaration() && "Cannot create a wrapper around a declaration!")((void)0); | |||
1886 | ||||
1887 | Module &M = *F.getParent(); | |||
1888 | LLVMContext &Ctx = M.getContext(); | |||
1889 | FunctionType *FnTy = F.getFunctionType(); | |||
1890 | ||||
1891 | Function *Wrapper = | |||
1892 | Function::Create(FnTy, F.getLinkage(), F.getAddressSpace(), F.getName()); | |||
1893 | F.setName(""); // set the inside function anonymous | |||
1894 | M.getFunctionList().insert(F.getIterator(), Wrapper); | |||
1895 | ||||
1896 | F.setLinkage(GlobalValue::InternalLinkage); | |||
1897 | ||||
1898 | F.replaceAllUsesWith(Wrapper); | |||
1899 | assert(F.use_empty() && "Uses remained after wrapper was created!")((void)0); | |||
1900 | ||||
1901 | // Move the COMDAT section to the wrapper. | |||
1902 | // TODO: Check if we need to keep it for F as well. | |||
1903 | Wrapper->setComdat(F.getComdat()); | |||
1904 | F.setComdat(nullptr); | |||
1905 | ||||
1906 | // Copy all metadata and attributes but keep them on F as well. | |||
1907 | SmallVector<std::pair<unsigned, MDNode *>, 1> MDs; | |||
1908 | F.getAllMetadata(MDs); | |||
1909 | for (auto MDIt : MDs) | |||
1910 | Wrapper->addMetadata(MDIt.first, *MDIt.second); | |||
1911 | Wrapper->setAttributes(F.getAttributes()); | |||
1912 | ||||
1913 | // Create the call in the wrapper. | |||
1914 | BasicBlock *EntryBB = BasicBlock::Create(Ctx, "entry", Wrapper); | |||
1915 | ||||
1916 | SmallVector<Value *, 8> Args; | |||
1917 | Argument *FArgIt = F.arg_begin(); | |||
1918 | for (Argument &Arg : Wrapper->args()) { | |||
1919 | Args.push_back(&Arg); | |||
1920 | Arg.setName((FArgIt++)->getName()); | |||
1921 | } | |||
1922 | ||||
1923 | CallInst *CI = CallInst::Create(&F, Args, "", EntryBB); | |||
1924 | CI->setTailCall(true); | |||
1925 | CI->addAttribute(AttributeList::FunctionIndex, Attribute::NoInline); | |||
1926 | ReturnInst::Create(Ctx, CI->getType()->isVoidTy() ? nullptr : CI, EntryBB); | |||
1927 | ||||
1928 | NumFnShallowWrappersCreated++; | |||
1929 | } | |||
1930 | ||||
1931 | bool Attributor::isInternalizable(Function &F) { | |||
1932 | if (F.isDeclaration() || F.hasLocalLinkage() || | |||
1933 | GlobalValue::isInterposableLinkage(F.getLinkage())) | |||
1934 | return false; | |||
1935 | return true; | |||
1936 | } | |||
1937 | ||||
1938 | Function *Attributor::internalizeFunction(Function &F, bool Force) { | |||
1939 | if (!AllowDeepWrapper && !Force) | |||
1940 | return nullptr; | |||
1941 | if (!isInternalizable(F)) | |||
1942 | return nullptr; | |||
1943 | ||||
1944 | SmallPtrSet<Function *, 2> FnSet = {&F}; | |||
1945 | DenseMap<Function *, Function *> InternalizedFns; | |||
1946 | internalizeFunctions(FnSet, InternalizedFns); | |||
1947 | ||||
1948 | return InternalizedFns[&F]; | |||
1949 | } | |||
1950 | ||||
1951 | bool Attributor::internalizeFunctions(SmallPtrSetImpl<Function *> &FnSet, | |||
1952 | DenseMap<Function *, Function *> &FnMap) { | |||
1953 | for (Function *F : FnSet) | |||
1954 | if (!Attributor::isInternalizable(*F)) | |||
1955 | return false; | |||
1956 | ||||
1957 | FnMap.clear(); | |||
1958 | // Generate the internalized version of each function. | |||
1959 | for (Function *F : FnSet) { | |||
1960 | Module &M = *F->getParent(); | |||
1961 | FunctionType *FnTy = F->getFunctionType(); | |||
1962 | ||||
1963 | // Create a copy of the current function | |||
1964 | Function *Copied = | |||
1965 | Function::Create(FnTy, F->getLinkage(), F->getAddressSpace(), | |||
1966 | F->getName() + ".internalized"); | |||
1967 | ValueToValueMapTy VMap; | |||
1968 | auto *NewFArgIt = Copied->arg_begin(); | |||
1969 | for (auto &Arg : F->args()) { | |||
1970 | auto ArgName = Arg.getName(); | |||
1971 | NewFArgIt->setName(ArgName); | |||
1972 | VMap[&Arg] = &(*NewFArgIt++); | |||
1973 | } | |||
1974 | SmallVector<ReturnInst *, 8> Returns; | |||
1975 | ||||
1976 | // Copy the body of the original function to the new one | |||
1977 | CloneFunctionInto(Copied, F, VMap, | |||
1978 | CloneFunctionChangeType::LocalChangesOnly, Returns); | |||
1979 | ||||
1980 | // Set the linakage and visibility late as CloneFunctionInto has some | |||
1981 | // implicit requirements. | |||
1982 | Copied->setVisibility(GlobalValue::DefaultVisibility); | |||
1983 | Copied->setLinkage(GlobalValue::PrivateLinkage); | |||
1984 | ||||
1985 | // Copy metadata | |||
1986 | SmallVector<std::pair<unsigned, MDNode *>, 1> MDs; | |||
1987 | F->getAllMetadata(MDs); | |||
1988 | for (auto MDIt : MDs) | |||
1989 | if (!Copied->hasMetadata()) | |||
1990 | Copied->addMetadata(MDIt.first, *MDIt.second); | |||
1991 | ||||
1992 | M.getFunctionList().insert(F->getIterator(), Copied); | |||
1993 | Copied->setDSOLocal(true); | |||
1994 | FnMap[F] = Copied; | |||
1995 | } | |||
1996 | ||||
1997 | // Replace all uses of the old function with the new internalized function | |||
1998 | // unless the caller is a function that was just internalized. | |||
1999 | for (Function *F : FnSet) { | |||
2000 | auto &InternalizedFn = FnMap[F]; | |||
2001 | auto IsNotInternalized = [&](Use &U) -> bool { | |||
2002 | if (auto *CB = dyn_cast<CallBase>(U.getUser())) | |||
2003 | return !FnMap.lookup(CB->getCaller()); | |||
2004 | return false; | |||
2005 | }; | |||
2006 | F->replaceUsesWithIf(InternalizedFn, IsNotInternalized); | |||
2007 | } | |||
2008 | ||||
2009 | return true; | |||
2010 | } | |||
2011 | ||||
2012 | bool Attributor::isValidFunctionSignatureRewrite( | |||
2013 | Argument &Arg, ArrayRef<Type *> ReplacementTypes) { | |||
2014 | ||||
2015 | if (!RewriteSignatures) | |||
2016 | return false; | |||
2017 | ||||
2018 | auto CallSiteCanBeChanged = [](AbstractCallSite ACS) { | |||
2019 | // Forbid the call site to cast the function return type. If we need to | |||
2020 | // rewrite these functions we need to re-create a cast for the new call site | |||
2021 | // (if the old had uses). | |||
2022 | if (!ACS.getCalledFunction() || | |||
2023 | ACS.getInstruction()->getType() != | |||
2024 | ACS.getCalledFunction()->getReturnType()) | |||
2025 | return false; | |||
2026 | // Forbid must-tail calls for now. | |||
2027 | return !ACS.isCallbackCall() && !ACS.getInstruction()->isMustTailCall(); | |||
2028 | }; | |||
2029 | ||||
2030 | Function *Fn = Arg.getParent(); | |||
2031 | // Avoid var-arg functions for now. | |||
2032 | if (Fn->isVarArg()) { | |||
2033 | LLVM_DEBUG(dbgs() << "[Attributor] Cannot rewrite var-args functions\n")do { } while (false); | |||
2034 | return false; | |||
2035 | } | |||
2036 | ||||
2037 | // Avoid functions with complicated argument passing semantics. | |||
2038 | AttributeList FnAttributeList = Fn->getAttributes(); | |||
2039 | if (FnAttributeList.hasAttrSomewhere(Attribute::Nest) || | |||
2040 | FnAttributeList.hasAttrSomewhere(Attribute::StructRet) || | |||
2041 | FnAttributeList.hasAttrSomewhere(Attribute::InAlloca) || | |||
2042 | FnAttributeList.hasAttrSomewhere(Attribute::Preallocated)) { | |||
2043 | LLVM_DEBUG(do { } while (false) | |||
2044 | dbgs() << "[Attributor] Cannot rewrite due to complex attribute\n")do { } while (false); | |||
2045 | return false; | |||
2046 | } | |||
2047 | ||||
2048 | // Avoid callbacks for now. | |||
2049 | bool AllCallSitesKnown; | |||
2050 | if (!checkForAllCallSites(CallSiteCanBeChanged, *Fn, true, nullptr, | |||
2051 | AllCallSitesKnown)) { | |||
2052 | LLVM_DEBUG(dbgs() << "[Attributor] Cannot rewrite all call sites\n")do { } while (false); | |||
2053 | return false; | |||
2054 | } | |||
2055 | ||||
2056 | auto InstPred = [](Instruction &I) { | |||
2057 | if (auto *CI = dyn_cast<CallInst>(&I)) | |||
2058 | return !CI->isMustTailCall(); | |||
2059 | return true; | |||
2060 | }; | |||
2061 | ||||
2062 | // Forbid must-tail calls for now. | |||
2063 | // TODO: | |||
2064 | bool UsedAssumedInformation = false; | |||
2065 | auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(*Fn); | |||
2066 | if (!checkForAllInstructionsImpl(nullptr, OpcodeInstMap, InstPred, nullptr, | |||
2067 | nullptr, {Instruction::Call}, | |||
2068 | UsedAssumedInformation)) { | |||
2069 | LLVM_DEBUG(dbgs() << "[Attributor] Cannot rewrite due to instructions\n")do { } while (false); | |||
2070 | return false; | |||
2071 | } | |||
2072 | ||||
2073 | return true; | |||
2074 | } | |||
2075 | ||||
2076 | bool Attributor::registerFunctionSignatureRewrite( | |||
2077 | Argument &Arg, ArrayRef<Type *> ReplacementTypes, | |||
2078 | ArgumentReplacementInfo::CalleeRepairCBTy &&CalleeRepairCB, | |||
2079 | ArgumentReplacementInfo::ACSRepairCBTy &&ACSRepairCB) { | |||
2080 | LLVM_DEBUG(dbgs() << "[Attributor] Register new rewrite of " << Arg << " in "do { } while (false) | |||
2081 | << Arg.getParent()->getName() << " with "do { } while (false) | |||
2082 | << ReplacementTypes.size() << " replacements\n")do { } while (false); | |||
2083 | assert(isValidFunctionSignatureRewrite(Arg, ReplacementTypes) &&((void)0) | |||
2084 | "Cannot register an invalid rewrite")((void)0); | |||
2085 | ||||
2086 | Function *Fn = Arg.getParent(); | |||
2087 | SmallVectorImpl<std::unique_ptr<ArgumentReplacementInfo>> &ARIs = | |||
2088 | ArgumentReplacementMap[Fn]; | |||
2089 | if (ARIs.empty()) | |||
2090 | ARIs.resize(Fn->arg_size()); | |||
2091 | ||||
2092 | // If we have a replacement already with less than or equal new arguments, | |||
2093 | // ignore this request. | |||
2094 | std::unique_ptr<ArgumentReplacementInfo> &ARI = ARIs[Arg.getArgNo()]; | |||
2095 | if (ARI && ARI->getNumReplacementArgs() <= ReplacementTypes.size()) { | |||
2096 | LLVM_DEBUG(dbgs() << "[Attributor] Existing rewrite is preferred\n")do { } while (false); | |||
2097 | return false; | |||
2098 | } | |||
2099 | ||||
2100 | // If we have a replacement already but we like the new one better, delete | |||
2101 | // the old. | |||
2102 | ARI.reset(); | |||
2103 | ||||
2104 | LLVM_DEBUG(dbgs() << "[Attributor] Register new rewrite of " << Arg << " in "do { } while (false) | |||
2105 | << Arg.getParent()->getName() << " with "do { } while (false) | |||
2106 | << ReplacementTypes.size() << " replacements\n")do { } while (false); | |||
2107 | ||||
2108 | // Remember the replacement. | |||
2109 | ARI.reset(new ArgumentReplacementInfo(*this, Arg, ReplacementTypes, | |||
2110 | std::move(CalleeRepairCB), | |||
2111 | std::move(ACSRepairCB))); | |||
2112 | ||||
2113 | return true; | |||
2114 | } | |||
2115 | ||||
2116 | bool Attributor::shouldSeedAttribute(AbstractAttribute &AA) { | |||
2117 | bool Result = true; | |||
2118 | #ifndef NDEBUG1 | |||
2119 | if (SeedAllowList.size() != 0) | |||
2120 | Result = | |||
2121 | std::count(SeedAllowList.begin(), SeedAllowList.end(), AA.getName()); | |||
2122 | Function *Fn = AA.getAnchorScope(); | |||
2123 | if (FunctionSeedAllowList.size() != 0 && Fn) | |||
2124 | Result &= std::count(FunctionSeedAllowList.begin(), | |||
2125 | FunctionSeedAllowList.end(), Fn->getName()); | |||
2126 | #endif | |||
2127 | return Result; | |||
2128 | } | |||
2129 | ||||
2130 | ChangeStatus Attributor::rewriteFunctionSignatures( | |||
2131 | SmallPtrSetImpl<Function *> &ModifiedFns) { | |||
2132 | ChangeStatus Changed = ChangeStatus::UNCHANGED; | |||
2133 | ||||
2134 | for (auto &It : ArgumentReplacementMap) { | |||
2135 | Function *OldFn = It.getFirst(); | |||
2136 | ||||
2137 | // Deleted functions do not require rewrites. | |||
2138 | if (!Functions.count(OldFn) || ToBeDeletedFunctions.count(OldFn)) | |||
2139 | continue; | |||
2140 | ||||
2141 | const SmallVectorImpl<std::unique_ptr<ArgumentReplacementInfo>> &ARIs = | |||
2142 | It.getSecond(); | |||
2143 | assert(ARIs.size() == OldFn->arg_size() && "Inconsistent state!")((void)0); | |||
2144 | ||||
2145 | SmallVector<Type *, 16> NewArgumentTypes; | |||
2146 | SmallVector<AttributeSet, 16> NewArgumentAttributes; | |||
2147 | ||||
2148 | // Collect replacement argument types and copy over existing attributes. | |||
2149 | AttributeList OldFnAttributeList = OldFn->getAttributes(); | |||
2150 | for (Argument &Arg : OldFn->args()) { | |||
2151 | if (const std::unique_ptr<ArgumentReplacementInfo> &ARI = | |||
2152 | ARIs[Arg.getArgNo()]) { | |||
2153 | NewArgumentTypes.append(ARI->ReplacementTypes.begin(), | |||
2154 | ARI->ReplacementTypes.end()); | |||
2155 | NewArgumentAttributes.append(ARI->getNumReplacementArgs(), | |||
2156 | AttributeSet()); | |||
2157 | } else { | |||
2158 | NewArgumentTypes.push_back(Arg.getType()); | |||
2159 | NewArgumentAttributes.push_back( | |||
2160 | OldFnAttributeList.getParamAttributes(Arg.getArgNo())); | |||
2161 | } | |||
2162 | } | |||
2163 | ||||
2164 | FunctionType *OldFnTy = OldFn->getFunctionType(); | |||
2165 | Type *RetTy = OldFnTy->getReturnType(); | |||
2166 | ||||
2167 | // Construct the new function type using the new arguments types. | |||
2168 | FunctionType *NewFnTy = | |||
2169 | FunctionType::get(RetTy, NewArgumentTypes, OldFnTy->isVarArg()); | |||
2170 | ||||
2171 | LLVM_DEBUG(dbgs() << "[Attributor] Function rewrite '" << OldFn->getName()do { } while (false) | |||
2172 | << "' from " << *OldFn->getFunctionType() << " to "do { } while (false) | |||
2173 | << *NewFnTy << "\n")do { } while (false); | |||
2174 | ||||
2175 | // Create the new function body and insert it into the module. | |||
2176 | Function *NewFn = Function::Create(NewFnTy, OldFn->getLinkage(), | |||
2177 | OldFn->getAddressSpace(), ""); | |||
2178 | Functions.insert(NewFn); | |||
2179 | OldFn->getParent()->getFunctionList().insert(OldFn->getIterator(), NewFn); | |||
2180 | NewFn->takeName(OldFn); | |||
2181 | NewFn->copyAttributesFrom(OldFn); | |||
2182 | ||||
2183 | // Patch the pointer to LLVM function in debug info descriptor. | |||
2184 | NewFn->setSubprogram(OldFn->getSubprogram()); | |||
2185 | OldFn->setSubprogram(nullptr); | |||
2186 | ||||
2187 | // Recompute the parameter attributes list based on the new arguments for | |||
2188 | // the function. | |||
2189 | LLVMContext &Ctx = OldFn->getContext(); | |||
2190 | NewFn->setAttributes(AttributeList::get( | |||
2191 | Ctx, OldFnAttributeList.getFnAttributes(), | |||
2192 | OldFnAttributeList.getRetAttributes(), NewArgumentAttributes)); | |||
2193 | ||||
2194 | // Since we have now created the new function, splice the body of the old | |||
2195 | // function right into the new function, leaving the old rotting hulk of the | |||
2196 | // function empty. | |||
2197 | NewFn->getBasicBlockList().splice(NewFn->begin(), | |||
2198 | OldFn->getBasicBlockList()); | |||
2199 | ||||
2200 | // Fixup block addresses to reference new function. | |||
2201 | SmallVector<BlockAddress *, 8u> BlockAddresses; | |||
2202 | for (User *U : OldFn->users()) | |||
2203 | if (auto *BA = dyn_cast<BlockAddress>(U)) | |||
2204 | BlockAddresses.push_back(BA); | |||
2205 | for (auto *BA : BlockAddresses) | |||
2206 | BA->replaceAllUsesWith(BlockAddress::get(NewFn, BA->getBasicBlock())); | |||
2207 | ||||
2208 | // Set of all "call-like" instructions that invoke the old function mapped | |||
2209 | // to their new replacements. | |||
2210 | SmallVector<std::pair<CallBase *, CallBase *>, 8> CallSitePairs; | |||
2211 | ||||
2212 | // Callback to create a new "call-like" instruction for a given one. | |||
2213 | auto CallSiteReplacementCreator = [&](AbstractCallSite ACS) { | |||
2214 | CallBase *OldCB = cast<CallBase>(ACS.getInstruction()); | |||
2215 | const AttributeList &OldCallAttributeList = OldCB->getAttributes(); | |||
2216 | ||||
2217 | // Collect the new argument operands for the replacement call site. | |||
2218 | SmallVector<Value *, 16> NewArgOperands; | |||
2219 | SmallVector<AttributeSet, 16> NewArgOperandAttributes; | |||
2220 | for (unsigned OldArgNum = 0; OldArgNum < ARIs.size(); ++OldArgNum) { | |||
2221 | unsigned NewFirstArgNum = NewArgOperands.size(); | |||
2222 | (void)NewFirstArgNum; // only used inside assert. | |||
2223 | if (const std::unique_ptr<ArgumentReplacementInfo> &ARI = | |||
2224 | ARIs[OldArgNum]) { | |||
2225 | if (ARI->ACSRepairCB) | |||
2226 | ARI->ACSRepairCB(*ARI, ACS, NewArgOperands); | |||
2227 | assert(ARI->getNumReplacementArgs() + NewFirstArgNum ==((void)0) | |||
2228 | NewArgOperands.size() &&((void)0) | |||
2229 | "ACS repair callback did not provide as many operand as new "((void)0) | |||
2230 | "types were registered!")((void)0); | |||
2231 | // TODO: Exose the attribute set to the ACS repair callback | |||
2232 | NewArgOperandAttributes.append(ARI->ReplacementTypes.size(), | |||
2233 | AttributeSet()); | |||
2234 | } else { | |||
2235 | NewArgOperands.push_back(ACS.getCallArgOperand(OldArgNum)); | |||
2236 | NewArgOperandAttributes.push_back( | |||
2237 | OldCallAttributeList.getParamAttributes(OldArgNum)); | |||
2238 | } | |||
2239 | } | |||
2240 | ||||
2241 | assert(NewArgOperands.size() == NewArgOperandAttributes.size() &&((void)0) | |||
2242 | "Mismatch # argument operands vs. # argument operand attributes!")((void)0); | |||
2243 | assert(NewArgOperands.size() == NewFn->arg_size() &&((void)0) | |||
2244 | "Mismatch # argument operands vs. # function arguments!")((void)0); | |||
2245 | ||||
2246 | SmallVector<OperandBundleDef, 4> OperandBundleDefs; | |||
2247 | OldCB->getOperandBundlesAsDefs(OperandBundleDefs); | |||
2248 | ||||
2249 | // Create a new call or invoke instruction to replace the old one. | |||
2250 | CallBase *NewCB; | |||
2251 | if (InvokeInst *II = dyn_cast<InvokeInst>(OldCB)) { | |||
2252 | NewCB = | |||
2253 | InvokeInst::Create(NewFn, II->getNormalDest(), II->getUnwindDest(), | |||
2254 | NewArgOperands, OperandBundleDefs, "", OldCB); | |||
2255 | } else { | |||
2256 | auto *NewCI = CallInst::Create(NewFn, NewArgOperands, OperandBundleDefs, | |||
2257 | "", OldCB); | |||
2258 | NewCI->setTailCallKind(cast<CallInst>(OldCB)->getTailCallKind()); | |||
2259 | NewCB = NewCI; | |||
2260 | } | |||
2261 | ||||
2262 | // Copy over various properties and the new attributes. | |||
2263 | NewCB->copyMetadata(*OldCB, {LLVMContext::MD_prof, LLVMContext::MD_dbg}); | |||
2264 | NewCB->setCallingConv(OldCB->getCallingConv()); | |||
2265 | NewCB->takeName(OldCB); | |||
2266 | NewCB->setAttributes(AttributeList::get( | |||
2267 | Ctx, OldCallAttributeList.getFnAttributes(), | |||
2268 | OldCallAttributeList.getRetAttributes(), NewArgOperandAttributes)); | |||
2269 | ||||
2270 | CallSitePairs.push_back({OldCB, NewCB}); | |||
2271 | return true; | |||
2272 | }; | |||
2273 | ||||
2274 | // Use the CallSiteReplacementCreator to create replacement call sites. | |||
2275 | bool AllCallSitesKnown; | |||
2276 | bool Success = checkForAllCallSites(CallSiteReplacementCreator, *OldFn, | |||
2277 | true, nullptr, AllCallSitesKnown); | |||
2278 | (void)Success; | |||
2279 | assert(Success && "Assumed call site replacement to succeed!")((void)0); | |||
2280 | ||||
2281 | // Rewire the arguments. | |||
2282 | Argument *OldFnArgIt = OldFn->arg_begin(); | |||
2283 | Argument *NewFnArgIt = NewFn->arg_begin(); | |||
2284 | for (unsigned OldArgNum = 0; OldArgNum < ARIs.size(); | |||
2285 | ++OldArgNum, ++OldFnArgIt) { | |||
2286 | if (const std::unique_ptr<ArgumentReplacementInfo> &ARI = | |||
2287 | ARIs[OldArgNum]) { | |||
2288 | if (ARI->CalleeRepairCB) | |||
2289 | ARI->CalleeRepairCB(*ARI, *NewFn, NewFnArgIt); | |||
2290 | NewFnArgIt += ARI->ReplacementTypes.size(); | |||
2291 | } else { | |||
2292 | NewFnArgIt->takeName(&*OldFnArgIt); | |||
2293 | OldFnArgIt->replaceAllUsesWith(&*NewFnArgIt); | |||
2294 | ++NewFnArgIt; | |||
2295 | } | |||
2296 | } | |||
2297 | ||||
2298 | // Eliminate the instructions *after* we visited all of them. | |||
2299 | for (auto &CallSitePair : CallSitePairs) { | |||
2300 | CallBase &OldCB = *CallSitePair.first; | |||
2301 | CallBase &NewCB = *CallSitePair.second; | |||
2302 | assert(OldCB.getType() == NewCB.getType() &&((void)0) | |||
2303 | "Cannot handle call sites with different types!")((void)0); | |||
2304 | ModifiedFns.insert(OldCB.getFunction()); | |||
2305 | CGUpdater.replaceCallSite(OldCB, NewCB); | |||
2306 | OldCB.replaceAllUsesWith(&NewCB); | |||
2307 | OldCB.eraseFromParent(); | |||
2308 | } | |||
2309 | ||||
2310 | // Replace the function in the call graph (if any). | |||
2311 | CGUpdater.replaceFunctionWith(*OldFn, *NewFn); | |||
2312 | ||||
2313 | // If the old function was modified and needed to be reanalyzed, the new one | |||
2314 | // does now. | |||
2315 | if (ModifiedFns.erase(OldFn)) | |||
2316 | ModifiedFns.insert(NewFn); | |||
2317 | ||||
2318 | Changed = ChangeStatus::CHANGED; | |||
2319 | } | |||
2320 | ||||
2321 | return Changed; | |||
2322 | } | |||
2323 | ||||
2324 | void InformationCache::initializeInformationCache(const Function &CF, | |||
2325 | FunctionInfo &FI) { | |||
2326 | // As we do not modify the function here we can remove the const | |||
2327 | // withouth breaking implicit assumptions. At the end of the day, we could | |||
2328 | // initialize the cache eagerly which would look the same to the users. | |||
2329 | Function &F = const_cast<Function &>(CF); | |||
2330 | ||||
2331 | // Walk all instructions to find interesting instructions that might be | |||
2332 | // queried by abstract attributes during their initialization or update. | |||
2333 | // This has to happen before we create attributes. | |||
2334 | ||||
2335 | for (Instruction &I : instructions(&F)) { | |||
2336 | bool IsInterestingOpcode = false; | |||
2337 | ||||
2338 | // To allow easy access to all instructions in a function with a given | |||
2339 | // opcode we store them in the InfoCache. As not all opcodes are interesting | |||
2340 | // to concrete attributes we only cache the ones that are as identified in | |||
2341 | // the following switch. | |||
2342 | // Note: There are no concrete attributes now so this is initially empty. | |||
2343 | switch (I.getOpcode()) { | |||
2344 | default: | |||
2345 | assert(!isa<CallBase>(&I) &&((void)0) | |||
2346 | "New call base instruction type needs to be known in the "((void)0) | |||
2347 | "Attributor.")((void)0); | |||
2348 | break; | |||
2349 | case Instruction::Call: | |||
2350 | // Calls are interesting on their own, additionally: | |||
2351 | // For `llvm.assume` calls we also fill the KnowledgeMap as we find them. | |||
2352 | // For `must-tail` calls we remember the caller and callee. | |||
2353 | if (auto *Assume = dyn_cast<AssumeInst>(&I)) { | |||
2354 | fillMapFromAssume(*Assume, KnowledgeMap); | |||
2355 | } else if (cast<CallInst>(I).isMustTailCall()) { | |||
2356 | FI.ContainsMustTailCall = true; | |||
2357 | if (const Function *Callee = cast<CallInst>(I).getCalledFunction()) | |||
2358 | getFunctionInfo(*Callee).CalledViaMustTail = true; | |||
2359 | } | |||
2360 | LLVM_FALLTHROUGH[[gnu::fallthrough]]; | |||
2361 | case Instruction::CallBr: | |||
2362 | case Instruction::Invoke: | |||
2363 | case Instruction::CleanupRet: | |||
2364 | case Instruction::CatchSwitch: | |||
2365 | case Instruction::AtomicRMW: | |||
2366 | case Instruction::AtomicCmpXchg: | |||
2367 | case Instruction::Br: | |||
2368 | case Instruction::Resume: | |||
2369 | case Instruction::Ret: | |||
2370 | case Instruction::Load: | |||
2371 | // The alignment of a pointer is interesting for loads. | |||
2372 | case Instruction::Store: | |||
2373 | // The alignment of a pointer is interesting for stores. | |||
2374 | case Instruction::Alloca: | |||
2375 | case Instruction::AddrSpaceCast: | |||
2376 | IsInterestingOpcode = true; | |||
2377 | } | |||
2378 | if (IsInterestingOpcode) { | |||
2379 | auto *&Insts = FI.OpcodeInstMap[I.getOpcode()]; | |||
2380 | if (!Insts) | |||
2381 | Insts = new (Allocator) InstructionVectorTy(); | |||
2382 | Insts->push_back(&I); | |||
2383 | } | |||
2384 | if (I.mayReadOrWriteMemory()) | |||
2385 | FI.RWInsts.push_back(&I); | |||
2386 | } | |||
2387 | ||||
2388 | if (F.hasFnAttribute(Attribute::AlwaysInline) && | |||
2389 | isInlineViable(F).isSuccess()) | |||
2390 | InlineableFunctions.insert(&F); | |||
2391 | } | |||
2392 | ||||
2393 | AAResults *InformationCache::getAAResultsForFunction(const Function &F) { | |||
2394 | return AG.getAnalysis<AAManager>(F); | |||
2395 | } | |||
2396 | ||||
2397 | InformationCache::FunctionInfo::~FunctionInfo() { | |||
2398 | // The instruction vectors are allocated using a BumpPtrAllocator, we need to | |||
2399 | // manually destroy them. | |||
2400 | for (auto &It : OpcodeInstMap) | |||
2401 | It.getSecond()->~InstructionVectorTy(); | |||
2402 | } | |||
2403 | ||||
2404 | void Attributor::recordDependence(const AbstractAttribute &FromAA, | |||
2405 | const AbstractAttribute &ToAA, | |||
2406 | DepClassTy DepClass) { | |||
2407 | if (DepClass == DepClassTy::NONE) | |||
2408 | return; | |||
2409 | // If we are outside of an update, thus before the actual fixpoint iteration | |||
2410 | // started (= when we create AAs), we do not track dependences because we will | |||
2411 | // put all AAs into the initial worklist anyway. | |||
2412 | if (DependenceStack.empty()) | |||
2413 | return; | |||
2414 | if (FromAA.getState().isAtFixpoint()) | |||
2415 | return; | |||
2416 | DependenceStack.back()->push_back({&FromAA, &ToAA, DepClass}); | |||
2417 | } | |||
2418 | ||||
2419 | void Attributor::rememberDependences() { | |||
2420 | assert(!DependenceStack.empty() && "No dependences to remember!")((void)0); | |||
2421 | ||||
2422 | for (DepInfo &DI : *DependenceStack.back()) { | |||
2423 | assert((DI.DepClass == DepClassTy::REQUIRED ||((void)0) | |||
2424 | DI.DepClass == DepClassTy::OPTIONAL) &&((void)0) | |||
2425 | "Expected required or optional dependence (1 bit)!")((void)0); | |||
2426 | auto &DepAAs = const_cast<AbstractAttribute &>(*DI.FromAA).Deps; | |||
2427 | DepAAs.push_back(AbstractAttribute::DepTy( | |||
2428 | const_cast<AbstractAttribute *>(DI.ToAA), unsigned(DI.DepClass))); | |||
2429 | } | |||
2430 | } | |||
2431 | ||||
2432 | void Attributor::identifyDefaultAbstractAttributes(Function &F) { | |||
2433 | if (!VisitedFunctions.insert(&F).second) | |||
2434 | return; | |||
2435 | if (F.isDeclaration()) | |||
2436 | return; | |||
2437 | ||||
2438 | // In non-module runs we need to look at the call sites of a function to | |||
2439 | // determine if it is part of a must-tail call edge. This will influence what | |||
2440 | // attributes we can derive. | |||
2441 | InformationCache::FunctionInfo &FI = InfoCache.getFunctionInfo(F); | |||
2442 | if (!isModulePass() && !FI.CalledViaMustTail) { | |||
2443 | for (const Use &U : F.uses()) | |||
2444 | if (const auto *CB = dyn_cast<CallBase>(U.getUser())) | |||
2445 | if (CB->isCallee(&U) && CB->isMustTailCall()) | |||
2446 | FI.CalledViaMustTail = true; | |||
2447 | } | |||
2448 | ||||
2449 | IRPosition FPos = IRPosition::function(F); | |||
2450 | ||||
2451 | // Check for dead BasicBlocks in every function. | |||
2452 | // We need dead instruction detection because we do not want to deal with | |||
2453 | // broken IR in which SSA rules do not apply. | |||
2454 | getOrCreateAAFor<AAIsDead>(FPos); | |||
2455 | ||||
2456 | // Every function might be "will-return". | |||
2457 | getOrCreateAAFor<AAWillReturn>(FPos); | |||
2458 | ||||
2459 | // Every function might contain instructions that cause "undefined behavior". | |||
2460 | getOrCreateAAFor<AAUndefinedBehavior>(FPos); | |||
2461 | ||||
2462 | // Every function can be nounwind. | |||
2463 | getOrCreateAAFor<AANoUnwind>(FPos); | |||
2464 | ||||
2465 | // Every function might be marked "nosync" | |||
2466 | getOrCreateAAFor<AANoSync>(FPos); | |||
2467 | ||||
2468 | // Every function might be "no-free". | |||
2469 | getOrCreateAAFor<AANoFree>(FPos); | |||
2470 | ||||
2471 | // Every function might be "no-return". | |||
2472 | getOrCreateAAFor<AANoReturn>(FPos); | |||
2473 | ||||
2474 | // Every function might be "no-recurse". | |||
2475 | getOrCreateAAFor<AANoRecurse>(FPos); | |||
2476 | ||||
2477 | // Every function might be "readnone/readonly/writeonly/...". | |||
2478 | getOrCreateAAFor<AAMemoryBehavior>(FPos); | |||
2479 | ||||
2480 | // Every function can be "readnone/argmemonly/inaccessiblememonly/...". | |||
2481 | getOrCreateAAFor<AAMemoryLocation>(FPos); | |||
2482 | ||||
2483 | // Every function might be applicable for Heap-To-Stack conversion. | |||
2484 | if (EnableHeapToStack) | |||
2485 | getOrCreateAAFor<AAHeapToStack>(FPos); | |||
2486 | ||||
2487 | // Return attributes are only appropriate if the return type is non void. | |||
2488 | Type *ReturnType = F.getReturnType(); | |||
2489 | if (!ReturnType->isVoidTy()) { | |||
2490 | // Argument attribute "returned" --- Create only one per function even | |||
2491 | // though it is an argument attribute. | |||
2492 | getOrCreateAAFor<AAReturnedValues>(FPos); | |||
2493 | ||||
2494 | IRPosition RetPos = IRPosition::returned(F); | |||
2495 | ||||
2496 | // Every returned value might be dead. | |||
2497 | getOrCreateAAFor<AAIsDead>(RetPos); | |||
2498 | ||||
2499 | // Every function might be simplified. | |||
2500 | getOrCreateAAFor<AAValueSimplify>(RetPos); | |||
2501 | ||||
2502 | // Every returned value might be marked noundef. | |||
2503 | getOrCreateAAFor<AANoUndef>(RetPos); | |||
2504 | ||||
2505 | if (ReturnType->isPointerTy()) { | |||
2506 | ||||
2507 | // Every function with pointer return type might be marked align. | |||
2508 | getOrCreateAAFor<AAAlign>(RetPos); | |||
2509 | ||||
2510 | // Every function with pointer return type might be marked nonnull. | |||
2511 | getOrCreateAAFor<AANonNull>(RetPos); | |||
2512 | ||||
2513 | // Every function with pointer return type might be marked noalias. | |||
2514 | getOrCreateAAFor<AANoAlias>(RetPos); | |||
2515 | ||||
2516 | // Every function with pointer return type might be marked | |||
2517 | // dereferenceable. | |||
2518 | getOrCreateAAFor<AADereferenceable>(RetPos); | |||
2519 | } | |||
2520 | } | |||
2521 | ||||
2522 | for (Argument &Arg : F.args()) { | |||
2523 | IRPosition ArgPos = IRPosition::argument(Arg); | |||
2524 | ||||
2525 | // Every argument might be simplified. We have to go through the Attributor | |||
2526 | // interface though as outside AAs can register custom simplification | |||
2527 | // callbacks. | |||
2528 | bool UsedAssumedInformation = false; | |||
2529 | getAssumedSimplified(ArgPos, /* AA */ nullptr, UsedAssumedInformation); | |||
2530 | ||||
2531 | // Every argument might be dead. | |||
2532 | getOrCreateAAFor<AAIsDead>(ArgPos); | |||
2533 | ||||
2534 | // Every argument might be marked noundef. | |||
2535 | getOrCreateAAFor<AANoUndef>(ArgPos); | |||
2536 | ||||
2537 | if (Arg.getType()->isPointerTy()) { | |||
2538 | // Every argument with pointer type might be marked nonnull. | |||
2539 | getOrCreateAAFor<AANonNull>(ArgPos); | |||
2540 | ||||
2541 | // Every argument with pointer type might be marked noalias. | |||
2542 | getOrCreateAAFor<AANoAlias>(ArgPos); | |||
2543 | ||||
2544 | // Every argument with pointer type might be marked dereferenceable. | |||
2545 | getOrCreateAAFor<AADereferenceable>(ArgPos); | |||
2546 | ||||
2547 | // Every argument with pointer type might be marked align. | |||
2548 | getOrCreateAAFor<AAAlign>(ArgPos); | |||
2549 | ||||
2550 | // Every argument with pointer type might be marked nocapture. | |||
2551 | getOrCreateAAFor<AANoCapture>(ArgPos); | |||
2552 | ||||
2553 | // Every argument with pointer type might be marked | |||
2554 | // "readnone/readonly/writeonly/..." | |||
2555 | getOrCreateAAFor<AAMemoryBehavior>(ArgPos); | |||
2556 | ||||
2557 | // Every argument with pointer type might be marked nofree. | |||
2558 | getOrCreateAAFor<AANoFree>(ArgPos); | |||
2559 | ||||
2560 | // Every argument with pointer type might be privatizable (or promotable) | |||
2561 | getOrCreateAAFor<AAPrivatizablePtr>(ArgPos); | |||
2562 | } | |||
2563 | } | |||
2564 | ||||
2565 | auto CallSitePred = [&](Instruction &I) -> bool { | |||
2566 | auto &CB = cast<CallBase>(I); | |||
2567 | IRPosition CBRetPos = IRPosition::callsite_returned(CB); | |||
2568 | ||||
2569 | // Call sites might be dead if they do not have side effects and no live | |||
2570 | // users. The return value might be dead if there are no live users. | |||
2571 | getOrCreateAAFor<AAIsDead>(CBRetPos); | |||
2572 | ||||
2573 | Function *Callee = CB.getCalledFunction(); | |||
2574 | // TODO: Even if the callee is not known now we might be able to simplify | |||
2575 | // the call/callee. | |||
2576 | if (!Callee) | |||
2577 | return true; | |||
2578 | ||||
2579 | // Skip declarations except if annotations on their call sites were | |||
2580 | // explicitly requested. | |||
2581 | if (!AnnotateDeclarationCallSites && Callee->isDeclaration() && | |||
2582 | !Callee->hasMetadata(LLVMContext::MD_callback)) | |||
2583 | return true; | |||
2584 | ||||
2585 | if (!Callee->getReturnType()->isVoidTy() && !CB.use_empty()) { | |||
2586 | ||||
2587 | IRPosition CBRetPos = IRPosition::callsite_returned(CB); | |||
2588 | getOrCreateAAFor<AAValueSimplify>(CBRetPos); | |||
2589 | } | |||
2590 | ||||
2591 | for (int I = 0, E = CB.getNumArgOperands(); I < E; ++I) { | |||
2592 | ||||
2593 | IRPosition CBArgPos = IRPosition::callsite_argument(CB, I); | |||
2594 | ||||
2595 | // Every call site argument might be dead. | |||
2596 | getOrCreateAAFor<AAIsDead>(CBArgPos); | |||
2597 | ||||
2598 | // Call site argument might be simplified. We have to go through the | |||
2599 | // Attributor interface though as outside AAs can register custom | |||
2600 | // simplification callbacks. | |||
2601 | bool UsedAssumedInformation = false; | |||
2602 | getAssumedSimplified(CBArgPos, /* AA */ nullptr, UsedAssumedInformation); | |||
2603 | ||||
2604 | // Every call site argument might be marked "noundef". | |||
2605 | getOrCreateAAFor<AANoUndef>(CBArgPos); | |||
2606 | ||||
2607 | if (!CB.getArgOperand(I)->getType()->isPointerTy()) | |||
2608 | continue; | |||
2609 | ||||
2610 | // Call site argument attribute "non-null". | |||
2611 | getOrCreateAAFor<AANonNull>(CBArgPos); | |||
2612 | ||||
2613 | // Call site argument attribute "nocapture". | |||
2614 | getOrCreateAAFor<AANoCapture>(CBArgPos); | |||
2615 | ||||
2616 | // Call site argument attribute "no-alias". | |||
2617 | getOrCreateAAFor<AANoAlias>(CBArgPos); | |||
2618 | ||||
2619 | // Call site argument attribute "dereferenceable". | |||
2620 | getOrCreateAAFor<AADereferenceable>(CBArgPos); | |||
2621 | ||||
2622 | // Call site argument attribute "align". | |||
2623 | getOrCreateAAFor<AAAlign>(CBArgPos); | |||
2624 | ||||
2625 | // Call site argument attribute | |||
2626 | // "readnone/readonly/writeonly/..." | |||
2627 | getOrCreateAAFor<AAMemoryBehavior>(CBArgPos); | |||
2628 | ||||
2629 | // Call site argument attribute "nofree". | |||
2630 | getOrCreateAAFor<AANoFree>(CBArgPos); | |||
2631 | } | |||
2632 | return true; | |||
2633 | }; | |||
2634 | ||||
2635 | auto &OpcodeInstMap = InfoCache.getOpcodeInstMapForFunction(F); | |||
2636 | bool Success; | |||
2637 | bool UsedAssumedInformation = false; | |||
2638 | Success = checkForAllInstructionsImpl( | |||
2639 | nullptr, OpcodeInstMap, CallSitePred, nullptr, nullptr, | |||
2640 | {(unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr, | |||
2641 | (unsigned)Instruction::Call}, | |||
2642 | UsedAssumedInformation); | |||
2643 | (void)Success; | |||
2644 | assert(Success && "Expected the check call to be successful!")((void)0); | |||
2645 | ||||
2646 | auto LoadStorePred = [&](Instruction &I) -> bool { | |||
2647 | if (isa<LoadInst>(I)) { | |||
2648 | getOrCreateAAFor<AAAlign>( | |||
2649 | IRPosition::value(*cast<LoadInst>(I).getPointerOperand())); | |||
2650 | if (SimplifyAllLoads) | |||
2651 | getOrCreateAAFor<AAValueSimplify>(IRPosition::value(I)); | |||
2652 | } else | |||
2653 | getOrCreateAAFor<AAAlign>( | |||
2654 | IRPosition::value(*cast<StoreInst>(I).getPointerOperand())); | |||
2655 | return true; | |||
2656 | }; | |||
2657 | Success = checkForAllInstructionsImpl( | |||
2658 | nullptr, OpcodeInstMap, LoadStorePred, nullptr, nullptr, | |||
2659 | {(unsigned)Instruction::Load, (unsigned)Instruction::Store}, | |||
2660 | UsedAssumedInformation); | |||
2661 | (void)Success; | |||
2662 | assert(Success && "Expected the check call to be successful!")((void)0); | |||
2663 | } | |||
2664 | ||||
2665 | /// Helpers to ease debugging through output streams and print calls. | |||
2666 | /// | |||
2667 | ///{ | |||
2668 | raw_ostream &llvm::operator<<(raw_ostream &OS, ChangeStatus S) { | |||
2669 | return OS << (S == ChangeStatus::CHANGED ? "changed" : "unchanged"); | |||
2670 | } | |||
2671 | ||||
2672 | raw_ostream &llvm::operator<<(raw_ostream &OS, IRPosition::Kind AP) { | |||
2673 | switch (AP) { | |||
2674 | case IRPosition::IRP_INVALID: | |||
2675 | return OS << "inv"; | |||
2676 | case IRPosition::IRP_FLOAT: | |||
2677 | return OS << "flt"; | |||
2678 | case IRPosition::IRP_RETURNED: | |||
2679 | return OS << "fn_ret"; | |||
2680 | case IRPosition::IRP_CALL_SITE_RETURNED: | |||
2681 | return OS << "cs_ret"; | |||
2682 | case IRPosition::IRP_FUNCTION: | |||
2683 | return OS << "fn"; | |||
2684 | case IRPosition::IRP_CALL_SITE: | |||
2685 | return OS << "cs"; | |||
2686 | case IRPosition::IRP_ARGUMENT: | |||
2687 | return OS << "arg"; | |||
2688 | case IRPosition::IRP_CALL_SITE_ARGUMENT: | |||
2689 | return OS << "cs_arg"; | |||
2690 | } | |||
2691 | llvm_unreachable("Unknown attribute position!")__builtin_unreachable(); | |||
2692 | } | |||
2693 | ||||
2694 | raw_ostream &llvm::operator<<(raw_ostream &OS, const IRPosition &Pos) { | |||
2695 | const Value &AV = Pos.getAssociatedValue(); | |||
2696 | OS << "{" << Pos.getPositionKind() << ":" << AV.getName() << " [" | |||
2697 | << Pos.getAnchorValue().getName() << "@" << Pos.getCallSiteArgNo() << "]"; | |||
2698 | ||||
2699 | if (Pos.hasCallBaseContext()) | |||
2700 | OS << "[cb_context:" << *Pos.getCallBaseContext() << "]"; | |||
2701 | return OS << "}"; | |||
2702 | } | |||
2703 | ||||
2704 | raw_ostream &llvm::operator<<(raw_ostream &OS, const IntegerRangeState &S) { | |||
2705 | OS << "range-state(" << S.getBitWidth() << ")<"; | |||
2706 | S.getKnown().print(OS); | |||
2707 | OS << " / "; | |||
2708 | S.getAssumed().print(OS); | |||
2709 | OS << ">"; | |||
2710 | ||||
2711 | return OS << static_cast<const AbstractState &>(S); | |||
2712 | } | |||
2713 | ||||
2714 | raw_ostream &llvm::operator<<(raw_ostream &OS, const AbstractState &S) { | |||
2715 | return OS << (!S.isValidState() ? "top" : (S.isAtFixpoint() ? "fix" : "")); | |||
2716 | } | |||
2717 | ||||
2718 | raw_ostream &llvm::operator<<(raw_ostream &OS, const AbstractAttribute &AA) { | |||
2719 | AA.print(OS); | |||
2720 | return OS; | |||
2721 | } | |||
2722 | ||||
2723 | raw_ostream &llvm::operator<<(raw_ostream &OS, | |||
2724 | const PotentialConstantIntValuesState &S) { | |||
2725 | OS << "set-state(< {"; | |||
2726 | if (!S.isValidState()) | |||
2727 | OS << "full-set"; | |||
2728 | else { | |||
2729 | for (auto &it : S.getAssumedSet()) | |||
2730 | OS << it << ", "; | |||
2731 | if (S.undefIsContained()) | |||
2732 | OS << "undef "; | |||
2733 | } | |||
2734 | OS << "} >)"; | |||
2735 | ||||
2736 | return OS; | |||
2737 | } | |||
2738 | ||||
2739 | void AbstractAttribute::print(raw_ostream &OS) const { | |||
2740 | OS << "["; | |||
2741 | OS << getName(); | |||
2742 | OS << "] for CtxI "; | |||
2743 | ||||
2744 | if (auto *I = getCtxI()) { | |||
2745 | OS << "'"; | |||
2746 | I->print(OS); | |||
2747 | OS << "'"; | |||
2748 | } else | |||
2749 | OS << "<<null inst>>"; | |||
2750 | ||||
2751 | OS << " at position " << getIRPosition() << " with state " << getAsStr() | |||
2752 | << '\n'; | |||
2753 | } | |||
2754 | ||||
2755 | void AbstractAttribute::printWithDeps(raw_ostream &OS) const { | |||
2756 | print(OS); | |||
2757 | ||||
2758 | for (const auto &DepAA : Deps) { | |||
2759 | auto *AA = DepAA.getPointer(); | |||
2760 | OS << " updates "; | |||
2761 | AA->print(OS); | |||
2762 | } | |||
2763 | ||||
2764 | OS << '\n'; | |||
2765 | } | |||
2766 | ||||
2767 | raw_ostream &llvm::operator<<(raw_ostream &OS, | |||
2768 | const AAPointerInfo::Access &Acc) { | |||
2769 | OS << " [" << Acc.getKind() << "] " << *Acc.getRemoteInst(); | |||
2770 | if (Acc.getLocalInst() != Acc.getRemoteInst()) | |||
2771 | OS << " via " << *Acc.getLocalInst(); | |||
2772 | if (Acc.getContent().hasValue()) | |||
2773 | OS << " [" << *Acc.getContent() << "]"; | |||
2774 | return OS; | |||
2775 | } | |||
2776 | ///} | |||
2777 | ||||
2778 | /// ---------------------------------------------------------------------------- | |||
2779 | /// Pass (Manager) Boilerplate | |||
2780 | /// ---------------------------------------------------------------------------- | |||
2781 | ||||
2782 | static bool runAttributorOnFunctions(InformationCache &InfoCache, | |||
2783 | SetVector<Function *> &Functions, | |||
2784 | AnalysisGetter &AG, | |||
2785 | CallGraphUpdater &CGUpdater, | |||
2786 | bool DeleteFns) { | |||
2787 | if (Functions.empty()) | |||
2788 | return false; | |||
2789 | ||||
2790 | LLVM_DEBUG({do { } while (false) | |||
2791 | dbgs() << "[Attributor] Run on module with " << Functions.size()do { } while (false) | |||
2792 | << " functions:\n";do { } while (false) | |||
2793 | for (Function *Fn : Functions)do { } while (false) | |||
2794 | dbgs() << " - " << Fn->getName() << "\n";do { } while (false) | |||
2795 | })do { } while (false); | |||
2796 | ||||
2797 | // Create an Attributor and initially empty information cache that is filled | |||
2798 | // while we identify default attribute opportunities. | |||
2799 | Attributor A(Functions, InfoCache, CGUpdater, /* Allowed */ nullptr, | |||
2800 | DeleteFns); | |||
2801 | ||||
2802 | // Create shallow wrappers for all functions that are not IPO amendable | |||
2803 | if (AllowShallowWrappers) | |||
2804 | for (Function *F : Functions) | |||
2805 | if (!A.isFunctionIPOAmendable(*F)) | |||
2806 | Attributor::createShallowWrapper(*F); | |||
2807 | ||||
2808 | // Internalize non-exact functions | |||
2809 | // TODO: for now we eagerly internalize functions without calculating the | |||
2810 | // cost, we need a cost interface to determine whether internalizing | |||
2811 | // a function is "benefitial" | |||
2812 | if (AllowDeepWrapper) { | |||
2813 | unsigned FunSize = Functions.size(); | |||
2814 | for (unsigned u = 0; u < FunSize; u++) { | |||
2815 | Function *F = Functions[u]; | |||
2816 | if (!F->isDeclaration() && !F->isDefinitionExact() && F->getNumUses() && | |||
2817 | !GlobalValue::isInterposableLinkage(F->getLinkage())) { | |||
2818 | Function *NewF = Attributor::internalizeFunction(*F); | |||
2819 | assert(NewF && "Could not internalize function.")((void)0); | |||
2820 | Functions.insert(NewF); | |||
2821 | ||||
2822 | // Update call graph | |||
2823 | CGUpdater.replaceFunctionWith(*F, *NewF); | |||
2824 | for (const Use &U : NewF->uses()) | |||
2825 | if (CallBase *CB = dyn_cast<CallBase>(U.getUser())) { | |||
2826 | auto *CallerF = CB->getCaller(); | |||
2827 | CGUpdater.reanalyzeFunction(*CallerF); | |||
2828 | } | |||
2829 | } | |||
2830 | } | |||
2831 | } | |||
2832 | ||||
2833 | for (Function *F : Functions) { | |||
2834 | if (F->hasExactDefinition()) | |||
2835 | NumFnWithExactDefinition++; | |||
2836 | else | |||
2837 | NumFnWithoutExactDefinition++; | |||
2838 | ||||
2839 | // We look at internal functions only on-demand but if any use is not a | |||
2840 | // direct call or outside the current set of analyzed functions, we have | |||
2841 | // to do it eagerly. | |||
2842 | if (F->hasLocalLinkage()) { | |||
2843 | if (llvm::all_of(F->uses(), [&Functions](const Use &U) { | |||
2844 | const auto *CB = dyn_cast<CallBase>(U.getUser()); | |||
2845 | return CB && CB->isCallee(&U) && | |||
2846 | Functions.count(const_cast<Function *>(CB->getCaller())); | |||
2847 | })) | |||
2848 | continue; | |||
2849 | } | |||
2850 | ||||
2851 | // Populate the Attributor with abstract attribute opportunities in the | |||
2852 | // function and the information cache with IR information. | |||
2853 | A.identifyDefaultAbstractAttributes(*F); | |||
2854 | } | |||
2855 | ||||
2856 | ChangeStatus Changed = A.run(); | |||
2857 | ||||
2858 | LLVM_DEBUG(dbgs() << "[Attributor] Done with " << Functions.size()do { } while (false) | |||
2859 | << " functions, result: " << Changed << ".\n")do { } while (false); | |||
2860 | return Changed == ChangeStatus::CHANGED; | |||
2861 | } | |||
2862 | ||||
2863 | void AADepGraph::viewGraph() { llvm::ViewGraph(this, "Dependency Graph"); } | |||
2864 | ||||
2865 | void AADepGraph::dumpGraph() { | |||
2866 | static std::atomic<int> CallTimes; | |||
2867 | std::string Prefix; | |||
2868 | ||||
2869 | if (!DepGraphDotFileNamePrefix.empty()) | |||
2870 | Prefix = DepGraphDotFileNamePrefix; | |||
2871 | else | |||
2872 | Prefix = "dep_graph"; | |||
2873 | std::string Filename = | |||
2874 | Prefix + "_" + std::to_string(CallTimes.load()) + ".dot"; | |||
2875 | ||||
2876 | outs() << "Dependency graph dump to " << Filename << ".\n"; | |||
2877 | ||||
2878 | std::error_code EC; | |||
2879 | ||||
2880 | raw_fd_ostream File(Filename, EC, sys::fs::OF_TextWithCRLF); | |||
2881 | if (!EC) | |||
2882 | llvm::WriteGraph(File, this); | |||
2883 | ||||
2884 | CallTimes++; | |||
2885 | } | |||
2886 | ||||
2887 | void AADepGraph::print() { | |||
2888 | for (auto DepAA : SyntheticRoot.Deps) | |||
2889 | cast<AbstractAttribute>(DepAA.getPointer())->printWithDeps(outs()); | |||
2890 | } | |||
2891 | ||||
2892 | PreservedAnalyses AttributorPass::run(Module &M, ModuleAnalysisManager &AM) { | |||
2893 | FunctionAnalysisManager &FAM = | |||
2894 | AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); | |||
2895 | AnalysisGetter AG(FAM); | |||
2896 | ||||
2897 | SetVector<Function *> Functions; | |||
2898 | for (Function &F : M) | |||
2899 | Functions.insert(&F); | |||
2900 | ||||
2901 | CallGraphUpdater CGUpdater; | |||
2902 | BumpPtrAllocator Allocator; | |||
2903 | InformationCache InfoCache(M, AG, Allocator, /* CGSCC */ nullptr); | |||
2904 | if (runAttributorOnFunctions(InfoCache, Functions, AG, CGUpdater, | |||
2905 | /* DeleteFns */ true)) { | |||
2906 | // FIXME: Think about passes we will preserve and add them here. | |||
2907 | return PreservedAnalyses::none(); | |||
2908 | } | |||
2909 | return PreservedAnalyses::all(); | |||
2910 | } | |||
2911 | ||||
2912 | PreservedAnalyses AttributorCGSCCPass::run(LazyCallGraph::SCC &C, | |||
2913 | CGSCCAnalysisManager &AM, | |||
2914 | LazyCallGraph &CG, | |||
2915 | CGSCCUpdateResult &UR) { | |||
2916 | FunctionAnalysisManager &FAM = | |||
2917 | AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager(); | |||
2918 | AnalysisGetter AG(FAM); | |||
2919 | ||||
2920 | SetVector<Function *> Functions; | |||
2921 | for (LazyCallGraph::Node &N : C) | |||
2922 | Functions.insert(&N.getFunction()); | |||
2923 | ||||
2924 | if (Functions.empty()) | |||
2925 | return PreservedAnalyses::all(); | |||
2926 | ||||
2927 | Module &M = *Functions.back()->getParent(); | |||
2928 | CallGraphUpdater CGUpdater; | |||
2929 | CGUpdater.initialize(CG, C, AM, UR); | |||
2930 | BumpPtrAllocator Allocator; | |||
2931 | InformationCache InfoCache(M, AG, Allocator, /* CGSCC */ &Functions); | |||
2932 | if (runAttributorOnFunctions(InfoCache, Functions, AG, CGUpdater, | |||
2933 | /* DeleteFns */ false)) { | |||
2934 | // FIXME: Think about passes we will preserve and add them here. | |||
2935 | PreservedAnalyses PA; | |||
2936 | PA.preserve<FunctionAnalysisManagerCGSCCProxy>(); | |||
2937 | return PA; | |||
2938 | } | |||
2939 | return PreservedAnalyses::all(); | |||
2940 | } | |||
2941 | ||||
2942 | namespace llvm { | |||
2943 | ||||
2944 | template <> struct GraphTraits<AADepGraphNode *> { | |||
2945 | using NodeRef = AADepGraphNode *; | |||
2946 | using DepTy = PointerIntPair<AADepGraphNode *, 1>; | |||
2947 | using EdgeRef = PointerIntPair<AADepGraphNode *, 1>; | |||
2948 | ||||
2949 | static NodeRef getEntryNode(AADepGraphNode *DGN) { return DGN; } | |||
2950 | static NodeRef DepGetVal(DepTy &DT) { return DT.getPointer(); } | |||
2951 | ||||
2952 | using ChildIteratorType = | |||
2953 | mapped_iterator<TinyPtrVector<DepTy>::iterator, decltype(&DepGetVal)>; | |||
2954 | using ChildEdgeIteratorType = TinyPtrVector<DepTy>::iterator; | |||
2955 | ||||
2956 | static ChildIteratorType child_begin(NodeRef N) { return N->child_begin(); } | |||
2957 | ||||
2958 | static ChildIteratorType child_end(NodeRef N) { return N->child_end(); } | |||
2959 | }; | |||
2960 | ||||
2961 | template <> | |||
2962 | struct GraphTraits<AADepGraph *> : public GraphTraits<AADepGraphNode *> { | |||
2963 | static NodeRef getEntryNode(AADepGraph *DG) { return DG->GetEntryNode(); } | |||
2964 | ||||
2965 | using nodes_iterator = | |||
2966 | mapped_iterator<TinyPtrVector<DepTy>::iterator, decltype(&DepGetVal)>; | |||
2967 | ||||
2968 | static nodes_iterator nodes_begin(AADepGraph *DG) { return DG->begin(); } | |||
2969 | ||||
2970 | static nodes_iterator nodes_end(AADepGraph *DG) { return DG->end(); } | |||
2971 | }; | |||
2972 | ||||
2973 | template <> struct DOTGraphTraits<AADepGraph *> : public DefaultDOTGraphTraits { | |||
2974 | DOTGraphTraits(bool isSimple = false) : DefaultDOTGraphTraits(isSimple) {} | |||
2975 | ||||
2976 | static std::string getNodeLabel(const AADepGraphNode *Node, | |||
2977 | const AADepGraph *DG) { | |||
2978 | std::string AAString; | |||
2979 | raw_string_ostream O(AAString); | |||
2980 | Node->print(O); | |||
2981 | return AAString; | |||
2982 | } | |||
2983 | }; | |||
2984 | ||||
2985 | } // end namespace llvm | |||
2986 | ||||
2987 | namespace { | |||
2988 | ||||
2989 | struct AttributorLegacyPass : public ModulePass { | |||
2990 | static char ID; | |||
2991 | ||||
2992 | AttributorLegacyPass() : ModulePass(ID) { | |||
2993 | initializeAttributorLegacyPassPass(*PassRegistry::getPassRegistry()); | |||
2994 | } | |||
2995 | ||||
2996 | bool runOnModule(Module &M) override { | |||
2997 | if (skipModule(M)) | |||
2998 | return false; | |||
2999 | ||||
3000 | AnalysisGetter AG; | |||
3001 | SetVector<Function *> Functions; | |||
3002 | for (Function &F : M) | |||
3003 | Functions.insert(&F); | |||
3004 | ||||
3005 | CallGraphUpdater CGUpdater; | |||
3006 | BumpPtrAllocator Allocator; | |||
3007 | InformationCache InfoCache(M, AG, Allocator, /* CGSCC */ nullptr); | |||
3008 | return runAttributorOnFunctions(InfoCache, Functions, AG, CGUpdater, | |||
3009 | /* DeleteFns*/ true); | |||
3010 | } | |||
3011 | ||||
3012 | void getAnalysisUsage(AnalysisUsage &AU) const override { | |||
3013 | // FIXME: Think about passes we will preserve and add them here. | |||
3014 | AU.addRequired<TargetLibraryInfoWrapperPass>(); | |||
3015 | } | |||
3016 | }; | |||
3017 | ||||
3018 | struct AttributorCGSCCLegacyPass : public CallGraphSCCPass { | |||
3019 | static char ID; | |||
3020 | ||||
3021 | AttributorCGSCCLegacyPass() : CallGraphSCCPass(ID) { | |||
3022 | initializeAttributorCGSCCLegacyPassPass(*PassRegistry::getPassRegistry()); | |||
3023 | } | |||
3024 | ||||
3025 | bool runOnSCC(CallGraphSCC &SCC) override { | |||
3026 | if (skipSCC(SCC)) | |||
3027 | return false; | |||
3028 | ||||
3029 | SetVector<Function *> Functions; | |||
3030 | for (CallGraphNode *CGN : SCC) | |||
3031 | if (Function *Fn = CGN->getFunction()) | |||
3032 | if (!Fn->isDeclaration()) | |||
3033 | Functions.insert(Fn); | |||
3034 | ||||
3035 | if (Functions.empty()) | |||
3036 | return false; | |||
3037 | ||||
3038 | AnalysisGetter AG; | |||
3039 | CallGraph &CG = const_cast<CallGraph &>(SCC.getCallGraph()); | |||
3040 | CallGraphUpdater CGUpdater; | |||
3041 | CGUpdater.initialize(CG, SCC); | |||
3042 | Module &M = *Functions.back()->getParent(); | |||
3043 | BumpPtrAllocator Allocator; | |||
3044 | InformationCache InfoCache(M, AG, Allocator, /* CGSCC */ &Functions); | |||
3045 | return runAttributorOnFunctions(InfoCache, Functions, AG, CGUpdater, | |||
3046 | /* DeleteFns */ false); | |||
3047 | } | |||
3048 | ||||
3049 | void getAnalysisUsage(AnalysisUsage &AU) const override { | |||
3050 | // FIXME: Think about passes we will preserve and add them here. | |||
3051 | AU.addRequired<TargetLibraryInfoWrapperPass>(); | |||
3052 | CallGraphSCCPass::getAnalysisUsage(AU); | |||
3053 | } | |||
3054 | }; | |||
3055 | ||||
3056 | } // end anonymous namespace | |||
3057 | ||||
3058 | Pass *llvm::createAttributorLegacyPass() { return new AttributorLegacyPass(); } | |||
3059 | Pass *llvm::createAttributorCGSCCLegacyPass() { | |||
3060 | return new AttributorCGSCCLegacyPass(); | |||
3061 | } | |||
3062 | ||||
3063 | char AttributorLegacyPass::ID = 0; | |||
3064 | char AttributorCGSCCLegacyPass::ID = 0; | |||
3065 | ||||
3066 | INITIALIZE_PASS_BEGIN(AttributorLegacyPass, "attributor",static void *initializeAttributorLegacyPassPassOnce(PassRegistry &Registry) { | |||
3067 | "Deduce and propagate attributes", false, false)static void *initializeAttributorLegacyPassPassOnce(PassRegistry &Registry) { | |||
3068 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)initializeTargetLibraryInfoWrapperPassPass(Registry); | |||
3069 | INITIALIZE_PASS_END(AttributorLegacyPass, "attributor",PassInfo *PI = new PassInfo( "Deduce and propagate attributes" , "attributor", &AttributorLegacyPass::ID, PassInfo::NormalCtor_t (callDefaultCtor<AttributorLegacyPass>), false, false); Registry.registerPass(*PI, true); return PI; } static llvm:: once_flag InitializeAttributorLegacyPassPassFlag; void llvm:: initializeAttributorLegacyPassPass(PassRegistry &Registry ) { llvm::call_once(InitializeAttributorLegacyPassPassFlag, initializeAttributorLegacyPassPassOnce , std::ref(Registry)); } | |||
3070 | "Deduce and propagate attributes", false, false)PassInfo *PI = new PassInfo( "Deduce and propagate attributes" , "attributor", &AttributorLegacyPass::ID, PassInfo::NormalCtor_t (callDefaultCtor<AttributorLegacyPass>), false, false); Registry.registerPass(*PI, true); return PI; } static llvm:: once_flag InitializeAttributorLegacyPassPassFlag; void llvm:: initializeAttributorLegacyPassPass(PassRegistry &Registry ) { llvm::call_once(InitializeAttributorLegacyPassPassFlag, initializeAttributorLegacyPassPassOnce , std::ref(Registry)); } | |||
3071 | INITIALIZE_PASS_BEGIN(AttributorCGSCCLegacyPass, "attributor-cgscc",static void *initializeAttributorCGSCCLegacyPassPassOnce(PassRegistry &Registry) { | |||
3072 | "Deduce and propagate attributes (CGSCC pass)", false,static void *initializeAttributorCGSCCLegacyPassPassOnce(PassRegistry &Registry) { | |||
3073 | false)static void *initializeAttributorCGSCCLegacyPassPassOnce(PassRegistry &Registry) { | |||
3074 | INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)initializeTargetLibraryInfoWrapperPassPass(Registry); | |||
3075 | INITIALIZE_PASS_DEPENDENCY(CallGraphWrapperPass)initializeCallGraphWrapperPassPass(Registry); | |||
3076 | INITIALIZE_PASS_END(AttributorCGSCCLegacyPass, "attributor-cgscc",PassInfo *PI = new PassInfo( "Deduce and propagate attributes (CGSCC pass)" , "attributor-cgscc", &AttributorCGSCCLegacyPass::ID, PassInfo ::NormalCtor_t(callDefaultCtor<AttributorCGSCCLegacyPass> ), false, false); Registry.registerPass(*PI, true); return PI ; } static llvm::once_flag InitializeAttributorCGSCCLegacyPassPassFlag ; void llvm::initializeAttributorCGSCCLegacyPassPass(PassRegistry &Registry) { llvm::call_once(InitializeAttributorCGSCCLegacyPassPassFlag , initializeAttributorCGSCCLegacyPassPassOnce, std::ref(Registry )); } | |||
3077 | "Deduce and propagate attributes (CGSCC pass)", false,PassInfo *PI = new PassInfo( "Deduce and propagate attributes (CGSCC pass)" , "attributor-cgscc", &AttributorCGSCCLegacyPass::ID, PassInfo ::NormalCtor_t(callDefaultCtor<AttributorCGSCCLegacyPass> ), false, false); Registry.registerPass(*PI, true); return PI ; } static llvm::once_flag InitializeAttributorCGSCCLegacyPassPassFlag ; void llvm::initializeAttributorCGSCCLegacyPassPass(PassRegistry &Registry) { llvm::call_once(InitializeAttributorCGSCCLegacyPassPassFlag , initializeAttributorCGSCCLegacyPassPassOnce, std::ref(Registry )); } | |||
3078 | false)PassInfo *PI = new PassInfo( "Deduce and propagate attributes (CGSCC pass)" , "attributor-cgscc", &AttributorCGSCCLegacyPass::ID, PassInfo ::NormalCtor_t(callDefaultCtor<AttributorCGSCCLegacyPass> ), false, false); Registry.registerPass(*PI, true); return PI ; } static llvm::once_flag InitializeAttributorCGSCCLegacyPassPassFlag ; void llvm::initializeAttributorCGSCCLegacyPassPass(PassRegistry &Registry) { llvm::call_once(InitializeAttributorCGSCCLegacyPassPassFlag , initializeAttributorCGSCCLegacyPassPassOnce, std::ref(Registry )); } |