[RISCV] Fix mgather -> riscv.masked.strided.load combine not extending indices (...
[llvm-project.git] / llvm / lib / Transforms / Utils / ValueMapper.cpp
blob380541ffdd49d6e3e3cc908e18995a8d73a46170
1 //===- ValueMapper.cpp - Interface shared by lib/Transforms/Utils ---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file defines the MapValue function, which is shared by various parts of
10 // the lib/Transforms/Utils library.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Transforms/Utils/ValueMapper.h"
15 #include "llvm/ADT/ArrayRef.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/DenseSet.h"
18 #include "llvm/ADT/STLExtras.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/IR/Argument.h"
21 #include "llvm/IR/BasicBlock.h"
22 #include "llvm/IR/Constant.h"
23 #include "llvm/IR/Constants.h"
24 #include "llvm/IR/DebugInfoMetadata.h"
25 #include "llvm/IR/DerivedTypes.h"
26 #include "llvm/IR/Function.h"
27 #include "llvm/IR/GlobalAlias.h"
28 #include "llvm/IR/GlobalIFunc.h"
29 #include "llvm/IR/GlobalObject.h"
30 #include "llvm/IR/GlobalVariable.h"
31 #include "llvm/IR/InlineAsm.h"
32 #include "llvm/IR/Instruction.h"
33 #include "llvm/IR/Instructions.h"
34 #include "llvm/IR/IntrinsicInst.h"
35 #include "llvm/IR/Metadata.h"
36 #include "llvm/IR/Operator.h"
37 #include "llvm/IR/Type.h"
38 #include "llvm/IR/Value.h"
39 #include "llvm/Support/Casting.h"
40 #include "llvm/Support/Debug.h"
41 #include <cassert>
42 #include <limits>
43 #include <memory>
44 #include <utility>
46 using namespace llvm;
48 #define DEBUG_TYPE "value-mapper"
50 // Out of line method to get vtable etc for class.
51 void ValueMapTypeRemapper::anchor() {}
52 void ValueMaterializer::anchor() {}
54 namespace {
56 /// A basic block used in a BlockAddress whose function body is not yet
57 /// materialized.
58 struct DelayedBasicBlock {
59 BasicBlock *OldBB;
60 std::unique_ptr<BasicBlock> TempBB;
62 DelayedBasicBlock(const BlockAddress &Old)
63 : OldBB(Old.getBasicBlock()),
64 TempBB(BasicBlock::Create(Old.getContext())) {}
67 struct WorklistEntry {
68 enum EntryKind {
69 MapGlobalInit,
70 MapAppendingVar,
71 MapAliasOrIFunc,
72 RemapFunction
74 struct GVInitTy {
75 GlobalVariable *GV;
76 Constant *Init;
78 struct AppendingGVTy {
79 GlobalVariable *GV;
80 Constant *InitPrefix;
82 struct AliasOrIFuncTy {
83 GlobalValue *GV;
84 Constant *Target;
87 unsigned Kind : 2;
88 unsigned MCID : 29;
89 unsigned AppendingGVIsOldCtorDtor : 1;
90 unsigned AppendingGVNumNewMembers;
91 union {
92 GVInitTy GVInit;
93 AppendingGVTy AppendingGV;
94 AliasOrIFuncTy AliasOrIFunc;
95 Function *RemapF;
96 } Data;
99 struct MappingContext {
100 ValueToValueMapTy *VM;
101 ValueMaterializer *Materializer = nullptr;
103 /// Construct a MappingContext with a value map and materializer.
104 explicit MappingContext(ValueToValueMapTy &VM,
105 ValueMaterializer *Materializer = nullptr)
106 : VM(&VM), Materializer(Materializer) {}
109 class Mapper {
110 friend class MDNodeMapper;
112 #ifndef NDEBUG
113 DenseSet<GlobalValue *> AlreadyScheduled;
114 #endif
116 RemapFlags Flags;
117 ValueMapTypeRemapper *TypeMapper;
118 unsigned CurrentMCID = 0;
119 SmallVector<MappingContext, 2> MCs;
120 SmallVector<WorklistEntry, 4> Worklist;
121 SmallVector<DelayedBasicBlock, 1> DelayedBBs;
122 SmallVector<Constant *, 16> AppendingInits;
124 public:
125 Mapper(ValueToValueMapTy &VM, RemapFlags Flags,
126 ValueMapTypeRemapper *TypeMapper, ValueMaterializer *Materializer)
127 : Flags(Flags), TypeMapper(TypeMapper),
128 MCs(1, MappingContext(VM, Materializer)) {}
130 /// ValueMapper should explicitly call \a flush() before destruction.
131 ~Mapper() { assert(!hasWorkToDo() && "Expected to be flushed"); }
133 bool hasWorkToDo() const { return !Worklist.empty(); }
135 unsigned
136 registerAlternateMappingContext(ValueToValueMapTy &VM,
137 ValueMaterializer *Materializer = nullptr) {
138 MCs.push_back(MappingContext(VM, Materializer));
139 return MCs.size() - 1;
142 void addFlags(RemapFlags Flags);
144 void remapGlobalObjectMetadata(GlobalObject &GO);
146 Value *mapValue(const Value *V);
147 void remapInstruction(Instruction *I);
148 void remapFunction(Function &F);
149 void remapDPValue(DPValue &DPV);
151 Constant *mapConstant(const Constant *C) {
152 return cast_or_null<Constant>(mapValue(C));
155 /// Map metadata.
157 /// Find the mapping for MD. Guarantees that the return will be resolved
158 /// (not an MDNode, or MDNode::isResolved() returns true).
159 Metadata *mapMetadata(const Metadata *MD);
161 void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init,
162 unsigned MCID);
163 void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
164 bool IsOldCtorDtor,
165 ArrayRef<Constant *> NewMembers,
166 unsigned MCID);
167 void scheduleMapAliasOrIFunc(GlobalValue &GV, Constant &Target,
168 unsigned MCID);
169 void scheduleRemapFunction(Function &F, unsigned MCID);
171 void flush();
173 private:
174 void mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
175 bool IsOldCtorDtor,
176 ArrayRef<Constant *> NewMembers);
178 ValueToValueMapTy &getVM() { return *MCs[CurrentMCID].VM; }
179 ValueMaterializer *getMaterializer() { return MCs[CurrentMCID].Materializer; }
181 Value *mapBlockAddress(const BlockAddress &BA);
183 /// Map metadata that doesn't require visiting operands.
184 std::optional<Metadata *> mapSimpleMetadata(const Metadata *MD);
186 Metadata *mapToMetadata(const Metadata *Key, Metadata *Val);
187 Metadata *mapToSelf(const Metadata *MD);
190 class MDNodeMapper {
191 Mapper &M;
193 /// Data about a node in \a UniquedGraph.
194 struct Data {
195 bool HasChanged = false;
196 unsigned ID = std::numeric_limits<unsigned>::max();
197 TempMDNode Placeholder;
200 /// A graph of uniqued nodes.
201 struct UniquedGraph {
202 SmallDenseMap<const Metadata *, Data, 32> Info; // Node properties.
203 SmallVector<MDNode *, 16> POT; // Post-order traversal.
205 /// Propagate changed operands through the post-order traversal.
207 /// Iteratively update \a Data::HasChanged for each node based on \a
208 /// Data::HasChanged of its operands, until fixed point.
209 void propagateChanges();
211 /// Get a forward reference to a node to use as an operand.
212 Metadata &getFwdReference(MDNode &Op);
215 /// Worklist of distinct nodes whose operands need to be remapped.
216 SmallVector<MDNode *, 16> DistinctWorklist;
218 // Storage for a UniquedGraph.
219 SmallDenseMap<const Metadata *, Data, 32> InfoStorage;
220 SmallVector<MDNode *, 16> POTStorage;
222 public:
223 MDNodeMapper(Mapper &M) : M(M) {}
225 /// Map a metadata node (and its transitive operands).
227 /// Map all the (unmapped) nodes in the subgraph under \c N. The iterative
228 /// algorithm handles distinct nodes and uniqued node subgraphs using
229 /// different strategies.
231 /// Distinct nodes are immediately mapped and added to \a DistinctWorklist
232 /// using \a mapDistinctNode(). Their mapping can always be computed
233 /// immediately without visiting operands, even if their operands change.
235 /// The mapping for uniqued nodes depends on whether their operands change.
236 /// \a mapTopLevelUniquedNode() traverses the transitive uniqued subgraph of
237 /// a node to calculate uniqued node mappings in bulk. Distinct leafs are
238 /// added to \a DistinctWorklist with \a mapDistinctNode().
240 /// After mapping \c N itself, this function remaps the operands of the
241 /// distinct nodes in \a DistinctWorklist until the entire subgraph under \c
242 /// N has been mapped.
243 Metadata *map(const MDNode &N);
245 private:
246 /// Map a top-level uniqued node and the uniqued subgraph underneath it.
248 /// This builds up a post-order traversal of the (unmapped) uniqued subgraph
249 /// underneath \c FirstN and calculates the nodes' mapping. Each node uses
250 /// the identity mapping (\a Mapper::mapToSelf()) as long as all of its
251 /// operands uses the identity mapping.
253 /// The algorithm works as follows:
255 /// 1. \a createPOT(): traverse the uniqued subgraph under \c FirstN and
256 /// save the post-order traversal in the given \a UniquedGraph, tracking
257 /// nodes' operands change.
259 /// 2. \a UniquedGraph::propagateChanges(): propagate changed operands
260 /// through the \a UniquedGraph until fixed point, following the rule
261 /// that if a node changes, any node that references must also change.
263 /// 3. \a mapNodesInPOT(): map the uniqued nodes, creating new uniqued nodes
264 /// (referencing new operands) where necessary.
265 Metadata *mapTopLevelUniquedNode(const MDNode &FirstN);
267 /// Try to map the operand of an \a MDNode.
269 /// If \c Op is already mapped, return the mapping. If it's not an \a
270 /// MDNode, compute and return the mapping. If it's a distinct \a MDNode,
271 /// return the result of \a mapDistinctNode().
273 /// \return std::nullopt if \c Op is an unmapped uniqued \a MDNode.
274 /// \post getMappedOp(Op) only returns std::nullopt if this returns
275 /// std::nullopt.
276 std::optional<Metadata *> tryToMapOperand(const Metadata *Op);
278 /// Map a distinct node.
280 /// Return the mapping for the distinct node \c N, saving the result in \a
281 /// DistinctWorklist for later remapping.
283 /// \pre \c N is not yet mapped.
284 /// \pre \c N.isDistinct().
285 MDNode *mapDistinctNode(const MDNode &N);
287 /// Get a previously mapped node.
288 std::optional<Metadata *> getMappedOp(const Metadata *Op) const;
290 /// Create a post-order traversal of an unmapped uniqued node subgraph.
292 /// This traverses the metadata graph deeply enough to map \c FirstN. It
293 /// uses \a tryToMapOperand() (via \a Mapper::mapSimplifiedNode()), so any
294 /// metadata that has already been mapped will not be part of the POT.
296 /// Each node that has a changed operand from outside the graph (e.g., a
297 /// distinct node, an already-mapped uniqued node, or \a ConstantAsMetadata)
298 /// is marked with \a Data::HasChanged.
300 /// \return \c true if any nodes in \c G have \a Data::HasChanged.
301 /// \post \c G.POT is a post-order traversal ending with \c FirstN.
302 /// \post \a Data::hasChanged in \c G.Info indicates whether any node needs
303 /// to change because of operands outside the graph.
304 bool createPOT(UniquedGraph &G, const MDNode &FirstN);
306 /// Visit the operands of a uniqued node in the POT.
308 /// Visit the operands in the range from \c I to \c E, returning the first
309 /// uniqued node we find that isn't yet in \c G. \c I is always advanced to
310 /// where to continue the loop through the operands.
312 /// This sets \c HasChanged if any of the visited operands change.
313 MDNode *visitOperands(UniquedGraph &G, MDNode::op_iterator &I,
314 MDNode::op_iterator E, bool &HasChanged);
316 /// Map all the nodes in the given uniqued graph.
318 /// This visits all the nodes in \c G in post-order, using the identity
319 /// mapping or creating a new node depending on \a Data::HasChanged.
321 /// \pre \a getMappedOp() returns std::nullopt for nodes in \c G, but not for
322 /// any of their operands outside of \c G. \pre \a Data::HasChanged is true
323 /// for a node in \c G iff any of its operands have changed. \post \a
324 /// getMappedOp() returns the mapped node for every node in \c G.
325 void mapNodesInPOT(UniquedGraph &G);
327 /// Remap a node's operands using the given functor.
329 /// Iterate through the operands of \c N and update them in place using \c
330 /// mapOperand.
332 /// \pre N.isDistinct() or N.isTemporary().
333 template <class OperandMapper>
334 void remapOperands(MDNode &N, OperandMapper mapOperand);
337 } // end anonymous namespace
339 Value *Mapper::mapValue(const Value *V) {
340 ValueToValueMapTy::iterator I = getVM().find(V);
342 // If the value already exists in the map, use it.
343 if (I != getVM().end()) {
344 assert(I->second && "Unexpected null mapping");
345 return I->second;
348 // If we have a materializer and it can materialize a value, use that.
349 if (auto *Materializer = getMaterializer()) {
350 if (Value *NewV = Materializer->materialize(const_cast<Value *>(V))) {
351 getVM()[V] = NewV;
352 return NewV;
356 // Global values do not need to be seeded into the VM if they
357 // are using the identity mapping.
358 if (isa<GlobalValue>(V)) {
359 if (Flags & RF_NullMapMissingGlobalValues)
360 return nullptr;
361 return getVM()[V] = const_cast<Value *>(V);
364 if (const InlineAsm *IA = dyn_cast<InlineAsm>(V)) {
365 // Inline asm may need *type* remapping.
366 FunctionType *NewTy = IA->getFunctionType();
367 if (TypeMapper) {
368 NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy));
370 if (NewTy != IA->getFunctionType())
371 V = InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(),
372 IA->hasSideEffects(), IA->isAlignStack(),
373 IA->getDialect(), IA->canThrow());
376 return getVM()[V] = const_cast<Value *>(V);
379 if (const auto *MDV = dyn_cast<MetadataAsValue>(V)) {
380 const Metadata *MD = MDV->getMetadata();
382 if (auto *LAM = dyn_cast<LocalAsMetadata>(MD)) {
383 // Look through to grab the local value.
384 if (Value *LV = mapValue(LAM->getValue())) {
385 if (V == LAM->getValue())
386 return const_cast<Value *>(V);
387 return MetadataAsValue::get(V->getContext(), ValueAsMetadata::get(LV));
390 // FIXME: always return nullptr once Verifier::verifyDominatesUse()
391 // ensures metadata operands only reference defined SSA values.
392 return (Flags & RF_IgnoreMissingLocals)
393 ? nullptr
394 : MetadataAsValue::get(
395 V->getContext(),
396 MDTuple::get(V->getContext(), std::nullopt));
398 if (auto *AL = dyn_cast<DIArgList>(MD)) {
399 SmallVector<ValueAsMetadata *, 4> MappedArgs;
400 for (auto *VAM : AL->getArgs()) {
401 // Map both Local and Constant VAMs here; they will both ultimately
402 // be mapped via mapValue. The exceptions are constants when we have no
403 // module level changes and locals when they have no existing mapped
404 // value and RF_IgnoreMissingLocals is set; these have identity
405 // mappings.
406 if ((Flags & RF_NoModuleLevelChanges) && isa<ConstantAsMetadata>(VAM)) {
407 MappedArgs.push_back(VAM);
408 } else if (Value *LV = mapValue(VAM->getValue())) {
409 MappedArgs.push_back(
410 LV == VAM->getValue() ? VAM : ValueAsMetadata::get(LV));
411 } else if ((Flags & RF_IgnoreMissingLocals) && isa<LocalAsMetadata>(VAM)) {
412 MappedArgs.push_back(VAM);
413 } else {
414 // If we cannot map the value, set the argument as undef.
415 MappedArgs.push_back(ValueAsMetadata::get(
416 UndefValue::get(VAM->getValue()->getType())));
419 return MetadataAsValue::get(V->getContext(),
420 DIArgList::get(V->getContext(), MappedArgs));
423 // If this is a module-level metadata and we know that nothing at the module
424 // level is changing, then use an identity mapping.
425 if (Flags & RF_NoModuleLevelChanges)
426 return getVM()[V] = const_cast<Value *>(V);
428 // Map the metadata and turn it into a value.
429 auto *MappedMD = mapMetadata(MD);
430 if (MD == MappedMD)
431 return getVM()[V] = const_cast<Value *>(V);
432 return getVM()[V] = MetadataAsValue::get(V->getContext(), MappedMD);
435 // Okay, this either must be a constant (which may or may not be mappable) or
436 // is something that is not in the mapping table.
437 Constant *C = const_cast<Constant*>(dyn_cast<Constant>(V));
438 if (!C)
439 return nullptr;
441 if (BlockAddress *BA = dyn_cast<BlockAddress>(C))
442 return mapBlockAddress(*BA);
444 if (const auto *E = dyn_cast<DSOLocalEquivalent>(C)) {
445 auto *Val = mapValue(E->getGlobalValue());
446 GlobalValue *GV = dyn_cast<GlobalValue>(Val);
447 if (GV)
448 return getVM()[E] = DSOLocalEquivalent::get(GV);
450 auto *Func = cast<Function>(Val->stripPointerCastsAndAliases());
451 Type *NewTy = E->getType();
452 if (TypeMapper)
453 NewTy = TypeMapper->remapType(NewTy);
454 return getVM()[E] = llvm::ConstantExpr::getBitCast(
455 DSOLocalEquivalent::get(Func), NewTy);
458 if (const auto *NC = dyn_cast<NoCFIValue>(C)) {
459 auto *Val = mapValue(NC->getGlobalValue());
460 GlobalValue *GV = cast<GlobalValue>(Val);
461 return getVM()[NC] = NoCFIValue::get(GV);
464 auto mapValueOrNull = [this](Value *V) {
465 auto Mapped = mapValue(V);
466 assert((Mapped || (Flags & RF_NullMapMissingGlobalValues)) &&
467 "Unexpected null mapping for constant operand without "
468 "NullMapMissingGlobalValues flag");
469 return Mapped;
472 // Otherwise, we have some other constant to remap. Start by checking to see
473 // if all operands have an identity remapping.
474 unsigned OpNo = 0, NumOperands = C->getNumOperands();
475 Value *Mapped = nullptr;
476 for (; OpNo != NumOperands; ++OpNo) {
477 Value *Op = C->getOperand(OpNo);
478 Mapped = mapValueOrNull(Op);
479 if (!Mapped)
480 return nullptr;
481 if (Mapped != Op)
482 break;
485 // See if the type mapper wants to remap the type as well.
486 Type *NewTy = C->getType();
487 if (TypeMapper)
488 NewTy = TypeMapper->remapType(NewTy);
490 // If the result type and all operands match up, then just insert an identity
491 // mapping.
492 if (OpNo == NumOperands && NewTy == C->getType())
493 return getVM()[V] = C;
495 // Okay, we need to create a new constant. We've already processed some or
496 // all of the operands, set them all up now.
497 SmallVector<Constant*, 8> Ops;
498 Ops.reserve(NumOperands);
499 for (unsigned j = 0; j != OpNo; ++j)
500 Ops.push_back(cast<Constant>(C->getOperand(j)));
502 // If one of the operands mismatch, push it and the other mapped operands.
503 if (OpNo != NumOperands) {
504 Ops.push_back(cast<Constant>(Mapped));
506 // Map the rest of the operands that aren't processed yet.
507 for (++OpNo; OpNo != NumOperands; ++OpNo) {
508 Mapped = mapValueOrNull(C->getOperand(OpNo));
509 if (!Mapped)
510 return nullptr;
511 Ops.push_back(cast<Constant>(Mapped));
514 Type *NewSrcTy = nullptr;
515 if (TypeMapper)
516 if (auto *GEPO = dyn_cast<GEPOperator>(C))
517 NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType());
519 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C))
520 return getVM()[V] = CE->getWithOperands(Ops, NewTy, false, NewSrcTy);
521 if (isa<ConstantArray>(C))
522 return getVM()[V] = ConstantArray::get(cast<ArrayType>(NewTy), Ops);
523 if (isa<ConstantStruct>(C))
524 return getVM()[V] = ConstantStruct::get(cast<StructType>(NewTy), Ops);
525 if (isa<ConstantVector>(C))
526 return getVM()[V] = ConstantVector::get(Ops);
527 // If this is a no-operand constant, it must be because the type was remapped.
528 if (isa<PoisonValue>(C))
529 return getVM()[V] = PoisonValue::get(NewTy);
530 if (isa<UndefValue>(C))
531 return getVM()[V] = UndefValue::get(NewTy);
532 if (isa<ConstantAggregateZero>(C))
533 return getVM()[V] = ConstantAggregateZero::get(NewTy);
534 if (isa<ConstantTargetNone>(C))
535 return getVM()[V] = Constant::getNullValue(NewTy);
536 assert(isa<ConstantPointerNull>(C));
537 return getVM()[V] = ConstantPointerNull::get(cast<PointerType>(NewTy));
540 void Mapper::remapDPValue(DPValue &V) {
541 // Remap variables and DILocations.
542 auto *MappedVar = mapMetadata(V.getVariable());
543 auto *MappedDILoc = mapMetadata(V.getDebugLoc());
544 V.setVariable(cast<DILocalVariable>(MappedVar));
545 V.setDebugLoc(DebugLoc(cast<DILocation>(MappedDILoc)));
547 bool IgnoreMissingLocals = Flags & RF_IgnoreMissingLocals;
549 if (V.isDbgAssign()) {
550 auto *NewAddr = mapValue(V.getAddress());
551 if (!IgnoreMissingLocals && !NewAddr)
552 V.setKillAddress();
553 else if (NewAddr)
554 V.setAddress(NewAddr);
557 // Find Value operands and remap those.
558 SmallVector<Value *, 4> Vals, NewVals;
559 for (Value *Val : V.location_ops())
560 Vals.push_back(Val);
561 for (Value *Val : Vals)
562 NewVals.push_back(mapValue(Val));
564 // If there are no changes to the Value operands, finished.
565 if (Vals == NewVals)
566 return;
568 // Otherwise, do some replacement.
569 if (!IgnoreMissingLocals &&
570 llvm::any_of(NewVals, [&](Value *V) { return V == nullptr; })) {
571 V.setKillLocation();
572 } else {
573 // Either we have all non-empty NewVals, or we're permitted to ignore
574 // missing locals.
575 for (unsigned int I = 0; I < Vals.size(); ++I)
576 if (NewVals[I])
577 V.replaceVariableLocationOp(I, NewVals[I]);
581 Value *Mapper::mapBlockAddress(const BlockAddress &BA) {
582 Function *F = cast<Function>(mapValue(BA.getFunction()));
584 // F may not have materialized its initializer. In that case, create a
585 // dummy basic block for now, and replace it once we've materialized all
586 // the initializers.
587 BasicBlock *BB;
588 if (F->empty()) {
589 DelayedBBs.push_back(DelayedBasicBlock(BA));
590 BB = DelayedBBs.back().TempBB.get();
591 } else {
592 BB = cast_or_null<BasicBlock>(mapValue(BA.getBasicBlock()));
595 return getVM()[&BA] = BlockAddress::get(F, BB ? BB : BA.getBasicBlock());
598 Metadata *Mapper::mapToMetadata(const Metadata *Key, Metadata *Val) {
599 getVM().MD()[Key].reset(Val);
600 return Val;
603 Metadata *Mapper::mapToSelf(const Metadata *MD) {
604 return mapToMetadata(MD, const_cast<Metadata *>(MD));
607 std::optional<Metadata *> MDNodeMapper::tryToMapOperand(const Metadata *Op) {
608 if (!Op)
609 return nullptr;
611 if (std::optional<Metadata *> MappedOp = M.mapSimpleMetadata(Op)) {
612 #ifndef NDEBUG
613 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
614 assert((!*MappedOp || M.getVM().count(CMD->getValue()) ||
615 M.getVM().getMappedMD(Op)) &&
616 "Expected Value to be memoized");
617 else
618 assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) &&
619 "Expected result to be memoized");
620 #endif
621 return *MappedOp;
624 const MDNode &N = *cast<MDNode>(Op);
625 if (N.isDistinct())
626 return mapDistinctNode(N);
627 return std::nullopt;
630 MDNode *MDNodeMapper::mapDistinctNode(const MDNode &N) {
631 assert(N.isDistinct() && "Expected a distinct node");
632 assert(!M.getVM().getMappedMD(&N) && "Expected an unmapped node");
633 Metadata *NewM = nullptr;
635 if (M.Flags & RF_ReuseAndMutateDistinctMDs) {
636 NewM = M.mapToSelf(&N);
637 } else {
638 NewM = MDNode::replaceWithDistinct(N.clone());
639 LLVM_DEBUG(dbgs() << "\nMap " << N << "\n"
640 << "To " << *NewM << "\n\n");
641 M.mapToMetadata(&N, NewM);
643 DistinctWorklist.push_back(cast<MDNode>(NewM));
645 return DistinctWorklist.back();
648 static ConstantAsMetadata *wrapConstantAsMetadata(const ConstantAsMetadata &CMD,
649 Value *MappedV) {
650 if (CMD.getValue() == MappedV)
651 return const_cast<ConstantAsMetadata *>(&CMD);
652 return MappedV ? ConstantAsMetadata::getConstant(MappedV) : nullptr;
655 std::optional<Metadata *> MDNodeMapper::getMappedOp(const Metadata *Op) const {
656 if (!Op)
657 return nullptr;
659 if (std::optional<Metadata *> MappedOp = M.getVM().getMappedMD(Op))
660 return *MappedOp;
662 if (isa<MDString>(Op))
663 return const_cast<Metadata *>(Op);
665 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
666 return wrapConstantAsMetadata(*CMD, M.getVM().lookup(CMD->getValue()));
668 return std::nullopt;
671 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(MDNode &Op) {
672 auto Where = Info.find(&Op);
673 assert(Where != Info.end() && "Expected a valid reference");
675 auto &OpD = Where->second;
676 if (!OpD.HasChanged)
677 return Op;
679 // Lazily construct a temporary node.
680 if (!OpD.Placeholder)
681 OpD.Placeholder = Op.clone();
683 return *OpD.Placeholder;
686 template <class OperandMapper>
687 void MDNodeMapper::remapOperands(MDNode &N, OperandMapper mapOperand) {
688 assert(!N.isUniqued() && "Expected distinct or temporary nodes");
689 for (unsigned I = 0, E = N.getNumOperands(); I != E; ++I) {
690 Metadata *Old = N.getOperand(I);
691 Metadata *New = mapOperand(Old);
692 if (Old != New)
693 LLVM_DEBUG(dbgs() << "Replacing Op " << Old << " with " << New << " in "
694 << N << "\n");
696 if (Old != New)
697 N.replaceOperandWith(I, New);
701 namespace {
703 /// An entry in the worklist for the post-order traversal.
704 struct POTWorklistEntry {
705 MDNode *N; ///< Current node.
706 MDNode::op_iterator Op; ///< Current operand of \c N.
708 /// Keep a flag of whether operands have changed in the worklist to avoid
709 /// hitting the map in \a UniquedGraph.
710 bool HasChanged = false;
712 POTWorklistEntry(MDNode &N) : N(&N), Op(N.op_begin()) {}
715 } // end anonymous namespace
717 bool MDNodeMapper::createPOT(UniquedGraph &G, const MDNode &FirstN) {
718 assert(G.Info.empty() && "Expected a fresh traversal");
719 assert(FirstN.isUniqued() && "Expected uniqued node in POT");
721 // Construct a post-order traversal of the uniqued subgraph under FirstN.
722 bool AnyChanges = false;
723 SmallVector<POTWorklistEntry, 16> Worklist;
724 Worklist.push_back(POTWorklistEntry(const_cast<MDNode &>(FirstN)));
725 (void)G.Info[&FirstN];
726 while (!Worklist.empty()) {
727 // Start or continue the traversal through the this node's operands.
728 auto &WE = Worklist.back();
729 if (MDNode *N = visitOperands(G, WE.Op, WE.N->op_end(), WE.HasChanged)) {
730 // Push a new node to traverse first.
731 Worklist.push_back(POTWorklistEntry(*N));
732 continue;
735 // Push the node onto the POT.
736 assert(WE.N->isUniqued() && "Expected only uniqued nodes");
737 assert(WE.Op == WE.N->op_end() && "Expected to visit all operands");
738 auto &D = G.Info[WE.N];
739 AnyChanges |= D.HasChanged = WE.HasChanged;
740 D.ID = G.POT.size();
741 G.POT.push_back(WE.N);
743 // Pop the node off the worklist.
744 Worklist.pop_back();
746 return AnyChanges;
749 MDNode *MDNodeMapper::visitOperands(UniquedGraph &G, MDNode::op_iterator &I,
750 MDNode::op_iterator E, bool &HasChanged) {
751 while (I != E) {
752 Metadata *Op = *I++; // Increment even on early return.
753 if (std::optional<Metadata *> MappedOp = tryToMapOperand(Op)) {
754 // Check if the operand changes.
755 HasChanged |= Op != *MappedOp;
756 continue;
759 // A uniqued metadata node.
760 MDNode &OpN = *cast<MDNode>(Op);
761 assert(OpN.isUniqued() &&
762 "Only uniqued operands cannot be mapped immediately");
763 if (G.Info.insert(std::make_pair(&OpN, Data())).second)
764 return &OpN; // This is a new one. Return it.
766 return nullptr;
769 void MDNodeMapper::UniquedGraph::propagateChanges() {
770 bool AnyChanges;
771 do {
772 AnyChanges = false;
773 for (MDNode *N : POT) {
774 auto &D = Info[N];
775 if (D.HasChanged)
776 continue;
778 if (llvm::none_of(N->operands(), [&](const Metadata *Op) {
779 auto Where = Info.find(Op);
780 return Where != Info.end() && Where->second.HasChanged;
782 continue;
784 AnyChanges = D.HasChanged = true;
786 } while (AnyChanges);
789 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) {
790 // Construct uniqued nodes, building forward references as necessary.
791 SmallVector<MDNode *, 16> CyclicNodes;
792 for (auto *N : G.POT) {
793 auto &D = G.Info[N];
794 if (!D.HasChanged) {
795 // The node hasn't changed.
796 M.mapToSelf(N);
797 continue;
800 // Remember whether this node had a placeholder.
801 bool HadPlaceholder(D.Placeholder);
803 // Clone the uniqued node and remap the operands.
804 TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone();
805 remapOperands(*ClonedN, [this, &D, &G](Metadata *Old) {
806 if (std::optional<Metadata *> MappedOp = getMappedOp(Old))
807 return *MappedOp;
808 (void)D;
809 assert(G.Info[Old].ID > D.ID && "Expected a forward reference");
810 return &G.getFwdReference(*cast<MDNode>(Old));
813 auto *NewN = MDNode::replaceWithUniqued(std::move(ClonedN));
814 if (N && NewN && N != NewN) {
815 LLVM_DEBUG(dbgs() << "\nMap " << *N << "\n"
816 << "To " << *NewN << "\n\n");
819 M.mapToMetadata(N, NewN);
821 // Nodes that were referenced out of order in the POT are involved in a
822 // uniquing cycle.
823 if (HadPlaceholder)
824 CyclicNodes.push_back(NewN);
827 // Resolve cycles.
828 for (auto *N : CyclicNodes)
829 if (!N->isResolved())
830 N->resolveCycles();
833 Metadata *MDNodeMapper::map(const MDNode &N) {
834 assert(DistinctWorklist.empty() && "MDNodeMapper::map is not recursive");
835 assert(!(M.Flags & RF_NoModuleLevelChanges) &&
836 "MDNodeMapper::map assumes module-level changes");
838 // Require resolved nodes whenever metadata might be remapped.
839 assert(N.isResolved() && "Unexpected unresolved node");
841 Metadata *MappedN =
842 N.isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N);
843 while (!DistinctWorklist.empty())
844 remapOperands(*DistinctWorklist.pop_back_val(), [this](Metadata *Old) {
845 if (std::optional<Metadata *> MappedOp = tryToMapOperand(Old))
846 return *MappedOp;
847 return mapTopLevelUniquedNode(*cast<MDNode>(Old));
849 return MappedN;
852 Metadata *MDNodeMapper::mapTopLevelUniquedNode(const MDNode &FirstN) {
853 assert(FirstN.isUniqued() && "Expected uniqued node");
855 // Create a post-order traversal of uniqued nodes under FirstN.
856 UniquedGraph G;
857 if (!createPOT(G, FirstN)) {
858 // Return early if no nodes have changed.
859 for (const MDNode *N : G.POT)
860 M.mapToSelf(N);
861 return &const_cast<MDNode &>(FirstN);
864 // Update graph with all nodes that have changed.
865 G.propagateChanges();
867 // Map all the nodes in the graph.
868 mapNodesInPOT(G);
870 // Return the original node, remapped.
871 return *getMappedOp(&FirstN);
874 std::optional<Metadata *> Mapper::mapSimpleMetadata(const Metadata *MD) {
875 // If the value already exists in the map, use it.
876 if (std::optional<Metadata *> NewMD = getVM().getMappedMD(MD))
877 return *NewMD;
879 if (isa<MDString>(MD))
880 return const_cast<Metadata *>(MD);
882 // This is a module-level metadata. If nothing at the module level is
883 // changing, use an identity mapping.
884 if ((Flags & RF_NoModuleLevelChanges))
885 return const_cast<Metadata *>(MD);
887 if (auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) {
888 // Don't memoize ConstantAsMetadata. Instead of lasting until the
889 // LLVMContext is destroyed, they can be deleted when the GlobalValue they
890 // reference is destructed. These aren't super common, so the extra
891 // indirection isn't that expensive.
892 return wrapConstantAsMetadata(*CMD, mapValue(CMD->getValue()));
895 assert(isa<MDNode>(MD) && "Expected a metadata node");
897 return std::nullopt;
900 Metadata *Mapper::mapMetadata(const Metadata *MD) {
901 assert(MD && "Expected valid metadata");
902 assert(!isa<LocalAsMetadata>(MD) && "Unexpected local metadata");
904 if (std::optional<Metadata *> NewMD = mapSimpleMetadata(MD))
905 return *NewMD;
907 return MDNodeMapper(*this).map(*cast<MDNode>(MD));
910 void Mapper::flush() {
911 // Flush out the worklist of global values.
912 while (!Worklist.empty()) {
913 WorklistEntry E = Worklist.pop_back_val();
914 CurrentMCID = E.MCID;
915 switch (E.Kind) {
916 case WorklistEntry::MapGlobalInit:
917 E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init));
918 remapGlobalObjectMetadata(*E.Data.GVInit.GV);
919 break;
920 case WorklistEntry::MapAppendingVar: {
921 unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers;
922 // mapAppendingVariable call can change AppendingInits if initalizer for
923 // the variable depends on another appending global, because of that inits
924 // need to be extracted and updated before the call.
925 SmallVector<Constant *, 8> NewInits(
926 drop_begin(AppendingInits, PrefixSize));
927 AppendingInits.resize(PrefixSize);
928 mapAppendingVariable(*E.Data.AppendingGV.GV,
929 E.Data.AppendingGV.InitPrefix,
930 E.AppendingGVIsOldCtorDtor, ArrayRef(NewInits));
931 break;
933 case WorklistEntry::MapAliasOrIFunc: {
934 GlobalValue *GV = E.Data.AliasOrIFunc.GV;
935 Constant *Target = mapConstant(E.Data.AliasOrIFunc.Target);
936 if (auto *GA = dyn_cast<GlobalAlias>(GV))
937 GA->setAliasee(Target);
938 else if (auto *GI = dyn_cast<GlobalIFunc>(GV))
939 GI->setResolver(Target);
940 else
941 llvm_unreachable("Not alias or ifunc");
942 break;
944 case WorklistEntry::RemapFunction:
945 remapFunction(*E.Data.RemapF);
946 break;
949 CurrentMCID = 0;
951 // Finish logic for block addresses now that all global values have been
952 // handled.
953 while (!DelayedBBs.empty()) {
954 DelayedBasicBlock DBB = DelayedBBs.pop_back_val();
955 BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB));
956 DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB);
960 void Mapper::remapInstruction(Instruction *I) {
961 // Remap operands.
962 for (Use &Op : I->operands()) {
963 Value *V = mapValue(Op);
964 // If we aren't ignoring missing entries, assert that something happened.
965 if (V)
966 Op = V;
967 else
968 assert((Flags & RF_IgnoreMissingLocals) &&
969 "Referenced value not in value map!");
972 // Remap phi nodes' incoming blocks.
973 if (PHINode *PN = dyn_cast<PHINode>(I)) {
974 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
975 Value *V = mapValue(PN->getIncomingBlock(i));
976 // If we aren't ignoring missing entries, assert that something happened.
977 if (V)
978 PN->setIncomingBlock(i, cast<BasicBlock>(V));
979 else
980 assert((Flags & RF_IgnoreMissingLocals) &&
981 "Referenced block not in value map!");
985 // Remap attached metadata.
986 SmallVector<std::pair<unsigned, MDNode *>, 4> MDs;
987 I->getAllMetadata(MDs);
988 for (const auto &MI : MDs) {
989 MDNode *Old = MI.second;
990 MDNode *New = cast_or_null<MDNode>(mapMetadata(Old));
991 if (New != Old)
992 I->setMetadata(MI.first, New);
995 if (!TypeMapper)
996 return;
998 // If the instruction's type is being remapped, do so now.
999 if (auto *CB = dyn_cast<CallBase>(I)) {
1000 SmallVector<Type *, 3> Tys;
1001 FunctionType *FTy = CB->getFunctionType();
1002 Tys.reserve(FTy->getNumParams());
1003 for (Type *Ty : FTy->params())
1004 Tys.push_back(TypeMapper->remapType(Ty));
1005 CB->mutateFunctionType(FunctionType::get(
1006 TypeMapper->remapType(I->getType()), Tys, FTy->isVarArg()));
1008 LLVMContext &C = CB->getContext();
1009 AttributeList Attrs = CB->getAttributes();
1010 for (unsigned i = 0; i < Attrs.getNumAttrSets(); ++i) {
1011 for (int AttrIdx = Attribute::FirstTypeAttr;
1012 AttrIdx <= Attribute::LastTypeAttr; AttrIdx++) {
1013 Attribute::AttrKind TypedAttr = (Attribute::AttrKind)AttrIdx;
1014 if (Type *Ty =
1015 Attrs.getAttributeAtIndex(i, TypedAttr).getValueAsType()) {
1016 Attrs = Attrs.replaceAttributeTypeAtIndex(C, i, TypedAttr,
1017 TypeMapper->remapType(Ty));
1018 break;
1022 CB->setAttributes(Attrs);
1023 return;
1025 if (auto *AI = dyn_cast<AllocaInst>(I))
1026 AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType()));
1027 if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) {
1028 GEP->setSourceElementType(
1029 TypeMapper->remapType(GEP->getSourceElementType()));
1030 GEP->setResultElementType(
1031 TypeMapper->remapType(GEP->getResultElementType()));
1033 I->mutateType(TypeMapper->remapType(I->getType()));
1036 void Mapper::remapGlobalObjectMetadata(GlobalObject &GO) {
1037 SmallVector<std::pair<unsigned, MDNode *>, 8> MDs;
1038 GO.getAllMetadata(MDs);
1039 GO.clearMetadata();
1040 for (const auto &I : MDs)
1041 GO.addMetadata(I.first, *cast<MDNode>(mapMetadata(I.second)));
1044 void Mapper::remapFunction(Function &F) {
1045 // Remap the operands.
1046 for (Use &Op : F.operands())
1047 if (Op)
1048 Op = mapValue(Op);
1050 // Remap the metadata attachments.
1051 remapGlobalObjectMetadata(F);
1053 // Remap the argument types.
1054 if (TypeMapper)
1055 for (Argument &A : F.args())
1056 A.mutateType(TypeMapper->remapType(A.getType()));
1058 // Remap the instructions.
1059 for (BasicBlock &BB : F)
1060 for (Instruction &I : BB)
1061 remapInstruction(&I);
1064 void Mapper::mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
1065 bool IsOldCtorDtor,
1066 ArrayRef<Constant *> NewMembers) {
1067 SmallVector<Constant *, 16> Elements;
1068 if (InitPrefix) {
1069 unsigned NumElements =
1070 cast<ArrayType>(InitPrefix->getType())->getNumElements();
1071 for (unsigned I = 0; I != NumElements; ++I)
1072 Elements.push_back(InitPrefix->getAggregateElement(I));
1075 PointerType *VoidPtrTy;
1076 Type *EltTy;
1077 if (IsOldCtorDtor) {
1078 // FIXME: This upgrade is done during linking to support the C API. See
1079 // also IRLinker::linkAppendingVarProto() in IRMover.cpp.
1080 VoidPtrTy = PointerType::getUnqual(GV.getContext());
1081 auto &ST = *cast<StructType>(NewMembers.front()->getType());
1082 Type *Tys[3] = {ST.getElementType(0), ST.getElementType(1), VoidPtrTy};
1083 EltTy = StructType::get(GV.getContext(), Tys, false);
1086 for (auto *V : NewMembers) {
1087 Constant *NewV;
1088 if (IsOldCtorDtor) {
1089 auto *S = cast<ConstantStruct>(V);
1090 auto *E1 = cast<Constant>(mapValue(S->getOperand(0)));
1091 auto *E2 = cast<Constant>(mapValue(S->getOperand(1)));
1092 Constant *Null = Constant::getNullValue(VoidPtrTy);
1093 NewV = ConstantStruct::get(cast<StructType>(EltTy), E1, E2, Null);
1094 } else {
1095 NewV = cast_or_null<Constant>(mapValue(V));
1097 Elements.push_back(NewV);
1100 GV.setInitializer(
1101 ConstantArray::get(cast<ArrayType>(GV.getValueType()), Elements));
1104 void Mapper::scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init,
1105 unsigned MCID) {
1106 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
1107 assert(MCID < MCs.size() && "Invalid mapping context");
1109 WorklistEntry WE;
1110 WE.Kind = WorklistEntry::MapGlobalInit;
1111 WE.MCID = MCID;
1112 WE.Data.GVInit.GV = &GV;
1113 WE.Data.GVInit.Init = &Init;
1114 Worklist.push_back(WE);
1117 void Mapper::scheduleMapAppendingVariable(GlobalVariable &GV,
1118 Constant *InitPrefix,
1119 bool IsOldCtorDtor,
1120 ArrayRef<Constant *> NewMembers,
1121 unsigned MCID) {
1122 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
1123 assert(MCID < MCs.size() && "Invalid mapping context");
1125 WorklistEntry WE;
1126 WE.Kind = WorklistEntry::MapAppendingVar;
1127 WE.MCID = MCID;
1128 WE.Data.AppendingGV.GV = &GV;
1129 WE.Data.AppendingGV.InitPrefix = InitPrefix;
1130 WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor;
1131 WE.AppendingGVNumNewMembers = NewMembers.size();
1132 Worklist.push_back(WE);
1133 AppendingInits.append(NewMembers.begin(), NewMembers.end());
1136 void Mapper::scheduleMapAliasOrIFunc(GlobalValue &GV, Constant &Target,
1137 unsigned MCID) {
1138 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
1139 assert((isa<GlobalAlias>(GV) || isa<GlobalIFunc>(GV)) &&
1140 "Should be alias or ifunc");
1141 assert(MCID < MCs.size() && "Invalid mapping context");
1143 WorklistEntry WE;
1144 WE.Kind = WorklistEntry::MapAliasOrIFunc;
1145 WE.MCID = MCID;
1146 WE.Data.AliasOrIFunc.GV = &GV;
1147 WE.Data.AliasOrIFunc.Target = &Target;
1148 Worklist.push_back(WE);
1151 void Mapper::scheduleRemapFunction(Function &F, unsigned MCID) {
1152 assert(AlreadyScheduled.insert(&F).second && "Should not reschedule");
1153 assert(MCID < MCs.size() && "Invalid mapping context");
1155 WorklistEntry WE;
1156 WE.Kind = WorklistEntry::RemapFunction;
1157 WE.MCID = MCID;
1158 WE.Data.RemapF = &F;
1159 Worklist.push_back(WE);
1162 void Mapper::addFlags(RemapFlags Flags) {
1163 assert(!hasWorkToDo() && "Expected to have flushed the worklist");
1164 this->Flags = this->Flags | Flags;
1167 static Mapper *getAsMapper(void *pImpl) {
1168 return reinterpret_cast<Mapper *>(pImpl);
1171 namespace {
1173 class FlushingMapper {
1174 Mapper &M;
1176 public:
1177 explicit FlushingMapper(void *pImpl) : M(*getAsMapper(pImpl)) {
1178 assert(!M.hasWorkToDo() && "Expected to be flushed");
1181 ~FlushingMapper() { M.flush(); }
1183 Mapper *operator->() const { return &M; }
1186 } // end anonymous namespace
1188 ValueMapper::ValueMapper(ValueToValueMapTy &VM, RemapFlags Flags,
1189 ValueMapTypeRemapper *TypeMapper,
1190 ValueMaterializer *Materializer)
1191 : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {}
1193 ValueMapper::~ValueMapper() { delete getAsMapper(pImpl); }
1195 unsigned
1196 ValueMapper::registerAlternateMappingContext(ValueToValueMapTy &VM,
1197 ValueMaterializer *Materializer) {
1198 return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer);
1201 void ValueMapper::addFlags(RemapFlags Flags) {
1202 FlushingMapper(pImpl)->addFlags(Flags);
1205 Value *ValueMapper::mapValue(const Value &V) {
1206 return FlushingMapper(pImpl)->mapValue(&V);
1209 Constant *ValueMapper::mapConstant(const Constant &C) {
1210 return cast_or_null<Constant>(mapValue(C));
1213 Metadata *ValueMapper::mapMetadata(const Metadata &MD) {
1214 return FlushingMapper(pImpl)->mapMetadata(&MD);
1217 MDNode *ValueMapper::mapMDNode(const MDNode &N) {
1218 return cast_or_null<MDNode>(mapMetadata(N));
1221 void ValueMapper::remapInstruction(Instruction &I) {
1222 FlushingMapper(pImpl)->remapInstruction(&I);
1225 void ValueMapper::remapDPValue(Module *M, DPValue &V) {
1226 FlushingMapper(pImpl)->remapDPValue(V);
1229 void ValueMapper::remapDPValueRange(
1230 Module *M, iterator_range<DPValue::self_iterator> Range) {
1231 for (DPValue &DPV : Range) {
1232 remapDPValue(M, DPV);
1236 void ValueMapper::remapFunction(Function &F) {
1237 FlushingMapper(pImpl)->remapFunction(F);
1240 void ValueMapper::remapGlobalObjectMetadata(GlobalObject &GO) {
1241 FlushingMapper(pImpl)->remapGlobalObjectMetadata(GO);
1244 void ValueMapper::scheduleMapGlobalInitializer(GlobalVariable &GV,
1245 Constant &Init,
1246 unsigned MCID) {
1247 getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID);
1250 void ValueMapper::scheduleMapAppendingVariable(GlobalVariable &GV,
1251 Constant *InitPrefix,
1252 bool IsOldCtorDtor,
1253 ArrayRef<Constant *> NewMembers,
1254 unsigned MCID) {
1255 getAsMapper(pImpl)->scheduleMapAppendingVariable(
1256 GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID);
1259 void ValueMapper::scheduleMapGlobalAlias(GlobalAlias &GA, Constant &Aliasee,
1260 unsigned MCID) {
1261 getAsMapper(pImpl)->scheduleMapAliasOrIFunc(GA, Aliasee, MCID);
1264 void ValueMapper::scheduleMapGlobalIFunc(GlobalIFunc &GI, Constant &Resolver,
1265 unsigned MCID) {
1266 getAsMapper(pImpl)->scheduleMapAliasOrIFunc(GI, Resolver, MCID);
1269 void ValueMapper::scheduleRemapFunction(Function &F, unsigned MCID) {
1270 getAsMapper(pImpl)->scheduleRemapFunction(F, MCID);