HomeSort by: relevance | last modified time | path
    Searched refs:LI (Results 1 - 25 of 444) sorted by relevancy

1 2 3 4 5 6 7 8 91011>>

  /src/external/apache2/llvm/dist/llvm/lib/Transforms/Vectorize/
VPlanHCFGBuilder.h 45 LoopInfo *LI;
63 VPlanHCFGBuilder(Loop *Lp, LoopInfo *LI, VPlan &P)
64 : TheLoop(Lp), LI(LI), Plan(P) {}
  /src/external/apache2/llvm/dist/llvm/lib/Target/AMDGPU/
AMDGPULateCodeGenPrepare.cpp 77 bool canWidenScalarExtLoad(LoadInst &LI) const;
78 bool visitLoadInst(LoadInst &LI);
106 bool AMDGPULateCodeGenPrepare::canWidenScalarExtLoad(LoadInst &LI) const {
107 unsigned AS = LI.getPointerAddressSpace();
113 if (!LI.isSimple())
115 auto *Ty = LI.getType();
124 if (LI.getAlign() < DL->getABITypeAlign(Ty))
127 return DA->isUniform(&LI);
130 bool AMDGPULateCodeGenPrepare::visitLoadInst(LoadInst &LI) {
136 if (LI.getAlign() >= 4
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Target/NVPTX/
NVPTXLowerAggrCopies.cpp 72 if (LoadInst *LI = dyn_cast<LoadInst>(II)) {
73 if (!LI->hasOneUse())
76 if (DL.getTypeStoreSize(LI->getType()) < MaxAggrCopySize)
79 if (StoreInst *SI = dyn_cast<StoreInst>(LI->user_back())) {
80 if (SI->getOperand(0) != LI)
82 AggrLoads.push_back(LI);
105 for (LoadInst *LI : AggrLoads) {
106 auto *SI = cast<StoreInst>(*LI->user_begin());
107 Value *SrcAddr = LI->getOperand(0);
109 unsigned NumLoads = DL.getTypeStoreSize(LI->getType())
    [all...]
NVPTXLowerAlloca.cpp 90 auto LI = dyn_cast<LoadInst>(AllocaUse.getUser());
91 if (LI && LI->getPointerOperand() == allocaInst &&
92 !LI->isVolatile()) {
93 LI->setOperand(LI->getPointerOperandIndex(), NewASCToGeneric);
  /src/external/apache2/llvm/dist/llvm/include/llvm/Analysis/
LoopAnalysisManager.h 56 LoopInfo &LI;
88 explicit Result(LoopAnalysisManager &InnerAM, LoopInfo &LI)
89 : InnerAM(&InnerAM), LI(&LI), MSSAUsed(false) {}
91 : InnerAM(std::move(Arg.InnerAM)), LI(Arg.LI), MSSAUsed(Arg.MSSAUsed) {
99 LI = RHS.LI;
137 LoopInfo *LI;
LazyBranchProbabilityInfo.h 58 LazyBranchProbabilityInfo(const Function *F, const LoopInfo *LI,
60 : Calculated(false), F(F), LI(LI), TLI(TLI) {}
65 assert(F && LI && "call setAnalysis");
66 BPI.calculate(*F, *LI, TLI, nullptr, nullptr);
80 const LoopInfo *LI;
LazyBlockFrequencyInfo.h 38 : Calculated(false), F(nullptr), BPIPass(nullptr), LI(nullptr) {}
42 const LoopInfoT *LI) {
45 this->LI = LI;
51 assert(F && BPIPass && LI && "call setAnalysis");
53 *F, BPIPassTrait<BranchProbabilityInfoPassT>::getBPI(BPIPass), *LI);
74 const LoopInfoT *LI;
CFG.h 63 /// Using DT or LI allows us to answer more quickly. LI reduces the cost of
67 /// on branchy code but not loops, and LI is most useful on code with loops but
72 const DominatorTree *DT = nullptr, const LoopInfo *LI = nullptr);
83 const DominatorTree *DT = nullptr, const LoopInfo *LI = nullptr);
95 const LoopInfo *LI = nullptr);
109 const DominatorTree *DT = nullptr, const LoopInfo *LI = nullptr);
116 /// RPOTraversal) and the loop info analysis (\p LI) of the CFG. This utility
128 /// 3) \p LI must be a valid LoopInfoBase that contains up-to-date loop
132 /// computed in \p LI. When a back-edge is found during the RPO traversal, th
    [all...]
  /src/external/apache2/llvm/dist/llvm/include/llvm/CodeGen/
LiveIntervalCalc.h 31 /// If @p LR is a main range, or if @p LI is null, then all uses must be
33 /// of the live interval @p LI, corresponding to lane mask @p LaneMask,
40 LiveInterval *LI = nullptr);
45 /// createDeadDefs - Create a dead def in LI for every def operand of Reg.
58 /// Calculates liveness for the register specified in live interval @p LI.
61 void calculate(LiveInterval &LI, bool TrackSubRegs);
63 /// For live interval \p LI with correct SubRanges construct matching
66 void constructMainRangeFromSubranges(LiveInterval &LI);
MachineLoopInfo.h 93 LoopInfoBase<MachineBasicBlock, MachineLoop> LI;
106 LoopInfoBase<MachineBasicBlock, MachineLoop>& getBase() { return LI; }
118 inline iterator begin() const { return LI.begin(); }
119 inline iterator end() const { return LI.end(); }
120 bool empty() const { return LI.empty(); }
125 return LI.getLoopFor(BB);
130 return LI.getLoopFor(BB);
135 return LI.getLoopDepth(BB);
140 return LI.isLoopHeader(BB);
147 void releaseMemory() override { LI.releaseMemory();
    [all...]
CalcSpillWeights.h 53 /// Returns true if Reg of live interval LI is used in instruction with many
55 bool isLiveAtStatepointVarArg(LiveInterval &LI);
65 /// (re)compute li's spill weight and allocation hint.
66 void calculateSpillWeightAndHint(LiveInterval &LI);
68 /// Compute future expected spill weight of a split artifact of LI
70 /// \param LI The live interval to be split.
76 /// negative weight for unspillable LI.
77 float futureWeight(LiveInterval &LI, SlotIndex Start, SlotIndex End);
85 /// (Re)compute LI's spill weight and allocation hint, or, for non null
87 /// artifact of LI that will span between start and end slot indexes
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Transforms/Utils/
FixIrreducible.cpp 117 static void reconnectChildLoops(LoopInfo &LI, Loop *ParentLoop, Loop *NewLoop,
121 : LI.getTopLevelLoopsVector();
140 LI.changeLoopFor(BB, NewLoop);
144 LI.destroy(Child);
158 static void createNaturalLoopInternal(LoopInfo &LI, DominatorTree &DT,
196 auto NewLoop = LI.AllocateLoop();
200 LI.addTopLevelLoop(NewLoop);
210 NewLoop->addBasicBlockToLoop(G, LI);
216 if (LI.getLoopFor(BB) == ParentLoop) {
219 LI.changeLoopFor(BB, NewLoop)
    [all...]
MatrixUtils.cpp 26 LoopInfo &LI) {
60 L->addBasicBlockToLoop(Header, LI);
61 L->addBasicBlockToLoop(Body, LI);
62 L->addBasicBlockToLoop(Latch, LI);
72 LoopInfo &LI) {
73 Loop *ColLoop = LI.AllocateLoop();
74 Loop *RowLoop = LI.AllocateLoop();
75 Loop *InnerLoop = LI.AllocateLoop();
78 if (Loop *ParentL = LI.getLoopFor(Start))
81 LI.addTopLevelLoop(ColLoop)
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/CodeGen/
LiveIntervalCalc.cpp 52 void LiveIntervalCalc::calculate(LiveInterval &LI, bool TrackSubRegs) {
63 unsigned Reg = LI.reg();
69 if (LI.hasSubRanges() || (SubReg != 0 && TrackSubRegs)) {
74 if (!LI.hasSubRanges() && !LI.empty()) {
76 LI.createSubRangeFrom(*Alloc, ClassMask, LI);
79 LI.refineSubRanges(
90 if (MO.isDef() && !LI.hasSubRanges())
91 createDeadDef(*Indexes, *Alloc, LI, MO)
    [all...]
CalcSpillWeights.cpp 79 // Check if all values in LI are rematerializable
80 static bool isRematerializable(const LiveInterval &LI, const LiveIntervals &LIS,
83 unsigned Reg = LI.reg();
85 for (LiveInterval::const_vni_iterator I = LI.vni_begin(), E = LI.vni_end();
129 bool VirtRegAuxInfo::isLiveAtStatepointVarArg(LiveInterval &LI) {
130 return any_of(VRM.getRegInfo().reg_operands(LI.reg()),
139 void VirtRegAuxInfo::calculateSpillWeightAndHint(LiveInterval &LI) {
140 float Weight = weightCalcHelper(LI);
144 LI.setWeight(Weight)
    [all...]
LiveRangeEdit.cpp 39 LiveInterval &LI = LIS.createEmptyInterval(VReg);
41 LI.markNotSpillable();
49 LI.createSubRange(Alloc, S.LaneMask);
51 return LI;
123 LiveInterval &li = LIS.getInterval(MO.getReg()); local
124 const VNInfo *OVNI = li.getVNInfoAt(OrigIdx);
134 if (OVNI != li.getVNInfoAt(UseIdx))
185 bool LiveRangeEdit::foldAsLoad(LiveInterval *LI,
190 for (MachineOperand &MO : MRI.reg_nodbg_operands(LI->reg())) {
226 if (UseMI->readsWritesVirtualRegister(LI->reg(), &Ops).second
    [all...]
RenameIndependentSubregs.cpp 78 bool renameComponents(LiveInterval &LI) const;
85 LiveInterval &LI) const;
122 bool RenameIndependentSubregs::renameComponents(LiveInterval &LI) const {
124 if (LI.valnos.size() < 2)
129 if (!findComponents(Classes, SubRangeInfos, LI))
133 unsigned Reg = LI.reg();
136 Intervals.push_back(&LI);
157 LiveInterval &LI) const {
161 for (LiveInterval::SubRange &SR : LI.subranges()) {
178 unsigned Reg = LI.reg()
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Analysis/
ObjCARCAnalysisUtils.cpp 39 if (const LoadInst *LI = dyn_cast<LoadInst>(Op))
40 if (AA.pointsToConstantMemory(LI->getPointerOperand()))
MemDerefPrinter.cpp 58 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
59 Value *PO = LI->getPointerOperand();
60 if (isDereferenceablePointer(PO, LI->getType(), DL))
63 PO, LI->getType(), MaybeAlign(LI->getAlignment()), DL))
93 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
94 Value *PO = LI->getPointerOperand();
95 if (isDereferenceablePointer(PO, LI->getType(), DL))
98 PO, LI->getType(), MaybeAlign(LI->getAlignment()), DL)
    [all...]
CFG.cpp 128 static const Loop *getOutermostLoop(const LoopInfo *LI, const BasicBlock *BB) {
129 const Loop *L = LI->getLoopFor(BB);
140 const LoopInfo *LI) {
155 if (LI && ExclusionSet) {
157 if (const Loop *L = getOutermostLoop(LI, BB))
162 const Loop *StopLoop = LI ? getOutermostLoop(LI, StopBB) : nullptr;
178 if (LI) {
179 Outer = getOutermostLoop(LI, BB);
214 const LoopInfo *LI) {
    [all...]
SyncDependenceAnalysis.cpp 138 static void computeLoopPO(const LoopInfo &LI, Loop &Loop, POCB CallBack,
142 static void computeStackPO(BlockStack &Stack, const LoopInfo &LI, Loop *Loop,
148 auto *NestedLoop = LI.getLoopFor(NextBB);
169 computeLoopPO(LI, *NestedLoop, CallBack, Finalized);
196 static void computeTopLevelPO(Function &F, const LoopInfo &LI, POCB CallBack) {
201 computeStackPO(Stack, LI, nullptr, CallBack, Finalized);
204 static void computeLoopPO(const LoopInfo &LI, Loop &Loop, POCB CallBack,
224 computeStackPO(Stack, LI, &Loop, CallBack, Finalized);
235 const LoopInfo &LI)
236 : DT(DT), PDT(PDT), LI(LI)
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Transforms/IPO/
LoopExtractor.cpp 84 bool extractLoops(Loop::iterator From, Loop::iterator To, LoopInfo &LI,
86 bool extractLoop(Loop *L, LoopInfo &LI, DominatorTree &DT);
175 LoopInfo &LI = LookupLoopInfo(F);
178 if (LI.empty())
185 if (std::next(LI.begin()) != LI.end())
186 return Changed | extractLoops(LI.begin(), LI.end(), LI, DT);
189 Loop *TLL = *LI.begin()
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/TableGen/
StringMatcher.cpp 113 for (const auto &LI : MatchesByLetter) {
115 OS << Indent << "case '" << LI.first << "':\t // " << LI.second.size()
117 if (LI.second.size() != 1)
120 if (EmitStringMatcherForChar(LI.second, CharNo + 1, IndentCount + 1,
146 for (const auto &LI : MatchesByLength) {
148 << "case " << LI.first << ":\t // " << LI.second.size() << " string"
149 << (LI.second.size() == 1 ? "" : "s") << " to match.\n";
150 if (EmitStringMatcherForChar(LI.second, 0, Indent, IgnoreDuplicates)
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Target/WebAssembly/
WebAssemblyRegColoring.cpp 108 LiveInterval *LI = &Liveness->getInterval(VReg);
109 assert(LI->weight() == 0.0f);
110 LI->setWeight(computeWeight(MRI, MBFI, VReg));
111 LLVM_DEBUG(LI->dump());
112 SortedIntervals.push_back(LI);
137 LiveInterval *LI = SortedIntervals[I];
138 unsigned Old = LI->reg();
148 if (!OtherLI->empty() && OtherLI->overlaps(*LI))
159 Assignments[Color].push_back(LI);
163 LLVM_DEBUG(dbgs() << "Assigning vreg" << Register::virtReg2Index(LI->reg()
    [all...]
  /src/external/apache2/llvm/dist/llvm/lib/Transforms/InstCombine/
InstCombineLoadStoreAlloca.cpp 60 if (auto *LI = dyn_cast<LoadInst>(I)) {
62 if (!LI->isSimple()) return false;
459 LoadInst *InstCombinerImpl::combineLoadToNewType(LoadInst &LI, Type *NewTy,
461 assert((!LI.isAtomic() || isSupportedAtomicType(NewTy)) &&
464 Value *Ptr = LI.getPointerOperand();
465 unsigned AS = LI.getPointerAddressSpace();
473 NewTy, NewPtr, LI.getAlign(), LI.isVolatile(), LI.getName() + Suffix);
474 NewLoad->setAtomic(LI.getOrdering(), LI.getSyncScopeID())
    [all...]

Completed in 29 milliseconds

1 2 3 4 5 6 7 8 91011>>