reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

include/llvm/CodeGen/LiveIntervals.h
  273         Indexes->insertMachineInstrInMaps(*I);
include/llvm/CodeGen/MachineBasicBlock.h
  196   MachineInstr       &back()              { return *--end();      }
  198   const MachineInstr &back()        const { return *--end();      }
include/llvm/CodeGen/MachineDominators.h
  139     for (; &*I != A && &*I != B; ++I)
  139     for (; &*I != A && &*I != B; ++I)
  142     return &*I == A;
include/llvm/CodeGen/MachineInstrBuilder.h
   71       : MF(&F), MI(&*I) {}
  530       MachineInstr &MI = *B;
include/llvm/CodeGen/MachineInstrBundleIterator.h
  179   pointer operator->() const { return &operator*(); }
include/llvm/CodeGen/SlotIndexes.h
  433         Mi2IndexMap::const_iterator MapItr = mi2iMap.find(&*I);
  450         Mi2IndexMap::const_iterator MapItr = mi2iMap.find(&*I);
include/llvm/Transforms/Utils/SSAUpdaterImpl.h
  390     for (auto &SomePHI : BB->phis()) {
lib/CodeGen/AggressiveAntiDepBreaker.cpp
  816     MachineInstr &MI = *--I;
lib/CodeGen/AsmPrinter/AsmPrinter.cpp
 1055     for (auto &MI : MBB) {
 3034   for (const auto &MI : Pred->terminators()) {
lib/CodeGen/AsmPrinter/CodeViewDebug.cpp
 1408     for (const auto &MI : MBB) {
 1427     for (const auto &MI : MBB) {
 2859     for (const auto &MI : MBB) {
 2895     for (const auto &NextMI : *MI->getParent()) {
lib/CodeGen/AsmPrinter/DbgEntityHistoryCalculator.cpp
  241     for (const auto &MI : MBB) {
lib/CodeGen/AsmPrinter/DwarfDebug.cpp
 1730     for (const auto &MI : MBB)
lib/CodeGen/AsmPrinter/EHStreamer.cpp
  244     for (const auto &MI : MBB) {
lib/CodeGen/AsmPrinter/WinException.cpp
  457       const MachineInstr &MI = *MBBI;
lib/CodeGen/BranchFolding.cpp
  231     for (const MachineInstr &I : BB)
  301   return HashMachineInstr(*I);
  323     while (!countsAsInstruction(*I1)) {
  325         while (!countsAsInstruction(*I2)) {
  339     while (!countsAsInstruction(*I2)) {
  348     if (!I1->isIdenticalTo(*I2) ||
  427       LiveRegs.stepBackward(*I);
  499     if (!countsAsInstruction(*I))
  859     if (!countsAsInstruction(*MBBI)) {
  864     while ((MBBICommon != MBBIECommon) && !countsAsInstruction(*MBBICommon))
  869     assert(MBBICommon->isIdenticalTo(*MBBI) && "Expected matching MIIs!");
  873       MBBICommon->cloneMergedMemRefs(*MBB->getParent(), {&*MBBICommon, &*MBBI});
  873       MBBICommon->cloneMergedMemRefs(*MBB->getParent(), {&*MBBICommon, &*MBBI});
  903   for (auto &MI : *MBB) {
  914       while (!countsAsInstruction(*Pos)) {
  919       assert(MI.isIdenticalTo(*Pos) && "Expected matching MIIs!");
 1489           if (!MBBIter->isIdenticalTo(*PrevBBIter))
 1491           MachineInstr &DuplicateDbg = *MBBIter;
 1587     MachineInstr &TailCall = *MBB->getFirstNonDebugInstr();
 1876   if (!TII->isUnpredicatedTerminator(*Loc))
 1940   if (!PI->isSafeToMove(nullptr, DontMoveAcrossStore) || TII->isPredicated(*PI))
 2005     if (!TIB->isIdenticalTo(*FIB, MachineInstr::CheckKillDead))
 2008     if (TII->isPredicated(*TIB))
lib/CodeGen/BranchRelaxation.cpp
  170   for (const MachineInstr &MI : MBB)
  187   for (MachineBasicBlock::const_iterator I = MBB->begin(); &*I != &MI; ++I) {
  189     Offset += TII->getInstSizeInBytes(*I);
  499       if (MachineBasicBlock *DestBB = TII->getBranchDestBlock(*Last)) {
  500         if (!isBlockInRange(*Last, *DestBB)) {
  501           fixupUnconditionalBranch(*Last);
  513       MachineInstr &MI = *J;
  523             splitBlockBeforeInstr(*Next, DestBB);
lib/CodeGen/BreakFalseDeps.cpp
  232   for (MachineInstr &I : make_range(MBB->rbegin(), MBB->rend())) {
  256   for (MachineInstr &MI : *MBB) {
lib/CodeGen/CFGuardLongjmp.cpp
   75     for (MachineInstr &MI : MBB) {
lib/CodeGen/CFIInstrInserter.cpp
  166   for (MachineInstr &MI : *MBBInfo.MBB) {
lib/CodeGen/CriticalAntiDepBreaker.cpp
  529     MachineInstr &MI = *--I;
lib/CodeGen/DFAPacketizer.cpp
  260     MachineInstr &MI = *BeginItr;
lib/CodeGen/DeadMachineInstructionElim.cpp
  125       MachineInstr *MI = &*MII++;
lib/CodeGen/DetectDeadLanes.cpp
  535     for (MachineInstr &MI : MBB) {
lib/CodeGen/EarlyIfConversion.cpp
  224       LLVM_DEBUG(dbgs() << "Can't hoist: " << *I);
  232       LLVM_DEBUG(dbgs() << "Won't speculate load: " << *I);
  239       LLVM_DEBUG(dbgs() << "Can't speculate: " << *I);
  244     if (!InstrDependenciesAllowIfConv(&(*I)))
  319       LLVM_DEBUG(dbgs() << "Can't predicate: " << *I);
  325     if (!TII->isPredicable(*I) || TII->isPredicated(*I)) {
  325     if (!TII->isPredicable(*I) || TII->isPredicated(*I)) {
  330     if (!InstrDependenciesAllowIfConv(&(*I)))
  347     TII->PredicateInstruction(*I, Condition);
  372     if (InsertAfter.count(&*I)) {
  373       LLVM_DEBUG(dbgs() << "Can't insert code after " << *I);
  412         dbgs() << " live before " << *I;
  419     LLVM_DEBUG(dbgs() << "Can insert before " << *I);
  509     PHIs.push_back(&*I);
  571     LLVM_DEBUG(dbgs() << "          --> " << *std::prev(FirstTerm));
  600       LLVM_DEBUG(dbgs() << "          --> " << *std::prev(FirstTerm));
  834       HeadTrace.getInstrCycles(*IfConv.Head->getFirstTerminator()).Depth;
  987     for (MachineInstr &I : IfBlock) {
 1001   for (MachineInstr &I : *IfConv.TBB) {
 1007   for (MachineInstr &I : *IfConv.FBB) {
lib/CodeGen/ExecutionDomainFix.cpp
  402   for (MachineInstr &MI : *TraversedMBB.MBB) {
lib/CodeGen/ExpandPostRAPseudos.cpp
  126     LLVM_DEBUG(dbgs() << "subreg: " << *CopyMI);
  172     dbgs() << "replaced by: " << *(--dMI);
  194       MachineInstr &MI = *mi;
lib/CodeGen/FinalizeISel.cpp
   56       MachineInstr &MI = *MBBI++;
lib/CodeGen/GlobalISel/CSEInfo.cpp
  235     for (MachineInstr &MI : MBB) {
lib/CodeGen/GlobalISel/CSEMIRBuilder.cpp
   28   for (; &*I != A && &*I != B; ++I)
   28   for (; &*I != A && &*I != B; ++I)
   30   return &*I == A;
lib/CodeGen/GlobalISel/Combiner.cpp
  136         MachineInstr *CurMI = &*MII;
lib/CodeGen/GlobalISel/CombinerHelper.cpp
  540   for (; &*I != &DefMI && &*I != &UseMI; ++I)
  540   for (; &*I != &DefMI && &*I != &UseMI; ++I)
  541     return &*I == &DefMI;
  760   MachineInstr *BrCond = &*std::prev(BrIt);
  785   MachineInstr *BrCond = &*std::prev(BrIt);
lib/CodeGen/GlobalISel/IRTranslator.cpp
 1578     HasTailCall = TII->isTailCall(*std::prev(MIRBuilder.getInsertPt()));
lib/CodeGen/GlobalISel/InstructionSelect.cpp
  117       MachineInstr &MI = *MII;
  146         for (auto &InsertedMI : make_range(InsertedBegin, AfterIt))
  161       MachineInstr &MI = *MII;
  235     for (const auto &MI : MBB) {
lib/CodeGen/GlobalISel/Legalizer.cpp
  167     for (MachineInstr &MI : *MBB) {
lib/CodeGen/GlobalISel/LegalizerInfo.cpp
  741       for (const MachineInstr &MI : MBB)
lib/CodeGen/GlobalISel/Localizer.cpp
  118     MachineInstr &MI = *RI;
  195     while (II != MBB.end() && !Users.count(&*II))
  198     LLVM_DEBUG(dbgs() << "Intra-block: moving " << *MI << " before " << *&*II
lib/CodeGen/GlobalISel/RegBankSelect.cpp
  343       assert(&MI != &(*MI.getParent()->getFirstTerminator()) &&
  413     assert(&MI == &(*MI.getParent()->getFirstTerminator()) &&
  688       MachineInstr &MI = *MII++;
  752         addInsertPoint(*It, /*Before*/ true);
  754         addInsertPoint(*(--It), /*Before*/ false);
  778       addInsertPoint(*It, /*Before*/ false);
  794         addInsertPoint(*MI.getParent()->begin(), true);
  799       addInsertPoint(*It, /*Before*/ false);
lib/CodeGen/IfConversion.cpp
  308         for (auto &I : make_range(TBBInfo.BB->begin(), TIB)) {
  312         for (auto &I : make_range(FBBInfo.BB->begin(), FIB)) {
  321         for (auto &I : make_range(TIE, TBBInfo.BB->end())) {
  330         for (auto &I : make_range(FIE, FBBInfo.BB->end())) {
  339         for (auto &I : CommBB.terminators()) {
  352         for (auto &I : make_range(TIB, TIE)) {
  358         for (auto &I : make_range(FIB, FIE)) {
  747     if (!TIB->isIdenticalTo(*FIB))
  752     if (TII->DefinesPredicate(*TIB, PredDefs))
  790     if (!RTIE->isIdenticalTo(*RFIE))
  854       assert(E1->isIdenticalTo(*E2) &&
 1066   for (MachineInstr &MI : make_range(Begin, End)) {
 1848       MBB2.getParent()->eraseCallSiteInfo(&*DI2);
 1858     for (const MachineInstr &MI : make_range(MBB1.begin(), DI1)) {
 1897       MBB1.getParent()->eraseCallSiteInfo(&*DI1);
 1941     for (const MachineInstr &FI : make_range(MBB2.begin(), DI2)) {
 1985     bool BB1Predicated = BBI1T != MBB1.end() && TII->isPredicated(*BBI1T);
 1986     bool BB2NonPredicated = BBI2T != MBB2.end() && !TII->isPredicated(*BBI2T);
 2077     if (TI != BBI.BB->end() && TII->isPredicated(*TI))
 2134   for (MachineInstr &I : make_range(BBI.BB->begin(), E)) {
 2177   for (MachineInstr &I : FromMBB) {
 2249   if (FromTI != FromMBB.end() && !TII->isPredicated(*FromTI))
lib/CodeGen/ImplicitNullChecks.cpp
  524   for (auto I = MBB.rbegin(); MBP.ConditionDef != &*I; ++I)
  584   for (auto &MI : *NotNullSucc) {
lib/CodeGen/InlineSpiller.cpp
  415   LIS.InsertMachineInstrInMaps(*MII);
  416   LLVM_DEBUG(dbgs() << "\thoisted: " << SrcVNI->def << '\t' << *MII);
  418   HSpiller.addToMergeableSpills(*MII, StackSlot, Original);
  766     SlotIndex Idx = LIS.getInstructionIndex(*I).getRegSlot();
  777     dbgs() << SlotIndent << Idx << '\t' << *I;
  875   for (MachineInstr &MI : MIS)
  940   if (isFullUndefDef(*MI)) {
  958     HSpiller.addToMergeableSpills(*std::next(MI), StackSlot, Original);
 1179     Idx = LIS.getInstructionIndex(*MI);
lib/CodeGen/LexicalScopes.cpp
   76     for (const auto &MInsn : MBB) {
  312   for (auto &I : *MBB) {
lib/CodeGen/LiveDebugValues.cpp
 1318   for (auto &MI : First_MBB)
 1329     for (auto &MI : MBB) {
 1380         for (auto &MI : *MBB)
lib/CodeGen/LiveDebugVariables.cpp
  699               : LIS->getInstructionIndex(*std::prev(MBBI)).getRegSlot();
  704         if ((MBBI->isDebugValue() && handleDebugValue(*MBBI, Idx)) ||
  705             (MBBI->isDebugLabel() && handleDebugLabel(*MBBI, Idx))) {
 1273     if (!LIS.isNotInMIMap(*I) &&
 1274         SlotIndex::isEarlierEqualInstr(StopIdx, LIS.getInstructionIndex(*I)))
lib/CodeGen/LiveIntervals.cpp
  225     for (const MachineInstr &MI : MBB) {
 1438       SlotIndex Idx = Indexes->getInstructionIndex(*MII);
 1445       for (MIBundleOperands MO(*MII); MO.isValid(); ++MO)
 1500     MachineInstr &MI = *I;
 1582   while (Begin != MBB->begin() && !Indexes->hasIndex(*Begin))
 1584   while (End != MBB->end() && !Indexes->hasIndex(*End))
 1591     endIdx = getInstructionIndex(*End);
 1597     MachineInstr &MI = *I;
lib/CodeGen/LivePhysRegs.cpp
  253   for (const MachineInstr &MI : make_range(MBB.rbegin(), MBB.rend()))
  289   for (MachineInstr &MI : make_range(MBB.rbegin(), MBB.rend())) {
lib/CodeGen/LiveRangeEdit.cpp
  175   (*--MI).getOperand(0).setIsDead(false);
  177   return LIS.getSlotIndexes()->insertMachineInstrInMaps(*MI, Late).getRegSlot();
lib/CodeGen/LiveRangeShrink.cpp
  103   for (MachineInstr &I : make_range(Start, Start->getParent()->end()))
  130       MachineInstr &MI = *Next;
  228         unsigned NewOrder = IOM[&*I];
  239             IOM[&*EndIter] = NewOrder;
lib/CodeGen/LiveVariables.cpp
  576   for (MachineInstr &MI : *MBB) {
  710     for (const auto &BBI : MBB) {
lib/CodeGen/LocalStackSlotAllocation.cpp
  304     for (MachineInstr &MI : BB) {
  348     MachineInstr &MI = *FR.getMachineInstr();
  407               *FrameReferenceInsns[ref + 1].getMachineInstr(), TRI)) {
lib/CodeGen/MIRCanonicalizerPass.cpp
  147     for (auto &CurMI : *MI.getParent()) {
  158   for (auto &MI : *MBB) {
  356   for (auto &MI : *MBB) {
  427     MachineInstr &MI = *MII++;
lib/CodeGen/MIRParser/MIRParser.cpp
  322     for (const MachineInstr &MI : MBB) {
  614     for (const MachineInstr &MI : MBB) {
lib/CodeGen/MIRPrinter.cpp
  554   for (const MachineInstr &MI : MBB) {
lib/CodeGen/MIRVRegNamerUtils.cpp
   56     MachineInstr *MI = &*II;
lib/CodeGen/MachineBasicBlock.cpp
  175                     TII->isBasicBlockPrologue(*I)))
  190                     TII->isBasicBlockPrologue(*I)))
 1218   for (MachineInstr &MI : phis())
 1399         ConstMIOperands(*I).analyzePhysReg(Reg, TRI);
 1438           ConstMIOperands(*I).analyzePhysReg(Reg, TRI);
lib/CodeGen/MachineCSE.cpp
  517     MachineInstr *MI = &*I;
  795     MachineInstr *MI = &*I;
lib/CodeGen/MachineCombiner.cpp
  511     auto &MI = *BlockIter++;
lib/CodeGen/MachineCopyPropagation.cpp
  465     MachineInstr *MI = &*I;
lib/CodeGen/MachineFrameInfo.cpp
  198     for (const MachineInstr &MI : MBB) {
lib/CodeGen/MachineInstr.cpp
 2117       DbgValues.push_back(&*DI);
lib/CodeGen/MachineLICM.cpp
  517     for (MachineInstr &MI : *BB)
  577     for (MachineInstr &MI : *BB) {
  737       MachineInstr *MI = &*MII;
  821   for (const MachineInstr &MI : *BB)
 1342   for (MachineInstr &MI : *BB)
lib/CodeGen/MachineLoopUtils.cpp
   47   for (MachineInstr &MI : *Loop) {
   80     MachineInstr &MI = *I;
lib/CodeGen/MachineOutliner.cpp
  663     MachineInstr &MI = *It;
 1140     MachineInstr *NewMI = MF.CloneMachineInstr(&*I);
lib/CodeGen/MachinePipeliner.cpp
  352   for (MachineInstr &PI : make_range(B.begin(), B.getFirstNonPHI())) {
 1027     FUS.calcCriticalResources(*I);
 1034     FuncUnitOrder.push(&*I);
lib/CodeGen/MachineScheduler.cpp
  443   return MI->isCall() || TII->isSchedulingBoundary(*MI, MBB, *MF);
  479         isSchedBoundary(&*std::prev(RegionEnd), &*MBB, MF, TII)) {
  488       MachineInstr &MI = *std::prev(I);
  563                         << " " << MBB->getName() << "\n  From: " << *I
  565                  if (RegionEnd != MBB->end()) dbgs() << *RegionEnd;
  716   if (&*RegionBegin == MI)
  782       if (&*CurrentTop == MI)
  790       if (&*priorII == MI)
  793         if (&*CurrentTop == MI)
  898     if (&*RegionBegin == DbgValue)
  911     if (SUnit *SU = getSUnit(&(*MI)))
 1137         LiveQueryResult LRQ = LI.Query(LIS->getInstructionIndex(*I));
 1392     if (&*CurrentTop == MI)
 1423     if (&*priorII == MI)
 1426       if (&*CurrentTop == MI) {
 1819   RegionBeginIdx = DAG->getLIS()->getInstructionIndex(*FirstPos);
 1821       *priorNonDebug(DAG->end(), DAG->begin()));
lib/CodeGen/MachineSink.cpp
  372     MachineInstr &MI = *I;  // The instruction to sink.
 1152       for (MachineInstr &UI : make_range(NI, CurBB.end())) {
 1245     MachineInstr *MI = &*I;
lib/CodeGen/MachineTraceMetrics.cpp
  111   for (const auto &MI : *MBB) {
  581   for (const auto &I : *BadMBB)
  831       updateDepth(Start->getParent(), *Start, RegUnits);
  883     for (const auto &UseMI : *MBB) {
 1074       for (const auto &PHI : *Succ) {
 1093       const MachineInstr &MI = *--BI;
lib/CodeGen/MachineVerifier.cpp
 2209   for (const MachineInstr &Phi : MBB) {
 2779     for (const auto &I : *MBB) {
lib/CodeGen/ModuloSchedule.cpp
  147     MachineInstr *NewMI = MF.CloneMachineInstr(&*I);
  150     InstrMap[NewMI] = &*I;
  183   for (auto &I : *BB)
  216         if (Schedule.getStage(&*BBI) == StageNum) {
  220               cloneAndChangeInstr(&*BBI, i, (unsigned)StageNum);
  223           InstrMap[NewMI] = &*BBI;
  290       for (auto &BBI : *BB) {
  389     getPhiRegs(*BBI, BB, InitVal, LoopVal);
  398     int StageScheduled = Schedule.getStage(&*BBI);
  405       rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, 0, &*BBI, Def,
  528               rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, np, &*BBI,
  556         InstrMap[NewPhi] = &*BBI;
  564       rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, np, &*BBI, Def,
  569         rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, np, &*BBI, R,
  588       rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, NumPhis, &*BBI, Def,
  629       int StageScheduled = Schedule.getStage(&*BBI);
  672           InstrMap[NewPhi] = &*BBI;
  677           rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, np, &*BBI, PhiOp1,
  679           rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, np, &*BBI, PhiOp2,
  687             rewriteScheduledInstr(NewBB, InstrMap, CurStageNum, np, &*BBI, Def,
  765     MachineInstr *MI = &*BBI;
  788   for (auto &PHI : KernelBB->phis()) {
  822           for (auto &I : *Epilog)
  833   for (MachineInstr &MI : *BB) {
 1110   for (auto &PHI : BB->phis()) {
 1222       MachineInstr &MI = *I++;
 1311       LIS->RemoveMachineInstrFromMaps(*I);
 1316   for (MachineInstr &MI : *BB) {
 1577     CanonicalMIs[&*I] = &*I;
 1577     CanonicalMIs[&*I] = &*I;
 1578     CanonicalMIs[&*NI] = &*I;
 1578     CanonicalMIs[&*NI] = &*I;
 1579     BlockMIs[{NewBB, &*I}] = &*NI;
 1579     BlockMIs[{NewBB, &*I}] = &*NI;
 1580     BlockMIs[{BB, &*I}] = &*I;
 1580     BlockMIs[{BB, &*I}] = &*I;
 1638     for (MachineInstr &MI : (*EI)->phis()) {
 1678   for (MachineInstr &MI : BB->phis()) {
 1787       for (MachineInstr &P : Fallthrough->phis()) {
 1797       for (MachineInstr &P : Epilog->phis()) {
 1863       IllegalPhis.insert(&*NI);
 1992   for (MachineInstr &MI : *BB) {
lib/CodeGen/OptimizePHIs.cpp
  170     MachineInstr *MI = &*MII++;
lib/CodeGen/PHIElimination.cpp
  247   MachineInstr *MPhi = MBB.remove(&*MBB.begin());
  471       LV->addVirtualRegisterKilled(SrcReg, *KillInst);
  531           SlotIndex LastUseIndex = LIS->getInstructionIndex(*KillInst);
  553     for (const auto &BBI : MBB) {
  601                           << printMBBReference(MBB) << ": " << *BBI);
lib/CodeGen/PHIEliminationUtils.cpp
   52     while (!DefUsesInMBB.count(&*--InsertPoint)) {}
lib/CodeGen/PatchableFunction.cpp
   68   for (; doesNotGeneratecode(*FirstActualI); ++FirstActualI)
lib/CodeGen/PeepholeOptimizer.cpp
 1640       MachineInstr *MI = &*MII;
lib/CodeGen/PostRAHazardRecognizer.cpp
   81     for (MachineInstr &MI : MBB) {
lib/CodeGen/PostRASchedulerList.cpp
  337       MachineInstr &MI = *std::prev(I);
lib/CodeGen/PrologEpilogInserter.cpp
  182   for (auto &MI : MBB) {
  317       if (TII.isFrameInstr(*I)) {
  318         unsigned Size = TII.getFrameSize(*I);
 1188     if (TII.isFrameInstr(*I)) {
 1189       InsideCallSequence = TII.isFrameSetup(*I);
 1190       SPAdj += TII.getSPAdjust(*I);
 1195     MachineInstr &MI = *I;
lib/CodeGen/ReachingDefAnalysis.cpp
  127   for (MachineInstr &MI : *TraversedMBB.MBB) {
lib/CodeGen/RegAllocFast.cpp
 1263   for (MachineInstr &MI : MBB) {
lib/CodeGen/RegAllocGreedy.cpp
 3169       for (MachineInstr &MI : *MBB) {
lib/CodeGen/RegAllocPBQP.cpp
  446       for (const auto &MI : MBB) {
lib/CodeGen/RegUsageInfoPropagate.cpp
  116     for (MachineInstr &MI : MBB) {
lib/CodeGen/RegisterCoalescer.cpp
  390   for (const auto &MI : *MBB) {
 1114       SlotIndex InsPosIdx = LIS->getInstructionIndex(*InsPos).getRegSlot(true);
 1282   MachineInstr &NewMI = *std::prev(MII);
 2882       if (usesLanes(*MI, Other.Reg, Other.SubIdx, TaintedLanes)) {
 2883         LLVM_DEBUG(dbgs() << "\t\ttainted lanes used by: " << *MI);
 2887       if (&*MI == LastMI) {
 3587       bool ApplyTerminalRule = applyTerminalRule(*MII);
 3588       if (isLocalCopy(&(*MII), LIS)) {
 3590           LocalTerminals.push_back(&(*MII));
 3592           LocalWorkList.push_back(&(*MII));
 3595           GlobalTerminals.push_back(&(*MII));
 3597           WorkList.push_back(&(*MII));
 3606     for (MachineInstr &MII : *MBB)
lib/CodeGen/RegisterPressure.cpp
  315   return LIS->getInstructionIndex(*IdxPos).getRegSlot();
  799     SlotIdx = LIS->getInstructionIndex(*CurrPos).getRegSlot();
  865     SlotIdx = LIS->getInstructionIndex(*CurrPos).getRegSlot();
  881   const MachineInstr &MI = *CurrPos;
  885     SlotIndex SlotIdx = LIS->getInstructionIndex(*CurrPos).getRegSlot();
  947   const MachineInstr &MI = *CurrPos;
lib/CodeGen/RegisterScavenging.cpp
  112   MachineInstr &MI = *MBBI;
  159   MachineInstr &MI = *MBBI;
  186   MachineInstr &MI = *MBBI;
  263   const MachineInstr &MI = *MBBI;
  392     const MachineInstr &MI = *I;
  411         Used.accumulate(*std::next(From));
  520     unsigned FIOperandNum = getFrameIndexOperandNum(*II);
  528     FIOperandNum = getFrameIndexOperandNum(*II);
  537   MachineInstr &MI = *I;
  571   Scavenged.Restore = &*std::prev(UseMI);
  609     LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
  611   Scavenged.Restore = &*std::prev(SpillBefore);
  614              << " until " << *SpillBefore);
  693       const MachineInstr &NMI = *N;
  715     const MachineInstr &MI = *I;
lib/CodeGen/ScheduleDAGInstrs.cpp
  201   MachineInstr *ExitMI = RegionEnd != BB->end() ? &*RegionEnd : nullptr;
  565   for (MachineInstr &MI : make_range(RegionBegin, RegionEnd)) {
  795     MachineInstr &MI = *std::prev(MII);
  821       if (RPTracker->getPos() == RegionEnd || &*RPTracker->getPos() != &MI)
  823       assert(&*RPTracker->getPos() == &MI && "RPTracker in sync");
 1110   for (MachineInstr &MI : make_range(MBB.rbegin(), MBB.rend())) {
lib/CodeGen/SelectionDAG/FastISel.cpp
  200       MachineInstr &LocalMI = *RI;
  234   for (MachineInstr &I : *MBB) {
  554       EmitStartPt = E.isValid() ? &*E : nullptr;
  556       LastLocalValue = E.isValid() ? &*E : nullptr;
  558     MachineInstr *Dead = &*I;
  577     LastLocalValue = &*std::prev(FuncInfo.InsertPt);
 2308       FuncInfo.PHINodesToUpdate.push_back(std::make_pair(&*MBBI++, Reg));
lib/CodeGen/SelectionDAG/ScheduleDAGSDNodes.cpp
  863       MI = &*std::next(Before);
lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp
 9992               std::make_pair(&*MBBI++, Reg + i));
lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
  641     for (const auto &MI : MBB) {
 1386       FastIS->setLastLocalValue(&*std::prev(FuncInfo->InsertPt));
 1394     FastIS->setLastLocalValue(&*std::prev(FuncInfo->InsertPt));
 1685   while (MIIsInTerminatorSequence(*Previous)) {
lib/CodeGen/ShrinkWrap.cpp
  353     for (const MachineInstr &Terminator : MBB.terminators()) {
  512     for (const MachineInstr &MI : MBB) {
lib/CodeGen/SlotIndexes.cpp
   74     for (MachineInstr &MI : MBB) {
  174   while (Begin != MBB->begin() && !hasIndex(*Begin))
  176   while (End != MBB->end() && !hasIndex(*End))
  184     startIdx = getInstructionIndex(*Begin);
  190     endIdx = getInstructionIndex(*End);
  206     MachineInstr *MI = (MBBI != MBB->end() && !pastStart) ? &*MBBI : nullptr;
  231     MachineInstr &MI = *I;
lib/CodeGen/SplitKit.cpp
   94       LIP.first = LIS.getInstructionIndex(*FirstTerm);
  105         LIP.second = LIS.getInstructionIndex(*I);
  883       SlotIndex Kill = LIS.getInstructionIndex(*MBBI).getRegSlot();
  884       LLVM_DEBUG(dbgs() << "  move kill to " << Kill << '\t' << *MBBI);
lib/CodeGen/SplitKit.h
   86         Res = LIS.getInstructionIndex(*MII);
lib/CodeGen/StackColoring.cpp
  653     for (MachineInstr &MI : *MBB) {
  722     for (MachineInstr &MI : *MBB) {
  833     for (const MachineInstr &MI : MBB) {
  963     for (MachineInstr &I : BB) {
 1079     for (MachineInstr &I : BB) {
lib/CodeGen/StackMapLivenessAnalysis.cpp
  135         addLiveOutSetToMI(MF, *I);
  140       LLVM_DEBUG(dbgs() << "   " << LiveRegs << "   " << *I);
  141       LiveRegs.stepBackward(*I);
lib/CodeGen/StackSlotColoring.cpp
  164       MachineInstr &MI = *MII;
  375     for (MachineInstr &MI : MBB)
  434     if (TII->isStackSlotCopy(*I, FirstSS, SecondSS) && FirstSS == SecondSS &&
  438       toErase.push_back(&*I);
  449     if (!(LoadReg = TII->isLoadFromStackSlot(*I, FirstSS, LoadSize)))
  457     if (!(StoreReg = TII->isStoreToStackSlot(*NextMI, SecondSS, StoreSize)))
  468       toErase.push_back(&*ProbableLoadMI);
  471     toErase.push_back(&*NextMI);
lib/CodeGen/TailDuplicator.cpp
  115                  << *MI;
  126                  << ": " << *MI;
  133                  << *MI;
  311   for (const auto &MI : BB) {
  465     for (MachineInstr &MI : *SuccBB) {
  593   for (MachineInstr &MI : TailBB) {
  638     for (auto &I : *SB) {
  845       MachineInstr *MI = &*I;
  904         MachineInstr *MI = &*I++;
  912         MachineInstr *MI = &*I++;
  967       MachineInstr *MI = &*I++;
lib/CodeGen/TargetInstrInfo.cpp
  146       MBB->getParent()->eraseCallSiteInfo(&*MI);
  616   return &*--Pos;
  643       NewMI = &*MBB.insert(MI, NewMI);
lib/CodeGen/TwoAddressInstructionPass.cpp
  291   for (MachineInstr &OtherMI : make_range(std::next(OldPos), KillPos)) {
  735   MachineInstr *NewMI = TII->convertToThreeAddress(MFI, *mi, LV);
  741   LLVM_DEBUG(dbgs() << "2addr: CONVERTING 2-ADDR: " << *mi);
  746     LIS->ReplaceMachineInstrInMaps(*mi, *NewMI);
  864   MachineInstr *MI = &*mi;
  943   for (MachineInstr &OtherMI : make_range(End, KillPos)) {
 1000       LIS->handleMove(*CopyMI);
 1054   MachineInstr *MI = &*mi;
 1116   for (MachineInstr &OtherMI :
 1276   MachineInstr &MI = *mi;
 1574     DistanceMap.insert(std::make_pair(&*PrevMI, Dist));
 1578       LastCopyIdx = LIS->InsertMachineInstrInMaps(*PrevMI).getRegSlot();
 1637       LV->addVirtualRegisterKilled(RegB, *PrevMI);
 1708       if (mi->isDebugInstr() || SunkInstrs.count(&*mi)) {
 1718       DistanceMap.insert(std::make_pair(&*mi, ++Dist));
 1720       processCopy(&*mi);
 1724       if (!collectTiedOperands(&*mi, TiedOperands)) {
 1731       LLVM_DEBUG(dbgs() << '\t' << *mi);
 1757         processTiedPairs(&*mi, TO.second, Dist);
 1758         LLVM_DEBUG(dbgs() << "\t\trewrite to:\t" << *mi);
 1772         LLVM_DEBUG(dbgs() << "\t\tconvert to:\t" << *mi);
 1800   MachineInstr &MI = *MBBI;
lib/CodeGen/XRayInstrumentation.cpp
   94     for (auto &T : MBB.terminators()) {
  127     for (auto &T : MBB.terminators()) {
  195   auto &FirstMI = *FirstMBB.begin();
lib/Target/AArch64/AArch64A53Fix835769.cpp
  162     for (MachineInstr &I : make_range(FMBB->rbegin(), FMBB->rend()))
  207   for (auto &MI : MBB) {
lib/Target/AArch64/AArch64A57FPLoadBalancing.cpp
  343   for (auto &MI : MBB)
  506     Units.stepBackward(*I);
  514     Units.accumulate(*I);
  546   for (MachineInstr &I : *G) {
lib/Target/AArch64/AArch64AdvSIMDScalarPass.cpp
  381     MachineInstr &MI = *I++;
lib/Target/AArch64/AArch64CallLowering.cpp
  438     MIRBuilder.setInstr(*MBB.begin());
lib/Target/AArch64/AArch64CleanupLocalDynamicTLSPass.cpp
   77           I = replaceTLSBaseAddrCall(*I, TLSBaseAddrReg);
   79           I = setRegister(*I, &TLSBaseAddrReg);
lib/Target/AArch64/AArch64CollectLOH.cpp
  525     for (const MachineInstr &MI : make_range(MBB.rbegin(), MBB.rend())) {
lib/Target/AArch64/AArch64CompressJumpTables.cpp
   68   for (const MachineInstr &MI : MBB)
  151     for (MachineInstr &MI : MBB) {
lib/Target/AArch64/AArch64CondBrTuning.cpp
  309       MachineInstr &MI = *I;
lib/Target/AArch64/AArch64ConditionOptimizer.cpp
  175         LLVM_DEBUG(dbgs() << "Immediate of cmp is symbolic, " << *I << '\n');
  178         LLVM_DEBUG(dbgs() << "Immediate of cmp may be out of range, " << *I
  182         LLVM_DEBUG(dbgs() << "Destination of cmp is not dead, " << *I << '\n');
  185       return &*I;
  288   MachineInstr &BrMI = *MBB->getFirstTerminator();
lib/Target/AArch64/AArch64ConditionalCompares.cpp
  216   for (auto &I : *Tail) {
  242   for (auto &I : *Tail) {
  310       return &*I;
  313     LLVM_DEBUG(dbgs() << "Flags not used by terminator: " << *I);
  331         LLVM_DEBUG(dbgs() << "Immediate out of range for ccmp: " << *I);
  341         return &*I;
  343                         << *I);
  350       return &*I;
  355         MIOperands(*I).analyzePhysReg(AArch64::NZCV, TRI);
  361       LLVM_DEBUG(dbgs() << "Can't create ccmp with multiple uses: " << *I);
  367       LLVM_DEBUG(dbgs() << "Not convertible compare: " << *I);
  395   for (auto &I : make_range(MBB->begin(), MBB->getFirstTerminator())) {
  889       Trace.getInstrCycles(*CmpConv.Head->getFirstTerminator()).Depth;
  891       Trace.getInstrCycles(*CmpConv.CmpBB->getFirstTerminator()).Depth;
lib/Target/AArch64/AArch64DeadRegisterDefinitionsPass.cpp
  119   for (MachineInstr &MI : MBB) {
lib/Target/AArch64/AArch64ExpandPseudoInsts.cpp
  111   MachineInstr &MI = *MBBI;
  174   MachineInstr &MI = *MBBI;
  253   MachineInstr &MI = *MBBI;
  346   MachineInstr &MI = *MBBI;
  406   MachineInstr &MI = *MBBI;
lib/Target/AArch64/AArch64FalkorHWPFFix.cpp
  680     for (MachineInstr &MI : *MBB) {
  716     for (auto I = MBB->rbegin(); I != MBB->rend(); LR.stepBackward(*I), ++I) {
  717       MachineInstr &MI = *I;
lib/Target/AArch64/AArch64FrameLowering.cpp
  189     for (MachineInstr &MI : MBB) {
  689     if (AArch64InstrInfo::isSEHInstruction(*SEH))
  778     assert(AArch64InstrInfo::isSEHInstruction(*MBBI) &&
  970       fixupCalleeSaveRestoreStackOffset(*MBBI, AFI->getLocalStackSize(),
 1301         .copyImplicitOps(*MBBI);
 1343     IsFunclet = isFuncletReturnInstr(*MBBI);
 1422     while (AArch64InstrInfo::isSEHInstruction(*Pop))
 1451       fixupCalleeSaveRestoreStackOffset(*LastPopI, AFI->getLocalStackSize(),
lib/Target/AArch64/AArch64InstrInfo.cpp
  218   if (!isUnpredicatedTerminator(*I))
  222   MachineInstr *LastInst = &*I;
  226   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
  240   MachineInstr *SecondLastInst = &*I;
  250       if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
  255         SecondLastInst = &*I;
  262   if (SecondLastInst && I != MBB.begin() && isUnpredicatedTerminator(*--I))
 1160     const MachineInstr &Instr = *To;
 3260       return &*--InsertPt;
 3306           return &*--InsertPt;
 3341         MachineInstr &LoadMI = *--InsertPt;
 5374   MachineInstr &MI = *MIT;
 5503   for (MachineInstr &MI : MBB) {
lib/Target/AArch64/AArch64LoadStoreOptimizer.cpp
  720   assert(isPromotableZeroStoreInst(*I) && isPromotableZeroStoreInst(*MergeMI) &&
  720   assert(isPromotableZeroStoreInst(*I) && isPromotableZeroStoreInst(*MergeMI) &&
  734   int OffsetStride = IsScaled ? 1 : getMemScale(*I);
  743       MergeForward ? getLdStBaseOp(*MergeMI) : getLdStBaseOp(*I);
  743       MergeForward ? getLdStBaseOp(*MergeMI) : getLdStBaseOp(*I);
  747   if (getLdStOffsetOp(*I).getImm() ==
  748       getLdStOffsetOp(*MergeMI).getImm() + OffsetStride)
  749     RtMI = &*MergeMI;
  751     RtMI = &*I;
  768             .cloneMergedMemRefs({&*I, &*MergeMI})
  768             .cloneMergedMemRefs({&*I, &*MergeMI})
  769             .setMIFlags(I->mergeFlagsWith(*MergeMI));
  803   int OffsetStride = IsUnscaled ? getMemScale(*I) : 1;
  812       MergeForward ? getLdStBaseOp(*Paired) : getLdStBaseOp(*I);
  812       MergeForward ? getLdStBaseOp(*Paired) : getLdStBaseOp(*I);
  814   int Offset = getLdStOffsetOp(*I).getImm();
  815   int PairedOffset = getLdStOffsetOp(*Paired).getImm();
  821     int MemSize = getMemScale(*Paired);
  825       assert(!(PairedOffset % getMemScale(*Paired)) &&
  836     RtMI = &*Paired;
  837     Rt2MI = &*I;
  844     RtMI = &*I;
  845     Rt2MI = &*Paired;
  875       Register Reg = getLdStRegOp(*I).getReg();
  876       for (MachineInstr &MI : make_range(std::next(I), Paired))
  885             .cloneMergedMemRefs({&*I, &*Paired})
  885             .cloneMergedMemRefs({&*I, &*Paired})
  886             .setMIFlags(I->mergeFlagsWith(*Paired));
  947   int LoadSize = getMemScale(*LoadI);
  948   int StoreSize = getMemScale(*StoreI);
  949   Register LdRt = getLdStRegOp(*LoadI).getReg();
  950   const MachineOperand &StMO = getLdStRegOp(*StoreI);
  951   Register StRt = getLdStRegOp(*StoreI).getReg();
  989     bool IsUnscaled = TII->isUnscaledLdSt(*LoadI);
  990     assert(IsUnscaled == TII->isUnscaledLdSt(*StoreI) &&
  994                                ? getLdStOffsetOp(*LoadI).getImm()
  995                                : getLdStOffsetOp(*LoadI).getImm() * LoadSize;
  997                                ? getLdStOffsetOp(*StoreI).getImm()
  998                                : getLdStOffsetOp(*StoreI).getImm() * StoreSize;
 1108   MachineInstr &LoadMI = *I;
 1124     MachineInstr &MI = *MBBI;
 1218   MachineInstr &FirstMI = *I;
 1238     MachineInstr &MI = *MBBI;
 1396   getPrePostIndexedMemOpInfo(*I, Scale, MinOffset, MaxOffset);
 1397   if (!isPairedLdSt(*I)) {
 1400               .add(getLdStRegOp(*Update))
 1401               .add(getLdStRegOp(*I))
 1402               .add(getLdStBaseOp(*I))
 1405               .setMIFlags(I->mergeFlagsWith(*Update));
 1409               .add(getLdStRegOp(*Update))
 1410               .add(getLdStRegOp(*I, 0))
 1411               .add(getLdStRegOp(*I, 1))
 1412               .add(getLdStBaseOp(*I))
 1415               .setMIFlags(I->mergeFlagsWith(*Update));
 1491   MachineInstr &MemMI = *I;
 1524     MachineInstr &MI = *MBBI;
 1532     if (isMatchingUpdateInsn(*I, MI, BaseReg, UnscaledOffset))
 1551   MachineInstr &MemMI = *I;
 1579     MachineInstr &MI = *MBBI;
 1587     if (isMatchingUpdateInsn(*I, MI, BaseReg, Offset))
 1604   MachineInstr &MI = *MBBI;
 1630   assert(isPromotableZeroStoreInst(*MBBI) && "Expected narrow store.");
 1631   MachineInstr &MI = *MBBI;
 1655   MachineInstr &MI = *MBBI;
 1691   MachineInstr &MI = *MBBI;
 1758     if (isPromotableLoadFromStore(*MBBI) && tryToPromoteLoadFromStore(MBBI))
 1777       if (isPromotableZeroStoreInst(*MBBI) && tryToMergeZeroStInst(MBBI))
 1791     if (TII->isPairableLdStInst(*MBBI) && tryToPairLdStInst(MBBI))
 1805     if (isMergeableLdStUpdate(*MBBI) && tryToMergeLdStUpdate(MBBI))
lib/Target/AArch64/AArch64PBQPRegAlloc.cpp
  336     for (const auto &MI: MBB) {
lib/Target/AArch64/AArch64RedundantCopyElimination.cpp
  168   for (MachineInstr &PredI : make_range(std::next(RIt), PredMBB->rend())) {
  310     if (!knownRegValInBlock(*Itr, MBB, KnownRegs, FirstUse))
  354       LiveRegUnits::accumulateUsedDefed(*PredI, OptBBClobberedRegs,
  376     MachineInstr *MI = &*I;
  460   LLVM_DEBUG(dbgs() << "Clearing kill flags.\n\tFirstUse: " << *FirstUse
  461                     << "\tLastChange: " << *LastChange);
  462   for (MachineInstr &MMI : make_range(FirstUse, PredMBB->end()))
  464   for (MachineInstr &MMI : make_range(MBB->begin(), LastChange))
lib/Target/AArch64/AArch64RegisterInfo.cpp
  451   MachineInstr &MI = *II;
lib/Target/AArch64/AArch64SIMDInstrOpt.cpp
  322     MachineInstr *CurrentMI = &*MII;
  713           MachineInstr &MI = *MII;
lib/Target/AArch64/AArch64SpeculationHardening.cpp
  292     MachineInstr &MI = *I;
  414     for (MachineInstr &MI : MBB) {
  469     MachineInstr &MI = *MBBI;
  550   MachineInstr &MI = *MBBI;
  620     MachineInstr &MI = *MBBI;
lib/Target/AArch64/AArch64StackTaggingPreRA.cpp
  191     for (auto &I : BB) {
lib/Target/AArch64/AArch64StorePairSuppress.cpp
  147     for (auto &MI : MBB) {
lib/Target/AMDGPU/AMDGPUAsmPrinter.cpp
  577     for (const MachineInstr &MI : MBB) {
  697     for (const MachineInstr &MI : MBB) {
lib/Target/AMDGPU/AMDGPUCallLowering.cpp
  691     B.setInstr(*MBB.begin());
lib/Target/AMDGPU/AMDGPUMachineCFGStructurizer.cpp
  756   for (auto &II : *MBB) {
  772     for (auto &II : *(*SI)) {
  798   for (auto &II : *MBB) {
  843     for (auto &II : *Succ) {
 1023     for (auto &II : *MBB) {
 1283     for (auto &TI : MBB->terminators()) {
 1315   for (auto &TI : Exit->terminators()) {
 1658   for (auto &BBI : *MBB) {
 2502   for (auto &II : *Succ) {
 2523   MachineBasicBlock *MBB = (*I).getParent();
lib/Target/AMDGPU/AMDGPURegisterBankInfo.cpp
  685   for (MachineInstr &MI : Range) {
  764   for (MachineInstr &MI : make_range(NewBegin, NewEnd)) {
  828           B.setInstr(*I);
 1709     B.setInstr(*Span.begin());
 1778     B.setInstr(*Span.begin());
lib/Target/AMDGPU/AMDILCFGStructurizer.cpp
  479   MachineInstr *OldMI = &(*I);
  491   MachineInstr *OldMI = &(*I);
  600     MachineInstr *instr = &(*It);
  610   MachineInstr *MI = &*It;
  621     MachineInstr *MI = &*It;
  635     MachineInstr *instr = &(*It);
  664   for (const MachineInstr &It : *MBB)
  691        ContInstr.push_back(&*Pre);
 1478     bool UseContinueLogical = ((&*ContingMBB->rbegin()) == MI);
lib/Target/AMDGPU/GCNDPPCombine.cpp
  567       auto &MI = *I++;
lib/Target/AMDGPU/GCNIterativeScheduler.cpp
   73       OS << LIS->getInstructionIndex(*I);
   74     OS << '\t' << *I;
   80       OS << LIS->getInstructionIndex(*I);
   81     OS << '\t' << *I;
   85     if (LIS) OS << LIS->getInstructionIndex(*End) << '\t';
   86     OS << *End;
   98   const auto LiveIns = getLiveRegsBefore(*Begin, *LIS);
  103   const auto LiveOuts = getLiveRegsAfter(*BottomMI, *LIS);
  261       &*AfterBottomMI != UPTracker.getLastTrackedMI()) {
  262     UPTracker.reset(*BottomMI);
  268     UPTracker.recede(*I);
  270   UPTracker.recede(*Begin);
  287     RPTracker.reset(*R.End);
  288     RPTracker.recede(*R.End);
  291     RPTracker.reset(*std::prev(BBEnd));
  383     if (MI != &*Top) {
lib/Target/AMDGPU/GCNNSAReassign.cpp
  242     for (const MachineInstr &MI : MBB) {
lib/Target/AMDGPU/GCNRegPressure.cpp
  363   GCNRPTracker::reset(*NextMI, LiveRegsCopy, false);
  374   SlotIndex SI = LIS.getInstructionIndex(*NextMI).getBaseIndex();
  403   LastTrackedMI = &*NextMI++;
  438   reset(*Begin, LiveRegsCopy);
lib/Target/AMDGPU/GCNSchedStrategy.cpp
  332   for (auto &I : *this) {
  464     RPTracker.reset(*MBB->begin(), &LiveIn);
  469     auto *NonDbgMI = &*skipDebugInstructionsForward(Rgn.first, Rgn.second);
  472     RPTracker.reset(*I, &LRS);
  497     RPTracker.reset(*OnlySucc->begin(), &RPTracker.getLiveRegs());
  511     auto *MI = &*skipDebugInstructionsForward(I->first, I->second);
  575                         << MBB->getName() << "\n  From: " << *begin()
  577                  if (RegionEnd != MBB->end()) dbgs() << *RegionEnd;
lib/Target/AMDGPU/R600AsmPrinter.cpp
   52     for (const MachineInstr &MI : MBB) {
lib/Target/AMDGPU/R600ClauseMergePass.cpp
  104     while (I != E && !isCFAlu(*I))
  108     MachineInstr &MI = *I++;
  195       MachineInstr &MI = *I++;
  203       if (LatestCFAlu != E && mergeIfPossible(*LatestCFAlu, MI)) {
lib/Target/AMDGPU/R600ControlFlowFinalizer.cpp
  337     bool IsTex = TII->usesTextureCache(*ClauseHead);
  340       if (IsTrivialInst(*I))
  344       if ((IsTex && !TII->usesTextureCache(*I)) ||
  345           (!IsTex && !TII->usesVertexCache(*I)))
  347       if (!isCompatibleWithClause(*I, DstRegs))
  350       ClauseContent.push_back(&*I);
  412     MachineInstr &ClauseHead = *I;
  416       if (IsTrivialInst(*I)) {
  424         MachineInstr &DeleteMI = *I;
  438         getLiteral(*I, Literals);
  439         ClauseContent.push_back(&*I);
  533         if (TII->usesTextureCache(*I) || TII->usesVertexCache(*I)) {
  533         if (TII->usesTextureCache(*I) || TII->usesVertexCache(*I)) {
  545           LastAlu.back() = &*MI;
lib/Target/AMDGPU/R600EmitClauseMarkers.cpp
  211         AluInstCount += OccupiedDwords(*UseI);
  213         if (!SubstituteKCacheBank(*UseI, KCacheBanks, false))
  249       if (IsTrivialInst(*I))
  251       if (!isALU(*I))
  264         if (TII->getFlagOp(*I).getImm() & MO_FLAG_PUSH)
  286       if (!SubstituteKCacheBank(*I, KCacheBanks))
  288       AluInstCount += OccupiedDwords(*I);
  327         if (isALU(*I)) {
lib/Target/AMDGPU/R600ExpandSpecialInstrs.cpp
   93       MachineInstr &MI = *I;
lib/Target/AMDGPU/R600InstrInfo.cpp
  656     MachineInstr &MI = *I;
  701   MachineInstr &LastInst = *I;
  724   MachineInstr &SecondLastInst = *I;
lib/Target/AMDGPU/R600OptimizeVectorRegisters.cpp
  350       MachineInstr &MI = *MII;
lib/Target/AMDGPU/R600Packetizer.cpp
  379         if (TII->isSchedulingBoundary(*std::prev(I), &*MBB, Fn))
  396       Packetizer.PacketizeMIs(&*MBB, &*I, RegionEnd);
lib/Target/AMDGPU/SIAddIMGInit.cpp
   75       MachineInstr &MI = *I;
lib/Target/AMDGPU/SIFixSGPRCopies.cpp
  411   while (I != MBB->end() && TII->isBasicBlockPrologue(*I))
  465           assert(MDT.dominates(&*To, &*From));
  465           assert(MDT.dominates(&*To, &*From));
  470             bool MayClobberFrom = isReachable(Clobber, &*From, MBBTo, MDT);
  471             bool MayClobberTo = isReachable(Clobber, &*To, MBBTo, MDT);
  482                       MDT.dominates(Clobber, &*From) &&
  483                       MDT.dominates(Clobber, &*To)) ||
  561       MachineInstr &BoundaryMI = *getFirstNonPrologue(MBB, TII);
  565       if (!TII->isBasicBlockPrologue(*B))
  602       MachineInstr &MI = *I;
lib/Target/AMDGPU/SIFixVGPRCopies.cpp
   55     for (MachineInstr &MI : MBB) {
lib/Target/AMDGPU/SIFixupVectorISel.cpp
  167     MachineInstr &MI = *I;
lib/Target/AMDGPU/SIFoldOperands.cpp
 1465       MachineInstr &MI = *I;
lib/Target/AMDGPU/SIFrameLowering.cpp
  887         LiveRegs.stepBackward(*MBBI);
lib/Target/AMDGPU/SIISelLowering.cpp
 3654     BuildMI(*BB, &*BB->begin(), MI.getDebugLoc(), TII->get(AMDGPU::S_MOV_B64),
 3662     BuildMI(*BB, &*BB->begin(), MI.getDebugLoc(), TII->get(AMDGPU::S_MOV_B32),
 3676     MachineInstr *FirstMI = &*BB->begin();
 3689         FirstMI = &*++BB->begin();
 3692         BB->insert(FirstMI, &*I);
10718       for (auto &MI : MBB) {
10773     for (const MachineInstr &MI : *MBB) {
lib/Target/AMDGPU/SIInsertSkips.cpp
  119       if (opcodeEmitsNoInsts(*I))
  132       if (TII->hasUnwantedEffectsWhenEXECEmpty(*I))
  136       if (TII->isSMRD(*I) || TII->isVMEM(*I) || TII->isFLAT(*I) ||
  136       if (TII->isSMRD(*I) || TII->isVMEM(*I) || TII->isFLAT(*I) ||
  136       if (TII->isSMRD(*I) || TII->isVMEM(*I) || TII->isFLAT(*I) ||
  374     TII->commuteInstruction(*A);
  466       MachineInstr &MI = *I;
lib/Target/AMDGPU/SIInsertWaitcnts.cpp
 1549       if (!HaveScalarStores && TII->isScalarStore(*I))
 1573         else if (TII->isScalarStore(*I))
lib/Target/AMDGPU/SIInstrInfo.cpp
 2026     RemovedSize += getInstSizeInBytes(*I);
 5722   for (MachineInstr &MI : // Skip the def inst itself.
 6070   MachineInstr *Branch = &(*TI);
 6096   MachineInstr *Branch = &(*TI);
lib/Target/AMDGPU/SILoadStoreOptimizer.cpp
  453     DMask0 = TII.getNamedOperand(*I, AMDGPU::OpName::dmask)->getImm();
  459   Width0 = getOpcodeWidth(*I, TII);
  464     GLC0 = TII.getNamedOperand(*I, AMDGPU::OpName::glc)->getImm();
  466       SLC0 = TII.getNamedOperand(*I, AMDGPU::OpName::slc)->getImm();
  468     DLC0 = TII.getNamedOperand(*I, AMDGPU::OpName::dlc)->getImm();
  513     DMask1 = TII.getNamedOperand(*Paired, AMDGPU::OpName::dmask)->getImm();
  520   Width1 = getOpcodeWidth(*Paired, TII);
  524     GLC1 = TII.getNamedOperand(*Paired, AMDGPU::OpName::glc)->getImm();
  526       SLC1 = TII.getNamedOperand(*Paired, AMDGPU::OpName::slc)->getImm();
  528     DLC1 = TII.getNamedOperand(*Paired, AMDGPU::OpName::dlc)->getImm();
  578   return !(A->mayStore() || B->mayStore()) || !A->mayAlias(AA, *B, true);
  642   const auto *TFEOp = TII.getNamedOperand(*CI.I, AMDGPU::OpName::tfe);
  643   const auto *LWEOp = TII.getNamedOperand(*CI.I, AMDGPU::OpName::lwe);
  775   addDefsUsesToList(*CI.I, RegDefsToMove, PhysRegUsesToMove);
  794           (!memAccessesCanBeReordered(*CI.I, *MBBI, AA) ||
  794           (!memAccessesCanBeReordered(*CI.I, *MBBI, AA) ||
  795            !canMoveInstsAcrossMemOp(*MBBI, CI.InstsToMove, AA))) {
  799         CI.InstsToMove.push_back(&*MBBI);
  800         addDefsUsesToList(*MBBI, RegDefsToMove, PhysRegUsesToMove);
  807       addToListsIfDependent(*MBBI, RegDefsToMove, PhysRegUsesToMove,
  822     if (addToListsIfDependent(*MBBI, RegDefsToMove, PhysRegUsesToMove,
  826     bool Match = CI.hasSameBaseAddress(*MBBI);
  841       if (canBeCombined && canMoveInstsAcrossMemOp(*MBBI, CI.InstsToMove, AA))
  850     if (!memAccessesCanBeReordered(*CI.I, *MBBI, AA) ||
  850     if (!memAccessesCanBeReordered(*CI.I, *MBBI, AA) ||
  851         !canMoveInstsAcrossMemOp(*MBBI, CI.InstsToMove, AA))
  877   const auto *AddrReg = TII->getNamedOperand(*CI.I, AMDGPU::OpName::addr);
  879   const auto *Dest0 = TII->getNamedOperand(*CI.I, AMDGPU::OpName::vdst);
  880   const auto *Dest1 = TII->getNamedOperand(*CI.Paired, AMDGPU::OpName::vdst);
  931           .cloneMergedMemRefs({&*CI.I, &*CI.Paired});
  931           .cloneMergedMemRefs({&*CI.I, &*CI.Paired});
  977       TII->getNamedOperand(*CI.I, AMDGPU::OpName::addr);
  979       TII->getNamedOperand(*CI.I, AMDGPU::OpName::data0);
  981       TII->getNamedOperand(*CI.Paired, AMDGPU::OpName::data0);
 1026           .cloneMergedMemRefs({&*CI.I, &*CI.Paired});
 1026           .cloneMergedMemRefs({&*CI.I, &*CI.Paired});
 1051   for (unsigned I = 1, E = (*CI.I).getNumOperands(); I != E; ++I) {
 1055       MIB.add((*CI.I).getOperand(I));
 1074   const auto *Dest0 = TII->getNamedOperand(*CI.I, AMDGPU::OpName::vdata);
 1075   const auto *Dest1 = TII->getNamedOperand(*CI.Paired, AMDGPU::OpName::vdata);
 1112         .add(*TII->getNamedOperand(*CI.I, AMDGPU::OpName::sbase))
 1124   const auto *Dest0 = TII->getNamedOperand(*CI.I, AMDGPU::OpName::sdst);
 1125   const auto *Dest1 = TII->getNamedOperand(*CI.Paired, AMDGPU::OpName::sdst);
 1159     MIB.add(*TII->getNamedOperand(*CI.I, AMDGPU::OpName::vaddr));
 1170     MIB.add(*TII->getNamedOperand(*CI.I, AMDGPU::OpName::srsrc))
 1171         .add(*TII->getNamedOperand(*CI.I, AMDGPU::OpName::soffset))
 1186   const auto *Dest0 = TII->getNamedOperand(*CI.I, AMDGPU::OpName::vdata);
 1187   const auto *Dest1 = TII->getNamedOperand(*CI.Paired, AMDGPU::OpName::vdata);
 1310   const auto *Src0 = TII->getNamedOperand(*CI.I, AMDGPU::OpName::vdata);
 1311   const auto *Src1 = TII->getNamedOperand(*CI.Paired, AMDGPU::OpName::vdata);
 1325     MIB.add(*TII->getNamedOperand(*CI.I, AMDGPU::OpName::vaddr));
 1337     MIB.add(*TII->getNamedOperand(*CI.I, AMDGPU::OpName::srsrc))
 1338         .add(*TII->getNamedOperand(*CI.I, AMDGPU::OpName::soffset))
 1596     MachineInstr &MINext = *MBBI;
 1667     if (AddrList.front().hasSameBaseAddress(*CI.I) &&
 1749     if (&*CI->I == &MI) {
 1770         removeCombinedInst(MergeList, *CI.Paired);
 1778         removeCombinedInst(MergeList, *CI.Paired);
 1786         removeCombinedInst(MergeList, *CI.Paired);
 1795         removeCombinedInst(MergeList, *CI.Paired);
 1804         removeCombinedInst(MergeList, *CI.Paired);
 1813         removeCombinedInst(MergeList, *CI.Paired);
lib/Target/AMDGPU/SILowerControlFlow.cpp
  170     for(auto &Term : MBB->terminators())
  534       MachineInstr &MI = *I;
lib/Target/AMDGPU/SILowerI1Copies.cpp
  177       for (MachineInstr &MI : MBB->terminators()) {
  505     for (MachineInstr &MI : MBB) {
  553     for (MachineInstr &MI : MBB.phis()) {
  675     for (MachineInstr &MI : MBB) {
  792     instrDefsUsesSCC(*I, DefsSCC, TerminatorsUseSCC);
  804     instrDefsUsesSCC(*InsertionPt, DefSCC, UseSCC);
lib/Target/AMDGPU/SILowerSGPRSpills.cpp
  109         MachineInstr &Inst = *std::prev(I);
  144         MachineInstr &Inst = *std::prev(I);
  272         MachineInstr &MI = *I;
lib/Target/AMDGPU/SIMachineScheduler.cpp
  380         isDefBetween(Reg, LIS->getInstructionIndex(*BeginBlock).getRegSlot(),
  381                      LIS->getInstructionIndex(*EndBlock).getRegSlot(), MRI,
 1365       if (&*CurrentTopFastSched == MI) {
 1404       DAG->getLIS()->handleMove(*POld, /*UpdateFlags=*/true);
lib/Target/AMDGPU/SIMemoryLegalizer.cpp
 1269                                 isAtomicRet(*MI) ? SIMemOp::LOAD :
lib/Target/AMDGPU/SIModeRegister.cpp
  238   for (MachineInstr &MI : MBB) {
lib/Target/AMDGPU/SIOptimizeExecMasking.cpp
  230     if (removeTerminatorBit(TII, *I))
  247     unsigned CopyFromExec = isCopyFromExec(*I, ST);
  293     unsigned CopyToExec = isCopyToExec(*I, ST);
  298     auto CopyToExecInst = &*I;
  306           isLogicalOpOnExec(*PrepareExecInst) == CopyToExec) {
  307         LLVM_DEBUG(dbgs() << "Fold exec copy: " << *PrepareExecInst);
  311         LLVM_DEBUG(dbgs() << "into: " << *PrepareExecInst << '\n');
  333         LLVM_DEBUG(dbgs() << "exec read prevents saveexec: " << *J << '\n');
  355           SaveExecInst = &*J;
  360                      << "Instruction does not read exec copy: " << *J << '\n');
  372         LLVM_DEBUG(dbgs() << "Found second use of save inst candidate: " << *J
  378         assert(SaveExecInst != &*J);
  379         OtherUseInsts.push_back(&*J);
lib/Target/AMDGPU/SIOptimizeExecMaskingPreRA.cpp
  137     if (TII->mayReadEXEC(*MRI, *I))
  208                                    *I, MRI, LIS);
  354                      << "Removing no effect instruction: " << *I << '\n');
  362           LIS->RemoveMachineInstrFromMaps(*I);
  384     if (MBB.succ_size() != 1 || Lead == E || !isEndCF(*Lead, TRI, ST))
  389     if (NextLead == TmpMBB->end() || !isEndCF(*NextLead, TRI, ST) ||
  390         !getOrExecSource(*NextLead, *TII, MRI, ST))
  393     LLVM_DEBUG(dbgs() << "Redundant EXEC = S_OR_B64 found: " << *Lead << '\n');
  395     auto SaveExec = getOrExecSource(*Lead, *TII, MRI, ST);
  396     unsigned SaveExecReg = getOrNonExecReg(*Lead, *TII, ST);
  402     LIS->RemoveMachineInstrFromMaps(*Lead);
lib/Target/AMDGPU/SIPeepholeSDWA.cpp
  853   for (MachineInstr &MI : MBB) {
lib/Target/AMDGPU/SIPreAllocateWWMRegs.cpp
  122     for (MachineInstr &MI : MBB) {
  189     for (MachineInstr &MI : *MBB) {
lib/Target/AMDGPU/SIRegisterInfo.cpp
  590   const MachineOperand *Reg = TII->getNamedOperand(*MI, AMDGPU::OpName::vdata);
  597           .add(*TII->getNamedOperand(*MI, AMDGPU::OpName::srsrc))
  598           .add(*TII->getNamedOperand(*MI, AMDGPU::OpName::soffset))
  605           .cloneMemRefs(*MI);
  607   const MachineOperand *VDataIn = TII->getNamedOperand(*MI,
  647     hasAGPRs(RC) ? TII->getNamedOperand(*MI, AMDGPU::OpName::tmp)->getReg()
 1041       const MachineOperand *VData = TII->getNamedOperand(*MI,
 1043       assert(TII->getNamedOperand(*MI, AMDGPU::OpName::soffset)->getReg() ==
 1049             TII->getNamedOperand(*MI, AMDGPU::OpName::srsrc)->getReg(),
 1051             TII->getNamedOperand(*MI, AMDGPU::OpName::offset)->getImm(),
 1071       const MachineOperand *VData = TII->getNamedOperand(*MI,
 1073       assert(TII->getNamedOperand(*MI, AMDGPU::OpName::soffset)->getReg() ==
 1079             TII->getNamedOperand(*MI, AMDGPU::OpName::srsrc)->getReg(),
 1081             TII->getNamedOperand(*MI, AMDGPU::OpName::offset)->getImm(),
 1090       bool IsMUBUF = TII->isMUBUF(*MI);
 1206         assert(TII->getNamedOperand(*MI, AMDGPU::OpName::soffset)->getReg() ==
 1209         TII->getNamedOperand(*MI, AMDGPU::OpName::soffset)->setReg(FrameReg);
 1213           = TII->getNamedOperand(*MI, AMDGPU::OpName::offset)->getImm();
 1228       if (!TII->isImmOperandLegal(*MI, FIOperandNum, FIOp)) {
lib/Target/AMDGPU/SIShrinkInstructions.cpp
  567       MachineInstr &MI = *I;
  607           (*Next).getOpcode() == AMDGPU::S_NOP) {
  609         MachineInstr &NextMI = *Next;
lib/Target/AMDGPU/SIWholeQuadMode.cpp
  236     for (const MachineInstr &MI : *BII.first) {
  327       MachineInstr &MI = *II;
  479     MachineInstr *LastMI = &*MBB.rbegin();
  586   SlotIndex FirstIdx = First != MBBE ? LIS->getInstructionIndex(*First)
  589       Last != MBBE ? LIS->getInstructionIndex(*Last) : LIS->getMBBEndIdx(&MBB);
  740       MachineInstr &MI = *II;
lib/Target/ARC/ARCBranchFinalize.cpp
  155     for (auto &MI : MBB) {
lib/Target/ARC/ARCExpandPseudos.cpp
   61   MachineInstr &SI = *SII;
lib/Target/ARC/ARCFrameLowering.cpp
  474   MachineInstr &Old = *I;
lib/Target/ARC/ARCInstrInfo.cpp
  181   while (isPredicated(*I) || I->isTerminator() || I->isDebugValue()) {
  213       CantAnalyze = !isPredicated(*I);
  221     if (!isPredicated(*I) && (isUncondBranchOpcode(I->getOpcode()) ||
  233           MachineInstr &InstToDelete = *DI;
lib/Target/ARC/ARCOptAddrMode.cpp
  157       User = &*MBB->rbegin();
  476     MachineInstr *Res = tryToCombine(*MI);
lib/Target/ARC/ARCRegisterInfo.cpp
   43   MachineInstr &MI = *II;
  169   MachineInstr &MI = *II;
lib/Target/ARM/A15SDOptimizer.cpp
  676     for (MachineInstr &MI : MBB) {
lib/Target/ARM/ARMBaseInstrInfo.cpp
  326   while (isPredicated(*I) || I->isTerminator() || I->isDebugValue()) {
  358       CantAnalyze = !isPredicated(*I);
  366     if (!isPredicated(*I) &&
  380           MachineInstr &InstToDelete = *DI;
 2005       MachineInstr *LastMI = &*Pred->rbegin();
 3000     const MachineInstr &Instr = *--I;
 3005       SubAdd = &*I;
 3055     const MachineInstr &Instr = *I;
 3124               std::make_pair(&((*I).getOperand(IO - 1)), NewCC));
 3195       analyzeCompare(*Next, SrcReg, SrcReg2, CmpMask, CmpValue) &&
 3196       isRedundantFlagInstr(&*Next, SrcReg, SrcReg2, CmpValue, &MI, IsThumb1))
 4797       .cloneMemRefs(*MI)
 5383   ARMCC::CondCodes Pred = getInstrPredicate(*CmpMI, PredReg);
 5391   return &*CmpMI;
lib/Target/ARM/ARMBaseRegisterInfo.cpp
  752   MachineInstr &MI = *II;
lib/Target/ARM/ARMBasicBlockInfo.cpp
   54   for (MachineInstr &I : *MBB) {
   84   for (MachineBasicBlock::const_iterator I = MBB->begin(); &*I != MI; ++I) {
   86     Offset += TII->getInstSizeInBytes(*I);
lib/Target/ARM/ARMCallLowering.cpp
  461     MIRBuilder.setInstr(*MBB.begin());
lib/Target/ARM/ARMConstantIslandPass.cpp
  679     for (MachineInstr &I : MBB)
  708     for (MachineInstr &I : MBB) {
  893   for (MachineInstr &LiveMI : make_range(OrigBB->rbegin(), LivenessEnd))
 1370          getITInstrPredicate(*I, PredReg) != ARMCC::AL;
 1371          Offset += TII->getInstSizeInBytes(*I), I = std::next(I)) {
 1373           std::max(BaseInsertOffset, Offset + TII->getInstSizeInBytes(*I) + 1);
 1387        Offset += TII->getInstSizeInBytes(*MI), MI = std::next(MI)) {
 1389     if (CPUIndex < NumCPUsers && CPUsers[CPUIndex].MI == &*MI) {
 1406       LastIT = &*MI;
 1414     ARMCC::CondCodes CC = getITInstrPredicate(*MI, PredReg);
 1438   assert(!isThumb || getITInstrPredicate(*MI, PredReg) == ARMCC::AL);
 1440   NewMBB = splitBlockBeforeInstr(&*MI);
 1572     CPEBB->setAlignment(getCPEAlign(&*CPEBB->begin()));
 2054   for (++I; &*I != JumpMI; ++I) {
 2055     if (isSimpleIndexCalc(*I, EntryReg, BaseReg)) {
 2056       RemovableAdd = &*I;
 2078   for (++I; &*I != JumpMI; ++I) {
 2115          &*MBB->begin() == CPEMI;
 2129   for (++I; &*I != JumpMI; ++I) {
 2131       RemovableAdd = &*I;
 2139   for (++J; &*J != JumpMI; ++J) {
lib/Target/ARM/ARMExpandPseudoInsts.cpp
  470   MachineInstr &MI = *MBBI;
  581   MachineInstr &MI = *MBBI;
  657   MachineInstr &MI = *MBBI;
  748   MachineInstr &MI = *MBBI;
  828   MachineInstr &MI = *MBBI;
  933   MachineInstr &MI = *MBBI;
 1052   MachineInstr &MI = *MBBI;
 1156   MachineInstr &MI = *MBBI;
 1210       MBB.getParent()->moveCallSiteInfo(&MI, &*NewMI);
lib/Target/ARM/ARMFrameLowering.cpp
  488         tryFoldSPUpdateIntoPushPop(STI, MF, &*LastPush, DPRGapSize))
  502       DefCFAOffsetCandidates.addInst(MBBI, sizeOfSPAdjustment(*MBBI));
  568         tryFoldSPUpdateIntoPushPop(STI, MF, &*LastPush, NumBytes))
  596     unsigned PushSize = sizeOfSPAdjustment(*GPRCS1Push);
  803       } while (MBBI != MBB.begin() && isCSRestore(*MBBI, TII, CSRegs));
  804       if (!isCSRestore(*MBBI, TII, CSRegs))
  852                !tryFoldSPUpdateIntoPushPop(STI, MF, &*MBBI, NumBytes))
 1127           MIB.copyImplicitOps(*MI);
 1491     for (auto &MI : MBB)
 1514     for (auto &MI : MBB) {
 2154     MachineInstr &Old = *I;
lib/Target/ARM/ARMHazardRecognizer.cpp
   57           DefMI = &*I;
lib/Target/ARM/ARMISelLowering.cpp
 9847       MachineInstrBuilder MIB(*MF, &*II);
10372     const MachineInstr& mi = *miI;
lib/Target/ARM/ARMInstrInfo.cpp
  134       .cloneMemRefs(*MI)
lib/Target/ARM/ARMLoadStoreOptimizer.cpp
  512         Register InstrSrcReg = getLoadStoreRegOp(*MBBI).getReg();
  519                  !definesCPSR(*MBBI)) {
  541     } else if (definesCPSR(*MBBI) || MBBI->isCall() || MBBI->isBranch()) {
  608     LiveRegs.stepBackward(*LiveRegPos);
  936     for (MachineInstr &MI : FixupRange) {
  955     for (MachineInstr &MI : FixupRange) {
 1225   Offset = isIncrementOrDecrement(*PrevMBBI, Reg, Pred, PredReg);
 1243   Offset = isIncrementOrDecrement(*NextMBBI, Reg, Pred, PredReg);
 1650   MachineInstr *MI = &*MBBI;
 1783     if (isMemoryOp(*MBBI)) {
 1787       Register Base = getLoadStoreBaseOp(*MBBI).getReg();
 1789       ARMCC::CondCodes Pred = getInstrPredicate(*MBBI, PredReg);
 1790       int Offset = getMemoryOpOffset(*MBBI);
 1796         MemOps.push_back(MemOpQueueEntry(*MBBI, Offset, Position));
 1824             MemOps.push_back(MemOpQueueEntry(*MBBI, Offset, Position));
 1840               MemOps.insert(MI, MemOpQueueEntry(*MBBI, Offset, Position));
 1857       MergeBaseCandidates.push_back(&*MBBI);
 1938     MachineInstr &PrevMI = *PrevI;
 1951       PrevMI.copyImplicitOps(*MBB.getParent(), *MBBI);
 1987           .copyImplicitOps(*MBBI);
 2112     if (I->isDebugInstr() || MemOps.count(&*I))
 2302                (MemOps.count(&*InsertPos) || InsertPos->isDebugInstr()))
 2400       MachineInstr &MI = *MBBI;
lib/Target/ARM/ARMLowOverheadLoops.cpp
  121   for(auto &MI : make_range(T(Begin), End)) {
  134   for(auto &MI : make_range(MachineBasicBlock::iterator(Begin), End)) {
  227     for (auto &MI : *MBB) {
  267     for (auto &MI : *MBB) {
  523     for (auto &I : MBB) {
lib/Target/ARM/ARMOptimizeBarriersPass.cpp
   66     for (auto &MI : MBB) {
lib/Target/ARM/MLxExpansionPass.cpp
  314     MachineInstr &MI2 = *MII;
  316     MachineInstr &MI1 = *MII;
  334     MachineInstr *MI = &*MII++;
lib/Target/ARM/MVEVPTBlockPass.cpp
  165   return &*CmpMI;
lib/Target/ARM/Thumb1FrameLowering.cpp
  123     MachineInstr &Old = *I;
  253       tryFoldSPUpdateIntoPushPop(STI, MF, &*std::prev(MBBI), NumBytes)) {
  498       while (MBBI != MBB.begin() && isCSRestore(*MBBI, CSRegs));
  499       if (!isCSRestore(*MBBI, CSRegs))
  540           &MBB.front() != &*MBBI && std::prev(MBBI)->getOpcode() == ARM::tPOP) {
  542         if (!tryFoldSPUpdateIntoPushPop(STI, MF, &*PMBBI, NumBytes))
  545       } else if (!tryFoldSPUpdateIntoPushPop(STI, MF, &*MBBI, NumBytes))
  670       UsedRegs.stepBackward(*--InstUpToMBBI);
  704       UsedRegs.stepBackward(*PrevMBBI);
 1056         MIB.copyImplicitOps(*MI);
lib/Target/ARM/Thumb2ITBlockPass.cpp
  187     ARMCC::CondCodes NCC = getITInstrPredicate(*I, NPredReg);
  200     MachineInstr *MI = &*MBBI;
  239         MachineInstr *NMI = &*MBBI;
lib/Target/ARM/Thumb2InstrInfo.cpp
   70   ARMCC::CondCodes CC = getInstrPredicate(*Tail, PredReg);
  118   return getITInstrPredicate(*MBBI, PredReg) == ARMCC::AL;
lib/Target/ARM/ThumbRegisterInfo.cpp
  363   MachineInstr &MI = *II;
  453   MachineInstr &MI = *II;
lib/Target/AVR/AVRExpandPseudoInsts.cpp
  141   MachineInstr &MI = *MBBI;
  174   MachineInstr &MI = *MBBI;
  221   MachineInstr &MI = *MBBI;
  273   MachineInstr &MI = *MBBI;
  325   MachineInstr &MI = *MBBI;
  388   MachineInstr &MI = *MBBI;
  418   MachineInstr &MI = *MBBI;
  451   MachineInstr &MI = *MBBI;
  486   MachineInstr &MI = *MBBI;
  535   MachineInstr &MI = *MBBI;
  579   MachineInstr &MI = *MBBI;
  628   MachineInstr &MI = *MBBI;
  659   MachineInstr &MI = *MBBI;
  690   MachineInstr &MI = *MBBI;
  745   MachineInstr &MI = *MBBI;
  799   MachineInstr &MI = *MBBI;
  968   MachineInstr &MI = *MBBI;
 1014   MachineInstr &MI = *MBBI;
 1042   MachineInstr &MI = *MBBI;
 1076   MachineInstr &MI = *MBBI;
 1110   MachineInstr &MI = *MBBI;
 1144   MachineInstr &MI = *MBBI;
 1174   MachineInstr &MI = *MBBI;
 1205   MachineInstr &MI = *MBBI;
 1230   MachineInstr &MI = *MBBI;
 1247   MachineInstr &MI = *MBBI;
 1280   MachineInstr &MI = *MBBI;
 1323   MachineInstr &MI = *MBBI;
 1353   MachineInstr &MI = *MBBI;
 1412   MachineInstr &MI = *MBBI;
 1449   MachineInstr &MI = *MBBI;
 1476   MachineInstr &MI = *MBBI;
 1510   MachineInstr &MI = *MBBI;
lib/Target/AVR/AVRFrameLowering.cpp
  314     MachineInstr &MI = *I;
  377   int Amount = TII.getFrameSize(*MI);
  461       for (const MachineInstr &MI : BB) {
lib/Target/AVR/AVRInstrInfo.cpp
  281     if (!isUnpredicatedTerminator(*I)) {
  457     if (BytesRemoved) *BytesRemoved += getInstSizeInBytes(*I);
lib/Target/AVR/AVRRegisterInfo.cpp
   99   MachineInstr &MI = *II;
  133   MachineInstr &MI = *II;
lib/Target/AVR/AVRRelaxMemOperations.cpp
   89   MachineInstr &MI = *MBBI;
  125   MachineInstr &MI = *MBBI;
lib/Target/BPF/BPFInstrInfo.cpp
  179     if (!isUnpredicatedTerminator(*I))
lib/Target/BPF/BPFMIChecking.cpp
  158     for (MachineInstr &MI : MBB) {
lib/Target/BPF/BPFMIPeephole.cpp
  124     for (MachineInstr &MI : MBB) {
  241     for (MachineInstr &MI : MBB) {
  362     for (MachineInstr &MI : MBB) {
lib/Target/BPF/BPFMISimplifyPatchable.cpp
   81     for (MachineInstr &MI : MBB) {
lib/Target/BPF/BPFRegisterInfo.cpp
   62   MachineInstr &MI = *II;
   69     for (auto &I : MBB)
lib/Target/Hexagon/BitTracker.cpp
  928     const MachineInstr &MI = *It;
 1067       const MachineInstr &PI = *It++;
 1081       const MachineInstr &MI = *It++;
 1097       visitBranchesFrom(*It);
lib/Target/Hexagon/HexagonBitSimplify.cpp
  271   for (auto &I : B)
  997     Instrs.push_back(&*I);
 1308     MachineInstr *MI = &*I;
 1463     if (isTfrConst(*I))
 1466     HBS::getInstrDefs(*I, Defs);
 1594     HBS::getInstrDefs(*I, Defs);
 1598         ConstGeneration::isTfrConst(*I))
 1719     Instrs.push_back(&*I);
 2289       if (!MDT.dominates(DefI, &*At))
 2705     MachineInstr *MI = &*I;
 2810       for (auto &I : B)
 3108   for (auto &I : *C.LB) {
 3158     HBS::getInstrDefs(*I, Defs);
 3164     if (!isBitShuffle(&*I, DefR))
 3194     ShufIns.push_back(&*I);
lib/Target/Hexagon/HexagonBlockRanges.cpp
  161   for (auto &In : B) {
  315   for (auto &In : B) {
  522   for (auto &In : M.Block) {
lib/Target/Hexagon/HexagonBranchRelaxation.cpp
  198     for (auto &MI : B) {
lib/Target/Hexagon/HexagonCFGOptimizer.cpp
  127       MachineInstr &MI = *MII;
lib/Target/Hexagon/HexagonConstExtenders.cpp
 1250     for (MachineInstr &MI : MBB)
 1516       if (RefMIs.count(&*It))
lib/Target/Hexagon/HexagonConstPropagation.cpp
  732     const MachineInstr &MI = *It;
  804   for (const MachineInstr &MI : *MB) {
  818     const MachineInstr &MI = *I;
  852     MachineInstr *PN = &*I;
  894       InstrExec.insert(&*It);
  895       visitPHI(*It);
  906     if (It != End && InstrExec.count(&*It))
  912         InstrExec.insert(&*It);
  913         visitNonBranch(*It);
  923       visitBranchesFrom(*It);
  985       MachineInstr &MI = *I;
 1037       if (I->isBranch() && !InstrExec.count(&*I))
lib/Target/Hexagon/HexagonCopyToCombine.cpp
  305       if (isUnsafeToMoveAcross(*I, I2UseReg, I2DestReg, TRI)) {
  313         KillingInstr = &*I;
  343       MachineInstr &MI = *I;
  397   for (MachineInstr &MI : BB) {
  424         while (&*It != &MI) {
  489       MachineInstr &I1 = *MI++;
  540     if (!isCombinableInstType(*I2, TII, ShouldCombineAggressively))
  544     if (ShouldCombineAggressively && PotentiallyNewifiableTFR.count(&*I2))
  560     if ((IsI2LowReg && !areCombinableOperations(TRI, I1, *I2, AllowC64)) ||
  561         (IsI1LowReg && !areCombinableOperations(TRI, *I2, I1, AllowC64)))
  564     if (isSafeToMoveTogether(I1, *I2, I1DestReg, I2DestReg, DoInsertAtI1))
  565       return &*I2;
lib/Target/Hexagon/HexagonEarlyIfConv.cpp
  341   for (auto &I : *B)
  369   for (auto &MI : *B) {
  435     for (auto &MI : B) {
  454   for (const MachineInstr &MI : *B) {
  490   for (auto &MI : *B) {
  769     if (isSafeToSpeculate(&*I))
  772       predicateInstr(ToB, At, &*I, PredR, IfTrue);
  815     MachineInstr *PN = &*I;
  988     MachineInstr *PN = &*I;
lib/Target/Hexagon/HexagonExpandCondsets.cpp
  754     MachineInstr *MI = &*I;
  835     MachineInstr *MI = &*I;
  912     MachineInstr *MI = &*I;
  982     MachineInstr *MI = &*I;
 1076       bool Done = predicate(*I, (Opc == Hexagon::A2_tfrt), UpdRegs);
 1084           removeInstr(*I);
 1268     for (auto &I : B)
lib/Target/Hexagon/HexagonFixupHwLoops.cpp
  125     for (const MachineInstr &MI : MBB)
  139       unsigned InstSize = HII->getSize(*MII);
  144       if (isHardwareLoop(*MII)) {
lib/Target/Hexagon/HexagonFrameLowering.cpp
  284     for (auto &I : MBB) {
  355     for (auto &I : MBB)
  613     for (auto &MI : MBB)
  849     for (MachineInstr &I : B) {
 1326       DeallocCall->copyImplicitOps(MF, *It);
 1345   MachineInstr &MI = *I;
 1571   MachineInstr *MI = &*It;
 1592   MachineInstr *MI = &*It;
 1625   MachineInstr *MI = &*It;
 1656   MachineInstr *MI = &*It;
 1693   MachineInstr *MI = &*It;
 1731   MachineInstr *MI = &*It;
 1744     LPR.stepForward(*R, Clobbers);
 1791   MachineInstr *MI = &*It;
 1831   MachineInstr *MI = &*It;
 1860   MachineInstr *MI = &*It;
 1892       MachineInstr *MI = &*I;
 2063     for (auto &In : B) {
 2297           MachineInstr &MI = *It;
 2401     for (auto &I : B)
 2502     for (const MachineInstr &MI : B) {
lib/Target/Hexagon/HexagonGenInsert.cpp
  605       const MachineInstr *MI = &*I;
  944     MachineInstr *MI = &*I;
 1461     Instrs.push_back(&*I);
lib/Target/Hexagon/HexagonGenMux.cpp
  187     MachineInstr *MI = &*I;
  237     MachineInstr *MI = &*I;
  304     MachineInstr &Def1 = *It1, &Def2 = *It2;
  304     MachineInstr &Def1 = *It1, &Def2 = *It2;
  371     LPR.stepBackward(*I);
lib/Target/Hexagon/HexagonGenPredicate.cpp
  209       MachineInstr *MI = &*I;
  469     for (MachineInstr &MI : MBB) {
lib/Target/Hexagon/HexagonHardwareLoops.cpp
 1018       const MachineInstr *MI = &*MII;
lib/Target/Hexagon/HexagonInstrInfo.cpp
  605       if (Term != MBB.end() && isPredicated(*Term) &&
  764       return std::make_unique<HexagonPipelinerLoopInfo>(LoopInst, &*I);
 1012     Regs.stepBackward(*I);
 2982   for (auto &I : *B)
 3602     for (const MachineInstr &I : B) {
lib/Target/Hexagon/HexagonNewValueJump.cpp
  123   if (QII->isPredicated(*II))
  142   if (QII->isSolo(*II))
  145   if (QII->isFloat(*II))
  245   MachineInstr &MI = *II;
  496       MachineInstr &MI = *--MII;
  673           TransferKills(*feederPos);
  674           TransferKills(*cmpPos);
lib/Target/Hexagon/HexagonPeephole.cpp
  132       MachineInstr &MI = *I;
lib/Target/Hexagon/HexagonRegisterInfo.cpp
  187   MachineInstr &MI = *II;
lib/Target/Hexagon/HexagonSplitConst32AndConst64.cpp
   74       MachineInstr &MI = *I;
lib/Target/Hexagon/HexagonSplitDouble.cpp
  534   for (auto &MI : *HB) {
lib/Target/Hexagon/HexagonStoreWidening.cpp
  215   for (auto &I : MBB)
  498   for (auto &I : *MBB) {
lib/Target/Hexagon/HexagonVExtract.cpp
  110     for (MachineInstr &MI : MBB) {
lib/Target/Hexagon/HexagonVLIWPacketizer.cpp
  251       while (RB != End && HII->isSchedulingBoundary(*RB, &MB, MF))
  256       while (RE != End && !HII->isSchedulingBoundary(*RE, &MB, MF))
  404   if (!HII->isHVXVec(*MII))
  421   MachineInstr &MJ = *MII;
 1168         InsertBeforeBundle = !hasWriteToReadDep(MI, *BundleIt, HRI);
 1349   if (NextMII != I.getParent()->end() && HII->isNewValueJump(*NextMII)) {
 1350     MachineInstr &NextMI = *NextMII;
 1708     MachineInstr &NvjMI = *++MII;
lib/Target/Hexagon/HexagonVectorPrint.cpp
  137     for (auto &MI : MBB) {
lib/Target/Hexagon/RDFGraph.cpp
  876     for (MachineInstr &I : B) {
lib/Target/Hexagon/RDFLiveness.cpp
  880     MachineInstr *MI = &*I;
lib/Target/Hexagon/RDFRegisters.cpp
   78     for (const MachineInstr &In : B)
lib/Target/Lanai/LanaiFrameLowering.cpp
   72       MachineInstr &MI = *MBBI++;
lib/Target/Lanai/LanaiInstrInfo.cpp
  321     const MachineInstr &Instr = *I;
  330     if (isRedundantFlagInstr(&CmpInstr, SrcReg, SrcReg2, CmpValue, &*I)) {
  331       Sub = &*I;
  356       const MachineInstr &Instr = *I;
  385                 std::make_pair(&((*I).getOperand(IO - 1)), NewCC));
  581     if (!isUnpredicatedTerminator(*Instruction))
lib/Target/Lanai/LanaiMemAluCombiner.cpp
  369     bool IsMemOp = isNonVolatileMemoryOp(*MBBIter);
lib/Target/Lanai/LanaiRegisterInfo.cpp
  141   MachineInstr &MI = *II;
lib/Target/MSP430/MSP430BranchSelector.cpp
   98     for (MachineInstr &MI : MBB) {
  121       MBBStartOffset += TII->getInstSizeInBytes(*MI);
  179       MachineInstr &OldBranch = *MI;
  199         InstrSizeDiff += TII->getInstSizeInBytes(*MI);
  205       InstrSizeDiff += TII->getInstSizeInBytes(*MI);
lib/Target/MSP430/MSP430FrameLowering.cpp
  237     MachineInstr &Old = *I;
  273     if (uint64_t CalleeAmt = TII.getFramePoppedByCallee(*I)) {
  274       MachineInstr &Old = *I;
lib/Target/MSP430/MSP430InstrInfo.cpp
  190     if (!isUnpredicatedTerminator(*I))
lib/Target/MSP430/MSP430RegisterInfo.cpp
  108   MachineInstr &MI = *II;
lib/Target/Mips/Mips16RegisterInfo.cpp
   79   MachineInstr &MI = *II;
lib/Target/Mips/MipsBranchExpansion.cpp
  271   MachineBasicBlock *Tgt = getTargetMBB(*FirstBr);
  273   if (Tgt != getTargetMBB(*LastBr))
  743       if (!TII->HasForbiddenSlot(*I))
  755       if (LastInstInFunction || !TII->SafeInForbiddenSlot(*Inst)) {
  761           MIBundleBuilder(&*I).append(
  795         int64_t Offset = computeOffset(&*Br);
  808           MBBInfos[I].Br = &*Br;
lib/Target/Mips/MipsConstantIslandPass.cpp
  658     for (MachineInstr &MI : MBB) {
  794   for (const MachineInstr &MI : *MBB)
  810   for (MachineBasicBlock::iterator I = MBB->begin(); &*I != MI; ++I) {
  812     Offset += TII->getInstSizeInBytes(*I);
 1284        Offset += TII->getInstSizeInBytes(*MI), MI = std::next(MI)) {
 1302   NewMBB = splitBlockBeforeInstr(*--MI);
 1429     CPEBB->setAlignment(getCPEAlign(*CPEBB->begin()));
lib/Target/Mips/MipsDelaySlotFiller.cpp
  317       MIBundleBuilder(I->second).append(MF->CloneMachineInstr(&*Filler));
  320       I->first->insert(I->first->end(), MF->CloneMachineInstr(&*Filler));
  604     if (!hasUnoccupiedSlot(&*I))
  615         if (searchBackward(MBB, *I)) {
  688     if (terminateSearch(*CurrI))
  699     if (delayHasHazard(*CurrI, RegDU, IM))
  717     unsigned Opcode = (*Slot).getOpcode();
  723      if (InMicroMipsMode && TII->getInstSizeInBytes(*CurrI) == 2 &&
  774   RegDU.setCallerSaved(*Slot);
lib/Target/Mips/MipsInstrInfo.cpp
  200   if (I == REnd || !isUnpredicatedTerminator(*I)) {
  207   MachineInstr *LastInst = &*I;
  226     SecondLastInst = &*I;
  249   if (++I != REnd && isUnpredicatedTerminator(*I))
  675   MIB.copyImplicitOps(*I);
  676   MIB.cloneMemRefs(*I);
lib/Target/Mips/MipsOptimizePICCall.cpp
  238     if (!isCallViaRegister(*I, Reg, Entry))
  249         getCallTargetRegOpnd(*I)->setReg(getReg(Entry));
  254       eraseGPOpnd(*I);
lib/Target/Mips/MipsRegisterInfo.cpp
  259   MachineInstr &MI = *II;
lib/Target/Mips/MipsSEISelDAGToDAG.cpp
  160     for (auto &MI: MBB) {
lib/Target/Mips/MipsSERegisterInfo.cpp
  149   MachineInstr &MI = *II;
lib/Target/NVPTX/NVPTXInstrInfo.cpp
  102   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I))
  106   MachineInstr &LastInst = *I;
  109   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
  124   MachineInstr &SecondLastInst = *I;
  127   if (I != MBB.begin() && isUnpredicatedTerminator(*--I))
lib/Target/NVPTX/NVPTXPeephole.cpp
  137       auto &MI = *BlockIter++;
lib/Target/NVPTX/NVPTXPrologEpilogPass.cpp
   59     for (MachineInstr &MI : MBB) {
lib/Target/NVPTX/NVPTXProxyRegErasure.cpp
   70     for (auto &MI : BB) {
  103     for (auto &I : BB) {
lib/Target/NVPTX/NVPTXRegisterInfo.cpp
  117   MachineInstr &MI = *II;
lib/Target/NVPTX/NVPTXReplaceImageHandles.cpp
   63       MachineInstr &MI = *I;
lib/Target/PowerPC/PPCBranchCoalescing.cpp
  244   for (auto &I : Cand.BranchBlock->terminators()) {
  401     MachineInstr &PHIInst = *Iter;
  543                      << "PHI " << *I
  550           LLVM_DEBUG(dbgs() << "PHI " << *I
  564     if (!canMoveToBeginning(*I, *SourceRegion.BranchTargetBlock)) {
  565       LLVM_DEBUG(dbgs() << "Instruction " << *I
  569     if (!canMoveToEnd(*I, *TargetRegion.BranchBlock)) {
  570       LLVM_DEBUG(dbgs() << "Instruction " << *I
  685     MachineInstr &CurrInst = *I;
lib/Target/PowerPC/PPCBranchSelector.cpp
  137     for (MachineInstr &MI : *MBB) {
  315           MBBStartOffset += TII->getInstSizeInBytes(*I);
  330         MachineInstr &OldBranch = *I;
lib/Target/PowerPC/PPCCTRLoops.cpp
  154     if (I != BI && clobbersCTR(*I)) {
  156                         << ") instruction " << *I
  160                         << *BI << "\n");
  177                         << *BI << "\n");
lib/Target/PowerPC/PPCEarlyReturn.cpp
   81                   .copyImplicitOps(*I);
   95                   .copyImplicitOps(*I);
  110                   .copyImplicitOps(*I);
lib/Target/PowerPC/PPCExpandISEL.cpp
  160     for (MachineInstr &MI : MBB)
  395     for (MachineInstr &MI : *MBB)
lib/Target/PowerPC/PPCFrameLowering.cpp
  804         HandleVRSaveUpdate(*MBBI, TII);
lib/Target/PowerPC/PPCISelLowering.cpp
14198         LoopSize += TII->getInstSizeInBytes(*J);
lib/Target/PowerPC/PPCInstrInfo.cpp
  507   if (!isUnpredicatedTerminator(*I))
  519       if (I == MBB.end() || !isUnpredicatedTerminator(*I))
  525   MachineInstr &LastInst = *I;
  528   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
  587   MachineInstr &SecondLastInst = *I;
  590   if (I != MBB.begin() && isUnpredicatedTerminator(*--I))
 1727       if (&*J == &*I) {
 1801     const MachineInstr &Instr = *I;
 1804     if (&*I != &CmpInstr && (Instr.modifiesRegister(PPC::CR0, TRI) ||
 1821       Sub = &*I;
 2333       return &*It;
 2503         clearOperandKillInfo(*It, i);
 2516         assert(&*It == &StartMI && "No new def between StartMI and EndMI.");
 2522     if ((&*It) == &StartMI)
 3525     if (It->modifiesRegister(Reg, &getRegisterInfo()) && (&*It) != &DefMI)
 3527     else if (It->killsRegister(Reg, &getRegisterInfo()) && (&*It) != &DefMI)
 3530     if ((&*It) == &DefMI)
 3533   assert((&*It) == &DefMI && "DefMI is missing");
 4242       return std::make_unique<PPCPipelinerLoopInfo>(LoopInst, &*I, LoopCount);
lib/Target/PowerPC/PPCMIPeephole.cpp
  277         for (MachineInstr &MI : MBB) {
  297     for (MachineInstr &MI : MBB) {
lib/Target/PowerPC/PPCPreEmitPeephole.cpp
  174         for (MachineInstr &MI : MBB) {
  235               CRSetMI = &*It;
  257             InstrsToErase.push_back(&*It);
lib/Target/PowerPC/PPCQPXLoadSplat.cpp
   69       MachineInstr *MI = &*MBBI;
lib/Target/PowerPC/PPCReduceCRLogicals.cpp
  561         return &*Me;
  642   MachineInstr *SplitBefore = &*Def2It;
  645       SplitBefore = &*Def1It;
  712     for (MachineInstr &MI : MBB) {
lib/Target/PowerPC/PPCRegisterInfo.cpp
  495   MachineInstr &MI = *II;
  614   MachineInstr &MI = *II;
  645   MachineInstr &MI = *II;       // ; SPILL_CR <SrcReg>, <offset>
  690   MachineInstr &MI = *II;       // ; <DestReg> = RESTORE_CR <offset>
  733   MachineInstr &MI = *II;       // ; SPILL_CRBIT <SrcReg>, <offset>
  813   MachineInstr &MI = *II;       // ; <DestReg> = RESTORE_CRBIT <offset>
  863   MachineInstr &MI = *II;       // ; SPILL_VRSAVE <SrcReg>, <offset>
  889   MachineInstr &MI = *II;       // ; <DestReg> = RESTORE_VRSAVE <offset>
  989   MachineInstr &MI = *II;
lib/Target/PowerPC/PPCTLSDynamicCall.cpp
   55         MachineInstr &MI = *I;
lib/Target/PowerPC/PPCTOCRegDeps.cpp
  117       for (auto &MI : MBB) {
lib/Target/PowerPC/PPCVSXCopy.cpp
   87       for (MachineInstr &MI : MBB) {
lib/Target/PowerPC/PPCVSXFMAMutate.cpp
   77         MachineInstr &MI = *I;
lib/Target/PowerPC/PPCVSXSwapRemoval.cpp
  245     for (MachineInstr &MI : MBB) {
lib/Target/RISCV/RISCVExpandPseudoInsts.cpp
  362   MachineInstr &MI = *MBBI;
  415   MachineInstr &MI = *MBBI;
  531   MachineInstr &MI = *MBBI;
  629   MachineInstr &MI = *MBBI;
lib/Target/RISCV/RISCVISelLowering.cpp
 1266     else if (isSelectPseudo(*SequenceMBBI)) {
 1273       LastSelectPseudo = &*SequenceMBBI;
lib/Target/RISCV/RISCVInstrInfo.cpp
  243   if (I == MBB.end() || !isUnpredicatedTerminator(*I))
  250   for (auto J = I.getReverse(); J != MBB.rend() && isUnpredicatedTerminator(*J);
  285     parseCondBranch(*I, TBB, Cond);
  292     parseCondBranch(*std::prev(I), TBB, Cond);
  315     *BytesRemoved += getInstSizeInBytes(*I);
  328     *BytesRemoved += getInstSizeInBytes(*I);
lib/Target/RISCV/RISCVMergeBaseOffset.cpp
  266     for (MachineInstr &HiLUI : MBB) {
lib/Target/RISCV/RISCVRegisterInfo.cpp
  107   MachineInstr &MI = *II;
lib/Target/Sparc/LeonPasses.cpp
   49       MachineInstr &MI = *MBBI;
   84       MachineInstr &MI = *MBBI;
  136       MachineInstr &MI = *MBBI;
lib/Target/Sparc/SparcFrameLowering.cpp
  208     MachineInstr &MI = *I;
lib/Target/Sparc/SparcInstrInfo.cpp
  168   if (!isUnpredicatedTerminator(*I))
  172   MachineInstr *LastInst = &*I;
  176   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
  190   MachineInstr *SecondLastInst = &*I;
  200       if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
  205         SecondLastInst = &*I;
  212   if (SecondLastInst && I != MBB.begin() && isUnpredicatedTerminator(*--I))
lib/Target/Sparc/SparcRegisterInfo.cpp
  169   MachineInstr &MI = *II;
lib/Target/SystemZ/SystemZAsmPrinter.cpp
  588     ShadowBytes += TII->getInstSizeInBytes(*MII);
lib/Target/SystemZ/SystemZElimCompare.cpp
  227     if (getRegReferences(*MBBI, SrcReg))
  271     if (getRegReferences(*MBBI, SrcReg))
  436     MachineInstr &MI = *MBBI++;
  472     MachineInstr &MI = *MBBI++;
  602     MachineInstr &MI = *--MBBI;
lib/Target/SystemZ/SystemZISelLowering.cpp
 6526     const MachineInstr& mi = *miI;
 6640     if (isSelectPseudo(*NextMIIt)) {
 6645         Selects.push_back(&*NextMIIt);
 6659         DbgValues.push_back(&*NextMIIt);
lib/Target/SystemZ/SystemZInstrInfo.cpp
   72   MachineInstr *EarlierMI = MF.CloneMachineInstr(&*MI);
  370     if (!isUnpredicatedTerminator(*I))
  379     SystemZII::Branch Branch(getBranchInfo(*I));
  459     if (!getBranchInfo(*I).hasMBBTarget())
lib/Target/SystemZ/SystemZLDCleanup.cpp
   97           I = ReplaceTLSCall(&*I, TLSBaseAddrReg);
   99           I = SetRegister(&*I, &TLSBaseAddrReg);
lib/Target/SystemZ/SystemZLongBranch.cpp
  290       Block.Size += TII->getInstSizeInBytes(*MI);
  299         Terminators.push_back(describeTerminator(*MI));
lib/Target/SystemZ/SystemZMachineScheduler.cpp
   70     HazardRec->emitInstruction(&*I);
  112                         (TII->getBranchInfo(*I).isIndirect() ||
  113                          TII->getBranchInfo(*I).getMBBTarget() == MBB));
  114     HazardRec->emitInstruction(&*I, TakenBranch);
lib/Target/SystemZ/SystemZPostRewrite.cpp
  140     TII->commuteInstruction(*MBBI, false, 1, 2);
  162   MachineInstr &MI = *MBBI;
  174     LiveRegs.stepBackward(*I);
  215   MachineInstr &MI = *MBBI;
lib/Target/SystemZ/SystemZShortenInst.cpp
  206     MachineInstr &MI = *MBBI;
lib/Target/WebAssembly/WebAssemblyArgumentMove.cpp
   80   for (MachineInstr &MI : EntryMBB) {
   89   for (MachineInstr &MI : llvm::make_range(InsertPt, EntryMBB.end())) {
lib/Target/WebAssembly/WebAssemblyCFGSort.cpp
  148   for (const MachineInstr &Term : MBB->terminators()) {
lib/Target/WebAssembly/WebAssemblyCFGStackify.cpp
  123   for (MachineInstr &MI : Pred->terminators())
  141     if (BeforeSet.count(&*std::prev(InsertPos))) {
  145         assert(!AfterSet.count(&*std::prev(Pos)));
  165     if (AfterSet.count(&*InsertPos)) {
  169         assert(!BeforeSet.count(&*Pos));
  232           BrOnExn = &*Pred->getFirstTerminator();
  266   for (const auto &MI : *Header) {
  304     if (WebAssembly::isChild(*std::prev(I), MFI))
  305       AfterSet.insert(&*std::prev(I));
  336   for (auto &MI : MBB) {
  395   for (const auto &MI : MBB) {
  416   for (const auto &MI : MBB)
  497   for (const auto &MI : *Header) {
  540       for (auto &MI : reverse(*Header)) {
  568     if (WebAssembly::isChild(*std::prev(I), MFI))
  569       AfterSet.insert(&*std::prev(I));
  584   for (const auto &MI : *Cont) {
  682     for (auto &MI : MBB) {
  714   for (auto &MI : Split) {
  873     for (auto &MI : reverse(MBB)) {
  914     for (auto &MI : reverse(MBB)) {
 1015     for (auto &MI : *EHPad) {
 1174     for (auto &MI : reverse(MBB)) {
 1244     for (MachineInstr &MI : reverse(MBB)) {
 1302       MachineInstr &MI = *I;
lib/Target/WebAssembly/WebAssemblyCallIndirectFixup.cpp
  111     for (MachineInstr &MI : MBB) {
lib/Target/WebAssembly/WebAssemblyExplicitLocals.cpp
  207     MachineInstr &MI = *I++;
  229       MachineInstr &MI = *I++;
  385     for (const MachineInstr &MI : MBB) {
lib/Target/WebAssembly/WebAssemblyFixIrreducibleControlFlow.cpp
  453     for (MachineInstr &Term : Pred->terminators())
lib/Target/WebAssembly/WebAssemblyInstrInfo.cpp
  114   for (MachineInstr &MI : MBB.terminators()) {
lib/Target/WebAssembly/WebAssemblyLateEHPrepare.cpp
  151     MachineInstr *TI = &*Pos;
  171       MachineInstr *Catch = &*CatchPos;
  188     for (auto &MI : MBB) {
  242     for (auto &MI : MBB) {
  260     for (auto &MI : MBB) {
  276     MachineInstr *Catch = &*CatchPos;
lib/Target/WebAssembly/WebAssemblyLowerBrUnless.cpp
   67       MachineInstr *MI = &*MII++;
lib/Target/WebAssembly/WebAssemblyMemIntrinsicResults.cpp
  200     for (auto &MI : MBB)
lib/Target/WebAssembly/WebAssemblyOptimizeLiveIntervals.cpp
   97     MachineInstr *MI = &*MII++;
lib/Target/WebAssembly/WebAssemblyPeephole.cpp
   88   if (&MI != &*End)
  148     for (auto &MI : MBB)
lib/Target/WebAssembly/WebAssemblyPrepareForLiveIntervals.cpp
  116     MachineInstr &MI = *MII++;
lib/Target/WebAssembly/WebAssemblyRegNumbering.cpp
   74   for (MachineInstr &MI : EntryMBB) {
lib/Target/WebAssembly/WebAssemblyRegStackify.cpp
  379     query(*I, AA, InterveningRead, InterveningWrite, InterveningEffects,
  789       MachineInstr *Insert = &*MII;
  892       if (Insert != &*MII) {
  893         imposeStackOrdering(&*MII);
  912     for (MachineInstr &MI : MBB) {
lib/Target/WebAssembly/WebAssemblyRegisterInfo.cpp
   57   MachineInstr &MI = *II;
  121     BuildMI(MBB, *II, II->getDebugLoc(), TII->get(WebAssembly::CONST_I32),
  125     BuildMI(MBB, *II, II->getDebugLoc(), TII->get(WebAssembly::ADD_I32),
lib/Target/WebAssembly/WebAssemblySetP2AlignOperands.cpp
   86     for (auto &MI : MBB) {
lib/Target/X86/X86AvoidStoreForwardingBlocks.cpp
  349     MachineInstr &MI = *PBInst;
  376         PotentialBlockers.push_back(&*PBInst);
  538     for (auto &MI : MBB) {
lib/Target/X86/X86AvoidTrailingCall.cpp
   93     if (isCallInstruction(*LastRealInstr)) {
lib/Target/X86/X86CallFrameOptimization.cpp
  170     for (MachineInstr &MI : BB) {
  258     for (auto &MI : MBB)
  369   unsigned int MaxAdjust = TII->getFrameSize(*FrameSetup) >> Log2SlotSize;
  397       Context.SPCopy = &*J++;
  451     Context.ArgStoreVector[StackDisp] = &*I;
  469   Context.Call = &*I;
  500   TII->setFrameAdjustment(*FrameSetup, Context.ExpectedDist);
lib/Target/X86/X86CallLowering.cpp
  366     MIRBuilder.setInstr(*MBB.begin());
lib/Target/X86/X86CmovConversion.cpp
  288     for (auto &I : *MBB) {
  428       for (MachineInstr &MI : *MBB) {
  709     auto &MI = *MIIt++;
  772     if (&*MIItBegin == &MI)
  820     if (X86::getCondFromCMov(*MIIt) == OppCC)
lib/Target/X86/X86CondBrFolding.cpp
  236     BrMI = &*UncondBrI;
  512       CC = X86::getCondFromBranch(*I);
  525       BrInstr = &*I;
  528     if (analyzeCompare(*I, SrcReg, CmpValue)) {
  531       CmpInstr = &*I;
lib/Target/X86/X86DiscriminateMemOps.cpp
  109     for (auto &MI : MBB) {
  128     for (auto &MI : MBB) {
lib/Target/X86/X86EvexToVex.cpp
  108     for (MachineInstr &MI : MBB)
lib/Target/X86/X86ExpandPseudo.cpp
   78   MachineInstr *JTInst = &*MBBI;
  181   MachineInstr &MI = *MBBI;
  276     MachineInstr &NewMI = *std::prev(MBBI);
  277     NewMI.copyImplicitOps(*MBBI->getParent()->getParent(), *MBBI);
  278     MBB.getParent()->moveCallSiteInfo(&*MBBI, &NewMI);
lib/Target/X86/X86FixupBWInsts.cpp
  430     MachineInstr *MI = &*I;
lib/Target/X86/X86FixupLEAs.cpp
  128   MachineInstr &MI = *MBBI;
  241   MachineInstr &MI = *I;
  289     InstrDistance += TSM.computeInstrLatency(&*CurInst);
  355   MachineInstr &MI = *I;
  445   MachineInstr &MI = *I;
  482   MachineInstr &MI = *I;
  532   MachineInstr &MI = *I;
lib/Target/X86/X86FixupSetCC.cpp
   82         return &*MI;
  103     for (auto &MI : MBB) {
lib/Target/X86/X86FlagsCopyLowering.cpp
  305     for (MachineInstr &MI : *Succ) {
  359     for (MachineInstr &MI : *MBB)
  701     for (MachineInstr &MI : MBB)
lib/Target/X86/X86FloatingPoint.cpp
  418     MachineInstr &MI = *I;
  440       PrevMI = &*std::prev(I);
  834   MachineInstr &MI = *I;
 1011   MachineInstr &MI = *I;
 1098   MachineInstr &MI = *I;
 1114   MachineInstr &MI = *I;
 1174   MachineInstr &MI = *I;
 1285   MachineInstr &MI = *I;
 1358   MBB->remove(&*I++);
 1381   MachineInstr &MI = *I;
 1409   MachineInstr &MI = *I;
 1438   MachineInstr &MI = *Inst;
 1696     MachineInstr &MI = *I;
lib/Target/X86/X86FrameLowering.cpp
  214   for (const MachineInstr &MI : MBB.terminators()) {
  505   for (MachineInstr &MI : PrologMBB) {
  748     for (MachineInstr &MI : *RoundMBB) {
  751     for (MachineInstr &MI : *LoopMBB) {
 1385     const MachineInstr &FrameInstr = *MBBI;
 1608   bool IsFunclet = MBBI == MBB.end() ? false : isFuncletReturnInstr(*MBBI);
 1671     emitCatchRetReturnValue(MBB, FirstCSPop, &*Terminator);
 2173   if (MI != MBB.end() && isFuncletReturnInstr(*MI) && STI.isOSWindows()) {
 2619       for (auto &MI : MBB) {
 2799   uint64_t Amount = TII.getFrameSize(*I);
 2800   uint64_t InternalAmt = (isDestroy || Amount) ? TII.getFrameAdjustment(*I) : 0;
 3118     for (auto &MI : MBB) {
lib/Target/X86/X86ISelLowering.cpp
29598     const MachineInstr& mi = *miI;
29935     while (NextMIIt != ThisMBB->end() && isCMOVPseudo(*NextMIIt) &&
29938       LastCMOV = &*NextMIIt;
29951     return EmitLoweredCascadedSelect(MI, *NextMIIt, ThisMBB);
30951     for (const auto &MI : MBB) {
31131     for (auto &II : reverse(*MBB)) {
31399         BuildMI(*BB, *MBBI, DL, TII->get(X86::LEA32r), computedAddrVReg), AM);
lib/Target/X86/X86InstrInfo.cpp
  667   MachineInstr &NewMI = *std::prev(I);
 2450     X86::CondCode CC = X86::getCondFromBranch(*I);
 2518     if (!isUnpredicatedTerminator(*I))
 2557     X86::CondCode BranchCode = X86::getCondFromBranch(*I);
 2609       CondBranches.push_back(&*I);
 2662     CondBranches.push_back(&*I);
 2703       ConditionDef = &*I;
 2758         X86::getCondFromBranch(*I) == X86::COND_INVALID)
 3629       NewCC = isUseDefConvertible(*J);
 3637         MI = &*Def;
 3663     MachineInstr &Instr = *RI;
 3705     const MachineInstr &Instr = *I;
 3774       OpsToUpdate.push_back(std::make_pair(&*I, ReplacementCC));
 3801       MachineInstr *Instr = &*InsertI;
 7877               I = ReplaceTLSBaseAddrCall(*I, TLSBaseAddrReg);
 7879               I = SetRegister(*I, &TLSBaseAddrReg);
 8046   MachineInstr &MI = *MIT;
lib/Target/X86/X86OptimizeLEAs.cpp
  474   for (auto &MI : MBB) {
  498     MachineInstr &MI = *I++;
  541                   InstrPos[&*std::prev(MachineBasicBlock::iterator(DefMI))]) &&
lib/Target/X86/X86PadShortFunction.cpp
  181   for (MachineInstr &MI : *MBB) {
lib/Target/X86/X86RegisterInfo.cpp
  679   MachineInstr &MI = *II;
  718   MachineInstr &MI = *II;
  723                                                : isFuncletReturnInstr(*MBBI);
lib/Target/X86/X86SpeculativeLoadHardening.cpp
  286   for (MachineInstr &MI : Succ) {
  331     for (auto &MI : MBB) {
  373     for (MachineInstr &MI : MBB) {
  632     for (MachineInstr &MI : llvm::reverse(MBB)) {
 1029         for (MachineInstr &MI : MBB.terminators()) {
 1670       for (MachineInstr &MI : MBB) {
 1771     for (MachineInstr &MI : MBB) {
lib/Target/X86/X86VZeroUpper.cpp
  202   for (MachineInstr &MI : MBB) {
lib/Target/X86/X86WinAllocaExpander.cpp
  154     for (MachineInstr &MI : *MBB) {
lib/Target/XCore/XCoreFrameLowering.cpp
  493     MachineInstr &Old = *I;
lib/Target/XCore/XCoreFrameToArgsOffsetElim.cpp
   57         MachineInstr &OldInst = *MBBI;
lib/Target/XCore/XCoreInstrInfo.cpp
  199   if (!isUnpredicatedTerminator(*I))
  203   MachineInstr *LastInst = &*I;
  206   if (I == MBB.begin() || !isUnpredicatedTerminator(*--I)) {
  226   MachineInstr *SecondLastInst = &*I;
  229   if (SecondLastInst && I != MBB.begin() && isUnpredicatedTerminator(*--I))
lib/Target/XCore/XCoreRegisterInfo.cpp
   64   MachineInstr &MI = *II;
   97   MachineInstr &MI = *II;
  131   MachineInstr &MI = *II;
  165   MachineInstr &MI = *II;
  263   MachineInstr &MI = *II;
unittests/CodeGen/GlobalISel/GISelMITest.h
  130     for (MachineInstr &MI : MBB) {
unittests/MI/LiveIntervalTest.cpp
  108   for (MachineInstr &MI : MBB) {
unittests/Target/AArch64/InstSizes.cpp
   90               EXPECT_EQ(16u, II.getInstSizeInBytes(*I));
   92               EXPECT_EQ(32u, II.getInstSizeInBytes(*I));
  105               EXPECT_EQ(16u, II.getInstSizeInBytes(*I));
  107               EXPECT_EQ(32u, II.getInstSizeInBytes(*I));
  121         EXPECT_EQ(16u, II.getInstSizeInBytes(*I));
usr/include/c++/7.4.0/bits/predefined_ops.h
  283 	{ return bool(_M_pred(*__it)); }
  351 	{ return !bool(_M_pred(*__it)); }
usr/include/c++/7.4.0/bits/stl_algo.h
 3884 	__f(*__first);
usr/include/c++/7.4.0/bits/stl_iterator.h
  172 	return *--__tmp;
usr/include/c++/7.4.0/bits/stl_numeric.h
  154 	__init = __binary_op(__init, *__first);