reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

gen/lib/Target/AArch64/AArch64GenInstrInfo.inc
18004   switch(MI.getOpcode()) {
18065   switch(MI.getOpcode()) {
18091   switch(MI.getOpcode()) {
18148   switch(MI.getOpcode()) {
18199   switch(MI.getOpcode()) {
18225   switch(MI.getOpcode()) {
18281   switch(MI.getOpcode()) {
18312   switch(MI.getOpcode()) {
18332   switch(MI.getOpcode()) {
18349   switch(MI.getOpcode()) {
18369   switch(MI.getOpcode()) {
18401   switch(MI.getOpcode()) {
gen/lib/Target/AArch64/AArch64GenSubtargetInfo.inc
18033               MI->getOpcode() == AArch64::EXTRWrri
18034               || MI->getOpcode() == AArch64::EXTRXrri
18044               MI->getOpcode() == AArch64::EXTRWrri
18045               || MI->getOpcode() == AArch64::EXTRXrri
18444             ( MI->getOpcode() == AArch64::BLR )
18452             ( MI->getOpcode() == AArch64::BLR )
18460             ( MI->getOpcode() == AArch64::BLR )
18514               MI->getOpcode() == AArch64::EXTRWrri
18515               || MI->getOpcode() == AArch64::EXTRXrri
18525               MI->getOpcode() == AArch64::EXTRWrri
18526               || MI->getOpcode() == AArch64::EXTRXrri
18538               MI->getOpcode() == AArch64::EXTRWrri
18539               || MI->getOpcode() == AArch64::EXTRXrri
18549               MI->getOpcode() == AArch64::EXTRWrri
18550               || MI->getOpcode() == AArch64::EXTRXrri
gen/lib/Target/AMDGPU/AMDGPUGenMCPseudoLowering.inc
   12   switch (MI->getOpcode()) {
gen/lib/Target/ARM/ARMGenMCPseudoLowering.inc
   12   switch (MI->getOpcode()) {
gen/lib/Target/Mips/MipsGenMCPseudoLowering.inc
   12   switch (MI->getOpcode()) {
gen/lib/Target/RISCV/RISCVGenMCPseudoLowering.inc
   12   switch (MI->getOpcode()) {
gen/lib/Target/X86/X86GenInstrInfo.inc
49344   switch(MI.getOpcode()) {
gen/lib/Target/X86/X86GenSubtargetInfo.inc
21545               MI->getOpcode() == X86::CMPXCHG8B
21546               || MI->getOpcode() == X86::LCMPXCHG8B
21553               MI->getOpcode() == X86::CMPXCHG16B
21554               || MI->getOpcode() == X86::LCMPXCHG16B
21561               MI->getOpcode() == X86::LCMPXCHG8
21562               || MI->getOpcode() == X86::CMPXCHG8rm
21569               MI->getOpcode() == X86::LCMPXCHG8
21570               || MI->getOpcode() == X86::CMPXCHG8rm
21571               || MI->getOpcode() == X86::CMPXCHG16rm
21572               || MI->getOpcode() == X86::CMPXCHG32rm
21573               || MI->getOpcode() == X86::CMPXCHG64rm
21574               || MI->getOpcode() == X86::LCMPXCHG16
21575               || MI->getOpcode() == X86::LCMPXCHG32
21576               || MI->getOpcode() == X86::LCMPXCHG64
21577               || MI->getOpcode() == X86::LCMPXCHG8B
21578               || MI->getOpcode() == X86::LCMPXCHG16B
21583             MI->getOpcode() == X86::CMPXCHG8B
21584             || MI->getOpcode() == X86::LCMPXCHG8B
21588             MI->getOpcode() == X86::CMPXCHG16B
21589             || MI->getOpcode() == X86::LCMPXCHG16B
21593             MI->getOpcode() == X86::LCMPXCHG8
21594             || MI->getOpcode() == X86::CMPXCHG8rm
21598             MI->getOpcode() == X86::LCMPXCHG8
21599             || MI->getOpcode() == X86::CMPXCHG8rm
21600             || MI->getOpcode() == X86::CMPXCHG16rm
21601             || MI->getOpcode() == X86::CMPXCHG32rm
21602             || MI->getOpcode() == X86::CMPXCHG64rm
21603             || MI->getOpcode() == X86::LCMPXCHG16
21604             || MI->getOpcode() == X86::LCMPXCHG32
21605             || MI->getOpcode() == X86::LCMPXCHG64
21606             || MI->getOpcode() == X86::LCMPXCHG8B
21607             || MI->getOpcode() == X86::LCMPXCHG16B
21610       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
21620               MI->getOpcode() == X86::CMPXCHG8B
21621               || MI->getOpcode() == X86::LCMPXCHG8B
21628               MI->getOpcode() == X86::CMPXCHG16B
21629               || MI->getOpcode() == X86::LCMPXCHG16B
21636               MI->getOpcode() == X86::LCMPXCHG8
21637               || MI->getOpcode() == X86::CMPXCHG8rm
21644               MI->getOpcode() == X86::LCMPXCHG8
21645               || MI->getOpcode() == X86::CMPXCHG8rm
21646               || MI->getOpcode() == X86::CMPXCHG16rm
21647               || MI->getOpcode() == X86::CMPXCHG32rm
21648               || MI->getOpcode() == X86::CMPXCHG64rm
21649               || MI->getOpcode() == X86::LCMPXCHG16
21650               || MI->getOpcode() == X86::LCMPXCHG32
21651               || MI->getOpcode() == X86::LCMPXCHG64
21652               || MI->getOpcode() == X86::LCMPXCHG8B
21653               || MI->getOpcode() == X86::LCMPXCHG16B
21658             MI->getOpcode() == X86::CMPXCHG8B
21659             || MI->getOpcode() == X86::LCMPXCHG8B
21663             MI->getOpcode() == X86::CMPXCHG16B
21664             || MI->getOpcode() == X86::LCMPXCHG16B
21668             MI->getOpcode() == X86::LCMPXCHG8
21669             || MI->getOpcode() == X86::CMPXCHG8rm
21673             MI->getOpcode() == X86::LCMPXCHG8
21674             || MI->getOpcode() == X86::CMPXCHG8rm
21675             || MI->getOpcode() == X86::CMPXCHG16rm
21676             || MI->getOpcode() == X86::CMPXCHG32rm
21677             || MI->getOpcode() == X86::CMPXCHG64rm
21678             || MI->getOpcode() == X86::LCMPXCHG16
21679             || MI->getOpcode() == X86::LCMPXCHG32
21680             || MI->getOpcode() == X86::LCMPXCHG64
21681             || MI->getOpcode() == X86::LCMPXCHG8B
21682             || MI->getOpcode() == X86::LCMPXCHG16B
21685       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
21695               MI->getOpcode() == X86::CMPXCHG8B
21696               || MI->getOpcode() == X86::LCMPXCHG8B
21703               MI->getOpcode() == X86::CMPXCHG16B
21704               || MI->getOpcode() == X86::LCMPXCHG16B
21711               MI->getOpcode() == X86::LCMPXCHG8
21712               || MI->getOpcode() == X86::CMPXCHG8rm
21719               MI->getOpcode() == X86::LCMPXCHG8
21720               || MI->getOpcode() == X86::CMPXCHG8rm
21721               || MI->getOpcode() == X86::CMPXCHG16rm
21722               || MI->getOpcode() == X86::CMPXCHG32rm
21723               || MI->getOpcode() == X86::CMPXCHG64rm
21724               || MI->getOpcode() == X86::LCMPXCHG16
21725               || MI->getOpcode() == X86::LCMPXCHG32
21726               || MI->getOpcode() == X86::LCMPXCHG64
21727               || MI->getOpcode() == X86::LCMPXCHG8B
21728               || MI->getOpcode() == X86::LCMPXCHG16B
21733             MI->getOpcode() == X86::CMPXCHG8B
21734             || MI->getOpcode() == X86::LCMPXCHG8B
21738             MI->getOpcode() == X86::CMPXCHG16B
21739             || MI->getOpcode() == X86::LCMPXCHG16B
21743             MI->getOpcode() == X86::LCMPXCHG8
21744             || MI->getOpcode() == X86::CMPXCHG8rm
21748             MI->getOpcode() == X86::LCMPXCHG8
21749             || MI->getOpcode() == X86::CMPXCHG8rm
21750             || MI->getOpcode() == X86::CMPXCHG16rm
21751             || MI->getOpcode() == X86::CMPXCHG32rm
21752             || MI->getOpcode() == X86::CMPXCHG64rm
21753             || MI->getOpcode() == X86::LCMPXCHG16
21754             || MI->getOpcode() == X86::LCMPXCHG32
21755             || MI->getOpcode() == X86::LCMPXCHG64
21756             || MI->getOpcode() == X86::LCMPXCHG8B
21757             || MI->getOpcode() == X86::LCMPXCHG16B
21760       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
21770               MI->getOpcode() == X86::CMPXCHG8B
21771               || MI->getOpcode() == X86::LCMPXCHG8B
21778               MI->getOpcode() == X86::CMPXCHG16B
21779               || MI->getOpcode() == X86::LCMPXCHG16B
21786               MI->getOpcode() == X86::LCMPXCHG8
21787               || MI->getOpcode() == X86::CMPXCHG8rm
21794               MI->getOpcode() == X86::LCMPXCHG8
21795               || MI->getOpcode() == X86::CMPXCHG8rm
21796               || MI->getOpcode() == X86::CMPXCHG16rm
21797               || MI->getOpcode() == X86::CMPXCHG32rm
21798               || MI->getOpcode() == X86::CMPXCHG64rm
21799               || MI->getOpcode() == X86::LCMPXCHG16
21800               || MI->getOpcode() == X86::LCMPXCHG32
21801               || MI->getOpcode() == X86::LCMPXCHG64
21802               || MI->getOpcode() == X86::LCMPXCHG8B
21803               || MI->getOpcode() == X86::LCMPXCHG16B
21808             MI->getOpcode() == X86::CMPXCHG8B
21809             || MI->getOpcode() == X86::LCMPXCHG8B
21813             MI->getOpcode() == X86::CMPXCHG16B
21814             || MI->getOpcode() == X86::LCMPXCHG16B
21818             MI->getOpcode() == X86::LCMPXCHG8
21819             || MI->getOpcode() == X86::CMPXCHG8rm
21823             MI->getOpcode() == X86::LCMPXCHG8
21824             || MI->getOpcode() == X86::CMPXCHG8rm
21825             || MI->getOpcode() == X86::CMPXCHG16rm
21826             || MI->getOpcode() == X86::CMPXCHG32rm
21827             || MI->getOpcode() == X86::CMPXCHG64rm
21828             || MI->getOpcode() == X86::LCMPXCHG16
21829             || MI->getOpcode() == X86::LCMPXCHG32
21830             || MI->getOpcode() == X86::LCMPXCHG64
21831             || MI->getOpcode() == X86::LCMPXCHG8B
21832             || MI->getOpcode() == X86::LCMPXCHG16B
21835       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
21845               MI->getOpcode() == X86::CMPXCHG8B
21846               || MI->getOpcode() == X86::LCMPXCHG8B
21853               MI->getOpcode() == X86::CMPXCHG16B
21854               || MI->getOpcode() == X86::LCMPXCHG16B
21861               MI->getOpcode() == X86::LCMPXCHG8
21862               || MI->getOpcode() == X86::CMPXCHG8rm
21869               MI->getOpcode() == X86::LCMPXCHG8
21870               || MI->getOpcode() == X86::CMPXCHG8rm
21871               || MI->getOpcode() == X86::CMPXCHG16rm
21872               || MI->getOpcode() == X86::CMPXCHG32rm
21873               || MI->getOpcode() == X86::CMPXCHG64rm
21874               || MI->getOpcode() == X86::LCMPXCHG16
21875               || MI->getOpcode() == X86::LCMPXCHG32
21876               || MI->getOpcode() == X86::LCMPXCHG64
21877               || MI->getOpcode() == X86::LCMPXCHG8B
21878               || MI->getOpcode() == X86::LCMPXCHG16B
21883             MI->getOpcode() == X86::CMPXCHG8B
21884             || MI->getOpcode() == X86::LCMPXCHG8B
21888             MI->getOpcode() == X86::CMPXCHG16B
21889             || MI->getOpcode() == X86::LCMPXCHG16B
21893             MI->getOpcode() == X86::LCMPXCHG8
21894             || MI->getOpcode() == X86::CMPXCHG8rm
21898             MI->getOpcode() == X86::LCMPXCHG8
21899             || MI->getOpcode() == X86::CMPXCHG8rm
21900             || MI->getOpcode() == X86::CMPXCHG16rm
21901             || MI->getOpcode() == X86::CMPXCHG32rm
21902             || MI->getOpcode() == X86::CMPXCHG64rm
21903             || MI->getOpcode() == X86::LCMPXCHG16
21904             || MI->getOpcode() == X86::LCMPXCHG32
21905             || MI->getOpcode() == X86::LCMPXCHG64
21906             || MI->getOpcode() == X86::LCMPXCHG8B
21907             || MI->getOpcode() == X86::LCMPXCHG16B
21910       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
22520               MI->getOpcode() == X86::CMPXCHG8B
22521               || MI->getOpcode() == X86::LCMPXCHG8B
22528               MI->getOpcode() == X86::CMPXCHG16B
22529               || MI->getOpcode() == X86::LCMPXCHG16B
22536               MI->getOpcode() == X86::LCMPXCHG8
22537               || MI->getOpcode() == X86::CMPXCHG8rm
22544               MI->getOpcode() == X86::LCMPXCHG8
22545               || MI->getOpcode() == X86::CMPXCHG8rm
22546               || MI->getOpcode() == X86::CMPXCHG16rm
22547               || MI->getOpcode() == X86::CMPXCHG32rm
22548               || MI->getOpcode() == X86::CMPXCHG64rm
22549               || MI->getOpcode() == X86::LCMPXCHG16
22550               || MI->getOpcode() == X86::LCMPXCHG32
22551               || MI->getOpcode() == X86::LCMPXCHG64
22552               || MI->getOpcode() == X86::LCMPXCHG8B
22553               || MI->getOpcode() == X86::LCMPXCHG16B
22558             MI->getOpcode() == X86::CMPXCHG8B
22559             || MI->getOpcode() == X86::LCMPXCHG8B
22563             MI->getOpcode() == X86::CMPXCHG16B
22564             || MI->getOpcode() == X86::LCMPXCHG16B
22568             MI->getOpcode() == X86::LCMPXCHG8
22569             || MI->getOpcode() == X86::CMPXCHG8rm
22573             MI->getOpcode() == X86::LCMPXCHG8
22574             || MI->getOpcode() == X86::CMPXCHG8rm
22575             || MI->getOpcode() == X86::CMPXCHG16rm
22576             || MI->getOpcode() == X86::CMPXCHG32rm
22577             || MI->getOpcode() == X86::CMPXCHG64rm
22578             || MI->getOpcode() == X86::LCMPXCHG16
22579             || MI->getOpcode() == X86::LCMPXCHG32
22580             || MI->getOpcode() == X86::LCMPXCHG64
22581             || MI->getOpcode() == X86::LCMPXCHG8B
22582             || MI->getOpcode() == X86::LCMPXCHG16B
22585       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
22595               MI->getOpcode() == X86::CMPXCHG8B
22596               || MI->getOpcode() == X86::LCMPXCHG8B
22603               MI->getOpcode() == X86::CMPXCHG16B
22604               || MI->getOpcode() == X86::LCMPXCHG16B
22611               MI->getOpcode() == X86::LCMPXCHG8
22612               || MI->getOpcode() == X86::CMPXCHG8rm
22619               MI->getOpcode() == X86::LCMPXCHG8
22620               || MI->getOpcode() == X86::CMPXCHG8rm
22621               || MI->getOpcode() == X86::CMPXCHG16rm
22622               || MI->getOpcode() == X86::CMPXCHG32rm
22623               || MI->getOpcode() == X86::CMPXCHG64rm
22624               || MI->getOpcode() == X86::LCMPXCHG16
22625               || MI->getOpcode() == X86::LCMPXCHG32
22626               || MI->getOpcode() == X86::LCMPXCHG64
22627               || MI->getOpcode() == X86::LCMPXCHG8B
22628               || MI->getOpcode() == X86::LCMPXCHG16B
22633             MI->getOpcode() == X86::CMPXCHG8B
22634             || MI->getOpcode() == X86::LCMPXCHG8B
22638             MI->getOpcode() == X86::CMPXCHG16B
22639             || MI->getOpcode() == X86::LCMPXCHG16B
22643             MI->getOpcode() == X86::LCMPXCHG8
22644             || MI->getOpcode() == X86::CMPXCHG8rm
22648             MI->getOpcode() == X86::LCMPXCHG8
22649             || MI->getOpcode() == X86::CMPXCHG8rm
22650             || MI->getOpcode() == X86::CMPXCHG16rm
22651             || MI->getOpcode() == X86::CMPXCHG32rm
22652             || MI->getOpcode() == X86::CMPXCHG64rm
22653             || MI->getOpcode() == X86::LCMPXCHG16
22654             || MI->getOpcode() == X86::LCMPXCHG32
22655             || MI->getOpcode() == X86::LCMPXCHG64
22656             || MI->getOpcode() == X86::LCMPXCHG8B
22657             || MI->getOpcode() == X86::LCMPXCHG16B
22660       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
22670               MI->getOpcode() == X86::CMPXCHG8B
22671               || MI->getOpcode() == X86::LCMPXCHG8B
22678               MI->getOpcode() == X86::CMPXCHG16B
22679               || MI->getOpcode() == X86::LCMPXCHG16B
22686               MI->getOpcode() == X86::LCMPXCHG8
22687               || MI->getOpcode() == X86::CMPXCHG8rm
22694               MI->getOpcode() == X86::LCMPXCHG8
22695               || MI->getOpcode() == X86::CMPXCHG8rm
22696               || MI->getOpcode() == X86::CMPXCHG16rm
22697               || MI->getOpcode() == X86::CMPXCHG32rm
22698               || MI->getOpcode() == X86::CMPXCHG64rm
22699               || MI->getOpcode() == X86::LCMPXCHG16
22700               || MI->getOpcode() == X86::LCMPXCHG32
22701               || MI->getOpcode() == X86::LCMPXCHG64
22702               || MI->getOpcode() == X86::LCMPXCHG8B
22703               || MI->getOpcode() == X86::LCMPXCHG16B
22708             MI->getOpcode() == X86::CMPXCHG8B
22709             || MI->getOpcode() == X86::LCMPXCHG8B
22713             MI->getOpcode() == X86::CMPXCHG16B
22714             || MI->getOpcode() == X86::LCMPXCHG16B
22718             MI->getOpcode() == X86::LCMPXCHG8
22719             || MI->getOpcode() == X86::CMPXCHG8rm
22723             MI->getOpcode() == X86::LCMPXCHG8
22724             || MI->getOpcode() == X86::CMPXCHG8rm
22725             || MI->getOpcode() == X86::CMPXCHG16rm
22726             || MI->getOpcode() == X86::CMPXCHG32rm
22727             || MI->getOpcode() == X86::CMPXCHG64rm
22728             || MI->getOpcode() == X86::LCMPXCHG16
22729             || MI->getOpcode() == X86::LCMPXCHG32
22730             || MI->getOpcode() == X86::LCMPXCHG64
22731             || MI->getOpcode() == X86::LCMPXCHG8B
22732             || MI->getOpcode() == X86::LCMPXCHG16B
22735       if (( MI->getOpcode() == X86::CMPXCHG8rr ))
22919   switch(MI->getOpcode()) {
23010   switch(MI->getOpcode()) {
23052   switch(MI->getOpcode()) {
include/llvm/CodeGen/GlobalISel/InstructionSelectorImpl.h
  143       unsigned Opcode = State.MIs[InsnID]->getOpcode();
  163       const int64_t Opcode = State.MIs[InsnID]->getOpcode();
  248       assert(State.MIs[InsnID]->getOpcode() == TargetOpcode::G_CONSTANT &&
  272       assert(State.MIs[InsnID]->getOpcode() == TargetOpcode::G_CONSTANT &&
  294       assert(State.MIs[InsnID]->getOpcode() == TargetOpcode::G_FCONSTANT &&
  908       assert(State.MIs[OldInsnID]->getOpcode() == TargetOpcode::G_CONSTANT && "Expected G_CONSTANT");
  927       assert(State.MIs[OldInsnID]->getOpcode() == TargetOpcode::G_FCONSTANT && "Expected G_FCONSTANT");
include/llvm/CodeGen/GlobalISel/LegalizationArtifactCombiner.h
   50     assert(MI.getOpcode() == TargetOpcode::G_ANYEXT);
   72       Builder.buildInstr(ExtMI->getOpcode(), {DstReg}, {ExtSrc});
   80     if (SrcMI->getOpcode() == TargetOpcode::G_CONSTANT) {
   95     assert(MI.getOpcode() == TargetOpcode::G_ZEXT);
  121     if (SrcMI->getOpcode() == TargetOpcode::G_CONSTANT) {
  136     assert(MI.getOpcode() == TargetOpcode::G_SEXT);
  162     assert(MI.getOpcode() == TargetOpcode::G_TRUNC);
  171     if (SrcMI->getOpcode() == TargetOpcode::G_CONSTANT) {
  189     unsigned Opcode = MI.getOpcode();
  251     assert(MI.getOpcode() == TargetOpcode::G_UNMERGE_VALUES);
  265     unsigned SrcOp = SrcDef->getOpcode();
  271     if (!MergeI || !canFoldMergeOpcode(MergeI->getOpcode(),
  377     assert(MI.getOpcode() == TargetOpcode::G_EXTRACT);
  393     if (!MergeI || !isMergeLikeOpcode(MergeI->getOpcode()))
  435     switch (MI.getOpcode()) {
  463     switch (MI.getOpcode()) {
  503           assert((TmpDef->getOpcode() == TargetOpcode::COPY ||
  504                   isArtifactCast(TmpDef->getOpcode())) &&
include/llvm/CodeGen/GlobalISel/MIPatternMatch.h
  182       if (TmpMI->getOpcode() == Opcode && TmpMI->getNumOperands() == 3) {
  250       if (TmpMI->getOpcode() == Opcode && TmpMI->getNumOperands() == 2) {
include/llvm/CodeGen/MachineInstr.h
 1039   bool isEHLabel() const { return getOpcode() == TargetOpcode::EH_LABEL; }
 1040   bool isGCLabel() const { return getOpcode() == TargetOpcode::GC_LABEL; }
 1042     return getOpcode() == TargetOpcode::ANNOTATION_LABEL;
 1051     return getOpcode() == TargetOpcode::CFI_INSTRUCTION;
 1057   bool isDebugValue() const { return getOpcode() == TargetOpcode::DBG_VALUE; }
 1058   bool isDebugLabel() const { return getOpcode() == TargetOpcode::DBG_LABEL; }
 1080     return getOpcode() == TargetOpcode::PHI ||
 1081            getOpcode() == TargetOpcode::G_PHI;
 1083   bool isKill() const { return getOpcode() == TargetOpcode::KILL; }
 1084   bool isImplicitDef() const { return getOpcode()==TargetOpcode::IMPLICIT_DEF; }
 1086     return getOpcode() == TargetOpcode::INLINEASM ||
 1087            getOpcode() == TargetOpcode::INLINEASM_BR;
 1100     return getOpcode() == TargetOpcode::INSERT_SUBREG;
 1104     return getOpcode() == TargetOpcode::SUBREG_TO_REG;
 1108     return getOpcode() == TargetOpcode::REG_SEQUENCE;
 1112     return getOpcode() == TargetOpcode::BUNDLE;
 1116     return getOpcode() == TargetOpcode::COPY;
 1124     return getOpcode() == TargetOpcode::EXTRACT_SUBREG;
 1142     switch (getOpcode()) {
 1162     switch (getOpcode()) {
include/llvm/CodeGen/TargetInstrInfo.h
  100     return MI.getOpcode() == TargetOpcode::IMPLICIT_DEF ||
  171     return I.getOpcode() == getCallFrameSetupOpcode() ||
  172            I.getOpcode() == getCallFrameDestroyOpcode();
  177     return I.getOpcode() == getCallFrameSetupOpcode();
lib/CodeGen/Analysis.cpp
  756     if (MBBI == MBB.end() || MBBI->getOpcode() != TII->getCatchReturnOpcode())
lib/CodeGen/AsmPrinter/AsmPrinter.cpp
 1079       switch (MI.getOpcode()) {
lib/CodeGen/BranchFolding.cpp
  257   unsigned Hash = MI.getOpcode();
lib/CodeGen/BranchRelaxation.cpp
  285   if (TII->isBranchOffsetInRange(MI.getOpcode(), DestOffset - BrOffset))
  447   assert(!TII->isBranchOffsetInRange(MI.getOpcode(), DestOffset - SrcOffset));
lib/CodeGen/DeadMachineInstructionElim.cpp
   66   if (MI->getOpcode() == TargetOpcode::LOCAL_ESCAPE)
lib/CodeGen/DetectDeadLanes.cpp
  141   switch (MI.getOpcode()) {
  166   switch (MI.getOpcode()) {
  236   switch (MI.getOpcode()) {
  285   if (MI.getOpcode() == TargetOpcode::PATCHPOINT)
  314   switch (MI.getOpcode()) {
lib/CodeGen/ExpandPostRAPseudos.cpp
  209       switch (MI.getOpcode()) {
lib/CodeGen/GlobalISel/CSEInfo.cpp
  135   assert(shouldCSE(MI->getOpcode()) && "Trying to CSE an unsupported Node");
  170   if (shouldCSE(MI->getOpcode())) {
  177   assert(shouldCSE(MI->getOpcode()) && "Invalid instruction for CSE");
  236       if (!shouldCSE(MI.getOpcode()))
  270   addNodeIDOpcode(MI->getOpcode());
lib/CodeGen/GlobalISel/CSEMIRBuilder.cpp
   42     CSEInfo->countOpcodeHit(MI->getOpcode());
  107   assert(canPerformCSEForOpc(MIB->getOpcode()) &&
lib/CodeGen/GlobalISel/CombinerHelper.cpp
   73   if (MI.getOpcode() != TargetOpcode::COPY)
  104   assert(MI.getOpcode() == TargetOpcode::G_CONCAT_VECTORS &&
  119     switch (Def->getOpcode()) {
  187   assert(MI.getOpcode() == TargetOpcode::G_SHUFFLE_VECTOR &&
  366   if (MI.getOpcode() != TargetOpcode::G_LOAD &&
  367       MI.getOpcode() != TargetOpcode::G_SEXTLOAD &&
  368       MI.getOpcode() != TargetOpcode::G_ZEXTLOAD)
  396   unsigned PreferredOpcode = MI.getOpcode() == TargetOpcode::G_LOAD
  398                                  : MI.getOpcode() == TargetOpcode::G_SEXTLOAD
  403     if (UseMI.getOpcode() == TargetOpcode::G_SEXT ||
  404         UseMI.getOpcode() == TargetOpcode::G_ZEXT ||
  405         UseMI.getOpcode() == TargetOpcode::G_ANYEXT) {
  408                                      UseMI.getOpcode(), &UseMI);
  468     if (UseMI->getOpcode() == Preferred.ExtendOpcode ||
  469         UseMI->getOpcode() == TargetOpcode::G_ANYEXT) {
  561   unsigned Opcode = MI.getOpcode();
  568   if (BaseDef && BaseDef->getOpcode() == TargetOpcode::G_FRAME_INDEX)
  574     if (Use.getOpcode() != TargetOpcode::G_GEP)
  628   unsigned Opcode = MI.getOpcode();
  650   if (BaseDef->getOpcode() == TargetOpcode::G_FRAME_INDEX) {
  655   if (MI.getOpcode() == TargetOpcode::G_STORE) {
  684   unsigned Opcode = MI.getOpcode();
  736   if (MI.getOpcode() != TargetOpcode::G_BR)
  761   if (BrCond->getOpcode() != TargetOpcode::G_BRCOND)
  769   if (!CmpMI || CmpMI->getOpcode() != TargetOpcode::G_ICMP ||
 1250   assert(MI.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS);
lib/CodeGen/GlobalISel/GISelKnownBits.cpp
   44   if (MI.getOpcode() == TargetOpcode::G_FRAME_INDEX) {
   94   unsigned Opcode = MI.getOpcode();
lib/CodeGen/GlobalISel/InstructionSelect.cpp
  168       if (MI.getOpcode() != TargetOpcode::COPY)
lib/CodeGen/GlobalISel/InstructionSelector.cpp
   63   if (RootI->getOpcode() != TargetOpcode::G_GEP)
   68   if (RHSI->getOpcode() != TargetOpcode::G_CONSTANT)
lib/CodeGen/GlobalISel/Legalizer.cpp
   67   switch (MI.getOpcode()) {
  101     if (isPreISelGenericOpcode(MI.getOpcode())) {
  170       if (!isPreISelGenericOpcode(MI.getOpcode()))
  221       assert(isPreISelGenericOpcode(MI.getOpcode()) && "Expecting generic opcode");
  260       assert(isPreISelGenericOpcode(MI.getOpcode()) && "Expecting generic opcode");
lib/CodeGen/GlobalISel/LegalizerHelper.cpp
   86   if (MI.getOpcode() == TargetOpcode::G_INTRINSIC ||
   87       MI.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS)
  382   auto Libcall = getRTLibDesc(MI.getOpcode(), Size);
  394   assert(MI.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS);
  484   RTLIB::Libcall Libcall = getConvRTLibDesc(MI.getOpcode(), ToType, FromType);
  497   switch (MI.getOpcode()) {
  605   switch (MI.getOpcode()) {
  813     bool ZExt = MI.getOpcode() == TargetOpcode::G_ZEXTLOAD;
  944     int OpIdx = MI.getOpcode() == TargetOpcode::G_EXTRACT_VECTOR_ELT ? 2 : 3;
 1410   switch (MI.getOpcode()) {
 1429     unsigned Opcode = MI.getOpcode() == TargetOpcode::G_UADDO
 1464     if (MI.getOpcode() == TargetOpcode::G_CTTZ) {
 1475     auto MIBNewOp = MIRBuilder.buildInstr(MI.getOpcode(), {WideTy}, {MIBSrc});
 1477     if (MI.getOpcode() == TargetOpcode::G_CTLZ ||
 1478         MI.getOpcode() == TargetOpcode::G_CTLZ_ZERO_UNDEF) {
 1581       unsigned CvtOp = MI.getOpcode() == TargetOpcode::G_ASHR ?
 1913   switch(MI.getOpcode()) {
 1919     MIRBuilder.buildInstr(MI.getOpcode() == G_SREM ? G_SDIV : G_UDIV)
 1945     unsigned Opcode = MI.getOpcode() == TargetOpcode::G_SMULO
 2048       if (MI.getOpcode() == TargetOpcode::G_LOAD) {
 2107       switch (MI.getOpcode()) {
 2316   const unsigned Opc = MI.getOpcode();
 2462         NewInsts.push_back(MIRBuilder.buildInstrNoInsert(MI.getOpcode())
 2470         NewInsts.push_back(MIRBuilder.buildInstrNoInsert(MI.getOpcode())
 2534     MachineInstr *NewInst = MIRBuilder.buildInstr(MI.getOpcode())
 2600     if (MI.getOpcode() == TargetOpcode::G_ICMP)
 2871   bool IsLoad = MI.getOpcode() == TargetOpcode::G_LOAD;
 2949   switch (MI.getOpcode()) {
 3060   if (MI.getOpcode() == TargetOpcode::G_SHL) {
 3078   } else if (MI.getOpcode() == TargetOpcode::G_LSHR) {
 3181   switch (MI.getOpcode()) {
 3205     auto HiS = MIRBuilder.buildInstr(MI.getOpcode(), {HalfTy}, {InH, Amt});
 3213     if (MI.getOpcode() == TargetOpcode::G_LSHR) {
 3219     auto LoL = MIRBuilder.buildInstr(MI.getOpcode(), {HalfTy},
 3263   unsigned Opc = MI.getOpcode();
 3430   bool IsMulHigh = MI.getOpcode() == TargetOpcode::G_UMULH;
 3609     auto Inst = MIRBuilder.buildInstr(MI.getOpcode(), {NarrowTy},
 3616       MI.getOpcode(),
 3676   unsigned Opc = MI.getOpcode();
 3967   const CmpInst::Predicate Pred = minMaxToCompare(MI.getOpcode());
 4027   unsigned NewOp = MI.getOpcode() == TargetOpcode::G_FMINNUM ?
 4274   const bool IsAdd = MI.getOpcode() == TargetOpcode::G_SADDO;
lib/CodeGen/GlobalISel/LegalizerInfo.cpp
  397   if (MI.getOpcode() == TargetOpcode::G_UNMERGE_VALUES && TypeIdx == 1)
  504   return getAction({MI.getOpcode(), Types, MemDescrs});
  742         if (isPreISelGenericOpcode(MI.getOpcode()) &&
lib/CodeGen/GlobalISel/Localizer.cpp
   70   switch (MI.getOpcode()) {
lib/CodeGen/GlobalISel/RegBankSelect.cpp
  692       if (isTargetSpecificOpcode(MI.getOpcode()) && !MI.isPreISelOpcode())
lib/CodeGen/GlobalISel/RegisterBankInfo.cpp
  152          MI.getOpcode() == TargetOpcode::REG_SEQUENCE;
lib/CodeGen/GlobalISel/Utils.cpp
  115   assert(!isPreISelGenericOpcode(I.getOpcode()) &&
  244   while ((MI = MRI.getVRegDef(VReg)) && !IsConstantOpcode(MI->getOpcode()) &&
  246     switch (MI->getOpcode()) {
  251           MI->getOpcode(),
  267   if (!MI || !IsConstantOpcode(MI->getOpcode()))
  298   if (TargetOpcode::G_FCONSTANT != MI->getOpcode())
  309   while (DefMI->getOpcode() == TargetOpcode::COPY) {
  322   return DefMI && DefMI->getOpcode() == Opcode ? DefMI : nullptr;
  401     switch (DefMI->getOpcode()) {
lib/CodeGen/ImplicitNullChecks.cpp
  640                  .addImm(MI->getOpcode());
lib/CodeGen/InlineSpiller.cpp
  539   return (MI.getOpcode() != TargetOpcode::STATEPOINT);
  804                       MI->getOpcode() == TargetOpcode::STATEPOINT ||
  805                       MI->getOpcode() == TargetOpcode::PATCHPOINT ||
  806                       MI->getOpcode() == TargetOpcode::STACKMAP;
lib/CodeGen/LiveIntervals.cpp
 1459   assert((!MI.isBundled() || MI.getOpcode() == TargetOpcode::BUNDLE) &&
lib/CodeGen/LocalStackSlotAllocation.cpp
  307       if (MI.isDebugInstr() || MI.getOpcode() == TargetOpcode::STATEPOINT ||
  308           MI.getOpcode() == TargetOpcode::STACKMAP ||
  309           MI.getOpcode() == TargetOpcode::PATCHPOINT)
lib/CodeGen/MIRPrinter.cpp
  758   OS << TII->getName(MI.getOpcode());
lib/CodeGen/MachineCSE.cpp
  422   if (MI->getOpcode() == TargetOpcode::LOAD_STACK_GUARD)
lib/CodeGen/MachineCombiner.cpp
  361     unsigned Opc = InstrPtr->getOpcode();
lib/CodeGen/MachineFrameInfo.cpp
  199       unsigned Opcode = MI.getOpcode();
lib/CodeGen/MachineInstr.cpp
  588   if (Other.getOpcode() != getOpcode() ||
  588   if (Other.getOpcode() != getOpcode() ||
 1546     OS << TII->getName(getOpcode());
 1981   HashComponents.push_back(MI->getOpcode());
lib/CodeGen/MachineLICM.cpp
 1297     TII->getOpcodeAfterMemoryUnfold(MI->getOpcode(),
 1343     CSEMap[MI.getOpcode()].push_back(&MI);
 1421   unsigned Opcode = MI->getOpcode();
 1467   unsigned Opcode = MI->getOpcode();
lib/CodeGen/MachinePipeliner.cpp
 1039     if (TII->isZeroCost(MI->getOpcode()))
 2308         if (ST.getInstrInfo()->isZeroCost((*I)->getInstr()->getOpcode()))
 2315     if (ST.getInstrInfo()->isZeroCost(SU->getInstr()->getOpcode()) ||
lib/CodeGen/MachineVerifier.cpp
  973   switch (MI->getOpcode()) {
  983     if (MI->getOpcode() == TargetOpcode::G_CONSTANT) {
 1023       if (MI->getOpcode() == TargetOpcode::G_ZEXTLOAD ||
 1024           MI->getOpcode() == TargetOpcode::G_SEXTLOAD) {
 1027       } else if (MI->getOpcode() == TargetOpcode::G_LOAD) {
 1030       } else if (MI->getOpcode() == TargetOpcode::G_STORE) {
 1081     if (MI->getOpcode() == TargetOpcode::G_INTTOPTR) {
 1086     } else if (MI->getOpcode() == TargetOpcode::G_PTRTOINT) {
 1092       assert(MI->getOpcode() == TargetOpcode::G_ADDRSPACE_CAST);
 1145     switch (MI->getOpcode()) {
 1355     bool NoSideEffects = MI->getOpcode() == TargetOpcode::G_INTRINSIC;
 1538   switch (MI->getOpcode()) {
 2780       if (I.getOpcode() == FrameSetupOpcode) {
 2787       if (I.getOpcode() == FrameDestroyOpcode) {
lib/CodeGen/MacroFusion.cpp
   71       dbgs() << DAG.TII->getName(FirstSU.getInstr()->getOpcode()) << " - "
   72              << DAG.TII->getName(SecondSU.getInstr()->getOpcode()) << '\n';);
lib/CodeGen/ModuloSchedule.cpp
 1876     assert(OI->getOpcode() == NI->getOpcode() && "Opcodes don't match?!");
 1876     assert(OI->getOpcode() == NI->getOpcode() && "Opcodes don't match?!");
lib/CodeGen/PatchableFunction.cpp
   43   switch (MI.getOpcode()) {
   75                  .addImm(FirstActualI->getOpcode());
lib/CodeGen/PeepholeOptimizer.cpp
  530     if (UseMI->getOpcode() == TargetOpcode::SUBREG_TO_REG)
 1090   switch (MI.getOpcode()) {
lib/CodeGen/PrologEpilogInserter.cpp
 1249       if (MI.getOpcode() == TargetOpcode::STATEPOINT) {
lib/CodeGen/SelectionDAG/FastISel.cpp
  540          FuncInfo.InsertPt->getOpcode() == TargetOpcode::EH_LABEL)
lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
  642       const MCInstrDesc &MCID = TII->get(MI.getOpcode());
lib/CodeGen/ShrinkWrap.cpp
  270   if (MI.getOpcode() == FrameSetupOpcode ||
  271       MI.getOpcode() == FrameDestroyOpcode) {
lib/CodeGen/StackColoring.cpp
  568   assert((MI.getOpcode() == TargetOpcode::LIFETIME_START ||
  569           MI.getOpcode() == TargetOpcode::LIFETIME_END) &&
  585   if (MI.getOpcode() == TargetOpcode::LIFETIME_START ||
  586       MI.getOpcode() == TargetOpcode::LIFETIME_END) {
  593     if (MI.getOpcode() == TargetOpcode::LIFETIME_END) {
  654       if (MI.getOpcode() == TargetOpcode::LIFETIME_START ||
  655           MI.getOpcode() == TargetOpcode::LIFETIME_END) {
  660         if (MI.getOpcode() == TargetOpcode::LIFETIME_START) {
  670           LLVM_DEBUG(dbgs() << (MI.getOpcode() == TargetOpcode::LIFETIME_START
  965       if (I.getOpcode() == TargetOpcode::LIFETIME_START ||
  966           I.getOpcode() == TargetOpcode::LIFETIME_END)
 1080       if (I.getOpcode() == TargetOpcode::LIFETIME_START ||
 1081           I.getOpcode() == TargetOpcode::LIFETIME_END || I.isDebugInstr())
lib/CodeGen/StackMapLivenessAnalysis.cpp
  134       if (I->getOpcode() == TargetOpcode::PATCHPOINT) {
lib/CodeGen/StackMaps.cpp
  364   assert(MI.getOpcode() == TargetOpcode::STACKMAP && "expected stackmap");
  373   assert(MI.getOpcode() == TargetOpcode::PATCHPOINT && "expected patchpoint");
  394   assert(MI.getOpcode() == TargetOpcode::STATEPOINT && "expected statepoint");
lib/CodeGen/TargetInstrInfo.cpp
  472   switch (MI.getOpcode()) {
  501       MF.CreateMachineInstr(TII.get(MI.getOpcode()), MI.getDebugLoc(), true);
  571   if (MI.getOpcode() == TargetOpcode::STACKMAP ||
  572       MI.getOpcode() == TargetOpcode::PATCHPOINT ||
  573       MI.getOpcode() == TargetOpcode::STATEPOINT) {
  636   if ((MI.getOpcode() == TargetOpcode::STACKMAP ||
  637        MI.getOpcode() == TargetOpcode::PATCHPOINT ||
  638        MI.getOpcode() == TargetOpcode::STATEPOINT) &&
  692   unsigned AssocOpcode = Inst.getOpcode();
  696   Commuted = MI1->getOpcode() != AssocOpcode && MI2->getOpcode() == AssocOpcode;
  696   Commuted = MI1->getOpcode() != AssocOpcode && MI2->getOpcode() == AssocOpcode;
  704   return MI1->getOpcode() == AssocOpcode &&
  831   unsigned Opcode = Root.getOpcode();
  976   if ((!StackGrowsDown && MI.getOpcode() == FrameSetupOpcode) ||
  977       (StackGrowsDown && MI.getOpcode() == FrameDestroyOpcode))
 1091   if (isHighLatencyDef(DefMI.getOpcode()))
lib/CodeGen/TargetLoweringBase.cpp
 1057       assert(MI->getOpcode() == TargetOpcode::STATEPOINT && "sanity");
 1082     if (MI->getOpcode() != TargetOpcode::STATEPOINT) {
 1102   assert(MI.getOpcode() == TargetOpcode::PATCHABLE_EVENT_CALL &&
 1117   assert(MI.getOpcode() == TargetOpcode::PATCHABLE_TYPED_EVENT_CALL &&
lib/CodeGen/TwoAddressInstructionPass.cpp
 1352       TII->getOpcodeAfterMemoryUnfold(MI.getOpcode(),
lib/CodeGen/XRayInstrumentation.cpp
   97           (op.HandleAllReturns || T.getOpcode() == TII->getReturnOpcode())) {
  109                        .addImm(T.getOpcode());
  130           (op.HandleAllReturns || T.getOpcode() == TII->getReturnOpcode())) {
lib/Target/AArch64/AArch64A53Fix835769.cpp
   41   switch (MI->getOpcode()) {
   58   switch (MI->getOpcode()) {
lib/Target/AArch64/AArch64A57FPLoadBalancing.cpp
   69   switch (MI->getOpcode()) {
   82   switch (MI->getOpcode()) {
lib/Target/AArch64/AArch64AdvSIMDScalarPass.cpp
  132   if (MI->getOpcode() == AArch64::FMOVDXr ||
  133       MI->getOpcode() == AArch64::FMOVXDr)
  137   if (MI->getOpcode() == AArch64::UMOVvi64 && MI->getOperand(2).getImm() == 0) {
  143   if (MI->getOpcode() == AArch64::COPY) {
  185   unsigned Opc = MI.getOpcode();
  253     else if (Use->getOpcode() == AArch64::INSERT_SUBREG ||
  254              Use->getOpcode() == AArch64::INSvi64gpr)
  291   unsigned OldOpc = MI.getOpcode();
lib/Target/AArch64/AArch64AsmPrinter.cpp
  242       MI.getOpcode() == AArch64::HWASAN_CHECK_MEMACCESS_SHORTGRANULES;
  764   bool IsByteEntry = MI.getOpcode() == AArch64::JumpTableDest8;
  806         MII->getOpcode() == AArch64::DBG_VALUE ||
  807         MII->getOpcode() == TargetOpcode::PATCHPOINT ||
  808         MII->getOpcode() == TargetOpcode::STACKMAP)
  880     switch (MI.getOpcode()) {
  922   switch (MI->getOpcode()) {
lib/Target/AArch64/AArch64BranchTargets.cpp
  122   if (MBBI != MBB.end() && (MBBI->getOpcode() == AArch64::PACIASP ||
  123                             MBBI->getOpcode() == AArch64::PACIBSP))
lib/Target/AArch64/AArch64CallLowering.cpp
  641     if (!RegDef || RegDef->getOpcode() != TargetOpcode::COPY) {
lib/Target/AArch64/AArch64CleanupLocalDynamicTLSPass.cpp
   69       switch (I->getOpcode()) {
lib/Target/AArch64/AArch64CollectLOH.cpp
  177   switch (MI.getOpcode()) {
  199   switch (MI.getOpcode()) {
  223   switch (MI.getOpcode()) {
  244   switch (MI.getOpcode()) {
  313   } else if (MI.getOpcode() == AArch64::ADDXri) {
  317   } else if ((MI.getOpcode() == AArch64::LDRXui ||
  318               MI.getOpcode() == AArch64::LDRWui) &&
  350   if (MI.getOpcode() == AArch64::ADDXri && canAddBePartOfLOH(MI)) {
  363     assert((MI.getOpcode() == AArch64::LDRXui ||
  364             MI.getOpcode() == AArch64::LDRWui) &&
  526       unsigned Opcode = MI.getOpcode();
lib/Target/AArch64/AArch64CompressJumpTables.cpp
   86   if (MI.getOpcode() != AArch64::JumpTableDest32)
lib/Target/AArch64/AArch64CondBrTuning.cpp
  100   unsigned NewOpc = TII->convertToFlagSettingOpc(MI.getOpcode(), Is64Bit);
  116   switch (MI.getOpcode()) {
  149   unsigned MIOpc = MI.getOpcode();
  151   switch (DefMI.getOpcode()) {
  310       switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64ConditionOptimizer.cpp
  151   if (I->getOpcode() != AArch64::Bcc)
  166     switch (I->getOpcode()) {
  243   unsigned Opc = CmpMI->getOpcode();
  321   if (std::get<0>(Info) == ToImm && std::get<1>(Info) == To->getOpcode()) {
lib/Target/AArch64/AArch64ConditionalCompares.cpp
  304     switch (I->getOpcode()) {
  321     switch (I->getOpcode()) {
  654   switch (CmpMI->getOpcode()) {
  706     bool isNZ = CmpMI->getOpcode() == AArch64::CBNZW ||
  707                 CmpMI->getOpcode() == AArch64::CBNZX;
  743   switch (CmpMI->getOpcode()) {
lib/Target/AArch64/AArch64DeadRegisterDefinitionsPass.cpp
  136     if (atomicBarrierDroppedOnZero(MI.getOpcode()) || atomicReadDroppedOnZero(MI.getOpcode())) {
  136     if (atomicBarrierDroppedOnZero(MI.getOpcode()) || atomicReadDroppedOnZero(MI.getOpcode())) {
lib/Target/AArch64/AArch64ExpandPseudoInsts.cpp
  353   bool ZeroData = MI.getOpcode() == AArch64::STZGloop;
  407   unsigned Opcode = MI.getOpcode();
  437     switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64FalkorHWPFFix.cpp
  242   switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64FastISel.cpp
 4477   switch (LI->getOpcode()) {
 4497   switch (LI->getOpcode()) {
 4543   if (LoadMI->getOpcode() == TargetOpcode::COPY &&
 4567     assert((MI->getOpcode() == TargetOpcode::COPY &&
lib/Target/AArch64/AArch64FrameLowering.cpp
  191           MI.getOpcode() == AArch64::ADDXri ||
  192           MI.getOpcode() == AArch64::ADDSXri)
  279   unsigned Opc = I->getOpcode();
  486   unsigned Opc = MBBI->getOpcode();
  607   switch (MBBI->getOpcode()) {
  631   while (MBBI->getOpcode() == AArch64::STRXpost ||
  632          MBBI->getOpcode() == AArch64::LDRXpre ||
  633          MBBI->getOpcode() == AArch64::CFI_INSTRUCTION) {
  634     if (MBBI->getOpcode() != AArch64::CFI_INSTRUCTION)
  640   switch (MBBI->getOpcode()) {
  732   unsigned Opc = MI.getOpcode();
 1298       MBBI->getOpcode() == AArch64::RET_ReallyLR) {
 1312   switch (MI.getOpcode()) {
 1339     unsigned RetOpcode = MBBI->getOpcode();
 1546       if (Prev->getOpcode() != AArch64::LDRXpre ||
lib/Target/AArch64/AArch64ISelLowering.cpp
 1397   switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64InstrInfo.cpp
   81     auto Op = MI.getOpcode();
  129   switch (LastInst->getOpcode()) {
  142     Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode()));
  151     Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode()));
  188   switch (MI.getOpcode()) {
  225   unsigned LastOpc = LastInst->getOpcode();
  241   unsigned SecondLastOpc = SecondLastInst->getOpcode();
  249       LastOpc = LastInst->getOpcode();
  256         SecondLastOpc = SecondLastInst->getOpcode();
  342   if (!isUncondBranchOpcode(I->getOpcode()) &&
  343       !isCondBranchOpcode(I->getOpcode()))
  357   if (!isCondBranchOpcode(I->getOpcode())) {
  440   switch (DefMI->getOpcode()) {
  688   const unsigned Opcode = MI.getOpcode();
  764   switch (MI.getOpcode()) {
  886   unsigned Opc = MI.getOpcode();
  914   switch (MI.getOpcode()) {
  968   switch (MI.getOpcode()) {
  995   switch (MI.getOpcode()) {
 1040                    MI.getOpcode() == AArch64::ANDSWri ? 32 : 64) != 0;
 1095   switch (MI.getOpcode()) {
 1097     return MI.getOpcode();
 1195     unsigned Opc = CmpInstr.getOpcode();
 1227   switch (Instr.getOpcode()) {
 1239     return Instr.getOpcode();
 1305   switch (Instr.getOpcode()) {
 1401   const unsigned CmpOpcode = CmpInstr->getOpcode();
 1412   if (sForm(*MI) != MI->getOpcode())
 1469   if (MI.getOpcode() != TargetOpcode::LOAD_STACK_GUARD &&
 1470       MI.getOpcode() != AArch64::CATCHRET)
 1478   if (MI.getOpcode() == AArch64::CATCHRET) {
 1577   switch (MI.getOpcode()) {
 1601   switch (MI.getOpcode()) {
 1631   switch (MI.getOpcode()) {
 1653   switch (MI.getOpcode()) {
 1676   switch (MI.getOpcode()) {
 1806   switch (MI.getOpcode()) {
 1966     switch (MI.getOpcode()) {
 2013   if (!getMemOpInfo(LdSt.getOpcode(), Scale, Width, Dummy1, Dummy2))
 2349   unsigned FirstOpc = FirstLdSt.getOpcode();
 2350   unsigned SecondOpc = SecondLdSt.getOpcode();
 3382   switch (MI.getOpcode()) {
 3405   if (!AArch64InstrInfo::getMemOpInfo(MI.getOpcode(), Scale, Width, MinOff,
 3410   bool IsMulVL = isSVEScaledImmInstruction(MI.getOpcode());
 3415       MI.getOperand(AArch64InstrInfo::getLoadStoreImmIdx(MI.getOpcode()));
 3422       AArch64InstrInfo::getUnscaledLdSt(MI.getOpcode());
 3461   unsigned Opcode = MI.getOpcode();
 3560   switch (Inst.getOpcode()) {
 3603   if (!MI || MI->getParent() != &MBB || (unsigned)MI->getOpcode() != CombineOpc)
 3641   switch (Inst.getOpcode()) {
 3666   unsigned Opc = Root.getOpcode();
 3754   switch (Root.getOpcode()) {
 4749   switch (MI.getOpcode()) {
 4802   switch (DefMI->getOpcode()) {
 4815     bool Is32Bit = (DefMI->getOpcode() == AArch64::ANDWri);
 5100   unsigned LastInstrOpcode = RepeatedSequenceLocs[0].back()->getOpcode();
 5149       getMemOpInfo(MI.getOpcode(), Scale, DummyWidth, MinOffset, MaxOffset);
 5418   if (MI.getOpcode() == AArch64::ADRP)
 5455     if (MI.getOpcode() == AArch64::BLR || MI.getOpcode() == AArch64::BL)
 5455     if (MI.getOpcode() == AArch64::BLR || MI.getOpcode() == AArch64::BL)
 5493   if (MI.getOpcode() == AArch64::HINT) {
 5520     getMemOpInfo(MI.getOpcode(), Scale, Width, Dummy1, Dummy2);
 5539     if (Call->getOpcode() == AArch64::BL) {
 5542       assert(Call->getOpcode() == AArch64::BLR);
 5711   if (MI.getOpcode() == AArch64::ORRWrs &&
 5719   if (MI.getOpcode() == AArch64::ORRXrs &&
lib/Target/AArch64/AArch64InstrInfo.h
   85     return isUnscaledLdSt(MI.getOpcode());
lib/Target/AArch64/AArch64InstructionSelector.cpp
  747     LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  958   if (CCMI->getOpcode() == TargetOpcode::G_TRUNC)
  960   if (CCMI->getOpcode() != TargetOpcode::G_ICMP)
 1011   assert(I.getOpcode() == TargetOpcode::G_SHL);
 1041   assert(I.getOpcode() == TargetOpcode::G_ASHR);
 1159   switch (I.getOpcode()) {
 1180     if (AmtMI->getOpcode() != TargetOpcode::G_CONSTANT) {
 1203   assert(I.getOpcode() == TargetOpcode::G_SHL && "unexpected op");
 1235   assert(I.getOpcode() == TargetOpcode::G_STORE && "Expected G_STORE");
 1283   switch (I.getOpcode()) {
 1324   unsigned Opcode = I.getOpcode();
 1719     bool IsZExtLoad = I.getOpcode() == TargetOpcode::G_ZEXTLOAD;
 1758         selectLoadStoreUIOp(I.getOpcode(), RB.getID(), MemSizeInBits);
 1759     if (NewOpc == I.getOpcode())
 1768     if (PtrMI->getOpcode() == TargetOpcode::G_GEP) {
 1784     if (!Offset && PtrMI->getOpcode() == TargetOpcode::G_FRAME_INDEX)
 1792         if (I.getOpcode() == AArch64::STRWui)
 1794         else if (I.getOpcode() == AArch64::STRXui)
 1841     unsigned NewOpc = I.getOpcode() == TargetOpcode::G_SMULH ? AArch64::SMULHrr
 1874     const unsigned NewOpc = selectBinaryOp(I.getOpcode(), RB.getID(), OpSize);
 1875     if (NewOpc == I.getOpcode())
 2059     unsigned Opcode = I.getOpcode();
 2334   assert(I.getOpcode() == TargetOpcode::G_BRJT && "Expected G_BRJT");
 2354   assert(I.getOpcode() == TargetOpcode::G_JUMP_TABLE && "Expected jump table");
 2711   assert(I.getOpcode() == TargetOpcode::G_MERGE_VALUES && "unexpected opcode");
 2866   assert(I.getOpcode() == TargetOpcode::G_EXTRACT_VECTOR_ELT &&
 2936   assert(I.getOpcode() == TargetOpcode::G_UNMERGE_VALUES &&
 3051   assert(I.getOpcode() == TargetOpcode::G_CONCAT_VECTORS &&
 3333   assert(I.getOpcode() == TargetOpcode::G_FCONSTANT &&
 3409     unsigned Opc = CondDef->getOpcode();
 3425   unsigned CondOpc = CondDef->getOpcode();
 3493     if (!DefMI || DefMI->getOpcode() != TargetOpcode::G_SUB)
 3501     if (DefMI->getOpcode() != TargetOpcode::G_SUB)
 3554       LHSDef->getOpcode() == TargetOpcode::G_AND) {
 3775   assert(I.getOpcode() == TargetOpcode::G_INSERT_VECTOR_ELT);
 3851   assert(I.getOpcode() == TargetOpcode::G_BUILD_VECTOR);
 4210   unsigned OffsetOpc = OffsetInst->getOpcode();
 4287   if (!Gep || Gep->getOpcode() != TargetOpcode::G_GEP)
 4358   if (!RHS || RHS->getOpcode() != TargetOpcode::G_CONSTANT)
 4395   if (RootDef->getOpcode() == TargetOpcode::G_FRAME_INDEX) {
 4411         if (LHSDef->getOpcode() == TargetOpcode::G_FRAME_INDEX)
 4440   switch (MI.getOpcode()) {
 4498   unsigned Opc = MI.getOpcode();
 4588   if (RootDef->getOpcode() == TargetOpcode::G_SHL) {
 4638   assert(MI.getOpcode() == TargetOpcode::G_CONSTANT && "Expected G_CONSTANT");
 4646   assert(I.getOpcode() == TargetOpcode::G_CONSTANT && "Expected G_CONSTANT");
 4654   assert(I.getOpcode() == TargetOpcode::G_CONSTANT && "Expected G_CONSTANT");
 4678   switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64LegalizerInfo.cpp
  623   switch (MI.getOpcode()) {
  662   assert(MI.getOpcode() == TargetOpcode::G_ASHR ||
  663          MI.getOpcode() == TargetOpcode::G_LSHR ||
  664          MI.getOpcode() == TargetOpcode::G_SHL);
  670   if (CstMI->getOpcode() != TargetOpcode::G_CONSTANT)
  686   assert(MI.getOpcode() == TargetOpcode::G_STORE ||
  687          MI.getOpcode() == TargetOpcode::G_LOAD);
  710   if (MI.getOpcode() == TargetOpcode::G_STORE) {
lib/Target/AArch64/AArch64LoadStoreOptimizer.cpp
  207   switch (MI.getOpcode()) {
  220   switch (MI.getOpcode()) {
  383   unsigned LdOpc = LoadInst.getOpcode();
  384   unsigned StOpc = StoreInst.getOpcode();
  564   switch (MI.getOpcode()) {
  636   unsigned Opc = MI.getOpcode();
  643   switch (MI.getOpcode()) {
  661   unsigned Opc = MI.getOpcode();
  732   unsigned Opc = I->getOpcode();
  801       SExtIdx == -1 ? I->getOpcode() : getMatchingNonSExtOpcode(I->getOpcode());
  801       SExtIdx == -1 ? I->getOpcode() : getMatchingNonSExtOpcode(I->getOpcode());
  816   bool PairedIsUnscaled = TII->isUnscaledLdSt(Paired->getOpcode());
  849   if (TII->isUnscaledLdSt(RtMI->getOpcode())) {
 1175   unsigned OpcA = FirstMI.getOpcode();
 1176   unsigned OpcB = MI.getOpcode();
 1376   assert((Update->getOpcode() == AArch64::ADDXri ||
 1377           Update->getOpcode() == AArch64::SUBXri) &&
 1389   if (Update->getOpcode() == AArch64::SUBXri)
 1392   unsigned NewOpc = IsPreIdx ? getPreIndexedOpcode(I->getOpcode())
 1393                              : getPostIndexedOpcode(I->getOpcode());
 1444   switch (MI.getOpcode()) {
 1464     if (MI.getOpcode() == AArch64::SUBXri)
 1509   if (!isTagStore(MemMI) && MemMI.getOpcode() != AArch64::STGPi) {
 1708   if (TII->isUnscaledLdSt(MI.getOpcode()))
lib/Target/AArch64/AArch64MCInstLower.cpp
  297   OutMI.setOpcode(MI->getOpcode());
lib/Target/AArch64/AArch64MacroFusion.cpp
   25   if (SecondMI.getOpcode() != AArch64::Bcc)
   32   switch (FirstMI->getOpcode()) {
   66   if (SecondMI.getOpcode() != AArch64::CBZW &&
   67       SecondMI.getOpcode() != AArch64::CBZX &&
   68       SecondMI.getOpcode() != AArch64::CBNZW &&
   69       SecondMI.getOpcode() != AArch64::CBNZX)
   76   switch (FirstMI->getOpcode()) {
  117   switch (SecondMI.getOpcode()) {
  121     return FirstMI == nullptr || FirstMI->getOpcode() == AArch64::AESErr;
  125     return FirstMI == nullptr || FirstMI->getOpcode() == AArch64::AESDrr;
  134   if (SecondMI.getOpcode() != AArch64::EORv16i8)
  141   switch (FirstMI->getOpcode()) {
  160   if ((FirstMI == nullptr || FirstMI->getOpcode() == AArch64::ADRP) &&
  161       SecondMI.getOpcode() == AArch64::ADDXri)
  165   if ((FirstMI == nullptr || FirstMI->getOpcode() == AArch64::MOVZWi) &&
  166       (SecondMI.getOpcode() == AArch64::MOVKWi &&
  171   if((FirstMI == nullptr || FirstMI->getOpcode() == AArch64::MOVZXi) &&
  172      (SecondMI.getOpcode() == AArch64::MOVKXi &&
  178        (FirstMI->getOpcode() == AArch64::MOVKXi &&
  180       (SecondMI.getOpcode() == AArch64::MOVKXi &&
  190   switch (SecondMI.getOpcode()) {
  218    switch (FirstMI->getOpcode()) {
  233   if (SecondMI.getOpcode() == AArch64::CSELWr) {
  239       switch (FirstMI->getOpcode()) {
  251   if (SecondMI.getOpcode() == AArch64::CSELXr) {
  257       switch (FirstMI->getOpcode()) {
  278   switch (SecondMI.getOpcode()) {
  318     switch (FirstMI->getOpcode()) {
  354     switch (FirstMI->getOpcode()) {
lib/Target/AArch64/AArch64PBQPRegAlloc.cpp
  353       switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64PreLegalizerCombiner.cpp
   64   switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64RedundantCopyElimination.cpp
  127   unsigned Opc = CondBr.getOpcode();
  171     switch (PredI.getOpcode()) {
lib/Target/AArch64/AArch64RegisterBankInfo.cpp
  275   switch (MI.getOpcode()) {
  371   switch (OpdMapper.getMI().getOpcode()) {
  420   const unsigned Opc = MI.getOpcode();
  463   unsigned Op = MI.getOpcode();
  482   switch (MI.getOpcode()) {
  496   switch (MI.getOpcode()) {
  510   const unsigned Opc = MI.getOpcode();
  824     unsigned DefOpc = DefMI->getOpcode();
lib/Target/AArch64/AArch64RegisterInfo.cpp
  465   if (MI.isDebugValue() || MI.getOpcode() == TargetOpcode::STACKMAP ||
  466       MI.getOpcode() == TargetOpcode::PATCHPOINT) {
  477   if (MI.getOpcode() == TargetOpcode::LOCAL_ESCAPE) {
  485   if (MI.getOpcode() == AArch64::TAGPstack) {
lib/Target/AArch64/AArch64SIMDInstrOpt.cpp
  324     if (CurrentMI->getOpcode() == DupOpcode &&
  354   switch (MI.getOpcode()) {
  420   if (!shouldReplaceInst(MI.getParent()->getParent(), &TII->get(MI.getOpcode()),
  516     if (MI.getOpcode() == I.OrigOpc) {
  540   if (!shouldReplaceInst(MI.getParent()->getParent(), &TII->get(MI.getOpcode()),
  550   switch (MI.getOpcode()) {
  634   if (DefiningMI->getOpcode() != AArch64::REG_SEQUENCE)
  667   switch (MI.getOpcode()) {
lib/Target/AArch64/AArch64SpeculationHardening.cpp
  551   unsigned Opcode = MI.getOpcode();
lib/Target/AArch64/AArch64StackTaggingPreRA.cpp
  151     if (isUncheckedLoadOrStoreOpcode(UseI->getOpcode())) {
  153       unsigned OpIdx = TII->getLoadStoreImmIdx(UseI->getOpcode()) - 1;
  192       if (I.getOpcode() == AArch64::TAGPstack) {
lib/Target/AArch64/AArch64StorePairSuppress.cpp
  110   switch (MI.getOpcode()) {
lib/Target/AMDGPU/AMDGPUAsmPrinter.cpp
  189   return (MBB->back().getOpcode() != AMDGPU::S_SETPC_B64);
lib/Target/AMDGPU/AMDGPUInstructionSelector.cpp
  272     unsigned InstOpc = getLogicalBitOpcode(I.getOpcode(),
  291     unsigned InstOpc = getLogicalBitOpcode(I.getOpcode(), Size > 32);
  307   const bool Sub = I.getOpcode() == TargetOpcode::G_SUB;
  403   const bool IsAdd = I.getOpcode() == AMDGPU::G_UADDO;
  807   if (Def->getOpcode() == AMDGPU::G_CONSTANT) {
  819   if (Def->getOpcode() == AMDGPU::G_ADD) {
 1218   bool Signed = I.getOpcode() == AMDGPU::G_SEXT;
 1274   if (I.getOpcode() == AMDGPU::G_ANYEXT)
 1376   bool Signed = I.getOpcode() == AMDGPU::G_SITOFP;
 1471   return MI.getOpcode() == TargetOpcode::G_CONSTANT;
 1481   if (PtrMI->getOpcode() != TargetOpcode::G_GEP)
 1683   switch (I.getOpcode()) {
 1785   if (MI && MI->getOpcode() == AMDGPU::G_FNEG) {
 1791   if (MI && MI->getOpcode() == AMDGPU::G_FABS) {
 1964   if (!OpDef || OpDef->getOpcode() != AMDGPU::G_GEP)
 2055           if (LHSDef->getOpcode() == AMDGPU::G_FRAME_INDEX)
 2062     } else if (RootDef->getOpcode() == AMDGPU::G_FRAME_INDEX) {
 2162   } else if (RootDef->getOpcode() == AMDGPU::G_SUB) {
 2180   assert(MI.getOpcode() == TargetOpcode::G_CONSTANT && "Expected G_CONSTANT");
lib/Target/AMDGPU/AMDGPULegalizerInfo.cpp
 1094   switch (MI.getOpcode()) {
 1472   const bool IsIEEEOp = MI.getOpcode() == AMDGPU::G_FMINNUM_IEEE ||
 1473                         MI.getOpcode() == AMDGPU::G_FMAXNUM_IEEE;
 1569   Intrinsic::ID TrigIntrin = MI.getOpcode() == AMDGPU::G_FSIN ?
 1773     UseMI.getOpcode() == AMDGPU::G_BRCOND ? &UseMI : nullptr;
lib/Target/AMDGPU/AMDGPUMCInstLower.cpp
  111       skipDebugInstructionsForward(SrcBB.begin(), SrcBB.end())->getOpcode() ==
  177   unsigned Opcode = MI->getOpcode();
  204                 "a target-specific version: " + Twine(MI->getOpcode()));
  282     if (MI->getOpcode() == AMDGPU::SI_MASK_BRANCH) {
  297     if (MI->getOpcode() == AMDGPU::SI_RETURN_TO_EPILOG) {
  303     if (MI->getOpcode() == AMDGPU::WAVE_BARRIER) {
  309     if (MI->getOpcode() == AMDGPU::SI_MASKED_UNREACHABLE) {
  376   OutMI.setOpcode(MI->getOpcode());
lib/Target/AMDGPU/AMDGPUMacroFusion.cpp
   34   switch (SecondMI.getOpcode()) {
lib/Target/AMDGPU/AMDGPURegisterBankInfo.cpp
  346   switch (MI.getOpcode()) {
 1329   unsigned Opc = MI.getOpcode();
 1579     if (MI.getOpcode() == AMDGPU::G_SEXT)
 2142   if (MI.getOpcode() == TargetOpcode::G_PHI) {
 2190   switch (MI.getOpcode()) {
 2445     if (MI.getOpcode() == AMDGPU::G_ANYEXT) {
lib/Target/AMDGPU/AMDGPUSubtarget.cpp
  862            MAI.getOpcode() == AMDGPU::V_ACCVGPR_WRITE_B32 ||
  863            MAI.getOpcode() == AMDGPU::V_ACCVGPR_READ_B32)
lib/Target/AMDGPU/AMDILCFGStructurizer.cpp
  434     if (I->getOpcode() == R600::PRED_X) {
  574   switch (MI->getOpcode()) {
  585   switch (MI->getOpcode()) {
  625       else if (!TII->isMov(MI->getOpcode()))
  636     if (instr->getOpcode() == R600::RETURN)
  689      if (Pre->getOpcode() == R600::CONTINUE
  690          && It->getOpcode() == R600::ENDLOOP)
 1391   int OldOpcode = BranchMI->getOpcode();
 1475     int OldOpcode = MI->getOpcode();
lib/Target/AMDGPU/GCNDPPCombine.cpp
  139   switch(Def->getOpcode()) {
  158   assert(MovMI.getOpcode() == AMDGPU::V_MOV_B32_dpp);
  160   auto OrigOp = OrigMI.getOpcode();
  325     if (!isIdentityValue(OrigMI.getOpcode(), OldOpndValue)) {
  351   assert(MovMI.getOpcode() == AMDGPU::V_MOV_B32_dpp);
  457     auto OrigOp = OrigMI.getOpcode();
  568       if (MI.getOpcode() == AMDGPU::V_MOV_B32_dpp && combineDPPMov(MI)) {
  571       } else if (MI.getOpcode() == AMDGPU::V_MOV_B64_DPP_PSEUDO) {
lib/Target/AMDGPU/GCNHazardRecognizer.cpp
   95   if (TII.isAlwaysGDS(MI.getOpcode()))
   98   switch (MI.getOpcode()) {
  109     if (TII.isDS(MI.getOpcode())) {
  110       int GDS = AMDGPU::getNamedOperandIdx(MI.getOpcode(),
  120   unsigned Opcode = MI.getOpcode();
  161   if (isDivFMas(MI->getOpcode()) && checkDivFMasHazards(MI) > 0)
  164   if (isRWLane(MI->getOpcode()) && checkRWLaneHazards(MI) > 0)
  167   if (isSGetReg(MI->getOpcode()) && checkGetRegHazards(MI) > 0)
  170   if (isSSetReg(MI->getOpcode()) && checkSetRegHazards(MI) > 0)
  173   if (isRFE(MI->getOpcode()) && checkRFEHazards(MI) > 0)
  177       (TII.isVINTRP(*MI) || isSMovRel(MI->getOpcode())) &&
  271   if (isDivFMas(MI->getOpcode()))
  274   if (isRWLane(MI->getOpcode()))
  280   if (isSGetReg(MI->getOpcode()))
  283   if (isSSetReg(MI->getOpcode()))
  286   if (isRFE(MI->getOpcode()))
  290                                            isSMovRel(MI->getOpcode())))
  458     return isSSetReg(MI->getOpcode()) && IsHazard(MI);
  674   unsigned Opcode = MI.getOpcode();
  838       return MI->getOpcode() == AMDGPU::S_MOV_FED_B32;
  879     unsigned Opc = MI->getOpcode();
  932                   (MI->getOpcode() == AMDGPU::S_WAITCNT &&
  953   switch (MI->getOpcode()) {
  988         switch (MI->getOpcode()) {
 1058     if (MI->getOpcode() == AMDGPU::S_WAITCNT_DEPCTR &&
 1092                  (I->getOpcode() == AMDGPU::S_WAITCNT_VSCNT &&
 1114       return I->getOpcode() == AMDGPU::S_WAITCNT_VSCNT &&
 1153     const AMDGPU::MIMGInfo *Info = AMDGPU::getMIMGInfo(I->getOpcode());
 1164   if (MI->getOpcode() != AMDGPU::S_DENORM_MODE)
 1177     switch (MI->getOpcode()) {
 1201   unsigned Opc = MI->getOpcode();
 1235            MI->getOpcode() != AMDGPU::V_ACCVGPR_WRITE_B32 &&
 1236            MI->getOpcode() != AMDGPU::V_ACCVGPR_READ_B32;
 1306       if (MI->getOpcode() != AMDGPU::V_ACCVGPR_WRITE_B32)
 1373     return MI->getOpcode() == AMDGPU::V_ACCVGPR_READ_B32;
 1394       if (MI->getOpcode() != AMDGPU::V_ACCVGPR_READ_B32)
lib/Target/AMDGPU/GCNNSAReassign.cpp
  165   const AMDGPU::MIMGInfo *Info = AMDGPU::getMIMGInfo(MI.getOpcode());
  170     AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::vaddr0);
  270     const AMDGPU::MIMGInfo *Info = AMDGPU::getMIMGInfo(MI->getOpcode());
  272       AMDGPU::getNamedOperandIdx(MI->getOpcode(), AMDGPU::OpName::vaddr0);
lib/Target/AMDGPU/GCNRegBankReassign.cpp
  412     if (Def->getOpcode() == TargetOpcode::IMPLICIT_DEF)
lib/Target/AMDGPU/R600AsmPrinter.cpp
   53       if (MI.getOpcode() == R600::KILLGT)
lib/Target/AMDGPU/R600ClauseMergePass.cpp
   35   switch (MI.getOpcode()) {
   87       .getOperand(TII->getOperandIdx(MI.getOpcode(), R600::OpName::COUNT))
   94       .getOperand(TII->getOperandIdx(MI.getOpcode(), R600::OpName::Enabled))
  127   if (RootCFAlu.getOpcode() == R600::CF_ALU_PUSH_BEFORE)
  178   RootCFAlu.setDesc(TII->get(LatrCFAlu.getOpcode()));
  197           TII->mustBeLastInClause(MI.getOpcode()))
lib/Target/AMDGPU/R600ControlFlowFinalizer.cpp
  241     switch (MI.getOpcode()) {
  379           TII->getOperandIdx(MI.getOpcode(), R600::OpName::literal));
  420       if (!I->isBundle() && !TII->isALUInstr(I->getOpcode()))
  542         if (MI->getOpcode() != R600::ENDIF)
  544         if (MI->getOpcode() == R600::CF_ALU)
  548             CFStack.requiresWorkAroundForInst(MI->getOpcode());
  549         switch (MI->getOpcode()) {
  676           if (TII->isExport(MI->getOpcode())) {
lib/Target/AMDGPU/R600EmitClauseMarkers.cpp
   53     switch (MI.getOpcode()) {
   67     if (TII->isLDSRetInstr(MI.getOpcode()))
   70     if (TII->isVector(MI) || TII->isCubeOp(MI.getOpcode()) ||
   71         TII->isReductionOp(MI.getOpcode()))
   86     if (TII->isALUInstr(MI.getOpcode()))
   88     if (TII->isVector(MI) || TII->isCubeOp(MI.getOpcode()))
   90     switch (MI.getOpcode()) {
  104     switch (MI.getOpcode()) {
  134     if (!TII->isALUInstr(MI.getOpcode()) && MI.getOpcode() != R600::DOT_4)
  134     if (!TII->isALUInstr(MI.getOpcode()) && MI.getOpcode() != R600::DOT_4)
  140         (TII->isALUInstr(MI.getOpcode()) || MI.getOpcode() == R600::DOT_4) &&
  140         (TII->isALUInstr(MI.getOpcode()) || MI.getOpcode() == R600::DOT_4) &&
  255       if (I->getOpcode() == R600::PRED_X) {
  276       if (TII->mustBeLastInClause(I->getOpcode())) {
  324       if (I != MBB.end() && I->getOpcode() == R600::CF_ALU)
lib/Target/AMDGPU/R600ExpandSpecialInstrs.cpp
   97       if (TII->isLDSRetInstr(MI.getOpcode())) {
   98         int DstIdx = TII->getOperandIdx(MI.getOpcode(), R600::OpName::dst);
  104         int LDSPredSelIdx = TII->getOperandIdx(MI.getOpcode(),
  106         int MovPredSelIdx = TII->getOperandIdx(Mov->getOpcode(),
  113       switch (MI.getOpcode()) {
  155           unsigned Opcode = BMI->getOpcode();
  175       bool IsReduction = TII->isReductionOp(MI.getOpcode());
  177       bool IsCube = TII->isCubeOp(MI.getOpcode());
  251         unsigned Opcode = MI.getOpcode();
lib/Target/AMDGPU/R600ISelLowering.cpp
  286   return std::next(I)->getOpcode() == R600::RETURN;
  297   switch (MI.getOpcode()) {
  301     if (TII->isLDSRetInstr(MI.getOpcode())) {
  302       int DstIdx = TII->getOperandIdx(MI.getOpcode(), R600::OpName::dst);
  308           MI.getOpcode() == R600::LDS_CMPST_RET)
  312                       TII->get(R600::getLDSNoRetOp(MI.getOpcode())));
  379     BuildMI(*BB, I, BB->findDebugLoc(I), TII->get(MI.getOpcode()))
  386     BuildMI(*BB, I, BB->findDebugLoc(I), TII->get(MI.getOpcode()))
  434       if (NextExportInst->getOpcode() == R600::EG_ExportSwz ||
  435           NextExportInst->getOpcode() == R600::R600_ExportSwz) {
  447     unsigned CfInst = (MI.getOpcode() == R600::EG_ExportSwz) ? 84 : 40;
  448     BuildMI(*BB, I, BB->findDebugLoc(I), TII->get(MI.getOpcode()))
lib/Target/AMDGPU/R600InstrInfo.cpp
   58   return get(MI.getOpcode()).TSFlags & R600_InstFlag::VECTOR;
  160   if (isALUInstr(MI.getOpcode()))
  162   if (isVector(MI) || isCubeOp(MI.getOpcode()))
  164   switch (MI.getOpcode()) {
  184   return isTransOnly(MI.getOpcode());
  192   return isVectorOnly(MI.getOpcode());
  206          usesVertexCache(MI.getOpcode());
  216           usesVertexCache(MI.getOpcode())) ||
  217           usesTextureCache(MI.getOpcode());
  239   if (!isALUInstr(MI.getOpcode())) {
  281   if (MI.getOpcode() == R600::DOT_4) {
  295           MI.getOperand(getOperandIdx(MI.getOpcode(), OpTable[j][0]));
  299             MI.getOperand(getOperandIdx(MI.getOpcode(), OpTable[j][1]));
  315     int SrcIdx = getOperandIdx(MI.getOpcode(), OpTable[j][0]);
  322           MI.getOperand(getOperandIdx(MI.getOpcode(), OpTable[j][1]));
  328           MI.getOperand(getOperandIdx(MI.getOpcode(), R600::OpName::literal));
  548     unsigned Op = getOperandIdx(IG[i]->getOpcode(),
  614     if (!isALUInstr(MI.getOpcode()))
  657     if (isPredicateSetter(MI.getOpcode()))
  688   if (isBranch(I->getOpcode()))
  690   if (!isJump(I->getOpcode())) {
  695   while (I != MBB.begin() && std::prev(I)->getOpcode() == R600::JUMP) {
  704   unsigned LastOpc = LastInst.getOpcode();
  705   if (I == MBB.begin() || !isJump((--I)->getOpcode())) {
  711       while (!isPredicateSetter(predSet->getOpcode())) {
  725   unsigned SecondLastOpc = SecondLastInst.getOpcode();
  730     while (!isPredicateSetter(predSet->getOpcode())) {
  749     if (It->getOpcode() == R600::CF_ALU ||
  750         It->getOpcode() == R600::CF_ALU_PUSH_BEFORE)
  781       assert (CfAlu->getOpcode() == R600::CF_ALU);
  797     assert (CfAlu->getOpcode() == R600::CF_ALU);
  816   switch (I->getOpcode()) {
  826     assert (CfAlu->getOpcode() == R600::CF_ALU_PUSH_BEFORE);
  840   switch (I->getOpcode()) {
  851     assert (CfAlu->getOpcode() == R600::CF_ALU_PUSH_BEFORE);
  883   if (MI.getOpcode() == R600::KILLGT) {
  885   } else if (MI.getOpcode() == R600::CF_ALU) {
  968   return isPredicateSetter(MI.getOpcode());
  975   if (MI.getOpcode() == R600::CF_ALU) {
  980   if (MI.getOpcode() == R600::DOT_4) {
 1024   switch (MI.getOpcode()) {
 1028         R600::getNamedOperandIdx(MI.getOpcode(), R600::OpName::addr);
 1033         R600::getNamedOperandIdx(MI.getOpcode(), R600::OpName::chan);
 1036           R600::getNamedOperandIdx(MI.getOpcode(), R600::OpName::dst);
 1050           R600::getNamedOperandIdx(MI.getOpcode(), R600::OpName::val);
 1319   assert (MI->getOpcode() == R600::DOT_4 && "Not Implemented");
 1327       getOperandIdx(MI->getOpcode(), getSlotedOps(R600::OpName::src0, Slot)));
 1329       getOperandIdx(MI->getOpcode(), getSlotedOps(R600::OpName::src1, Slot)));
 1349   MachineOperand &MO = MI->getOperand(getOperandIdx(MI->getOpcode(),
 1356         getOperandIdx(MI->getOpcode(), getSlotedOps(Operands[i], Slot)));
 1381   return getOperandIdx(MI.getOpcode(), Op);
 1402   unsigned TargetFlags = get(MI.getOpcode()).TSFlags;
 1467   unsigned TargetFlags = get(MI.getOpcode()).TSFlags;
 1488   unsigned TargetFlags = get(MI.getOpcode()).TSFlags;
lib/Target/AMDGPU/R600InstrInfo.h
  318     return get(MI.getOpcode()).TSFlags & R600InstrFlags::REGISTER_STORE;
  322     return get(MI.getOpcode()).TSFlags & R600InstrFlags::REGISTER_LOAD;
lib/Target/AMDGPU/R600MachineScheduler.cpp
  183   if (MI->getOpcode() != R600::COPY)
  225   switch (MI->getOpcode()) {
  247      TII->isCubeOp(MI->getOpcode()) ||
  248      TII->isReductionOp(MI->getOpcode()) ||
  249      MI->getOpcode() == R600::GROUP_BARRIER) {
  253   if (TII->isLDSInstr(MI->getOpcode())) {
  294   int Opcode = SU->getInstr()->getOpcode();
  356   int DstIndex = TII->getOperandIdx(MI->getOpcode(), R600::OpName::dst);
lib/Target/AMDGPU/R600OptimizeVectorRegisters.cpp
   76     assert(MI->getOpcode() == R600::REG_SEQUENCE);
  153   if (TII->get(MI.getOpcode()).TSFlags & R600_InstFlag::TEX_INST)
  155   switch (MI.getOpcode()) {
  267   if (TII->get(MI.getOpcode()).TSFlags & R600_InstFlag::TEX_INST)
  351       if (MI.getOpcode() != R600::REG_SEQUENCE) {
  352         if (TII->get(MI.getOpcode()).TSFlags & R600_InstFlag::TEX_INST) {
lib/Target/AMDGPU/R600Packetizer.cpp
   72     if (!TII->isALUInstr(I->getOpcode()) && !I->isBundle())
   86       int OperandIdx = TII->getOperandIdx(BI->getOpcode(), R600::OpName::write);
   89       int DstIdx = TII->getOperandIdx(BI->getOpcode(), R600::OpName::dst);
   98       if (BI->getOpcode() == R600::DOT4_r600 ||
   99           BI->getOpcode() == R600::DOT4_eg) {
  136       int OperandIdx = TII->getOperandIdx(MI.getOpcode(), Ops[i]);
  171     if (!TII->isALUInstr(MI.getOpcode()))
  173     if (MI.getOpcode() == R600::GROUP_BARRIER)
  177     return TII->isLDSInstr(MI.getOpcode());
  187     int OpI = TII->getOperandIdx(MII->getOpcode(), R600::OpName::pred_sel),
  188         OpJ = TII->getOperandIdx(MIJ->getOpcode(), R600::OpName::pred_sel);
  222     unsigned LastOp = TII->getOperandIdx(MI->getOpcode(), R600::OpName::last);
  302         unsigned Op = TII->getOperandIdx(MI->getOpcode(),
  307           TII->getOperandIdx(MI.getOpcode(), R600::OpName::bank_swizzle);
  356       if (MI->isKill() || MI->getOpcode() == R600::IMPLICIT_DEF ||
  357           (MI->getOpcode() == R600::CF_ALU && !MI->getOperand(8).getImm())) {
lib/Target/AMDGPU/SIAddIMGInit.cpp
   77       auto Opcode = MI.getOpcode();
   99               AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::vdata);
lib/Target/AMDGPU/SIFixSGPRCopies.cpp
  217         UseMI->getOpcode() <= TargetOpcode::GENERIC_OP_END ||
  327   if (Copy->getOpcode() != AMDGPU::COPY)
  342   switch (MoveImm->getOpcode()) {
  604       switch (MI.getOpcode()) {
  690         if (ST.getConstantBusLimit(MI.getOpcode()) != 1)
  700             AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::src0);
  702             AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::src1);
lib/Target/AMDGPU/SIFixVGPRCopies.cpp
   56       switch (MI.getOpcode()) {
lib/Target/AMDGPU/SIFixupVectorISel.cpp
   97     switch (DefInst->getOpcode()) {
  168     int NewOpcd = AMDGPU::getGlobalSaddrOp(MI.getOpcode());
lib/Target/AMDGPU/SIFoldOperands.cpp
  142   unsigned Opc = UseMI.getOpcode();
  178     OpNo == AMDGPU::getNamedOperandIdx(UseMI.getOpcode(), AMDGPU::OpName::vaddr);
  200       unsigned Opcode = MI->getOpcode();
  335     unsigned Opc = MI->getOpcode();
  417         unsigned MaybeCommutedOpc = MI->getOpcode();
  772     unsigned UseOpc = UseMI->getOpcode();
  969   unsigned Opc = MI->getOpcode();
  988   if (MI->getOpcode() == AMDGPU::V_LSHL_OR_B32) {
 1046   if (MI->getOpcode() == AMDGPU::V_AND_B32_e64 ||
 1047       MI->getOpcode() == AMDGPU::V_AND_B32_e32 ||
 1048       MI->getOpcode() == AMDGPU::S_AND_B32) {
 1064   if (MI->getOpcode() == AMDGPU::V_XOR_B32_e64 ||
 1065       MI->getOpcode() == AMDGPU::V_XOR_B32_e32 ||
 1066       MI->getOpcode() == AMDGPU::S_XOR_B32) {
 1081   unsigned Opc = MI->getOpcode();
 1241   unsigned Op = MI.getOpcode();
 1356   unsigned Op = MI.getOpcode();
lib/Target/AMDGPU/SIFormMemoryClauses.cpp
  114   if (AMDGPU::getAtomicNoRetOp(MI.getOpcode()) != -1 ||
  115       AMDGPU::getAtomicRetOp(MI.getOpcode()) != -1)
lib/Target/AMDGPU/SIFrameLowering.cpp
 1108   unsigned Opc = I->getOpcode();
lib/Target/AMDGPU/SIISelLowering.cpp
 3039     MI.setDesc(TII->getKillTerminatorFromPseudo(MI.getOpcode()));
 3053   MI.setDesc(TII->getKillTerminatorFromPseudo(MI.getOpcode()));
 3597   switch (MI.getOpcode()) {
 3627     bool IsAdd = (MI.getOpcode() == AMDGPU::S_ADD_U64_PSEUDO);
 3684       if (I->getOpcode() != TargetOpcode::COPY ||
 3830     unsigned Opc = MI.getOpcode();
10392   if (TII->isVOP3(MI.getOpcode())) {
10400       unsigned Opc = MI.getOpcode();
10429   int NoRetAtomicOp = AMDGPU::getAtomicNoRetOp(MI.getOpcode());
10791       if (I != Exit->end() && I->getOpcode() == AMDGPU::S_INST_PREFETCH)
lib/Target/AMDGPU/SIInsertSkips.cpp
  100   switch (MI.getOpcode()) {
  128       if (I->getOpcode() == AMDGPU::S_CBRANCH_VCCNZ ||
  129           I->getOpcode() == AMDGPU::S_CBRANCH_VCCZ)
  137           I->getOpcode() == AMDGPU::S_WAITCNT)
  188   switch (MI.getOpcode()) {
  362       if (!A->definesRegister(CondReg, TRI) || A->getOpcode() != And)
  411   bool IsVCCZ = MI.getOpcode() == AMDGPU::S_CBRANCH_VCCZ;
  468       switch (MI.getOpcode()) {
lib/Target/AMDGPU/SIInsertWaitcnts.cpp
  540           AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::addr);
  548         if (AMDGPU::getNamedOperandIdx(Inst.getOpcode(),
  552               AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data0),
  555         if (AMDGPU::getNamedOperandIdx(Inst.getOpcode(),
  558                       AMDGPU::getNamedOperandIdx(Inst.getOpcode(),
  562       } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1 &&
  563                  Inst.getOpcode() != AMDGPU::DS_GWS_INIT &&
  564                  Inst.getOpcode() != AMDGPU::DS_GWS_SEMA_V &&
  565                  Inst.getOpcode() != AMDGPU::DS_GWS_SEMA_BR &&
  566                  Inst.getOpcode() != AMDGPU::DS_GWS_SEMA_P &&
  567                  Inst.getOpcode() != AMDGPU::DS_GWS_BARRIER &&
  568                  Inst.getOpcode() != AMDGPU::DS_APPEND &&
  569                  Inst.getOpcode() != AMDGPU::DS_CONSUME &&
  570                  Inst.getOpcode() != AMDGPU::DS_ORDERED_COUNT) {
  582             AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data),
  584       } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1) {
  587             AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data),
  593       } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1) {
  596             AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data),
  606       } else if (AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1) {
  609             AMDGPU::getNamedOperandIdx(Inst.getOpcode(), AMDGPU::OpName::data),
  805   unsigned Opc = MI.getOpcode();
  848   if (MI.getOpcode() == AMDGPU::BUFFER_WBINVL1 ||
  849       MI.getOpcode() == AMDGPU::BUFFER_WBINVL1_SC ||
  850       MI.getOpcode() == AMDGPU::BUFFER_WBINVL1_VOL ||
  851       MI.getOpcode() == AMDGPU::BUFFER_GL0_INV ||
  852       MI.getOpcode() == AMDGPU::BUFFER_GL1_INV) {
  859   if (MI.getOpcode() == AMDGPU::SI_RETURN_TO_EPILOG ||
  860       MI.getOpcode() == AMDGPU::S_SETPC_B64_return ||
  865   else if ((MI.getOpcode() == AMDGPU::S_SENDMSG ||
  866             MI.getOpcode() == AMDGPU::S_SENDMSGHALT) &&
  948           AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::src0);
  958             AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::dst);
 1046   if (MI.getOpcode() == AMDGPU::S_BARRIER &&
 1075         } else if (II->getOpcode() == AMDGPU::S_WAITCNT) {
 1079           assert(II->getOpcode() == AMDGPU::S_WAITCNT_VSCNT);
 1112       if (II->getOpcode() == AMDGPU::S_WAITCNT) {
 1127         assert(II->getOpcode() == AMDGPU::S_WAITCNT_VSCNT);
 1204     if (TII->isAlwaysGDS(Inst.getOpcode()) ||
 1218                AMDGPU::getAtomicRetOp(Inst.getOpcode()) == -1)
 1235              Inst.getOpcode() != AMDGPU::BUFFER_WBINVL1 &&
 1236              Inst.getOpcode() != AMDGPU::BUFFER_WBINVL1_SC &&
 1237              Inst.getOpcode() != AMDGPU::BUFFER_WBINVL1_VOL &&
 1238              Inst.getOpcode() != AMDGPU::BUFFER_GL0_INV &&
 1239              Inst.getOpcode() != AMDGPU::BUFFER_GL1_INV) {
 1243               AMDGPU::getAtomicRetOp(Inst.getOpcode()) == -1) ||
 1251         (Inst.mayStore() || AMDGPU::getAtomicNoRetOp(Inst.getOpcode()) != -1)) {
 1265     switch (Inst.getOpcode()) {
 1380     if (Inst.getOpcode() == AMDGPU::S_WAITCNT ||
 1381         (Inst.getOpcode() == AMDGPU::S_WAITCNT_VSCNT &&
 1552       if (I->getOpcode() == AMDGPU::S_ENDPGM ||
 1553           I->getOpcode() == AMDGPU::SI_RETURN_TO_EPILOG)
 1571         if (I->getOpcode() == AMDGPU::S_DCACHE_WB)
 1577         if ((I->getOpcode() == AMDGPU::S_ENDPGM ||
 1578              I->getOpcode() == AMDGPU::SI_RETURN_TO_EPILOG) &&
lib/Target/AMDGPU/SIInstrInfo.cpp
  133   switch (MI.getOpcode()) {
  263   unsigned Opc = LdSt.getOpcode();
  619         if (Def->getOpcode() != AMDGPU::V_ACCVGPR_WRITE_B32)
 1363   switch (MI.getOpcode()) {
 1374   switch (MI.getOpcode()) {
 1565   assert (MI.getOpcode() == AMDGPU::V_MOV_B64_DPP_PSEUDO);
 1670   unsigned Opc = MI.getOpcode();
 1758   if (MI.getOpcode() == AMDGPU::S_SETPC_B64) {
 1907   if (I->getOpcode() == AMDGPU::S_BRANCH) {
 1915   if (I->getOpcode() == AMDGPU::SI_NON_UNIFORM_BRCOND_PSEUDO) {
 1919     BranchPredicate Pred = getBranchPredicate(I->getOpcode());
 1935   if (I->getOpcode() == AMDGPU::S_BRANCH) {
 1956          I->getOpcode() != AMDGPU::SI_MASK_BRANCH) {
 1957     switch (I->getOpcode()) {
 1983   if (I->getOpcode() != AMDGPU::SI_MASK_BRANCH)
 2021     if (I->getOpcode() == AMDGPU::SI_MASK_BRANCH) {
 2253   switch (MI.getOpcode()) {
 2293   unsigned Opc = MI.getOpcode();
 2311   switch (DefMI.getOpcode()) {
 2331   unsigned Opc = UseMI.getOpcode();
 2599   if (Def && Def->getOpcode() == AMDGPU::V_MOV_B32_e32 &&
 2608   unsigned Opc = MI.getOpcode();
 2629     int Src0Idx = AMDGPU::getNamedOperandIdx(MI.getOpcode(),
 2713   switch (MI.getOpcode()) {
 2733          MI.getOpcode() == AMDGPU::S_SETREG_IMM32_B32 ||
 2734          MI.getOpcode() == AMDGPU::S_SETREG_B32 ||
 2735          MI.getOpcode() == AMDGPU::S_DENORM_MODE ||
 2750   unsigned Opcode = MI.getOpcode();
 2804   if (!isTargetSpecificOpcode(MI.getOpcode()))
 2942         OpNo ==(unsigned)AMDGPU::getNamedOperandIdx(MI.getOpcode(),
 2998     switch (MI.getOpcode()) {
 3036   if (!hasVALU32BitEncoding(MI.getOpcode()))
 3156     switch (MI.getOpcode()) {
 3172       SIInstrInfo::isGenericOpcode(MI.getOpcode()) ||
 3192   uint16_t Opcode = MI.getOpcode();
 3193   if (SIInstrInfo::isGenericOpcode(MI.getOpcode()))
 3391   if (isMIMG(MI.getOpcode()) && !MI.mayStore()) {
 3398           isGather4(MI.getOpcode()) ? 4 : countPopulation(DMaskImm);
 3412           AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::vdata);
 3728   switch (MI.getOpcode()) {
 3810   const MCInstrDesc &Desc = get(MI.getOpcode());
 3831   unsigned RCID = get(MI.getOpcode()).OpInfo[OpIdx].RegClass;
 3961   int ConstantBusLimit = ST.getConstantBusLimit(MI.getOpcode());
 4014   unsigned Opc = MI.getOpcode();
 4126   unsigned Opc = MI.getOpcode();
 4539   if (MI.getOpcode() == AMDGPU::PHI) {
 4595   if (MI.getOpcode() == AMDGPU::REG_SEQUENCE) {
 4622   if (MI.getOpcode() == AMDGPU::INSERT_SUBREG) {
 4636   if (MI.getOpcode() == AMDGPU::SI_INIT_M0) {
 4667       AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::srsrc);
 4671     unsigned RsrcRC = get(MI.getOpcode()).OpInfo[RsrcIdx].RegClass;
 4695     if (VAddr && AMDGPU::getIfAddr64Inst(MI.getOpcode()) != -1) {
 4748       unsigned Addr64Opcode = AMDGPU::getAddr64Inst(MI.getOpcode());
 4826     unsigned Opcode = Inst.getOpcode();
 5114     unsigned Opc = Inst.getOpcode();
 5343   bool IsAdd = (Inst.getOpcode() == AMDGPU::S_ADD_U64_PSEUDO);
 5570   assert(Inst.getOpcode() == AMDGPU::S_BFE_I64 && BitWidth <= 32 &&
 5626     switch (UseMI.getOpcode()) {
 5661   switch (Inst.getOpcode()) {
 5738   switch (Inst.getOpcode()) {
 5754       switch (Inst.getOpcode()) {
 5854   int Idx = AMDGPU::getNamedOperandIdx(MI.getOpcode(), OperandName);
 5908   unsigned Opc = MI.getOpcode();
 5914   unsigned Opc = MI.getOpcode();
 5981   unsigned Opc = MI.getOpcode();
 6062   return Branch.getOpcode() == AMDGPU::SI_NON_UNIFORM_BRCOND_PSEUDO;
 6074   if (Branch->getOpcode() == AMDGPU::SI_NON_UNIFORM_BRCOND_PSEUDO) {
 6100   if (Branch->getOpcode() == AMDGPU::SI_NON_UNIFORM_BRCOND_PSEUDO) {
 6184   return !MI.isTerminator() && MI.getOpcode() != AMDGPU::COPY &&
 6261   int Idx = AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::sbase);
 6395   switch (MI.getOpcode()) {
 6426     switch (MI->getOpcode()) {
 6544       (InsPt->getOpcode() == AMDGPU::SI_IF ||
 6545        InsPt->getOpcode() == AMDGPU::SI_ELSE ||
 6546        InsPt->getOpcode() == AMDGPU::SI_IF_BREAK) &&
lib/Target/AMDGPU/SIInstrInfo.h
  250     return commuteOpcode(MI.getOpcode());
  917     int Idx = AMDGPU::getNamedOperandIdx(MI.getOpcode(), OpName);
lib/Target/AMDGPU/SILoadStoreOptimizer.cpp
  276   const unsigned Opc = MI.getOpcode();
  427   unsigned Opc = MI->getOpcode();
  473   const unsigned Regs = getRegs(I->getOpcode(), TII);
  500     AddrIdx[i] = AMDGPU::getNamedOperandIdx(I->getOpcode(), AddrOpName[i]);
  510   assert(InstClass == getInstClass(Paired->getOpcode(), TII));
  516         AMDGPU::getNamedOperandIdx(I->getOpcode(), AMDGPU::OpName::offset);
  654     int Idx = AMDGPU::getNamedOperandIdx(CI.I->getOpcode(), op);
  655     if (AMDGPU::getNamedOperandIdx(CI.Paired->getOpcode(), op) != Idx)
  757   const unsigned Opc = CI.I->getOpcode();
  767       AMDGPU::getNamedOperandIdx(CI.I->getOpcode(), AMDGPU::OpName::swz);
  779     if ((getInstClass(MBBI->getOpcode(), *TII) != InstClass) ||
  780         (getInstSubclass(MBBI->getOpcode(), *TII) != InstSubclass)) {
 1048       AMDGPU::getNamedOperandIdx(CI.I->getOpcode(), AMDGPU::OpName::dmask);
 1210     return AMDGPU::getMUBUFOpcode(AMDGPU::getMUBUFBaseOpcode(CI.I->getOpcode()),
 1225     return AMDGPU::getMaskedMIMGOp(CI.I->getOpcode(), Width);
 1445   if (!Def || Def->getOpcode() != AMDGPU::S_MOV_B32 ||
 1468   if (!Def || Def->getOpcode() != AMDGPU::REG_SEQUENCE
 1480   if (!BaseLoDef || BaseLoDef->getOpcode() != AMDGPU::V_ADD_I32_e64 ||
 1481       !BaseHiDef || BaseHiDef->getOpcode() != AMDGPU::V_ADDC_U32_e64)
 1524   if (AMDGPU::getGlobalSaddrOp(MI.getOpcode()) < 0)
 1599     if (MINext.getOpcode() != MI.getOpcode() ||
 1599     if (MINext.getOpcode() != MI.getOpcode() ||
 1694     const InstClassEnum InstClass = getInstClass(MI.getOpcode(), *TII);
lib/Target/AMDGPU/SILowerControlFlow.cpp
  154       U->getOpcode() != AMDGPU::SI_END_CF)
  171       if (TII->isKillTerminator(Term.getOpcode()))
  190   if (J != MBB->end() && J->getOpcode() == FalseTermOpc &&
  451       !(Def->isFullCopy() || (Def->getOpcode() == MI.getOpcode())))
  451       !(Def->isFullCopy() || (Def->getOpcode() == MI.getOpcode())))
  536       switch (MI.getOpcode()) {
lib/Target/AMDGPU/SILowerI1Copies.cpp
  178         if (MI.getOpcode() == AMDGPU::SI_NON_UNIFORM_BRCOND_PSEUDO ||
  179             MI.getOpcode() == AMDGPU::SI_IF ||
  180             MI.getOpcode() == AMDGPU::SI_ELSE ||
  181             MI.getOpcode() == AMDGPU::SI_LOOP) {
  506       if (MI.getOpcode() != AMDGPU::COPY)
  580       if (IncomingDef->getOpcode() == AMDGPU::COPY) {
  584       } else if (IncomingDef->getOpcode() == AMDGPU::IMPLICIT_DEF) {
  676       if (MI.getOpcode() != AMDGPU::IMPLICIT_DEF &&
  677           MI.getOpcode() != AMDGPU::COPY)
  693       if (MI.getOpcode() == AMDGPU::IMPLICIT_DEF)
  741     if (MI->getOpcode() != AMDGPU::COPY)
  751   if (MI->getOpcode() != MovOp)
lib/Target/AMDGPU/SILowerSGPRSpills.cpp
  278           unsigned FIOp = AMDGPU::getNamedOperandIdx(MI.getOpcode(),
lib/Target/AMDGPU/SIMachineScheduler.cpp
 1873     } else if (SU->getInstr()->getOpcode() == AMDGPU::COPY) {
lib/Target/AMDGPU/SIMemoryLegalizer.cpp
  105   int BitIdx = AMDGPU::getNamedOperandIdx(MI->getOpcode(), BitName);
  401     return AMDGPU::getAtomicNoRetOp(MI.getOpcode()) != -1;
  607   if (MI->getOpcode() != AMDGPU::ATOMIC_FENCE)
 1209   assert(MI->getOpcode() == AMDGPU::ATOMIC_FENCE);
lib/Target/AMDGPU/SIModeRegister.cpp
  173     switch (MI.getOpcode()) {
  240     if ((MI.getOpcode() == AMDGPU::S_SETREG_B32) ||
  241         (MI.getOpcode() == AMDGPU::S_SETREG_IMM32_B32)) {
  265       if (MI.getOpcode() == AMDGPU::S_SETREG_IMM32_B32) {
lib/Target/AMDGPU/SIOptimizeExecMasking.cpp
   60   switch (MI.getOpcode()) {
   78   switch (MI.getOpcode()) {
  100   switch (MI.getOpcode()) {
  181   switch (MI.getOpcode()) {
  350         unsigned SaveExecOp = getSaveExecOp(J->getOpcode());
  408     BuildMI(MBB, InsPt, DL, TII->get(getSaveExecOp(SaveExecInst->getOpcode())),
lib/Target/AMDGPU/SIOptimizeExecMaskingPreRA.cpp
   88     return MI.getOpcode() == AMDGPU::S_OR_B32 &&
   92   return MI.getOpcode() == AMDGPU::S_OR_B64 &&
  201                            unsigned Opc = MI.getOpcode();
  209   if (!And || And->getOpcode() != AndOpc ||
  225   if (!Cmp || !(Cmp->getOpcode() == AMDGPU::V_CMP_NE_U32_e32 ||
  226                 Cmp->getOpcode() == AMDGPU::V_CMP_NE_U32_e64) ||
  239   if (!Sel || Sel->getOpcode() != AMDGPU::V_CNDMASK_B32_e64)
  328       if (Term.getOpcode() != AMDGPU::S_ENDPGM || Term.getNumOperands() != 1)
  337           if (I->isUnconditionalBranch() || I->getOpcode() == AMDGPU::S_ENDPGM)
lib/Target/AMDGPU/SIPeepholeSDWA.cpp
  407           auto DstIdx = AMDGPU::getNamedOperandIdx(MI.getOpcode(),
  421     if ((MI.getOpcode() == AMDGPU::V_FMAC_F16_sdwa ||
  422          MI.getOpcode() == AMDGPU::V_FMAC_F32_sdwa ||
  423          MI.getOpcode() == AMDGPU::V_MAC_F16_sdwa ||
  424          MI.getOpcode() == AMDGPU::V_MAC_F32_sdwa) &&
  465   if ((MI.getOpcode() == AMDGPU::V_FMAC_F16_sdwa ||
  466        MI.getOpcode() == AMDGPU::V_FMAC_F32_sdwa ||
  467        MI.getOpcode() == AMDGPU::V_MAC_F16_sdwa ||
  468        MI.getOpcode() == AMDGPU::V_MAC_F32_sdwa) &&
  515   MI.tieOperands(AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::vdst),
  551   unsigned Opcode = MI.getOpcode();
  882   int Opc = MI.getOpcode();
  901   int SuccOpc = AMDGPU::getVOPe32(MISucc.getOpcode());
  940   unsigned Opc = MI.getOpcode();
  996   unsigned Opcode = MI.getOpcode();
 1175   const MCInstrDesc &Desc = TII->get(MI.getOpcode());
 1228            (PotentialMI->getOpcode() == AMDGPU::V_ADD_I32_e64 ||
 1229             PotentialMI->getOpcode() == AMDGPU::V_SUB_I32_e64))
lib/Target/AMDGPU/SIPreAllocateWWMRegs.cpp
  190       if (MI.getOpcode() == AMDGPU::V_SET_INACTIVE_B32 ||
  191           MI.getOpcode() == AMDGPU::V_SET_INACTIVE_B64)
  194       if (MI.getOpcode() == AMDGPU::ENTER_WWM) {
  200       if (MI.getOpcode() == AMDGPU::EXIT_WWM) {
lib/Target/AMDGPU/SIRegisterInfo.cpp
  316   int OffIdx = AMDGPU::getNamedOperandIdx(MI->getOpcode(),
  326   assert(Idx == AMDGPU::getNamedOperandIdx(MI->getOpcode(),
  584   unsigned Opc = MI->getOpcode();
  959   switch (MI->getOpcode()) {
 1000   switch (MI->getOpcode()) {
 1054       MFI->addToSpilledVGPRs(getNumSubRegsForSpillOp(MI->getOpcode()));
 1105         bool IsCopy = MI->getOpcode() == AMDGPU::V_MOV_B32_e32;
 1130             const bool IsVOP2 = MIB->getOpcode() == AMDGPU::V_ADD_U32_e32;
 1203                AMDGPU::getNamedOperandIdx(MI->getOpcode(),
lib/Target/AMDGPU/SIShrinkInstructions.cpp
   75   int Src0Idx = AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::src0);
  192   int SOPKOpc = AMDGPU::getSOPKOp(MI.getOpcode());
  222   const AMDGPU::MIMGInfo *Info = AMDGPU::getMIMGInfo(MI.getOpcode());
  231       AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::vaddr0);
  272   int TFEIdx = AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::tfe);
  273   int LWEIdx = AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::lwe);
  305         AMDGPU::getNamedOperandIdx(MI.getOpcode(), AMDGPU::OpName::vdata),
  319   unsigned Opc = MI.getOpcode();
  457   assert(MovT.getOpcode() == AMDGPU::V_MOV_B32_e32 ||
  458          MovT.getOpcode() == AMDGPU::COPY);
  480     if ((MovY.getOpcode() != AMDGPU::V_MOV_B32_e32 &&
  481          MovY.getOpcode() != AMDGPU::COPY) ||
  509           (I->getOpcode() != AMDGPU::V_MOV_B32_e32 &&
  510            I->getOpcode() != AMDGPU::COPY) ||
  569       if (MI.getOpcode() == AMDGPU::V_MOV_B32_e32) {
  590       if (ST.hasSwap() && (MI.getOpcode() == AMDGPU::V_MOV_B32_e32 ||
  591                            MI.getOpcode() == AMDGPU::COPY)) {
  605       if (MI.getOpcode() == AMDGPU::S_NOP &&
  607           (*Next).getOpcode() == AMDGPU::S_NOP) {
  630       if (MI.getOpcode() == AMDGPU::S_ADD_I32 ||
  631           MI.getOpcode() == AMDGPU::S_MUL_I32) {
  652             unsigned Opc = (MI.getOpcode() == AMDGPU::S_ADD_I32) ?
  668       if (MI.getOpcode() == AMDGPU::S_MOV_B32) {
  686       if (MI.getOpcode() == AMDGPU::S_AND_B32 ||
  687           MI.getOpcode() == AMDGPU::S_OR_B32 ||
  688           MI.getOpcode() == AMDGPU::S_XOR_B32) {
  693       if (TII->isMIMG(MI.getOpcode()) &&
  701       if (!TII->hasVALU32BitEncoding(MI.getOpcode()))
  714       if (!TII->hasVALU32BitEncoding(MI.getOpcode()))
  717       int Op32 = AMDGPU::getVOPe32(MI.getOpcode());
lib/Target/AMDGPU/SIWholeQuadMode.cpp
  329       unsigned Opcode = MI.getOpcode();
  762       if (MI.getOpcode() == AMDGPU::SI_ELSE && BI.OutNeeds == StateExact)
lib/Target/ARC/ARCAsmPrinter.cpp
   53   switch (MI->getOpcode()) {
lib/Target/ARC/ARCBranchFinalize.cpp
   95   return !(MI->getOpcode() != ARC::BRcc_rr_p &&
   96            MI->getOpcode() != ARC::BRcc_ru6_p);
  101   if (MI->getOpcode() == ARC::BRcc_rr_p)
  108   if (MI->getOpcode() == ARC::BRcc_rr_p)
lib/Target/ARC/ARCExpandPseudos.cpp
   69           TII->get(getMappedOp(SI.getOpcode())))
   84       switch (MBBI->getOpcode()) {
lib/Target/ARC/ARCFrameLowering.cpp
  479     if (Amt > AFI->MaxCallStackReq && Old.getOpcode() == ARC::ADJCALLSTACKDOWN)
  483       assert((Old.getOpcode() == ARC::ADJCALLSTACKDOWN ||
  484               Old.getOpcode() == ARC::ADJCALLSTACKUP) &&
  486       bool IsAdd = (Old.getOpcode() == ARC::ADJCALLSTACKUP);
lib/Target/ARC/ARCInstrInfo.cpp
   70   int Opcode = MI.getOpcode();
   89   int Opcode = MI.getOpcode();
  194     if (isJumpOpcode(I->getOpcode())) {
  198     } else if (isUncondBranchOpcode(I->getOpcode())) {
  200     } else if (isCondBranchOpcode(I->getOpcode())) {
  221     if (!isPredicated(*I) && (isUncondBranchOpcode(I->getOpcode()) ||
  222                               isJumpOpcode(I->getOpcode()) || I->isReturn())) {
  261   if (!isUncondBranchOpcode(I->getOpcode()) &&
  262       !isCondBranchOpcode(I->getOpcode()))
  273   if (!isCondBranchOpcode(I->getOpcode()))
lib/Target/ARC/ARCMCInstLower.cpp
  105   OutMI.setOpcode(MI->getOpcode());
lib/Target/ARC/ARCOptAddrMode.cpp
  124   switch (MI.getOpcode()) {
  252     int NewOpcode = ARC::getPostIncOpcode(Ldst.getOpcode());
  474     if (ARC::getPostIncOpcode(MI->getOpcode()) < 0)
lib/Target/ARC/ARCRegisterInfo.cpp
   48   if (MI.getOpcode() == ARC::LD_rs9 && (Offset >= 256 || Offset < -256)) {
   58   if (MI.getOpcode() != ARC::GETFI && (Offset >= 256 || Offset < -256)) {
   82   switch (MI.getOpcode()) {
   93     BuildMI(MBB, II, dl, TII.get(MI.getOpcode()), Reg)
  106     BuildMI(MBB, II, dl, TII.get(MI.getOpcode()))
lib/Target/ARM/ARMAsmPrinter.cpp
 1076   unsigned Opc = MI->getOpcode();
 1248   if (InConstantPool && MI->getOpcode() != ARM::CONSTPOOL_ENTRY) {
 1262   assert(!convertAddSubFlagsOpcode(MI->getOpcode()) &&
 1266   unsigned Opc = MI->getOpcode();
 1275     EmitToStreamer(*OutStreamer, MCInstBuilder(MI->getOpcode() ==
 1277                   : (MI->getOpcode() == ARM::tLEApcrel ? ARM::tADR
 1291     EmitToStreamer(*OutStreamer, MCInstBuilder(MI->getOpcode() ==
 1293                   : (MI->getOpcode() == ARM::tLEApcrelJT ? ARM::tADR
 1580     switch (MI->getOpcode()) {
 1638     EmitJumpTableTBInst(MI, MI->getOpcode() == ARM::JUMPTABLE_TBB ? 1 : 2);
 1651     unsigned Opc = MI->getOpcode() == ARM::t2TBB_JT ? ARM::t2TBB : ARM::t2TBH;
 1665     bool Is8Bit = MI->getOpcode() == ARM::tTBB_JT;
 1751     unsigned Opc = MI->getOpcode() == ARM::BR_JTr ?
lib/Target/ARM/ARMBaseInstrInfo.cpp
  162   unsigned MemOpc = getUnindexedOpcode(MI.getOpcode());
  339     if (isIndirectBranchOpcode(I->getOpcode()) ||
  340         isJumpTableBranchOpcode(I->getOpcode())) {
  344     } else if (isUncondBranchOpcode(I->getOpcode())) {
  346     } else if (isCondBranchOpcode(I->getOpcode())) {
  367           (isUncondBranchOpcode(I->getOpcode()) ||
  368            isIndirectBranchOpcode(I->getOpcode()) ||
  369            isJumpTableBranchOpcode(I->getOpcode()) ||
  409   if (!isUncondBranchOpcode(I->getOpcode()) &&
  410       !isCondBranchOpcode(I->getOpcode()))
  420   if (!isCondBranchOpcode(I->getOpcode()))
  500   unsigned Opc = MI.getOpcode();
  623   switch (MI->getOpcode()) {
  709   switch (MI.getOpcode()) {
 1007       (MI.getOpcode() == ARM::VORRq &&
 1207   switch (MI.getOpcode()) {
 1444   switch (MI.getOpcode()) {
 1568   if (MI.getOpcode() == TargetOpcode::LOAD_STACK_GUARD) {
 1576   if (MI.getOpcode() == ARM::MEMCPY) {
 1697   unsigned Opcode = Orig.getOpcode();
 1726     switch (I->getOpcode()) {
 1747   unsigned Opcode = MI0.getOpcode();
 1758     if (MI1.getOpcode() != Opcode)
 1796     if (MI1.getOpcode() != Opcode)
 1975   if (I != MBB->end() && I->getOpcode() == ARM::t2IT)
 2006       if (LastMI->getOpcode() == ARM::t2Bcc) {
 2096   if (MI.getOpcode() == ARM::t2Bcc &&
 2152   switch (MI.getOpcode()) {
 2216   assert((MI.getOpcode() == ARM::MOVCCr || MI.getOpcode() == ARM::t2MOVCCr) &&
 2216   assert((MI.getOpcode() == ARM::MOVCCr || MI.getOpcode() == ARM::t2MOVCCr) &&
 2237   assert((MI.getOpcode() == ARM::MOVCCr || MI.getOpcode() == ARM::t2MOVCCr) &&
 2237   assert((MI.getOpcode() == ARM::MOVCCr || MI.getOpcode() == ARM::t2MOVCCr) &&
 2409   bool IsPop = isPopOpcode(MI->getOpcode());
 2410   bool IsPush = isPushOpcode(MI->getOpcode());
 2414   bool IsVFPPushPop = MI->getOpcode() == ARM::VSTMDDB_UPD ||
 2415                       MI->getOpcode() == ARM::VLDMDIA_UPD;
 2416   bool IsT1PushPop = MI->getOpcode() == ARM::tPUSH ||
 2417                      MI->getOpcode() == ARM::tPOP ||
 2418                      MI->getOpcode() == ARM::tPOP_RET;
 2523   unsigned Opcode = MI.getOpcode();
 2680   switch (MI.getOpcode()) {
 2716   switch (MI->getOpcode()) {
 2771   if ((CmpI->getOpcode() == ARM::CMPrr || CmpI->getOpcode() == ARM::t2CMPrr) &&
 2771   if ((CmpI->getOpcode() == ARM::CMPrr || CmpI->getOpcode() == ARM::t2CMPrr) &&
 2772       (OI->getOpcode() == ARM::SUBrr || OI->getOpcode() == ARM::t2SUBrr) &&
 2772       (OI->getOpcode() == ARM::SUBrr || OI->getOpcode() == ARM::t2SUBrr) &&
 2781   if (CmpI->getOpcode() == ARM::tCMPr && OI->getOpcode() == ARM::tSUBrr &&
 2781   if (CmpI->getOpcode() == ARM::tCMPr && OI->getOpcode() == ARM::tSUBrr &&
 2790   if ((CmpI->getOpcode() == ARM::CMPri || CmpI->getOpcode() == ARM::t2CMPri) &&
 2790   if ((CmpI->getOpcode() == ARM::CMPri || CmpI->getOpcode() == ARM::t2CMPri) &&
 2791       (OI->getOpcode() == ARM::SUBri || OI->getOpcode() == ARM::t2SUBri) &&
 2791       (OI->getOpcode() == ARM::SUBri || OI->getOpcode() == ARM::t2SUBri) &&
 2798   if (CmpI->getOpcode() == ARM::tCMPi8 &&
 2799       (OI->getOpcode() == ARM::tSUBi8 || OI->getOpcode() == ARM::tSUBi3) &&
 2799       (OI->getOpcode() == ARM::tSUBi8 || OI->getOpcode() == ARM::tSUBi3) &&
 2806   if ((CmpI->getOpcode() == ARM::CMPrr || CmpI->getOpcode() == ARM::t2CMPrr) &&
 2806   if ((CmpI->getOpcode() == ARM::CMPrr || CmpI->getOpcode() == ARM::t2CMPrr) &&
 2807       (OI->getOpcode() == ARM::ADDrr || OI->getOpcode() == ARM::t2ADDrr ||
 2807       (OI->getOpcode() == ARM::ADDrr || OI->getOpcode() == ARM::t2ADDrr ||
 2808        OI->getOpcode() == ARM::ADDri || OI->getOpcode() == ARM::t2ADDri) &&
 2808        OI->getOpcode() == ARM::ADDri || OI->getOpcode() == ARM::t2ADDri) &&
 2816   if (CmpI->getOpcode() == ARM::tCMPr &&
 2817       (OI->getOpcode() == ARM::tADDi3 || OI->getOpcode() == ARM::tADDi8 ||
 2817       (OI->getOpcode() == ARM::tADDi3 || OI->getOpcode() == ARM::tADDi8 ||
 2818        OI->getOpcode() == ARM::tADDrr) &&
 2829   switch (MI->getOpcode()) {
 2951     if (CmpInstr.getOpcode() == ARM::CMPri ||
 2952         CmpInstr.getOpcode() == ARM::t2CMPri ||
 2953         CmpInstr.getOpcode() == ARM::tCMPi8)
 2981         if (I->getOpcode() != ARM::tMOVi8) {
 3072       switch (Instr.getOpcode()) {
 3107         unsigned Opc = SubAdd->getOpcode();
 3205   unsigned DefOpc = DefMI.getOpcode();
 3234   unsigned UseOpc = UseMI.getOpcode();
 3334   switch (MI.getOpcode()) {
 3649   unsigned Opc = MI.getOpcode();
 4058     if (II->getOpcode() != ARM::t2IT)
 4295     if (DefMI.getOpcode() == ARM::FMSTAT) {
 4639       if (I->getOpcode() != ARM::t2IT)
 4733   if (convertAddSubFlagsOpcode(MI.getOpcode())) {
 4737   if (MI.getOpcode() == ARM::tMOVr && !Subtarget.hasV6Ops()) {
 4745   if (MI.getOpcode() == ARM::tPUSH ||
 4746       MI.getOpcode() == ARM::tPOP ||
 4747       MI.getOpcode() == ARM::tPOP_RET) {
 4754         if (!(MI.getOpcode() == ARM::tPUSH && Reg == ARM::LR) &&
 4755             !(MI.getOpcode() == ARM::tPOP_RET && Reg == ARM::PC)) {
 4844     if (MI.getOpcode() == ARM::VMOVD && !isPredicated(MI))
 4850         (MI.getOpcode() == ARM::VMOVRS || MI.getOpcode() == ARM::VMOVSR ||
 4850         (MI.getOpcode() == ARM::VMOVRS || MI.getOpcode() == ARM::VMOVSR ||
 4851          MI.getOpcode() == ARM::VMOVS))
 4934   switch (MI.getOpcode()) {
 5151   switch (MI.getOpcode()) {
 5255   switch (MI.getOpcode()) {
 5282   switch (MI.getOpcode()) {
 5305   switch (MI.getOpcode()) {
 5379   if (CmpMI->getOpcode() != ARM::tCMPi8 && CmpMI->getOpcode() != ARM::t2CMPri)
 5379   if (CmpMI->getOpcode() != ARM::tCMPi8 && CmpMI->getOpcode() != ARM::t2CMPri)
lib/Target/ARM/ARMBaseRegisterInfo.cpp
  563   unsigned Opc = MI->getOpcode();
lib/Target/ARM/ARMBasicBlockInfo.cpp
   31   switch(MI->getOpcode()) {
   66   if (!MBB->empty() && MBB->back().getOpcode() == ARM::tBR_JTr) {
lib/Target/ARM/ARMConstantIslandPass.cpp
  568     switch (MI->getOpcode()) {
  654   switch (CPEMI->getOpcode()) {
  681           (I.getOpcode() == ARM::t2BR_JT || I.getOpcode() == ARM::tBR_JTr))
  681           (I.getOpcode() == ARM::t2BR_JT || I.getOpcode() == ARM::tBR_JTr))
  712       unsigned Opc = I.getOpcode();
 1094   if (PredMI->getOpcode() == ARM::B || PredMI->getOpcode() == ARM::tB
 1094   if (PredMI->getOpcode() == ARM::B || PredMI->getOpcode() == ARM::tB
 1095       || PredMI->getOpcode() == ARM::t2B)
 1369          I->getOpcode() != ARM::t2IT &&
 1405     if (MI->getOpcode() == ARM::t2IT)
 1426   if (STI->isTargetWindows() && isThumb && MI->getOpcode() == ARM::t2MOVTi16 &&
 1430     assert(MI->getOpcode() == ARM::t2MOVi16 &&
 1673         BMI->getOpcode() == Br.UncondBr) {
 1717   BuildMI(MBB, DebugLoc(), TII->get(MI->getOpcode()))
 1746     if (MI->getOpcode() == ARM::tPOP_RET &&
 1755     } else if (MI->getOpcode() == ARM::tPUSH &&
 1772     unsigned Opcode = U.MI->getOpcode();
 1823     unsigned Opcode = Br.MI->getOpcode();
 1892     if (!CmpMI || CmpMI->getOpcode() != ARM::tCMPi8)
 1901     if (Br.MI->getOpcode() != ARM::t2Bcc || !STI->hasLOB() ||
 1950     unsigned Opcode = Br.MI->getOpcode();
 1981     if (Br.MI->getOpcode() == ARM::tBcc) {
 2004   if (I.getOpcode() != ARM::t2ADDrs)
 2130     if (I->getOpcode() == ARM::t2ADDrs && I->getOperand(0).getReg() == EntryReg)
 2227       if (Shift->getOpcode() != ARM::tLSLri ||
 2241       if (Load->getOpcode() != ARM::tLDRr)
 2261         if (Add->getOpcode() != ARM::tADDrr ||
lib/Target/ARM/ARMExpandPseudoInsts.cpp
  474   const NEONLdStTableEntry *TableEntry = LookupNEONLdSt(MI.getOpcode());
  585   const NEONLdStTableEntry *TableEntry = LookupNEONLdSt(MI.getOpcode());
  661   const NEONLdStTableEntry *TableEntry = LookupNEONLdSt(MI.getOpcode());
  829   unsigned Opcode = MI.getOpcode();
 1157   unsigned Opcode = MI.getOpcode();
 1167       unsigned RetOpcode = MBBI->getOpcode();
lib/Target/ARM/ARMFastISel.cpp
 2927     if (FLE.Opc[isThumb2] == MI->getOpcode() &&
lib/Target/ARM/ARMFeatures.h
   25   switch (Instr->getOpcode()) {
lib/Target/ARM/ARMFrameLowering.cpp
  148   if (isPopOpcode(MI.getOpcode())) {
  156   if ((MI.getOpcode() == ARM::LDR_POST_IMM ||
  157        MI.getOpcode() == ARM::LDR_POST_REG ||
  158        MI.getOpcode() == ARM::t2LDR_POST) &&
  191   switch (MI.getOpcode()) {
  501     while (MBBI != MBB.end() && MBBI->getOpcode() == ARM::VSTMDDB_UPD) {
  860       while (MBBI != MBB.end() && MBBI->getOpcode() == ARM::VLDMDIA_UPD)
 1065     unsigned RetOpcode = MI->getOpcode();
 1523         if (MI.getOpcode() == ARM::ADDri) {
 1529         if (MI.getOpcode() == ARM::t2ADDri || MI.getOpcode() == ARM::t2ADDri12)
 1529         if (MI.getOpcode() == ARM::t2ADDri || MI.getOpcode() == ARM::t2ADDri12)
 2169       unsigned Opc = Old.getOpcode();
lib/Target/ARM/ARMHazardRecognizer.cpp
   61       if (TII.isFpMLxInstruction(DefMI->getOpcode()) &&
   62           (TII.canCauseFpMLxStall(MI->getOpcode()) ||
lib/Target/ARM/ARMISelLowering.cpp
10403   switch (MI.getOpcode()) {
10438     unsigned NewOpc = MI.getOpcode() == ARM::STRi_preidx ? ARM::STR_PRE_IMM
10463     switch (MI.getOpcode()) {
10547     bool RHSisZero = MI.getOpcode() == ARM::BCCZi64;
10717   if (MI.getOpcode() == ARM::MEMCPY) {
10731   unsigned NewOpc = convertAddSubFlagsOpcode(MI.getOpcode());
lib/Target/ARM/ARMInstructionSelector.cpp
  223     LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  632     assert((MIB->getOpcode() == ARM::LDRi12 ||
  633             MIB->getOpcode() == ARM::t2LDRpci) &&
  647     if (MIB->getOpcode() == ARM::LDRi12)
  814   assert(OldInst.getOpcode() == TargetOpcode::G_FCONSTANT &&
  826   assert(OldInst.getOpcode() == TargetOpcode::G_FCONSTANT &&
  844   if (!isPreISelGenericOpcode(I.getOpcode())) {
  859   switch (I.getOpcode()) {
  897       unsigned NewOpc = selectSimpleExtOpc(I.getOpcode(), SrcSize);
  898       if (NewOpc == I.getOpcode())
  924       assert(I.getOpcode() == G_TRUNC && "Unsupported operand for G_ANYEXT");
 1093     const auto NewOpc = selectLoadStoreOpCode(I.getOpcode(), RegBank, ValSize);
lib/Target/ARM/ARMLegalizerInfo.cpp
  369   switch (MI.getOpcode()) {
  380         MI.getOpcode() == G_SREM ? RTLIB::SDIVREM_I32 : RTLIB::UDIVREM_I32;
lib/Target/ARM/ARMLoadStoreOptimizer.cpp
  217   unsigned Opcode = MI.getOpcode();
  437   switch (MI->getOpcode()) {
  492     unsigned Opc = MBBI->getOpcode();
  853   unsigned Opcode = First->getOpcode();
  982   unsigned Opcode = MI.getOpcode();
  997   unsigned Opcode = FirstMI->getOpcode();
 1185   switch (MI.getOpcode()) {
 1268   unsigned Opcode = MI->getOpcode();
 1392   unsigned Opcode = MI->getOpcode();
 1506   unsigned Opcode = MI.getOpcode();
 1566   unsigned Opcode = MI.getOpcode();
 1651   unsigned Opcode = MI->getOpcode();
 1784       unsigned Opcode = MBBI->getOpcode();
 1853     } else if (MBBI->getOpcode() == ARM::t2LDRDi8 ||
 1854                MBBI->getOpcode() == ARM::t2STRDi8) {
 1888         unsigned Opcode = Merged->getOpcode();
 1931       (MBBI->getOpcode() == ARM::BX_RET ||
 1932        MBBI->getOpcode() == ARM::tBX_RET ||
 1933        MBBI->getOpcode() == ARM::MOVPCLR)) {
 1939     unsigned Opcode = PrevMI.getOpcode();
 1973       MBBI->getOpcode() != ARM::tBX_RET)
 1978   if (Prev->getOpcode() != ARM::tMOVr || !Prev->definesRegister(ARM::LR))
 2154   unsigned Opcode = Op0->getOpcode();
 2246         = getLoadStoreMultipleOpcode(Op->getOpcode(), ARM_AM::ia);
 2416       int Opc = MI.getOpcode();
lib/Target/ARM/ARMLowOverheadLoops.cpp
  115   return MI.getOpcode() == ARM::t2DoLoopStart ||
  116          MI.getOpcode() == ARM::t2WhileLoopStart;
  150     return MI->getOpcode() == ARM::tMOVr &&
  268       if (MI.getOpcode() == ARM::t2LoopDec)
  270       else if (MI.getOpcode() == ARM::t2LoopEnd)
  294         if (MI.getOpcode() != ARM::t2LoopDec && MO.isReg() &&
  336   if (Start->getOpcode() == ARM::t2WhileLoopStart &&
  390           Def->getOpcode() == ARM::t2LoopEnd)
  449     unsigned Opc = Start->getOpcode() == ARM::t2DoLoopStart ?
  500     if (Start->getOpcode() == ARM::t2WhileLoopStart)
  526       else if (I.getOpcode() == ARM::t2LoopDec)
  528       else if (I.getOpcode() == ARM::t2LoopEnd)
  538       if (Start->getOpcode() == ARM::t2WhileLoopStart)
lib/Target/ARM/ARMMCInstLower.cpp
  125   OutMI.setOpcode(MI->getOpcode());
  129   switch (MI->getOpcode()) {
lib/Target/ARM/ARMMacroFusion.cpp
   25   switch(SecondMI.getOpcode()) {
   28     return FirstMI == nullptr || FirstMI->getOpcode() == ARM::AESE;
   31     return FirstMI == nullptr || FirstMI->getOpcode() == ARM::AESD;
   41   if ((FirstMI == nullptr || FirstMI->getOpcode() == ARM::MOVi16) &&
   42       SecondMI.getOpcode() == ARM::MOVTi16)
lib/Target/ARM/ARMOptimizeBarriersPass.cpp
   67       if (MI.getOpcode() == ARM::DMB) {
lib/Target/ARM/ARMRegisterBankInfo.cpp
  212   auto Opc = MI.getOpcode();
lib/Target/ARM/MLxExpansionPass.cpp
  218   if (TII->isFpMLxInstruction(DefMI->getOpcode())) {
  235     return isFpMulInstruction(DefMI->getOpcode()) || hasLoopHazard(MI);
  253     if (TII->canCauseFpMLxStall(NextMI->getOpcode())) {
lib/Target/ARM/MVEVPTBlockPass.cpp
  154   NewOpcode = VCMPOpcodeToVPT(CmpMI->getOpcode());
lib/Target/ARM/Thumb1FrameLowering.cpp
  133       unsigned Opc = Old.getOpcode();
  233   if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tPUSH) {
  335     while (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tMOVr)
  337     if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tPUSH) {
  456   if (MI.getOpcode() == ARM::tLDRspi && MI.getOperand(1).isFI() &&
  459   else if (MI.getOpcode() == ARM::tPOP) {
  461   } else if (MI.getOpcode() == ARM::tMOVr) {
  539       if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tBX_RET &&
  540           &MBB.front() != &*MBBI && std::prev(MBBI)->getOpcode() == ARM::tPOP) {
  619     if (MBBI != MBB.end() && MBBI->getOpcode() != ARM::tB)
  620       CanRestoreDirectly = (MBBI->getOpcode() == ARM::tBX_RET ||
  621                             MBBI->getOpcode() == ARM::tPOP_RET);
  625       assert(MBBI_prev->getOpcode() == ARM::tPOP);
  627       if ((*MBB.succ_begin())->begin()->getOpcode() == ARM::tBX_RET)
  635     if (!DoIt || MBBI->getOpcode() == ARM::tPOP_RET)
  703     if (PrevMBBI->getOpcode() == ARM::tPOP) {
  749   if (MBBI != MBB.end() && MBBI->getOpcode() == ARM::tPOP_RET) {
  973   if (Terminator != MBB.end() && Terminator->getOpcode() == ARM::tBX_RET) {
 1036           MI->getOpcode() == ARM::TCRETURNdi ||
 1037           MI->getOpcode() == ARM::TCRETURNri)
lib/Target/ARM/Thumb2ITBlockPass.cpp
  124   switch (MI->getOpcode()) {
lib/Target/ARM/Thumb2InstrInfo.cpp
   88       if (MBBI->getOpcode() == ARM::t2IT) {
  468   unsigned Opcode = MI.getOpcode();
  699   unsigned Opc = MI.getOpcode();
lib/Target/ARM/Thumb2SizeReduction.cpp
  263   switch(Def->getOpcode()) {
  323   if (Use->getOpcode() == ARM::t2MOVi ||
  324       Use->getOpcode() == ARM::t2MOVi16)
  375   unsigned Opc = MI->getOpcode();
  625   unsigned Opc = MI->getOpcode();
  749   if (MI->getOpcode() == ARM::t2MUL) {
 1006   unsigned Opcode = MI->getOpcode();
lib/Target/ARM/ThumbRegisterInfo.cpp
  369   unsigned Opcode = MI.getOpcode();
  504   unsigned Opcode = MI.getOpcode();
lib/Target/AVR/AVRExpandPseudoInsts.cpp
 1511   int Opcode = MBBI->getOpcode();
lib/Target/AVR/AVRFrameLowering.cpp
  109       (MBBI->getOpcode() == AVR::PUSHRr || MBBI->getOpcode() == AVR::PUSHWRr)) {
  109       (MBBI->getOpcode() == AVR::PUSHRr || MBBI->getOpcode() == AVR::PUSHWRr)) {
  188     int Opc = PI->getOpcode();
  315     unsigned Opcode = I->getOpcode();
  376   unsigned int Opcode = MI->getOpcode();
  462         int Opcode = MI.getOpcode();
lib/Target/AVR/AVRISelLowering.cpp
 1446   switch (MI.getOpcode()) {
 1570   if (I->getOpcode() == AVR::COPY) {
 1599   int Opc = MI.getOpcode();
lib/Target/AVR/AVRInstrInfo.cpp
   84   switch (MI.getOpcode()) {
  103   switch (MI.getOpcode()) {
  293     if (I->getOpcode() == AVR::RJMPk) {
  324     AVRCC::CondCodes BranchCode = getCondFromBranchOpc(I->getOpcode());
  451     if (I->getOpcode() != AVR::RJMPk &&
  452         getCondFromBranchOpc(I->getOpcode()) == AVRCC::COND_INVALID) {
  477   unsigned Opcode = MI.getOpcode();
  505   switch (MI.getOpcode()) {
lib/Target/AVR/AVRMCInstLower.cpp
   63   OutMI.setOpcode(MI.getOpcode());
lib/Target/AVR/AVRRegisterInfo.cpp
  100   int Opcode = MI.getOpcode();
  152   if (MI.getOpcode() == AVR::FRMIDX) {
lib/Target/AVR/AVRRelaxMemOperations.cpp
  126   int Opcode = MBBI->getOpcode();
lib/Target/BPF/BPFISelLowering.cpp
  621   unsigned Opc = MI.getOpcode();
lib/Target/BPF/BPFInstrInfo.cpp
  116   if (MI.getOpcode() == BPF::MEMCPY) {
  188     if (I->getOpcode() == BPF::JMP) {
  251     if (I->getOpcode() != BPF::JMP)
lib/Target/BPF/BPFMCInstLower.cpp
   48   OutMI.setOpcode(MI->getOpcode());
lib/Target/BPF/BPFMIChecking.cpp
  159       if (MI.getOpcode() != BPF::XADDW &&
  160           MI.getOpcode() != BPF::XADDD &&
  161           MI.getOpcode() != BPF::XADDW32)
lib/Target/BPF/BPFMIPeephole.cpp
   97       if (!PhiDef || PhiDef->isPHI() || PhiDef->getOpcode() == BPF::COPY)
  102   if (DefInsn->getOpcode() == BPF::COPY) {
  136       if (MI.getOpcode() == BPF::SRL_ri &&
  147             SllMI->getOpcode() != BPF::SLL_ri ||
  157             MovMI->getOpcode() != BPF::MOV_32_64)
  258       unsigned Opcode = MI.getOpcode();
  377       if (MI.getOpcode() == BPF::SRL_ri &&
  384             MI2->getOpcode() != BPF::SLL_ri ||
  393       } else if (MI.getOpcode() == BPF::AND_ri ||
  394                  MI.getOpcode() == BPF::AND_ri_32) {
  425               !TruncSizeCompatible(TruncSize, PhiDef->getOpcode())) {
  433       } else if (!TruncSizeCompatible(TruncSize, DefMI->getOpcode())) {
lib/Target/BPF/BPFMISimplifyPatchable.cpp
   88       if (MI.getOpcode() != BPF::LDD && MI.getOpcode() != BPF::LDW &&
   88       if (MI.getOpcode() != BPF::LDD && MI.getOpcode() != BPF::LDW &&
   89           MI.getOpcode() != BPF::LDH && MI.getOpcode() != BPF::LDB &&
   89           MI.getOpcode() != BPF::LDH && MI.getOpcode() != BPF::LDB &&
   90           MI.getOpcode() != BPF::LDW32 && MI.getOpcode() != BPF::LDH32 &&
   90           MI.getOpcode() != BPF::LDW32 && MI.getOpcode() != BPF::LDH32 &&
   91           MI.getOpcode() != BPF::LDB32)
  108       if (DefInst->getOpcode() == BPF::LD_imm64) {
lib/Target/BPF/BPFRegisterInfo.cpp
   84   if (MI.getOpcode() == BPF::MOV_rr) {
  104   if (MI.getOpcode() == BPF::FI_ri) {
lib/Target/BPF/BTFDebug.cpp
 1023   if (MI->getOpcode() == BPF::LD_imm64)
 1124   if (MI->getOpcode() == BPF::LD_imm64) {
lib/Target/Hexagon/BitTracker.cpp
  723   unsigned Opc = MI.getOpcode();
lib/Target/Hexagon/HexagonBitSimplify.cpp
  434   assert(I.getOpcode() == TargetOpcode::REG_SEQUENCE);
 1000     unsigned Opc = MI->getOpcode();
 1082   unsigned Opc = MI.getOpcode();
 1142   unsigned Opc = MI.getOpcode();
 1250   unsigned Opc = MI.getOpcode();
 1310     if (MI->getOpcode() == TargetOpcode::COPY)
 1393   unsigned Opc = MI.getOpcode();
 1596     unsigned Opc = I->getOpcode();
 1665   unsigned Opc = MI.getOpcode();
 1723     unsigned Opc = I->getOpcode();
 1914   unsigned Opc = MI->getOpcode();
 1937   unsigned Opc = MI->getOpcode();
 2014   unsigned Opc = MI->getOpcode();
 2046   unsigned Opc = MI->getOpcode();
 2088   unsigned Opc = MI->getOpcode();
 2112   unsigned Opc = MI->getOpcode();
 2180   unsigned Opc = MI->getOpcode();
 2278       if (In->getOpcode() != Hexagon::A4_bitspliti)
 2325   unsigned Opc = MI->getOpcode();
 2532     if (MI->getOpcode() == ExtOpc) {
 2577   unsigned Opc = MI->getOpcode();
 2662   if (SR.Sub == 0 && InpDef->getOpcode() == Hexagon::C2_muxii) {
 2709     unsigned Opc = MI->getOpcode();
 2980   unsigned Opc = MI->getOpcode();
 3086     auto MIB = BuildMI(LB, At, DL, HII->get(SI->getOpcode()), NewDR);
 3275       unsigned Opc = DefPrehR->getOpcode();
lib/Target/Hexagon/HexagonBitTracker.cpp
  205   unsigned Opc = MI.getOpcode();
  988   unsigned Opc = BI.getOpcode();
 1063   unsigned Opc = MI.getOpcode();
lib/Target/Hexagon/HexagonCFGOptimizer.cpp
   85   switch (MI.getOpcode()) {
  128       int Opc = MI.getOpcode();
  177         if (MI.getOpcode() == Hexagon::J2_jumpt ||
  178             MI.getOpcode() == Hexagon::J2_jumpf) {
  189               IsUnconditionalJump(LayoutSucc->front().getOpcode())) {
  197               IsUnconditionalJump(JumpAroundTarget->back().getOpcode()) &&
lib/Target/Hexagon/HexagonConstExtenders.cpp
 1048   unsigned Opc = MI.getOpcode();
 1102   unsigned IdxOpc = getRegOffOpcode(ED.UseMI->getOpcode());
 1141   unsigned Opc = MI.getOpcode();
 1232   unsigned Opc = MI.getOpcode();
 1284     if (ED.UseMI->getOpcode() == Hexagon::A2_tfrsi) {
 1588   unsigned ExtOpc = MI.getOpcode();
 1706   unsigned ExtOpc = MI.getOpcode();
lib/Target/Hexagon/HexagonConstPropagation.cpp
 1926   unsigned Opc = MI.getOpcode();
 2255   unsigned Opc = BrI.getOpcode();
 2323   unsigned Opc = MI.getOpcode();
 2546   unsigned Opc = MI.getOpcode();
 2571     unsigned Opc = MI.getOpcode();
 2621   unsigned Opc = MI.getOpcode();
 2715   unsigned Opc = MI.getOpcode();
 2762   unsigned Opc = MI.getOpcode();
 2949   unsigned Opc = MI.getOpcode();
 3136   if (BrI.getOpcode() == Hexagon::J2_jump)
lib/Target/Hexagon/HexagonCopyToCombine.cpp
  129   switch (MI.getOpcode()) {
  173   if (I.getOpcode() == Hexagon::TFRI64_V4 ||
  174       I.getOpcode() == Hexagon::A2_tfrsi) {
  186   unsigned HiOpc = HighRegInst.getOpcode();
  187   unsigned LoOpc = LowRegInst.getOpcode();
lib/Target/Hexagon/HexagonEarlyIfConv.cpp
  250   unsigned Opc = T1I->getOpcode();
  262   assert(T2I == B->end() || T2I->getOpcode() == Hexagon::J2_jump);
  374     unsigned Opc = MI.getOpcode();
  664   unsigned Opc = MI->getOpcode();
  691   if (MI->getOpcode() == TargetOpcode::LIFETIME_END)
  717   unsigned Opc = MI->getOpcode();
lib/Target/Hexagon/HexagonExpandCondsets.cpp
  274   unsigned Opc = MI.getOpcode();
  871   unsigned Opc = MI.getOpcode();
  937   unsigned Opc = TfrI.getOpcode();
 1074     unsigned Opc = I->getOpcode();
lib/Target/Hexagon/HexagonFixupHwLoops.cpp
   85   return MI.getOpcode() == Hexagon::J2_loop0r ||
   86          MI.getOpcode() == Hexagon::J2_loop0i ||
   87          MI.getOpcode() == Hexagon::J2_loop1r ||
   88          MI.getOpcode() == Hexagon::J2_loop1i;
  175   switch (MII->getOpcode()) {
lib/Target/Hexagon/HexagonFrameLowering.cpp
  288       unsigned Opc = MI->getOpcode();
  340     unsigned RetOpc = I->getOpcode();
  535       if (!RetI || isRestoreCall(RetI->getOpcode()))
  614       if (MI.getOpcode() == Hexagon::PS_alloca)
  618     assert((MI->getOpcode() == Hexagon::PS_alloca) && "Expected alloca");
  667   unsigned RetOpc = RetI ? RetI->getOpcode() : 0;
  704     unsigned COpc = PrevIt->getOpcode();
  818     if (!isRestoreCall(RetI.getOpcode()))
  852         if (I.getOpcode() == Hexagon::S2_allocframe)
  860         if (T->getOpcode() == Hexagon::S2_allocframe)
 1346   unsigned Opc = MI.getOpcode();
 1597   unsigned Opc = MI->getOpcode();
 1630   unsigned Opc = MI->getOpcode();
 1894       unsigned Opc = MI->getOpcode();
 2402       if (I.getOpcode() == Hexagon::PS_aligna)
 2504       switch (MI.getOpcode()) {
lib/Target/Hexagon/HexagonGenInsert.cpp
 1465     unsigned Opc = MI->getOpcode();
lib/Target/Hexagon/HexagonGenMux.cpp
  161   unsigned Opc = MI->getOpcode();
  239     unsigned Opc = MI->getOpcode();
lib/Target/Hexagon/HexagonGenPredicate.cpp
  187   unsigned Opc = MI->getOpcode();
  210       unsigned Opc = MI->getOpcode();
  256   unsigned Opc = DefI->getOpcode();
  331     unsigned DefOpc = DefI->getOpcode();
  370   unsigned Opc = MI->getOpcode();
  470       if (MI.getOpcode() != TargetOpcode::COPY)
lib/Target/Hexagon/HexagonHardwareLoops.cpp
  651   unsigned CondOpc = CondI->getOpcode();
  738     if (StartValInstr && (StartValInstr->getOpcode() == Hexagon::A2_tfrsi ||
  739                           StartValInstr->getOpcode() == Hexagon::A2_tfrpi))
  744     if (EndValInstr && (EndValInstr->getOpcode() == Hexagon::A2_tfrsi ||
  745                         EndValInstr->getOpcode() == Hexagon::A2_tfrpi))
  929       if (EndValInstr->getOpcode() == Hexagon::A2_addi &&
 1285   if (LastI->getOpcode() == Hexagon::J2_jumpt ||
 1286       LastI->getOpcode() == Hexagon::J2_jumpf) {
 1469         getComparisonKind(MI->getOpcode(), nullptr, nullptr, 0);
 1515   unsigned DOpc = DI->getOpcode();
 1592   BuildMI(B, DI, DL, TII->get(DI->getOpcode()), NewR).addImm(Val);
 1788             nonIndI->getOpcode() == Hexagon::A2_addi &&
 1804           getComparisonKind(PredDef->getOpcode(), nullptr, nullptr, 0);
 1822         if (!isImmValidForOpcode(PredDef->getOpcode(), CmpImm))
lib/Target/Hexagon/HexagonHazardRecognizer.cpp
   41   if (!MI || TII->isZeroCost(MI->getOpcode()))
  119   if (TII->isZeroCost(MI->getOpcode()))
lib/Target/Hexagon/HexagonInstrInfo.cpp
  167       unsigned Opc = I->getOpcode();
  241   switch (MI.getOpcode()) {
  289   switch (MI.getOpcode()) {
  427   bool JumpToBlock = I->getOpcode() == Hexagon::J2_jump &&
  459   int LastOpcode = LastInst->getOpcode();
  460   int SecLastOpcode = SecondLastInst ? SecondLastInst->getOpcode() : 0;
  483       Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode()));
  489       Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode()));
  496       Cond.push_back(MachineOperand::CreateImm(LastInst->getOpcode()));
  513     Cond.push_back(MachineOperand::CreateImm(SecondLastInst->getOpcode()));
  524     Cond.push_back(MachineOperand::CreateImm(SecondLastInst->getOpcode()));
  544     Cond.push_back(MachineOperand::CreateImm(SecondLastInst->getOpcode()));
  569     if (Count && (I->getOpcode() == Hexagon::J2_jump))
  693     TripCount = Loop->getOpcode() == Hexagon::J2_loop0r
  731     if (Loop->getOpcode() == Hexagon::J2_loop0i ||
  732         Loop->getOpcode() == Hexagon::J2_loop1i) {
  759   if (I != LoopBB->end() && isEndLoopN(I->getOpcode())) {
  762         LoopBB, I->getOpcode(), I->getOperand(0).getMBB(), VisitedBBs);
 1027   unsigned Opc = MI.getOpcode();
 1437   unsigned Opc = MI.getOpcode();
 1567   int Opc = MI.getOpcode();
 1655     switch (MI.getOpcode()) {
 1766   unsigned Opc = MI.getOpcode();
 1937   } else if (MI.getOpcode() == Hexagon::A2_addi) {
 2072   switch (MI.getOpcode()) {
 2122   switch (MI.getOpcode()) {
 2183   switch (MI.getOpcode()) {
 2211   unsigned Opcode = MI.getOpcode();
 2227   switch (MI.getOpcode()) {
 2238   switch (MI.getOpcode()) {
 2252   switch (MI.getOpcode()) {
 2276   switch (MI.getOpcode()) {
 2330   switch (MI.getOpcode()) {
 2355   unsigned Opcode = MI.getOpcode();
 2367   switch (MI.getOpcode()) {
 2482   return MI.getOpcode() == Hexagon::SAVE_REGISTERS_CALL_V4 ||
 2483          MI.getOpcode() == Hexagon::SAVE_REGISTERS_CALL_V4_EXT ||
 2484          MI.getOpcode() == Hexagon::SAVE_REGISTERS_CALL_V4_PIC ||
 2485          MI.getOpcode() == Hexagon::SAVE_REGISTERS_CALL_V4_EXT_PIC;
 2489   switch (MI.getOpcode()) {
 2572   switch (MI.getOpcode()) {
 2624     if (MI2.getOpcode() == Hexagon::V6_vS32b_pi)
 2830   const uint64_t F = get(MI.getOpcode()).TSFlags;
 2852   switch (MI.getOpcode()) {
 2953   if (Second.mayStore() && First.getOpcode() == Hexagon::S2_allocframe) {
 2977   unsigned Opc = CallMI.getOpcode();
 2994   if (Hexagon::getRegForm(MI.getOpcode()) >= 0)
 3004       NonExtOpcode = Hexagon::changeAddrMode_abs_io(MI.getOpcode());
 3010       NonExtOpcode = Hexagon::changeAddrMode_io_rr(MI.getOpcode());
 3013       NonExtOpcode = Hexagon::changeAddrMode_ur_rr(MI.getOpcode());
 3026   return Hexagon::getRealHWInstr(MI.getOpcode(),
 3107   switch (MI.getOpcode()) {
 3295   switch (MI.getOpcode()) {
 3383   if ((GA.getOpcode() != Hexagon::C2_cmpeqi) ||
 3384       (GB.getOpcode() != Hexagon::J2_jumptnew))
 3418   switch (MI.getOpcode()) {
 3434   switch (MI.getOpcode()) {
 3531   int NVOpcode = Hexagon::getNewValueOpcode(MI.getOpcode());
 3535   switch (MI.getOpcode()) {
 3538       std::to_string(MI.getOpcode()));
 3645   switch (MI.getOpcode()) {
 3659   switch (MI.getOpcode()) {
 3666   int NewOpcode = Hexagon::getPredNewOpcode(MI.getOpcode());
 3673   int NewOp = MI.getOpcode();
 3728   switch (MI.getOpcode()) {
 4070   return Hexagon::getRealHWInstr(MI.getOpcode(), Hexagon::InstrType_Real);
 4174   switch (MI.getOpcode()) {
 4238   short NonExtOpcode = Hexagon::getRegForm(MI.getOpcode());
 4246       return Hexagon::changeAddrMode_abs_io(MI.getOpcode());
 4248       return Hexagon::changeAddrMode_io_rr(MI.getOpcode());
 4250       return Hexagon::changeAddrMode_ur_rr(MI.getOpcode());
 4280   return Hexagon::getRealHWInstr(MI.getOpcode(), Hexagon::InstrType_Pseudo);
 4284   return Hexagon::getRegForm(MI.getOpcode());
 4305   if (BranchRelaxAsmLarge && MI.getOpcode() == Hexagon::INLINEASM) {
 4371   unsigned NewOpcode = getInvertedPredicatedOpcode(MI.getOpcode());
 4398                       << getName(NewMI->getOpcode())
 4410   MI.setDesc(get(getInvertedPredicatedOpcode(MI.getOpcode())));
lib/Target/Hexagon/HexagonInstrInfo.h
  494     return changeAddrMode_abs_io(MI.getOpcode());
  497     return changeAddrMode_io_abs(MI.getOpcode());
  500     return changeAddrMode_io_rr(MI.getOpcode());
  503     return changeAddrMode_rr_io(MI.getOpcode());
  506     return changeAddrMode_rr_ur(MI.getOpcode());
  509     return changeAddrMode_ur_rr(MI.getOpcode());
lib/Target/Hexagon/HexagonMCInstLower.cpp
   99   if (MI->getOpcode() == Hexagon::ENDLOOP0) {
  103   if (MI->getOpcode() == Hexagon::ENDLOOP1) {
  108   MCI->setOpcode(MI->getOpcode());
  109   assert(MCI->getOpcode() == static_cast<unsigned>(MI->getOpcode()) &&
lib/Target/Hexagon/HexagonMachineScheduler.cpp
  103   switch (SU->getInstr()->getOpcode()) {
  157   switch (SU->getInstr()->getOpcode()) {
lib/Target/Hexagon/HexagonNewValueJump.cpp
  136   if (II->getOpcode() == TargetOpcode::KILL)
  220     if (MII->getOpcode() == TargetOpcode::KILL ||
  221         MII->getOpcode() == TargetOpcode::PHI ||
  222         MII->getOpcode() == TargetOpcode::COPY)
  229     if (MII->getOpcode() == Hexagon::LDriw_pred ||
  230         MII->getOpcode() == Hexagon::STriw_pred)
  257     switch (MI.getOpcode()) {
  295       if (def->getOpcode() == TargetOpcode::COPY)
  347   switch (MI->getOpcode()) {
  429   switch (MI.getOpcode()) {
  506       if (!foundJump && (MI.getOpcode() == Hexagon::J2_jumpt ||
  507                          MI.getOpcode() == Hexagon::J2_jumptpt ||
  508                          MI.getOpcode() == Hexagon::J2_jumpf ||
  509                          MI.getOpcode() == Hexagon::J2_jumpfpt ||
  510                          MI.getOpcode() == Hexagon::J2_jumptnewpt ||
  511                          MI.getOpcode() == Hexagon::J2_jumptnew ||
  512                          MI.getOpcode() == Hexagon::J2_jumpfnewpt ||
  513                          MI.getOpcode() == Hexagon::J2_jumpfnew)) {
  551         if (MI.getOpcode() == Hexagon::J2_jumpf ||
  552             MI.getOpcode() == Hexagon::J2_jumpfnewpt ||
  553             MI.getOpcode() == Hexagon::J2_jumpfnew) {
  631             unsigned COp = cmpInstr->getOpcode();
lib/Target/Hexagon/HexagonOptAddrMode.cpp
  344   return HII->isValidOffset(MI->getOpcode(), Offset, HRI, false);
  451     } else if (MI.getOpcode() == Hexagon::S2_addasl_rrri) {
  677   else if (UseMI->getOpcode() == Hexagon::S2_addasl_rrri)
  695     if ((MI->getOpcode() != Hexagon::A2_tfrsi ||
  697         (MI->getOpcode() != Hexagon::A2_addi ||
  701     LLVM_DEBUG(dbgs() << "[Analyzing " << HII->getName(MI->getOpcode())
  721     if (MI->getOpcode() == Hexagon::A2_addi) {
lib/Target/Hexagon/HexagonPeephole.cpp
  135       if (!DisableOptSZExt && MI.getOpcode() == Hexagon::A2_sxtw) {
  153       if (!DisableOptExtTo64 && MI.getOpcode() == Hexagon::A4_combineir) {
  170       if (MI.getOpcode() == Hexagon::S2_lsr_i_p) {
  184       if (!DisablePNotP && MI.getOpcode() == Hexagon::C2_not) {
  251                 int NewOp = QII->getInvertedPredicatedOpcode(MI.getOpcode());
  261           unsigned Op = MI.getOpcode();
lib/Target/Hexagon/HexagonRDFOpt.cpp
  117   unsigned Opc = MI->getOpcode();
  222   unsigned Opc = MI.getOpcode();
lib/Target/Hexagon/HexagonRegisterInfo.cpp
  203   unsigned Opc = MI.getOpcode();
lib/Target/Hexagon/HexagonSplitConst32AndConst64.cpp
   76       unsigned Opc = MI.getOpcode();
lib/Target/Hexagon/HexagonSplitDouble.cpp
  168   unsigned Opc = MI->getOpcode();
  317   unsigned Opc = MI->getOpcode();
  406   switch (DefI->getOpcode()) {
  509   while (CmpI->getOpcode() == Hexagon::C2_not)
  549       if (UseI->getOpcode() != Hexagon::A2_addp)
  631   unsigned OrigOpc = MI->getOpcode();
  795   unsigned Opc = MI->getOpcode();
 1000   unsigned Opc = MI->getOpcode();
lib/Target/Hexagon/HexagonStoreWidening.cpp
  128   unsigned OpC = MI->getOpcode();
  155   unsigned Opc = MI->getOpcode();
lib/Target/Hexagon/HexagonSubtarget.cpp
  183   if (Inst1.getInstr()->getOpcode() != Hexagon::A2_tfrpi)
lib/Target/Hexagon/HexagonVExtract.cpp
   78     if (DI->getOpcode() == Hexagon::A2_tfrsi) {
  111       unsigned Opc = MI.getOpcode();
  141       assert(ExtI->getOpcode() == Hexagon::V6_extractw);
lib/Target/Hexagon/HexagonVLIWPacketizer.cpp
  325   return MI.getOpcode() == Hexagon::J2_jump;
  329   switch (MI.getOpcode()) {
  470   unsigned Opc = MI.getOpcode();
  491   unsigned Opc = MI.getOpcode();
  534   if (!HII->isValidOffset(MI.getOpcode(), Offset+Incr, HRI))
  608   unsigned Opc = MI.getOpcode();
 1080   if (MI.getOpcode() == Hexagon::A2_nop)
 1112   switch (MI.getOpcode()) {
 1181   unsigned Opc = MI.getOpcode();
 1373       if (PI->getOpcode() == Hexagon::S2_allocframe || PI->mayStore() ||
 1525           (J.getOpcode() != Hexagon::S2_allocframe &&
 1526            I.getOpcode() != Hexagon::S2_allocframe) &&
 1527           (J.getOpcode() != Hexagon::L2_deallocframe &&
 1528            I.getOpcode() != Hexagon::L2_deallocframe) &&
 1555     if (DepType == SDep::Data && J.getOpcode() == Hexagon::S2_allocframe) {
 1556       unsigned Opc = I.getOpcode();
 1673     unsigned Opc = MJ->getOpcode();
lib/Target/Hexagon/RDFCopy.cpp
   41   unsigned Opc = MI->getOpcode();
lib/Target/Hexagon/RDFGraph.cpp
  226   unsigned Opc = MI.getOpcode();
lib/Target/Lanai/LanaiAsmPrinter.cpp
  150   assert((MI->getOpcode() == Lanai::CALL || MI->getOpcode() == Lanai::CALLR) &&
  150   assert((MI->getOpcode() == Lanai::CALL || MI->getOpcode() == Lanai::CALLR) &&
  174   if (MI->getOpcode() == Lanai::CALL) {
lib/Target/Lanai/LanaiDelaySlotFiller.cpp
  103       if (I->getOpcode() == Lanai::RET) {
  108         assert(RI->getOpcode() == Lanai::LDW_RI && RI->getOperand(0).isReg() &&
  114         assert(RI->getOpcode() == Lanai::ADD_I_LO &&
lib/Target/Lanai/LanaiFrameLowering.cpp
   73       if (MI.getOpcode() == Lanai::ADJDYNALLOC) {
lib/Target/Lanai/LanaiInstrInfo.cpp
  180   switch (MI.getOpcode()) {
  208   if (CmpI->getOpcode() == Lanai::SFSUB_F_RR &&
  209       OI->getOpcode() == Lanai::SUB_R &&
  216   if (((CmpI->getOpcode() == Lanai::SFSUB_F_RI_LO &&
  217         OI->getOpcode() == Lanai::SUB_I_LO) ||
  218        (CmpI->getOpcode() == Lanai::SFSUB_F_RI_HI &&
  219         OI->getOpcode() == Lanai::SUB_I_HI)) &&
  311     if (CmpInstr.getOpcode() == Lanai::SFSUB_F_RI_LO)
  348   if (flagSettingOpcodeVariant(MI->getOpcode()) != Lanai::NOP) {
  429     MI->setDesc(get(flagSettingOpcodeVariant(MI->getOpcode())));
  442   assert(MI.getOpcode() == Lanai::SELECT && "unknown select instruction");
  496   assert(MI.getOpcode() == Lanai::SELECT && "unknown select instruction");
  590     if (Instruction->getOpcode() == Lanai::BT) {
  618     unsigned Opcode = Instruction->getOpcode();
  703     if (Instruction->getOpcode() != Lanai::BT &&
  704         Instruction->getOpcode() != Lanai::BRCC) {
  719   if (MI.getOpcode() == Lanai::LDW_RI)
  730   if (MI.getOpcode() == Lanai::LDW_RI) {
  748   if (MI.getOpcode() == Lanai::SW_RI)
  768   switch (LdSt.getOpcode()) {
  800   switch (LdSt.getOpcode()) {
lib/Target/Lanai/LanaiMCInstLower.cpp
   94   OutMI.setOpcode(MI->getOpcode());
lib/Target/Lanai/LanaiMemAluCombiner.cpp
  155   if (mergedOpcode(MI.getOpcode(), false) == 0)
  251   LPAC::AluCode AluOpcode = mergedAluCode(AluInstr->getOpcode());
  252   unsigned NewOpc = mergedOpcode(MemInstr->getOpcode(), AluOffset.isImm());
  304     if (AluIter->getOpcode() != Lanai::ADD_I_LO)
  333   bool IsSpls = isSpls(MemInstr->getOpcode());
lib/Target/Lanai/LanaiRegisterInfo.cpp
  170   if ((isSPLSOpcode(MI.getOpcode()) && !isInt<10>(Offset)) ||
  200     if (MI.getOpcode() == Lanai::ADD_I_LO) {
  210     if (isSPLSOpcode(MI.getOpcode()) || isRMOpcode(MI.getOpcode())) {
  210     if (isSPLSOpcode(MI.getOpcode()) || isRMOpcode(MI.getOpcode())) {
  211       MI.setDesc(TII->get(getRRMOpcodeVariant(MI.getOpcode())));
  232   if ((Offset < 0) && isALUArithLoOpcode(MI.getOpcode())) {
  233     unsigned NewOpcode = getOppositeALULoOpcode(MI.getOpcode());
lib/Target/MSP430/MSP430BranchSelector.cpp
  124       if (MI->getOpcode() != MSP430::JCC && MI->getOpcode() != MSP430::JMP) {
  124       if (MI->getOpcode() != MSP430::JCC && MI->getOpcode() != MSP430::JMP) {
  146       if (MI->getOpcode() == MSP430::JCC && std::next(MI) != EE) {
  183       if (MI->getOpcode() == MSP430::JCC) {
lib/Target/MSP430/MSP430FrameLowering.cpp
   82   while (MBBI != MBB.end() && (MBBI->getOpcode() == MSP430::PUSH16r))
  114   unsigned RetOpcode = MBBI->getOpcode();
  142     unsigned Opc = PI->getOpcode();
  246       if (Old.getOpcode() == TII.getCallFrameSetupOpcode()) {
  252         assert(Old.getOpcode() == TII.getCallFrameDestroyOpcode());
  270   } else if (I->getOpcode() == TII.getCallFrameDestroyOpcode()) {
lib/Target/MSP430/MSP430ISelLowering.cpp
 1420   switch (MI.getOpcode()) {
 1454     unsigned RrcOpc = MI.getOpcode() == MSP430::Rrcl16
 1542   unsigned Opc = MI.getOpcode();
lib/Target/MSP430/MSP430InstrInfo.cpp
  117     if (I->getOpcode() != MSP430::JMP &&
  118         I->getOpcode() != MSP430::JCC &&
  119         I->getOpcode() != MSP430::Br &&
  120         I->getOpcode() != MSP430::Bm)
  199     if (I->getOpcode() == MSP430::Br ||
  200         I->getOpcode() == MSP430::Bm)
  204     if (I->getOpcode() == MSP430::JMP) {
  230     assert(I->getOpcode() == MSP430::JCC && "Invalid conditional branch");
lib/Target/MSP430/MSP430MCInstLower.cpp
  116   OutMI.setOpcode(MI->getOpcode());
lib/Target/MSP430/MSP430RegisterInfo.cpp
  129   if (MI.getOpcode() == MSP430::ADDframe) {
lib/Target/Mips/MicroMipsSizeReduction.cpp
  355       !(MI->getOpcode() == Mips::LW || MI->getOpcode() == Mips::LW_MM ||
  355       !(MI->getOpcode() == Mips::LW || MI->getOpcode() == Mips::LW_MM ||
  356         MI->getOpcode() == Mips::LW16_MM))
  360       !(MI->getOpcode() == Mips::SW || MI->getOpcode() == Mips::SW_MM ||
  360       !(MI->getOpcode() == Mips::SW || MI->getOpcode() == Mips::SW_MM ||
  361         MI->getOpcode() == Mips::SW16_MM))
  418   unsigned Opcode = MI->getOpcode();
  468   bool ReduceToLwp = (MI1->getOpcode() == Mips::LW) ||
  469                      (MI1->getOpcode() == Mips::LW_MM) ||
  470                      (MI1->getOpcode() == Mips::LW16_MM);
  633   if (MI2->getOpcode() != Entry.WideOpc())
lib/Target/Mips/Mips16ISelLowering.cpp
  166   switch (MI.getOpcode()) {
lib/Target/Mips/Mips16RegisterInfo.cpp
  133       !Mips16InstrInfo::validImmediate(MI.getOpcode(), FrameReg, Offset)) {
lib/Target/Mips/MipsAsmPrinter.cpp
  190   unsigned Opc = MI->getOpcode();
  260     if (I->getOpcode() == Mips::PseudoReturn ||
  261         I->getOpcode() == Mips::PseudoReturn64 ||
  262         I->getOpcode() == Mips::PseudoIndirectBranch ||
  263         I->getOpcode() == Mips::PseudoIndirectBranch64 ||
  264         I->getOpcode() == Mips::TAILCALLREG ||
  265         I->getOpcode() == Mips::TAILCALLREG64) {
  278         && !isLongBranchPseudo(I->getOpcode()))
  728   switch (MI->getOpcode()) {
lib/Target/Mips/MipsBranchExpansion.cpp
  337   unsigned NewOpc = TII->getOppositeBranchOpc(Br->getOpcode());
  759             std::next(Iit)->getOpcode() != Mips::NOP) {
  806             !TII->isBranchOffsetInRange(Br->getOpcode(), Offset)) {
lib/Target/Mips/MipsCallLowering.cpp
  638   if (MIB->getOpcode() == Mips::JALRPseudo) {
lib/Target/Mips/MipsConstantIslandPass.cpp
   89   switch (MI->getOpcode()) {
  623   assert(CPEMI.getOpcode() == Mips::CONSTPOOL_ENTRY);
  662       int Opc = MI.getOpcode();
 1006   if (PredMI->getOpcode() == Mips::Bimm16)
 1547   unsigned Opcode = MI->getOpcode();
lib/Target/Mips/MipsDelaySlotFiller.cpp
  640           DSI->setDesc(TII->get(getEquivalentCallShort(DSI->getOpcode())));
  709       if ((isBasePlusOffsetMemoryAccess(CurrI->getOpcode(), &AddrIdx) &&
  717     unsigned Opcode = (*Slot).getOpcode();
lib/Target/Mips/MipsExpandPseudo.cpp
   88       I->getOpcode() == Mips::ATOMIC_CMP_SWAP_I8_POSTRA ? Mips::SEB : Mips::SEH;
  182         I->getOpcode() == Mips::ATOMIC_CMP_SWAP_I16_POSTRA ? 16 : 24;
  207       I->getOpcode() == Mips::ATOMIC_CMP_SWAP_I32_POSTRA ? 4 : 8;
  330   switch (I->getOpcode()) {
  526   switch (I->getOpcode()) {
  626   switch (MBBI->getOpcode()) {
lib/Target/Mips/MipsFrameLowering.cpp
  144     if (I->getOpcode() == Mips::ADJCALLSTACKDOWN)
lib/Target/Mips/MipsISelLowering.cpp
 1296   switch (MI.getOpcode()) {
 1431   switch (MI.getOpcode()) {
 1599   switch (MI.getOpcode()) {
 1724   assert((MI.getOpcode() == Mips::ATOMIC_CMP_SWAP_I32 ||
 1725           MI.getOpcode() == Mips::ATOMIC_CMP_SWAP_I64) &&
 1728   const unsigned Size = MI.getOpcode() == Mips::ATOMIC_CMP_SWAP_I32 ? 4 : 8;
 1736   unsigned AtomicOp = MI.getOpcode() == Mips::ATOMIC_CMP_SWAP_I32
 1807   unsigned AtomicOp = MI.getOpcode() == Mips::ATOMIC_CMP_SWAP_I8
 2967   switch (MI.getOpcode()) {
lib/Target/Mips/MipsInstrInfo.cpp
  169     if (!getAnalyzableBrOpc(I->getOpcode()))
  208   unsigned LastOpc = LastInst->getOpcode();
  227     SecondLastOpc = getAnalyzableBrOpc(SecondLastInst->getOpcode());
  440   unsigned Opcode = I->getOpcode();
  577   switch (MI.getOpcode()) {
  690   switch (MI.getOpcode()) {
  762   switch (MI.getOpcode()) {
lib/Target/Mips/MipsInstructionSelector.cpp
  106     LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  171   unsigned Opc = I.getOpcode();
  235   if (!isPreISelGenericOpcode(I.getOpcode())) {
  242   if (I.getOpcode() == Mips::G_MUL &&
  264   switch (I.getOpcode()) {
  389     if (NewOpc == I.getOpcode())
  402     if (Addr->getOpcode() == G_GEP) {
  404       if (Offset->getOpcode() == G_CONSTANT) {
  425     bool IsSigned = I.getOpcode() == G_SREM || I.getOpcode() == G_SDIV;
  425     bool IsSigned = I.getOpcode() == G_SREM || I.getOpcode() == G_SDIV;
  426     bool IsDiv = I.getOpcode() == G_UDIV || I.getOpcode() == G_SDIV;
  426     bool IsDiv = I.getOpcode() == G_UDIV || I.getOpcode() == G_SDIV;
lib/Target/Mips/MipsLegalizerInfo.cpp
  254   switch (MI.getOpcode()) {
lib/Target/Mips/MipsMCInstLower.cpp
  298   switch (MI->getOpcode()) {
  321   OutMI.setOpcode(MI->getOpcode());
lib/Target/Mips/MipsPreLegalizerCombiner.cpp
   41   switch (MI.getOpcode()) {
lib/Target/Mips/MipsRegisterBankInfo.cpp
  178     if (NonCopyInstr->getOpcode() == TargetOpcode::COPY &&
  200   while (Ret->getOpcode() == TargetOpcode::COPY &&
  214   while (Ret->getOpcode() == TargetOpcode::COPY &&
  222   assert(isAmbiguous(MI->getOpcode()) &&
  227   if (MI->getOpcode() == TargetOpcode::G_LOAD)
  230   if (MI->getOpcode() == TargetOpcode::G_STORE)
  233   if (MI->getOpcode() == TargetOpcode::G_PHI) {
  240   if (MI->getOpcode() == TargetOpcode::G_SELECT) {
  247   if (MI->getOpcode() == TargetOpcode::G_IMPLICIT_DEF)
  253   assert(isAmbiguous(MI->getOpcode()) && "Visiting non-Ambiguous opcode.\n");
  292     if (isDefUse ? isFloatingPointOpcodeUse(AdjMI->getOpcode())
  293                  : isFloatingPointOpcodeDef(AdjMI->getOpcode())) {
  300     if (AdjMI->getOpcode() == TargetOpcode::COPY) {
  307     if (!isAmbiguous(AdjMI->getOpcode())) {
  403   unsigned Opc = MI.getOpcode();
  407   if (MI.getOpcode() != TargetOpcode::G_PHI) {
  628   switch (MI.getOpcode()) {
  674   switch (MI.getOpcode()) {
  687       if (NewMI->getOpcode() == TargetOpcode::G_UNMERGE_VALUES)
  691       else if (NewMI->getOpcode() == TargetOpcode::G_MERGE_VALUES)
lib/Target/Mips/MipsSEFrameLowering.cpp
  116   switch(I->getOpcode()) {
lib/Target/Mips/MipsSEISelDAGToDAG.cpp
   87   if ((MI.getOpcode() == Mips::ADDiu) &&
   93   } else if ((MI.getOpcode() == Mips::DADDiu) &&
  161       switch (MI.getOpcode()) {
lib/Target/Mips/MipsSEISelLowering.cpp
 1068   switch (MI.getOpcode()) {
lib/Target/Mips/MipsSEInstrInfo.cpp
   47   unsigned Opc = MI.getOpcode();
   69   unsigned Opc = MI.getOpcode();
  187   switch (MI.getOpcode()) {
  206   switch (MI.getOpcode()) {
lib/Target/Mips/MipsSERegisterInfo.cpp
  214         getLoadStoreOffsetSizeInBits(MI.getOpcode(), MI.getOperand(OpNo - 1));
  215     const Align OffsetAlign(getLoadStoreOffsetAlign(MI.getOpcode()));
lib/Target/NVPTX/NVPTXAsmPrinter.cpp
  213   OutMI.setOpcode(MI->getOpcode());
  215   if (MI->getOpcode() == NVPTX::CALL_PROTOTYPE) {
lib/Target/NVPTX/NVPTXInstrInfo.cpp
  110     if (LastInst.getOpcode() == NVPTX::GOTO) {
  113     } else if (LastInst.getOpcode() == NVPTX::CBranch) {
  131   if (SecondLastInst.getOpcode() == NVPTX::CBranch &&
  132       LastInst.getOpcode() == NVPTX::GOTO) {
  141   if (SecondLastInst.getOpcode() == NVPTX::GOTO &&
  142       LastInst.getOpcode() == NVPTX::GOTO) {
  161   if (I->getOpcode() != NVPTX::GOTO && I->getOpcode() != NVPTX::CBranch)
  161   if (I->getOpcode() != NVPTX::GOTO && I->getOpcode() != NVPTX::CBranch)
  172   if (I->getOpcode() != NVPTX::CBranch)
lib/Target/NVPTX/NVPTXPeephole.cpp
   77   if (Root.getOpcode() != NVPTX::cvta_to_local_yes_64 &&
   78       Root.getOpcode() != NVPTX::cvta_to_local_yes)
   90       (GenericAddrDef->getOpcode() != NVPTX::LEA_ADDRi64 &&
   91        GenericAddrDef->getOpcode() != NVPTX::LEA_ADDRi)) {
  112       BuildMI(MF, Root.getDebugLoc(), TII->get(Prev.getOpcode()),
lib/Target/NVPTX/NVPTXProxyRegErasure.cpp
   71       switch (MI.getOpcode()) {
lib/Target/NVPTX/NVPTXReplaceImageHandles.cpp
  142   switch (TexHandleDef.getOpcode()) {
lib/Target/PowerPC/PPCAsmPrinter.cpp
  364         MII->getOpcode() == PPC::DBG_VALUE ||
  365         MII->getOpcode() == TargetOpcode::PATCHPOINT ||
  366         MII->getOpcode() == TargetOpcode::STACKMAP)
  571   switch (MI->getOpcode()) {
 1134         MI->getOpcode() == PPC::MFOCRF ? PPC::MFCR : PPC::MFCR8;
 1148         MI->getOpcode() == PPC::MTOCRF ? PPC::MTCRF : PPC::MTCRF8;
 1170       unsigned OpNum = (MI->getOpcode() == PPC::STD) ? 2 : 1;
 1188   switch (MI->getOpcode()) {
lib/Target/PowerPC/PPCBranchSelector.cpp
  304         if (I->getOpcode() == PPC::BCC && !I->getOperand(2).isImm())
  306         else if ((I->getOpcode() == PPC::BC || I->getOpcode() == PPC::BCn) &&
  306         else if ((I->getOpcode() == PPC::BC || I->getOpcode() == PPC::BCn) &&
  309         else if ((I->getOpcode() == PPC::BDNZ8 || I->getOpcode() == PPC::BDNZ ||
  309         else if ((I->getOpcode() == PPC::BDNZ8 || I->getOpcode() == PPC::BDNZ ||
  310                   I->getOpcode() == PPC::BDZ8  || I->getOpcode() == PPC::BDZ) &&
  310                   I->getOpcode() == PPC::BDZ8  || I->getOpcode() == PPC::BDZ) &&
  333         if (I->getOpcode() == PPC::BCC) {
  344         } else if (I->getOpcode() == PPC::BC) {
  347         } else if (I->getOpcode() == PPC::BCn) {
  350         } else if (I->getOpcode() == PPC::BDNZ) {
  352         } else if (I->getOpcode() == PPC::BDNZ8) {
  354         } else if (I->getOpcode() == PPC::BDZ) {
  356         } else if (I->getOpcode() == PPC::BDZ8) {
lib/Target/PowerPC/PPCCTRLoops.cpp
  148     unsigned Opc = I->getOpcode();
  210       unsigned Opc = MII->getOpcode();
lib/Target/PowerPC/PPCEarlyReturn.cpp
   60           (I->getOpcode() != PPC::BLR && I->getOpcode() != PPC::BLR8) ||
   60           (I->getOpcode() != PPC::BLR && I->getOpcode() != PPC::BLR8) ||
   76           if (J->getOpcode() == PPC::B) {
   80               BuildMI(**PI, J, J->getDebugLoc(), TII->get(I->getOpcode()))
   88           } else if (J->getOpcode() == PPC::BCC) {
  102           } else if (J->getOpcode() == PPC::BC || J->getOpcode() == PPC::BCn) {
  102           } else if (J->getOpcode() == PPC::BC || J->getOpcode() == PPC::BCn) {
  108                   TII->get(J->getOpcode() == PPC::BC ? PPC::BCLR : PPC::BCLRn))
lib/Target/PowerPC/PPCExpandISEL.cpp
   78     return (MI.getOpcode() == PPC::ISEL || MI.getOpcode() == PPC::ISEL8);
   78     return (MI.getOpcode() == PPC::ISEL || MI.getOpcode() == PPC::ISEL8);
   83     return (MI.getOpcode() == PPC::ISEL8);
lib/Target/PowerPC/PPCFastISel.cpp
 2302   switch(MI->getOpcode()) {
lib/Target/PowerPC/PPCFrameLowering.cpp
  300   assert(MBBI != Entry->end() && MBBI->getOpcode() == PPC::MTVRSAVE);
  312         if (MBBI->getOpcode() == PPC::MTVRSAVE) {
  328     assert(MBBI->getOpcode() == PPC::MFVRSAVE && "VRSAVE instrs wandered?");
  801       if (MBBI->getOpcode() == PPC::UPDATE_VRSAVE) {
 1466     unsigned RetOpcode = MBBI->getOpcode();
 1697     unsigned RetOpcode = MBBI->getOpcode();
 1734   unsigned RetOpcode = MBBI->getOpcode();
 2313       I->getOpcode() == PPC::ADJCALLSTACKUP) {
lib/Target/PowerPC/PPCHazardRecognizers.cpp
  333   unsigned Opcode = MI->getOpcode();
  391   unsigned Opcode = MI->getOpcode();
lib/Target/PowerPC/PPCISelLowering.cpp
10856   if (MI.getOpcode() == TargetOpcode::STACKMAP ||
10857       MI.getOpcode() == TargetOpcode::PATCHPOINT) {
10859         MI.getOpcode() == TargetOpcode::PATCHPOINT) {
10871   if (MI.getOpcode() == PPC::EH_SjLj_SetJmp32 ||
10872       MI.getOpcode() == PPC::EH_SjLj_SetJmp64) {
10874   } else if (MI.getOpcode() == PPC::EH_SjLj_LongJmp32 ||
10875              MI.getOpcode() == PPC::EH_SjLj_LongJmp64) {
10888   if (MI.getOpcode() == PPC::SELECT_CC_I4 ||
10889       MI.getOpcode() == PPC::SELECT_CC_I8 || MI.getOpcode() == PPC::SELECT_I4 ||
10889       MI.getOpcode() == PPC::SELECT_CC_I8 || MI.getOpcode() == PPC::SELECT_I4 ||
10890       MI.getOpcode() == PPC::SELECT_I8) {
10892     if (MI.getOpcode() == PPC::SELECT_CC_I4 ||
10893         MI.getOpcode() == PPC::SELECT_CC_I8)
10902   } else if (MI.getOpcode() == PPC::SELECT_CC_I4 ||
10903              MI.getOpcode() == PPC::SELECT_CC_I8 ||
10904              MI.getOpcode() == PPC::SELECT_CC_F4 ||
10905              MI.getOpcode() == PPC::SELECT_CC_F8 ||
10906              MI.getOpcode() == PPC::SELECT_CC_F16 ||
10907              MI.getOpcode() == PPC::SELECT_CC_QFRC ||
10908              MI.getOpcode() == PPC::SELECT_CC_QSRC ||
10909              MI.getOpcode() == PPC::SELECT_CC_QBRC ||
10910              MI.getOpcode() == PPC::SELECT_CC_VRRC ||
10911              MI.getOpcode() == PPC::SELECT_CC_VSFRC ||
10912              MI.getOpcode() == PPC::SELECT_CC_VSSRC ||
10913              MI.getOpcode() == PPC::SELECT_CC_VSRC ||
10914              MI.getOpcode() == PPC::SELECT_CC_SPE4 ||
10915              MI.getOpcode() == PPC::SELECT_CC_SPE ||
10916              MI.getOpcode() == PPC::SELECT_I4 ||
10917              MI.getOpcode() == PPC::SELECT_I8 ||
10918              MI.getOpcode() == PPC::SELECT_F4 ||
10919              MI.getOpcode() == PPC::SELECT_F8 ||
10920              MI.getOpcode() == PPC::SELECT_F16 ||
10921              MI.getOpcode() == PPC::SELECT_QFRC ||
10922              MI.getOpcode() == PPC::SELECT_QSRC ||
10923              MI.getOpcode() == PPC::SELECT_QBRC ||
10924              MI.getOpcode() == PPC::SELECT_SPE ||
10925              MI.getOpcode() == PPC::SELECT_SPE4 ||
10926              MI.getOpcode() == PPC::SELECT_VRRC ||
10927              MI.getOpcode() == PPC::SELECT_VSFRC ||
10928              MI.getOpcode() == PPC::SELECT_VSSRC ||
10929              MI.getOpcode() == PPC::SELECT_VSRC) {
10956     if (MI.getOpcode() == PPC::SELECT_I4 || MI.getOpcode() == PPC::SELECT_I8 ||
10956     if (MI.getOpcode() == PPC::SELECT_I4 || MI.getOpcode() == PPC::SELECT_I8 ||
10957         MI.getOpcode() == PPC::SELECT_F4 || MI.getOpcode() == PPC::SELECT_F8 ||
10957         MI.getOpcode() == PPC::SELECT_F4 || MI.getOpcode() == PPC::SELECT_F8 ||
10958         MI.getOpcode() == PPC::SELECT_F16 ||
10959         MI.getOpcode() == PPC::SELECT_SPE4 ||
10960         MI.getOpcode() == PPC::SELECT_SPE ||
10961         MI.getOpcode() == PPC::SELECT_QFRC ||
10962         MI.getOpcode() == PPC::SELECT_QSRC ||
10963         MI.getOpcode() == PPC::SELECT_QBRC ||
10964         MI.getOpcode() == PPC::SELECT_VRRC ||
10965         MI.getOpcode() == PPC::SELECT_VSFRC ||
10966         MI.getOpcode() == PPC::SELECT_VSSRC ||
10967         MI.getOpcode() == PPC::SELECT_VSRC) {
10996   } else if (MI.getOpcode() == PPC::ReadTB) {
11044   } else if (MI.getOpcode() == PPC::ATOMIC_LOAD_ADD_I8)
11046   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_ADD_I16)
11048   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_ADD_I32)
11050   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_ADD_I64)
11053   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_AND_I8)
11055   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_AND_I16)
11057   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_AND_I32)
11059   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_AND_I64)
11062   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_OR_I8)
11064   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_OR_I16)
11066   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_OR_I32)
11068   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_OR_I64)
11071   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_XOR_I8)
11073   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_XOR_I16)
11075   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_XOR_I32)
11077   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_XOR_I64)
11080   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_NAND_I8)
11082   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_NAND_I16)
11084   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_NAND_I32)
11086   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_NAND_I64)
11089   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_SUB_I8)
11091   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_SUB_I16)
11093   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_SUB_I32)
11095   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_SUB_I64)
11098   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MIN_I8)
11100   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MIN_I16)
11102   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MIN_I32)
11104   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MIN_I64)
11107   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MAX_I8)
11109   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MAX_I16)
11111   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MAX_I32)
11113   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_MAX_I64)
11116   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMIN_I8)
11118   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMIN_I16)
11120   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMIN_I32)
11122   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMIN_I64)
11125   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMAX_I8)
11127   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMAX_I16)
11129   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMAX_I32)
11131   else if (MI.getOpcode() == PPC::ATOMIC_LOAD_UMAX_I64)
11134   else if (MI.getOpcode() == PPC::ATOMIC_SWAP_I8)
11136   else if (MI.getOpcode() == PPC::ATOMIC_SWAP_I16)
11138   else if (MI.getOpcode() == PPC::ATOMIC_SWAP_I32)
11140   else if (MI.getOpcode() == PPC::ATOMIC_SWAP_I64)
11142   else if (MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I32 ||
11143            MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I64 ||
11145             MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I8) ||
11147             MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I16)) {
11148     bool is64bit = MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I64;
11152     switch (MI.getOpcode()) {
11244   } else if (MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I8 ||
11245              MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I16) {
11251     bool is8bit = MI.getOpcode() == PPC::ATOMIC_CMP_SWAP_I8;
11430   } else if (MI.getOpcode() == PPC::FADDrtz) {
11454   } else if (MI.getOpcode() == PPC::ANDIo_1_EQ_BIT ||
11455              MI.getOpcode() == PPC::ANDIo_1_GT_BIT ||
11456              MI.getOpcode() == PPC::ANDIo_1_EQ_BIT8 ||
11457              MI.getOpcode() == PPC::ANDIo_1_GT_BIT8) {
11458     unsigned Opcode = (MI.getOpcode() == PPC::ANDIo_1_EQ_BIT8 ||
11459                        MI.getOpcode() == PPC::ANDIo_1_GT_BIT8)
11462     bool isEQ = (MI.getOpcode() == PPC::ANDIo_1_EQ_BIT ||
11463                  MI.getOpcode() == PPC::ANDIo_1_EQ_BIT8);
11476   } else if (MI.getOpcode() == PPC::TCHECK_RET) {
11484   } else if (MI.getOpcode() == PPC::TBEGIN_RET) {
11491   } else if (MI.getOpcode() == PPC::SETRNDi) {
11513   } else if (MI.getOpcode() == PPC::SETRND) {
lib/Target/PowerPC/PPCInstrInfo.cpp
  234   switch (Inst.getOpcode()) {
  286   switch (MI.getOpcode()) {
  300   unsigned Opcode = MI.getOpcode();
  320   switch (MI.getOpcode()) {
  354   unsigned Opcode = MI.getOpcode();
  374   if (MI.getOpcode() != PPC::RLWIMI && MI.getOpcode() != PPC::RLWIMIo)
  374   if (MI.getOpcode() != PPC::RLWIMI && MI.getOpcode() != PPC::RLWIMIo)
  459   int AltOpc = PPC::getAltVSXFMAOpcode(MI.getOpcode());
  513     if (I->getOpcode() == PPC::B &&
  529     if (LastInst.getOpcode() == PPC::B) {
  534     } else if (LastInst.getOpcode() == PPC::BCC) {
  542     } else if (LastInst.getOpcode() == PPC::BC) {
  550     } else if (LastInst.getOpcode() == PPC::BCn) {
  558     } else if (LastInst.getOpcode() == PPC::BDNZ8 ||
  559                LastInst.getOpcode() == PPC::BDNZ) {
  569     } else if (LastInst.getOpcode() == PPC::BDZ8 ||
  570                LastInst.getOpcode() == PPC::BDZ) {
  594   if (SecondLastInst.getOpcode() == PPC::BCC &&
  595       LastInst.getOpcode() == PPC::B) {
  604   } else if (SecondLastInst.getOpcode() == PPC::BC &&
  605              LastInst.getOpcode() == PPC::B) {
  614   } else if (SecondLastInst.getOpcode() == PPC::BCn &&
  615              LastInst.getOpcode() == PPC::B) {
  624   } else if ((SecondLastInst.getOpcode() == PPC::BDNZ8 ||
  625               SecondLastInst.getOpcode() == PPC::BDNZ) &&
  626              LastInst.getOpcode() == PPC::B) {
  638   } else if ((SecondLastInst.getOpcode() == PPC::BDZ8 ||
  639               SecondLastInst.getOpcode() == PPC::BDZ) &&
  640              LastInst.getOpcode() == PPC::B) {
  656   if (SecondLastInst.getOpcode() == PPC::B && LastInst.getOpcode() == PPC::B) {
  656   if (SecondLastInst.getOpcode() == PPC::B && LastInst.getOpcode() == PPC::B) {
  678   if (I->getOpcode() != PPC::B && I->getOpcode() != PPC::BCC &&
  678   if (I->getOpcode() != PPC::B && I->getOpcode() != PPC::BCC &&
  679       I->getOpcode() != PPC::BC && I->getOpcode() != PPC::BCn &&
  679       I->getOpcode() != PPC::BC && I->getOpcode() != PPC::BCn &&
  680       I->getOpcode() != PPC::BDNZ8 && I->getOpcode() != PPC::BDNZ &&
  680       I->getOpcode() != PPC::BDNZ8 && I->getOpcode() != PPC::BDNZ &&
  681       I->getOpcode() != PPC::BDZ8  && I->getOpcode() != PPC::BDZ)
  681       I->getOpcode() != PPC::BDZ8  && I->getOpcode() != PPC::BDZ)
  691   if (I->getOpcode() != PPC::BCC &&
  692       I->getOpcode() != PPC::BC && I->getOpcode() != PPC::BCn &&
  692       I->getOpcode() != PPC::BC && I->getOpcode() != PPC::BCn &&
  693       I->getOpcode() != PPC::BDNZ8 && I->getOpcode() != PPC::BDNZ &&
  693       I->getOpcode() != PPC::BDNZ8 && I->getOpcode() != PPC::BDNZ &&
  694       I->getOpcode() != PPC::BDZ8  && I->getOpcode() != PPC::BDZ)
  694       I->getOpcode() != PPC::BDZ8  && I->getOpcode() != PPC::BDZ)
 1328   unsigned DefOpc = DefMI.getOpcode();
 1439   unsigned OpC = MI.getOpcode();
 1591   unsigned OpC = MI.getOpcode();
 1609   unsigned Opc = MI.getOpcode();
 1642   int OpC = CmpInstr.getOpcode();
 1702       if (UseMI->getOpcode() == PPC::BCC) {
 1708       } else if (UseMI->getOpcode() == PPC::ISEL ||
 1709                  UseMI->getOpcode() == PPC::ISEL8) {
 1765     if (UseMI->getOpcode() != PPC::BCC)
 1802     unsigned IOpC = Instr.getOpcode();
 1838   int MIOpC = MI->getOpcode();
 1878       if (UseMI->getOpcode() == PPC::BCC) {
 1887       } else if (UseMI->getOpcode() == PPC::ISEL ||
 1888                  UseMI->getOpcode() == PPC::ISEL8) {
 2011   unsigned Opcode = MI.getOpcode();
 2068     switch (MI.getOpcode()) {
 2136   switch (MI.getOpcode()) {
 2360         if (DefMI->getOpcode() == PPC::LI || DefMI->getOpcode() == PPC::LI8) {
 2360         if (DefMI->getOpcode() == PPC::LI || DefMI->getOpcode() == PPC::LI8) {
 2371     unsigned Opc = MI.getOpcode();
 2403           switch (DefMI->getOpcode()) {
 2646   unsigned Opc = ADDIMI.getOpcode();
 2658   unsigned Opc = ADDMI.getOpcode();
 2673   unsigned Opc = MI.getOpcode();
 2775   bool HasImmForm = instrHasImmForm(MI.getOpcode(), IsVFReg, III, PostRA);
 2784   if ((DefMI->getOpcode() != PPC::LI && DefMI->getOpcode() != PPC::LI8) ||
 2784   if ((DefMI->getOpcode() != PPC::LI && DefMI->getOpcode() != PPC::LI8) ||
 2802   unsigned Opc = MI.getOpcode();
 2829       unsigned UseOpc = CompareUseMI.getOpcode();
 3489   unsigned Opc = DefMI.getOpcode();
 3548   if (DefMI.getOpcode() == PPC::ADDItocL) {
 3665     if (DefMI.getOpcode() == PPC::ADDItocL)
 3759   unsigned Opc = MI.getOpcode();
 3870   int Opcode = MI.getOpcode();
 3911   int Opcode = MI.getOpcode();
 4005   switch (MI.getOpcode()) {
 4033             (--II)->getOpcode() == PPC::ADJCALLSTACKUP) {
 4098     if (MI.getOpcode() == PPC::PHI) {
 4173     if (LoopCount->getOpcode() == PPC::LI8 || LoopCount->getOpcode() == PPC::LI)
 4173     if (LoopCount->getOpcode() == PPC::LI8 || LoopCount->getOpcode() == PPC::LI)
 4208     if (LoopCount->getOpcode() == PPC::LI8 ||
 4209         LoopCount->getOpcode() == PPC::LI) {
 4236   if (I != LoopBB->end() && isBDNZ(I->getOpcode())) {
 4256     if (I.getOpcode() == LOOPi)
lib/Target/PowerPC/PPCMCInstLower.cpp
  152   OutMI.setOpcode(MI->getOpcode());
lib/Target/PowerPC/PPCMIPeephole.cpp
  162   unsigned Opcode = MI->getOpcode();
  311       switch (MI.getOpcode()) {
  349             unsigned DefOpc = DefMI ? DefMI->getOpcode() : 0;
  362                 if (LoadMI && LoadMI->getOpcode() == PPC::LXVDSX)
  442         unsigned MyOpcode = MI.getOpcode();
  451         unsigned DefOpcode = DefMI->getOpcode();
  459           return Splt && (Splt->getOpcode() == PPC::LXVWSX ||
  460             Splt->getOpcode() == PPC::XXSPLTW);
  515         if (DefMI && DefMI->getOpcode() == PPC::XXPERMDI) {
  532             if (RoundInstr->getOpcode() == PPC::FRSP &&
  575         if (SrcMI->getOpcode() == PPC::LHZ ||
  576             SrcMI->getOpcode() == PPC::LHZX) {
  593           unsigned Opc = getSextLoadOp(is64Bit(MI.getOpcode()),
  594                                        isXForm(SrcMI->getOpcode()));
  619         if (SrcMI->getOpcode() == PPC::LWZ ||
  620             SrcMI->getOpcode() == PPC::LWZX) {
  637           unsigned Opc = getSextLoadOp(is64Bit(MI.getOpcode()),
  638                                        isXForm(SrcMI->getOpcode()));
  649         } else if (MI.getOpcode() == PPC::EXTSW_32_64 &&
  686         if (!(SrcMI && SrcMI->getOpcode() == PPC::INSERT_SUBREG &&
  693         if (ImpDefMI->getOpcode() != PPC::IMPLICIT_DEF) break;
  696         if (SubRegMI->getOpcode() == PPC::COPY) {
  725           return DefPhiMI && (DefPhiMI->getOpcode() == PPC::PHI) &&
  742                 (LiMI->getOpcode() != PPC::LI && LiMI->getOpcode() != PPC::LI8)
  742                 (LiMI->getOpcode() != PPC::LI && LiMI->getOpcode() != PPC::LI8)
  761         const TargetRegisterClass *TRC = MI.getOpcode() == PPC::ADD8
  776           if (LiMI->getOpcode() == PPC::ADDI || LiMI->getOpcode() == PPC::ADDI8)
  776           if (LiMI->getOpcode() == PPC::ADDI || LiMI->getOpcode() == PPC::ADDI8)
  779           assert((LiMI->getOpcode() == PPC::LI ||
  780                   LiMI->getOpcode() == PPC::LI8) &&
  784           LiMI->setDesc(TII->get(LiMI->getOpcode() == PPC::LI ? PPC::ADDI
  867   bool SignedCmp = isSignedCmpOp(CMPI->getOpcode());
  886   bool SignedCmp = isSignedCmpOp(CMPI->getOpcode());
  922     if (BB1 && Inst->getOpcode() == PPC::PHI && Inst->getParent() == BB2) {
  948         (*BII).getOpcode() == PPC::BCC &&
 1017         if (Inst->getParent() == &MBB && Inst->getOpcode() != PPC::PHI)
 1120     if (!isSupportedCmpOp(CMPI1->getOpcode()) ||
 1121         !isSupportedCmpOp(CMPI2->getOpcode()) ||
 1122         is64bitCmpOp(CMPI1->getOpcode()) != is64bitCmpOp(CMPI2->getOpcode()))
 1122         is64bitCmpOp(CMPI1->getOpcode()) != is64bitCmpOp(CMPI2->getOpcode()))
 1130     if (CMPI1->getOpcode() != CMPI2->getOpcode()) {
 1130     if (CMPI1->getOpcode() != CMPI2->getOpcode()) {
 1147           CMPI1->getOpcode() == getSignedCmpOpCode(CMPI2->getOpcode()))
 1147           CMPI1->getOpcode() == getSignedCmpOpCode(CMPI2->getOpcode()))
 1148         NewOpCode = CMPI1->getOpcode();
 1150                getSignedCmpOpCode(CMPI1->getOpcode()) == CMPI2->getOpcode())
 1150                getSignedCmpOpCode(CMPI1->getOpcode()) == CMPI2->getOpcode())
 1151         NewOpCode = CMPI2->getOpcode();
 1255     if (NewOpCode != 0 && NewOpCode != CMPI1->getOpcode()) {
 1286           assert(Inst->getOpcode() == PPC::PHI &&
 1332   if (MI.getOpcode() != PPC::RLDICR)
 1340   if (SrcMI->getOpcode() != PPC::RLDICL)
 1397   if (MI.getOpcode() != PPC::RLDICR)
 1420   if (SrcMI->getOpcode() != PPC::EXTSW &&
 1421       SrcMI->getOpcode() != PPC::EXTSW_32_64)
 1441               SrcMI->getOpcode() == PPC::EXTSW ? TII->get(PPC::EXTSWSLI)
lib/Target/PowerPC/PPCMachineScheduler.cpp
   26     return Inst.getOpcode() == PPC::ADDI || Inst.getOpcode() == PPC::ADDI8;
   26     return Inst.getOpcode() == PPC::ADDI || Inst.getOpcode() == PPC::ADDI8;
lib/Target/PowerPC/PPCPreEmitPeephole.cpp
   83         unsigned Opc = BBI->getOpcode();
  124           if (AfterBBI->getOpcode() != Opc)
  175           unsigned Opc = MI.getOpcode();
  223         if (Br->getOpcode() != PPC::BC && Br->getOpcode() != PPC::BCn)
  223         if (Br->getOpcode() != PPC::BC && Br->getOpcode() != PPC::BCn)
  232             if ((It->getOpcode() == PPC::CRUNSET ||
  233                  It->getOpcode() == PPC::CRSET) &&
  243         unsigned CRSetOp = CRSetMI->getOpcode();
  244         if ((Br->getOpcode() == PPC::BCn && CRSetOp == PPC::CRSET) ||
  245             (Br->getOpcode() == PPC::BC  && CRSetOp == PPC::CRUNSET)) {
lib/Target/PowerPC/PPCQPXLoadSplat.cpp
   86           switch (MI->getOpcode()) {
  144       if (MI->getOpcode() != PPC::QVESPLATI &&
  145           MI->getOpcode() != PPC::QVESPLATIs &&
  146           MI->getOpcode() != PPC::QVESPLATIb)
lib/Target/PowerPC/PPCReduceCRLogicals.cpp
  155   unsigned OrigBROpcode = BSI.OrigBranch->getOpcode();
  390     unsigned Opc = MI.getOpcode();
  499     unsigned Opc = UseMI.getOpcode();
  627   if (CRI.MI->getOpcode() != PPC::CROR &&
  628       CRI.MI->getOpcode() != PPC::CRAND &&
  629       CRI.MI->getOpcode() != PPC::CRNOR &&
  630       CRI.MI->getOpcode() != PPC::CRNAND &&
  631       CRI.MI->getOpcode() != PPC::CRORC &&
  632       CRI.MI->getOpcode() != PPC::CRANDC) {
  679   unsigned Opc = CRI.MI->getOpcode();
  681   computeBranchTargetAndInversion(Opc, Branch->getOpcode(), UsingDef1,
lib/Target/PowerPC/PPCRegisterInfo.cpp
  771   switch (Ins->getOpcode()) {
  964   unsigned OpC = MI.getOpcode();
  975   else if (MI.getOpcode() == TargetOpcode::STACKMAP ||
  976            MI.getOpcode() == TargetOpcode::PATCHPOINT)
 1011   unsigned OpC = MI.getOpcode();
 1187   unsigned OpC = MI->getOpcode();
 1278   return MI->getOpcode() == PPC::DBG_VALUE || // DBG_VALUE is always Reg+Imm
 1279          MI->getOpcode() == TargetOpcode::STACKMAP ||
 1280          MI->getOpcode() == TargetOpcode::PATCHPOINT ||
lib/Target/PowerPC/PPCTLSDynamicCall.cpp
   57         if (MI.getOpcode() != PPC::ADDItlsgdLADDR &&
   58             MI.getOpcode() != PPC::ADDItlsldLADDR &&
   59             MI.getOpcode() != PPC::ADDItlsgdLADDR32 &&
   60             MI.getOpcode() != PPC::ADDItlsldLADDR32) {
   66           if (MI.getOpcode() == PPC::ADJCALLSTACKDOWN)
   68           else if (MI.getOpcode() == PPC::ADJCALLSTACKUP)
   84         switch (MI.getOpcode()) {
lib/Target/PowerPC/PPCTOCRegDeps.cpp
   97       if (MI.getOpcode() == PPC::LDtocL ||
   98           MI.getOpcode() == PPC::ADDItocL ||
   99           MI.getOpcode() == PPC::LWZtocL)
lib/Target/PowerPC/PPCVSXFMAMutate.cpp
   85         int AltOpc = PPC::getAltVSXFMAOpcode(MI.getOpcode());
lib/Target/PowerPC/PPCVSXSwapRemoval.cpp
  274       switch(MI.getOpcode()) {
  722         if (SwapVector[UseIdx].VSEMI->getOpcode() != MI->getOpcode()) {
  722         if (SwapVector[UseIdx].VSEMI->getOpcode() != MI->getOpcode()) {
  829     switch (MI->getOpcode()) {
  839     if (MI->getOpcode() == PPC::XXSPLTW)
  845     if (MI->getOpcode() == PPC::XXSPLTW)
  978     dbgs() << format("  %14s  ", TII->getName(MI->getOpcode()).str().c_str());
lib/Target/RISCV/RISCVExpandPseudoInsts.cpp
  101   switch (MBBI->getOpcode()) {
lib/Target/RISCV/RISCVFrameLowering.cpp
  457       if (MI->getOpcode() == RISCV::ADJCALLSTACKDOWN)
lib/Target/RISCV/RISCVISelLowering.cpp
 1091   assert(MI.getOpcode() == RISCV::ReadCycleWide && "Unexpected instruction");
 1153   assert(MI.getOpcode() == RISCV::SplitF64Pseudo && "Unexpected instruction");
 1184   assert(MI.getOpcode() == RISCV::BuildPairF64Pseudo &&
 1216   switch (MI.getOpcode()) {
 1352   switch (MI.getOpcode()) {
lib/Target/RISCV/RISCVInstrInfo.cpp
   38   switch (MI.getOpcode()) {
   64   switch (MI.getOpcode()) {
  209   Cond.push_back(MachineOperand::CreateImm(LastInst.getOpcode()));
  451   unsigned Opcode = MI.getOpcode();
  479   const unsigned Opcode = MI.getOpcode();
  494   MCInstrDesc const &Desc = MCII->get(MI.getOpcode());
lib/Target/RISCV/RISCVInstructionSelector.cpp
   85   if (!isPreISelGenericOpcode(I.getOpcode())) {
lib/Target/RISCV/RISCVMCInstLower.cpp
  130   OutMI.setOpcode(MI->getOpcode());
lib/Target/RISCV/RISCVMergeBaseOffset.cpp
   82   if (HiLUI.getOpcode() != RISCV::LUI ||
   90   if (LoADDI->getOpcode() != RISCV::ADDI ||
  137   assert((TailAdd.getOpcode() == RISCV::ADD) && "Expected ADD instruction!");
  147   if (OffsetTail.getOpcode() == RISCV::ADDI) {
  157     if (OffsetLui.getOpcode() != RISCV::LUI ||
  168   } else if (OffsetTail.getOpcode() == RISCV::LUI) {
  185   switch (Tail.getOpcode()) {
lib/Target/Sparc/DelaySlotFiller.cpp
  117         (MI->getOpcode() == SP::RESTORErr
  118          || MI->getOpcode() == SP::RESTOREri)) {
  126         (MI->getOpcode() == SP::FCMPS || MI->getOpcode() == SP::FCMPD
  126         (MI->getOpcode() == SP::FCMPS || MI->getOpcode() == SP::FCMPD
  127          || MI->getOpcode() == SP::FCMPQ)) {
  178   if (slot->getOpcode() == SP::RET || slot->getOpcode() == SP::TLS_CALL)
  178   if (slot->getOpcode() == SP::RET || slot->getOpcode() == SP::TLS_CALL)
  181   if (slot->getOpcode() == SP::RETL) {
  185     if (J->getOpcode() == SP::RESTORErr
  186         || J->getOpcode() == SP::RESTOREri) {
  270   unsigned Opcode = candidate->getOpcode();
  296   switch(MI->getOpcode()) {
  335       if (MO.isImplicit() && MI->getOpcode() == SP::RETL)
  359   switch (I->getOpcode()) {
  391   AddMI->setDesc(TII->get((AddMI->getOpcode() == SP::ADDrr)
  416   if (OrMI->getOpcode() == SP::ORrr
  421   if (OrMI->getOpcode() == SP::ORri
  430   OrMI->setDesc(TII->get((OrMI->getOpcode() == SP::ORrr)
  465   assert(RestoreMI->getOpcode() == SP::RESTORErr);
  488   assert(MBBI->getOpcode() == SP::RESTORErr
  501   switch (PrevInst->getOpcode()) {
lib/Target/Sparc/LeonPasses.cpp
   50       unsigned Opcode = MI.getOpcode();
   85       unsigned Opcode = MI.getOpcode();
  137       unsigned Opcode = MI.getOpcode();
lib/Target/Sparc/SparcAsmPrinter.cpp
  255   switch (MI->getOpcode()) {
  300     if (MI->getOpcode() == SP::CALL)
  303     else if (MI->getOpcode() == SP::SETHIi || MI->getOpcode() == SP::SETHIXi)
  303     else if (MI->getOpcode() == SP::SETHIi || MI->getOpcode() == SP::SETHIXi)
  313     else if (MI->getOpcode() == SP::TLS_CALL)
  318     else if (MI->getOpcode() == SP::TLS_ADDrr)
  324     else if (MI->getOpcode() == SP::TLS_LDrr)
  327     else if (MI->getOpcode() == SP::TLS_LDXrr)
  330     else if (MI->getOpcode() == SP::XORri || MI->getOpcode() == SP::XORXri)
  330     else if (MI->getOpcode() == SP::XORri || MI->getOpcode() == SP::XORXri)
lib/Target/Sparc/SparcFrameLowering.cpp
  210     if (MI.getOpcode() == SP::ADJCALLSTACKDOWN)
  227   assert(MBBI->getOpcode() == SP::RETL &&
lib/Target/Sparc/SparcISelLowering.cpp
 3101   switch (MI.getOpcode()) {
lib/Target/Sparc/SparcInstrInfo.cpp
   45   if (MI.getOpcode() == SP::LDri || MI.getOpcode() == SP::LDXri ||
   45   if (MI.getOpcode() == SP::LDri || MI.getOpcode() == SP::LDXri ||
   46       MI.getOpcode() == SP::LDFri || MI.getOpcode() == SP::LDDFri ||
   46       MI.getOpcode() == SP::LDFri || MI.getOpcode() == SP::LDDFri ||
   47       MI.getOpcode() == SP::LDQFri) {
   64   if (MI.getOpcode() == SP::STri || MI.getOpcode() == SP::STXri ||
   64   if (MI.getOpcode() == SP::STri || MI.getOpcode() == SP::STXri ||
   65       MI.getOpcode() == SP::STFri || MI.getOpcode() == SP::STDFri ||
   65       MI.getOpcode() == SP::STFri || MI.getOpcode() == SP::STDFri ||
   66       MI.getOpcode() == SP::STQFri) {
  173   unsigned LastOpc = LastInst->getOpcode();
  191   unsigned SecondLastOpc = SecondLastInst->getOpcode();
  199       LastOpc = LastInst->getOpcode();
  206         SecondLastOpc = SecondLastInst->getOpcode();
  285     if (I->getOpcode() != SP::BA
  286         && I->getOpcode() != SP::BCOND
  287         && I->getOpcode() != SP::FBCOND)
  495   switch (MI.getOpcode()) {
lib/Target/Sparc/SparcMCInstLower.cpp
   98   OutMI.setOpcode(MI->getOpcode());
lib/Target/Sparc/SparcRegisterInfo.cpp
  183     if (MI.getOpcode() == SP::STQFri) {
  195     } else if (MI.getOpcode() == SP::LDQFri) {
lib/Target/SystemZ/SystemZAsmPrinter.cpp
  128   switch (MI->getOpcode()) {
  585         MII->getOpcode() == TargetOpcode::PATCHPOINT ||
  586         MII->getOpcode() == TargetOpcode::STACKMAP)
lib/Target/SystemZ/SystemZElimCompare.cpp
  116   switch (MI.getOpcode()) {
  173   return (MI.getOpcode() == SystemZ::LTEBR ||
  174           MI.getOpcode() == SystemZ::LTDBR ||
  175           MI.getOpcode() == SystemZ::LTXBR) &&
  199   unsigned Opcode = MI.getOpcode();
  216   if (Branch->getOpcode() != SystemZ::BRC ||
  252   unsigned LATOpcode = TII->getLoadAndTrap(MI.getOpcode());
  260   if (Branch->getOpcode() != SystemZ::CondTrap ||
  294   unsigned Opcode = TII->getLoadAndTest(MI.getOpcode());
  318   int Opcode = (ConvOpc ? ConvOpc : MI.getOpcode());
  405   switch (Compare.getOpcode()) {
  498   switch (Branch->getOpcode()) {
  517       TII->getFusedCompare(Compare.getOpcode(), Type, &Compare);
lib/Target/SystemZ/SystemZFrameLowering.cpp
  363     if (MBBI != MBB.end() && MBBI->getOpcode() == SystemZ::STMG)
  441           (MBBI->getOpcode() == SystemZ::STD ||
  442            MBBI->getOpcode() == SystemZ::STDY))
  448           MBBI->getOpcode() == SystemZ::VST)
  487     unsigned Opcode = MBBI->getOpcode();
  534   switch (MI->getOpcode()) {
lib/Target/SystemZ/SystemZHazardRecognizer.cpp
  169   OS << TII->getName(SU->getInstr()->getOpcode());
  266           MI->getOpcode() == SystemZ::CondTrap);
lib/Target/SystemZ/SystemZISelLowering.cpp
 6548   switch (MI.getOpcode()) {
 7534   switch (MI.getOpcode()) {
lib/Target/SystemZ/SystemZInstrInfo.cpp
  282   switch (MI.getOpcode()) {
  335   if (MI.getOpcode() != SystemZ::MVC || !MI.getOperand(0).isFI() ||
  611   unsigned DefOpc = DefMI.getOpcode();
  619   unsigned UseOpc = UseMI.getOpcode();
  673   unsigned Opcode = MI.getOpcode();
  694   if (MBB.getLastNonDebugInstr()->getOpcode() != SystemZ::Trap &&
  724   unsigned Opcode = MI.getOpcode();
  954   if (LogicOp And = interpretAndImmediate(MI.getOpcode())) {
 1004   unsigned Opcode = MI.getOpcode();
 1218   switch (MI.getOpcode()) {
 1407   switch (MI.getOpcode()) {
lib/Target/SystemZ/SystemZLDCleanup.cpp
   94     switch (I->getOpcode()) {
lib/Target/SystemZ/SystemZLongBranch.cpp
  217     switch (MI.getOpcode()) {
  392   switch (Branch->getOpcode()) {
lib/Target/SystemZ/SystemZMCInstLower.cpp
   95   OutMI.setOpcode(MI->getOpcode());
lib/Target/SystemZ/SystemZPostRewrite.cpp
  216   unsigned Opcode = MI.getOpcode();
lib/Target/SystemZ/SystemZRegisterInfo.cpp
  104         if (Use.getOpcode() == SystemZ::LOCRMux ||
  105             Use.getOpcode() == SystemZ::SELRMux) {
  111           if (Use.getOpcode() == SystemZ::SELRMux)
  128         else if (Use.getOpcode() == SystemZ::CHIMux ||
  129                  Use.getOpcode() == SystemZ::CFIMux) {
  133               if (DefMI.getOpcode() != SystemZ::LMux)
  152     if (SystemZ::getTwoOperandOpcode(Use.getOpcode()) != -1) {
  279   unsigned Opcode = MI->getOpcode();
lib/Target/SystemZ/SystemZShortenInst.cpp
  207     switch (MI.getOpcode()) {
  334       int TwoOperandOpcode = SystemZ::getTwoOperandOpcode(MI.getOpcode());
lib/Target/WebAssembly/WebAssemblyArgumentMove.cpp
   81     if (!WebAssembly::isArgument(MI.getOpcode())) {
   90     if (WebAssembly::isArgument(MI.getOpcode())) {
lib/Target/WebAssembly/WebAssemblyAsmPrinter.cpp
  311   switch (MI->getOpcode()) {
  383       assert(MI->getOpcode() == WebAssembly::INLINEASM);
lib/Target/WebAssembly/WebAssemblyCFGStackify.cpp
  229         if (Pred->getFirstTerminator()->getOpcode() == WebAssembly::BR_ON_EXN) {
  270     if (MI.getOpcode() == WebAssembly::LOOP) {
  282     if (MI.getOpcode() == WebAssembly::BLOCK ||
  283         MI.getOpcode() == WebAssembly::TRY)
  288     if (MI.getOpcode() == WebAssembly::END_BLOCK ||
  289         MI.getOpcode() == WebAssembly::END_LOOP ||
  290         MI.getOpcode() == WebAssembly::END_TRY)
  339     if (MI.getOpcode() == WebAssembly::LOOP ||
  340         MI.getOpcode() == WebAssembly::TRY)
  348     if (MI.getOpcode() == WebAssembly::END_LOOP ||
  349         MI.getOpcode() == WebAssembly::END_TRY) {
  398     if (MI.getOpcode() == WebAssembly::END_LOOP)
  418     if (MI.getOpcode() == WebAssembly::END_LOOP)
  501     if (MI.getOpcode() == WebAssembly::LOOP) {
  513     if (MI.getOpcode() == WebAssembly::BLOCK ||
  514         MI.getOpcode() == WebAssembly::TRY)
  519     if (MI.getOpcode() == WebAssembly::END_BLOCK ||
  520         MI.getOpcode() == WebAssembly::END_LOOP ||
  521         MI.getOpcode() == WebAssembly::END_TRY)
  539         TermPos->getOpcode() != WebAssembly::RETHROW) {
  587     if (MI.getOpcode() == WebAssembly::LOOP ||
  588         MI.getOpcode() == WebAssembly::BLOCK)
  593     if (MI.getOpcode() == WebAssembly::END_TRY)
  601     if (MI.getOpcode() == WebAssembly::END_LOOP) {
  683       if (MI.getOpcode() != WebAssembly::TRY)
  692            std::prev(B)->getOpcode() == WebAssembly::BLOCK &&
  693            E->getOpcode() == WebAssembly::END_BLOCK &&
  702     if (MI->getOpcode() == WebAssembly::BLOCK)
  874       if (MI.getOpcode() == WebAssembly::TRY)
  876       else if (MI.getOpcode() == WebAssembly::CATCH)
  915       if (MI.getOpcode() == WebAssembly::TRY)
  917       else if (MI.getOpcode() == WebAssembly::CATCH)
  926       if (RangeEnd && WebAssembly::isMarker(MI.getOpcode())) {
 1016       switch (MI.getOpcode()) {
 1177       switch (MI.getOpcode()) {
 1247       switch (MI.getOpcode()) {
 1303       switch (MI.getOpcode()) {
lib/Target/WebAssembly/WebAssemblyCallIndirectFixup.cpp
   64   switch (MI.getOpcode()) {
lib/Target/WebAssembly/WebAssemblyExplicitLocals.cpp
  208     if (!WebAssembly::isArgument(MI.getOpcode()))
  230       assert(!WebAssembly::isArgument(MI.getOpcode()));
  238       if (WebAssembly::isTee(MI.getOpcode())) {
  278           if (MI.getOpcode() == WebAssembly::IMPLICIT_DEF) {
  359       if (WebAssembly::isCopy(MI.getOpcode())) {
lib/Target/WebAssembly/WebAssemblyFrameLowering.cpp
  144   if (I->getOpcode() == TII->getCallFrameDestroyOpcode() &&
  168          WebAssembly::isArgument(InsertPt->getOpcode()))
lib/Target/WebAssembly/WebAssemblyISelLowering.cpp
  429   switch (MI.getOpcode()) {
lib/Target/WebAssembly/WebAssemblyInstrInfo.cpp
   42   switch (MI.getOpcode()) {
  115     switch (MI.getOpcode()) {
lib/Target/WebAssembly/WebAssemblyLateEHPrepare.cpp
  153     switch (TI->getOpcode()) {
  189       if (MI.getOpcode() != WebAssembly::THROW &&
  190           MI.getOpcode() != WebAssembly::RETHROW)
  243       if (MI.getOpcode() == WebAssembly::EXTRACT_EXCEPTION_I32) {
  385     if (InsertPos->getOpcode() == WebAssembly::CATCH)
lib/Target/WebAssembly/WebAssemblyLowerBrUnless.cpp
   68       if (MI->getOpcode() != WebAssembly::BR_UNLESS)
   78         switch (Def->getOpcode()) {
lib/Target/WebAssembly/WebAssemblyMCInstLower.cpp
  208   OutMI.setOpcode(MI->getOpcode());
  249           if (WebAssembly::isCallIndirect(MI->getOpcode()))
  254           if (MI->getOpcode() == WebAssembly::RET_CALL_INDIRECT)
lib/Target/WebAssembly/WebAssemblyMemIntrinsicResults.cpp
  201       switch (MI.getOpcode()) {
lib/Target/WebAssembly/WebAssemblyPeephole.cpp
   86   assert(End->getOpcode() == WebAssembly::END_FUNCTION);
  149       switch (MI.getOpcode()) {
lib/Target/WebAssembly/WebAssemblyPrepareForLiveIntervals.cpp
   67     if (WebAssembly::isArgument(Def.getOpcode()))
  117     if (WebAssembly::isArgument(MI.getOpcode())) {
lib/Target/WebAssembly/WebAssemblyRegNumbering.cpp
   75     if (!WebAssembly::isArgument(MI.getOpcode()))
lib/Target/WebAssembly/WebAssemblyRegStackify.cpp
  103   assert(MI->getOpcode() == TargetOpcode::IMPLICIT_DEF);
  185     switch (MI.getOpcode()) {
  220     switch (MI.getOpcode()) {
  249   if (MI.getOpcode() == WebAssembly::GLOBAL_SET_I32 &&
  255     unsigned CalleeOpNo = WebAssembly::getCalleeOpNo(MI.getOpcode());
  322   if (Def->getOpcode() == WebAssembly::CATCH ||
  323       Def->getOpcode() == WebAssembly::EXTRACT_EXCEPTION_I32) {
  829         if (WebAssembly::isArgument(Def->getOpcode()))
  847         if (Def->getOpcode() == WebAssembly::CATCH)
  881         if (Insert->getOpcode() == TargetOpcode::IMPLICIT_DEF)
lib/Target/WebAssembly/WebAssemblyRegisterInfo.cpp
   74       MI.getOpcode(), WebAssembly::OpName::addr);
   77         MI.getOpcode(), WebAssembly::OpName::off);
   91   if (MI.getOpcode() == WebAssembly::ADD_I32) {
  100         if (Def && Def->getOpcode() == WebAssembly::CONST_I32 &&
lib/Target/WebAssembly/WebAssemblySetP2AlignOperands.cpp
   63              (UINT64_C(1) << WebAssembly::GetDefaultP2Align(MI.getOpcode())) &&
   72                      uint64_t(WebAssembly::GetDefaultP2Align(MI.getOpcode())));
   88           MI.getOpcode(), WebAssembly::OpName::p2align);
lib/Target/WebAssembly/WebAssemblyUtilities.cpp
   40   switch (MI.getOpcode()) {
   47   if (isCallIndirect(MI.getOpcode()))
   52   const MachineOperand &MO = MI.getOperand(getCalleeOpNo(MI.getOpcode()));
lib/Target/X86/X86AvoidStoreForwardingBlocks.cpp
  442     if ((Size - MOV128SZ >= 0) && isYMMLoadOpcode(LoadInst->getOpcode())) {
  444       buildCopy(LoadInst, getYMMtoXMMLoadOpcode(LoadInst->getOpcode()), LdDisp,
  445                 StoreInst, getYMMtoXMMStoreOpcode(StoreInst->getOpcode()),
  539       if (!isPotentialBlockedMemCpyLd(MI.getOpcode()))
  550             isPotentialBlockedMemCpyPair(MI.getOpcode(), StoreMI.getOpcode()) &&
  550             isPotentialBlockedMemCpyPair(MI.getOpcode(), StoreMI.getOpcode()) &&
  565   auto TRC = TII->getRegClass(TII->get(LoadInst->getOpcode()), 0, TRI,
  694       if (!isPotentialBlockingStoreInst(PBInst->getOpcode(),
  695                                         LoadInst->getOpcode()) ||
lib/Target/X86/X86CallFrameOptimization.cpp
  171       if (MI.getOpcode() == FrameSetupOpcode) {
  177       } else if (MI.getOpcode() == FrameDestroyOpcode) {
  259       if (MI.getOpcode() == FrameSetupOpcode) {
  287   switch (MI->getOpcode()) {
  363   assert(I->getOpcode() == TII->getCallFrameSetupOpcode());
  380   while (I->getOpcode() == X86::LEA32r || I->isDebugInstr())
  470   if ((++I)->getOpcode() != TII->getCallFrameDestroyOpcode())
  512     switch (Store->getOpcode()) {
  541       if (Is64Bit && Store->getOpcode() == X86::MOV32mr) {
  622   if ((DefMI.getOpcode() != X86::MOV32rm &&
  623        DefMI.getOpcode() != X86::MOV64rm) ||
lib/Target/X86/X86CmovConversion.cpp
  323                   return UseI.getOpcode() == X86::SUBREG_TO_REG;
  539         unsigned Op = UIs.begin()->getOpcode();
lib/Target/X86/X86CondBrFolding.cpp
  213     return MI.getOpcode() == X86::JMP_1;
  441   switch (MI.getOpcode()) {
  503     if (I->getOpcode() == X86::JMP_1) {
lib/Target/X86/X86DomainReassignment.cpp
   94     assert(MI->getOpcode() == SrcOpcode &&
  238     assert(MI->getOpcode() == TargetOpcode::COPY && "Expected a COPY");
  474       InstrConverterBase *IC = Converters.lookup({i, MI->getOpcode()});
  488         Converters.lookup({DstDomain, MI->getOpcode()})->getExtraCost(MI, MRI);
  504     if (Converters.lookup({Domain, MI->getOpcode()})
  531   const MCInstrDesc &Desc = TII->get(MI.getOpcode());
lib/Target/X86/X86EvexToVex.cpp
  149   unsigned Opc = MI.getOpcode();
  255   auto I = llvm::lower_bound(Table, MI.getOpcode());
  256   if (I == Table.end() || I->EvexOpcode != MI.getOpcode())
lib/Target/X86/X86ExpandPseudo.cpp
  182   unsigned Opcode = MI.getOpcode();
lib/Target/X86/X86FixupBWInsts.cpp
  240   unsigned Opc = OrigMI->getOpcode(); (void)Opc;
  344   if (MI->getOpcode() == X86::MOVSX16rr8 &&
  365   switch (MI->getOpcode()) {
lib/Target/X86/X86FixupLEAs.cpp
  129   switch (MI.getOpcode()) {
  136                 TII->get(MI.getOpcode() == X86::MOV32rr ? X86::LEA32r
  151   switch (MI.getOpcode()) {
  213       if (!isLEA(I->getOpcode()))
  376   if (MI.getOpcode() == X86::LEA64_32r) {
  389     unsigned NewOpcode = getADDrrFromLEA(MI.getOpcode());
  393     if (MI.getOpcode() == X86::LEA64_32r) {
  412       unsigned NewOpcode = getINCDECFromLEA(MI.getOpcode(), IsINC);
  414       if (MI.getOpcode() == X86::LEA64_32r) {
  423       unsigned NewOpcode = getADDriFromLEA(MI.getOpcode(), Disp);
  424       if (MI.getOpcode() == X86::LEA64_32r) {
  483   const unsigned Opcode = MI.getOpcode();
  533   const unsigned LEAOpcode = MI.getOpcode();
  551   if (MI.getOpcode() == X86::LEA64_32r) {
  577     unsigned NewOpc = getADDrrFromLEA(MI.getOpcode());
  581     if (MI.getOpcode() == X86::LEA64_32r) {
  616             getINCDECFromLEA(MI.getOpcode(), Offset.getImm() == 1);
  621         unsigned NewOpc = getADDriFromLEA(MI.getOpcode(), Offset);
  648     unsigned NewOpc = getADDrrFromLEA(MI.getOpcode());
  667   unsigned NewOpc = getADDrrFromLEA(MI.getOpcode());
lib/Target/X86/X86FixupSetCC.cpp
  107       if (MI.getOpcode() != X86::SETCCr)
  112         if (Use.getOpcode() == X86::MOVZX32rr8)
lib/Target/X86/X86FlagsCopyLowering.cpp
  360       if (MI.getOpcode() == TargetOpcode::COPY &&
  371     if (CopyDefI.getOpcode() != TargetOpcode::COPY) {
  598         } else if (MI.getOpcode() == TargetOpcode::COPY) {
  613           switch (MI.getOpcode()) {
  702       if (MI.getOpcode() == TargetOpcode::COPY &&
  787   switch (getMnemonicFromOpcode(MI.getOpcode())) {
  983   switch (SetBI.getOpcode()) {
lib/Target/X86/X86FloatingPoint.cpp
  841   int Opcode = Lookup(PopTable, I->getOpcode());
 1103   MI.setDesc(TII->get(getConcreteOpcode(MI.getOpcode())));
 1129   if (!KillsSrc && (MI.getOpcode() == X86::IST_Fp64m32 ||
 1130                     MI.getOpcode() == X86::ISTT_Fp16m32 ||
 1131                     MI.getOpcode() == X86::ISTT_Fp32m32 ||
 1132                     MI.getOpcode() == X86::ISTT_Fp64m32 ||
 1133                     MI.getOpcode() == X86::IST_Fp64m64 ||
 1134                     MI.getOpcode() == X86::ISTT_Fp16m64 ||
 1135                     MI.getOpcode() == X86::ISTT_Fp32m64 ||
 1136                     MI.getOpcode() == X86::ISTT_Fp64m64 ||
 1137                     MI.getOpcode() == X86::IST_Fp64m80 ||
 1138                     MI.getOpcode() == X86::ISTT_Fp16m80 ||
 1139                     MI.getOpcode() == X86::ISTT_Fp32m80 ||
 1140                     MI.getOpcode() == X86::ISTT_Fp64m80 ||
 1141                     MI.getOpcode() == X86::ST_FpP80m)) {
 1149   MI.setDesc(TII->get(getConcreteOpcode(MI.getOpcode())));
 1153   if (MI.getOpcode() == X86::IST_FP64m || MI.getOpcode() == X86::ISTT_FP16m ||
 1153   if (MI.getOpcode() == X86::IST_FP64m || MI.getOpcode() == X86::ISTT_FP16m ||
 1154       MI.getOpcode() == X86::ISTT_FP32m || MI.getOpcode() == X86::ISTT_FP64m ||
 1154       MI.getOpcode() == X86::ISTT_FP32m || MI.getOpcode() == X86::ISTT_FP64m ||
 1155       MI.getOpcode() == X86::ST_FP80m) {
 1201   MI.setDesc(TII->get(getConcreteOpcode(MI.getOpcode())));
 1351   int Opcode = Lookup(InstTable, MI.getOpcode());
 1397   MI.setDesc(TII->get(getConcreteOpcode(MI.getOpcode())));
 1423   MI.setDesc(TII->get(getConcreteOpcode(MI.getOpcode())));
 1450   switch (MI.getOpcode()) {
lib/Target/X86/X86FrameLowering.cpp
  158   switch (MBBI->getOpcode()) {
  418   unsigned Opc = PI->getOpcode();
 1185          (MBBI->getOpcode() == X86::PUSH32r ||
 1186           MBBI->getOpcode() == X86::PUSH64r)) {
 1519   switch (MI.getOpcode()) {
 1657     unsigned Opc = PI->getOpcode();
 1670   if (IsFunclet && Terminator->getOpcode() == X86::CATCHRET)
 1738       unsigned Opc = PI->getOpcode();
 1748   if (Terminator == MBB.end() || !isTailCallOpcode(Terminator->getOpcode())) {
 2180     if (MI->getOpcode() == X86::CATCHRET) {
 2796   unsigned Opcode = I->getOpcode();
lib/Target/X86/X86ISelLowering.cpp
 4244       unsigned Opcode = Def->getOpcode();
29627   switch (MI.getOpcode()) {
29947       NextMIIt->getOpcode() == MI.getOpcode() &&
29947       NextMIIt->getOpcode() == MI.getOpcode() &&
30365   unsigned Opc = getOpcodeForRetpoline(MI.getOpcode());
31168   switch (MI.getOpcode()) {
31216         MI.getOpcode() == X86::RDFLAGS32 ? X86::PUSHF32 : X86::PUSHF64;
31217     unsigned Pop = MI.getOpcode() == X86::RDFLAGS32 ? X86::POP32r : X86::POP64r;
31238         MI.getOpcode() == X86::WRFLAGS32 ? X86::PUSH32r : X86::PUSH64r;
31240         MI.getOpcode() == X86::WRFLAGS32 ? X86::POPF32 : X86::POPF64;
31291     switch (MI.getOpcode()) {
31410         MI.getOpcode() == X86::LCMPXCHG8B_SAVE_EBX ? X86::EBX : X86::RBX;
lib/Target/X86/X86IndirectBranchTracking.cpp
   79   if (I == MBB.end() || I->getOpcode() != EndbrOpcode) {
lib/Target/X86/X86InstrInfo.cpp
   93   switch (MI.getOpcode()) {
  114     switch (MI.getOpcode()) {
  158       if (I->getOpcode() == getCallFrameDestroyOpcode() ||
  165     if (I->getOpcode() != getCallFrameDestroyOpcode())
  173   switch (MI.getOpcode()) {
  405   if (isFrameLoadOpcode(MI.getOpcode(), MemBytes))
  414   if (isFrameLoadOpcode(MI.getOpcode(), Dummy)) {
  439   if (isFrameStoreOpcode(MI.getOpcode(), MemBytes))
  449   if (isFrameStoreOpcode(MI.getOpcode(), Dummy)) {
  474     if (DefMI->getOpcode() != X86::MOVPC32r)
  484   switch (MI.getOpcode()) {
  650     switch (Orig.getOpcode()) {
  919   unsigned MIOpc = MI.getOpcode();
 1361   unsigned Opc = MI.getOpcode();
 1536   switch (MI.getOpcode()) {
 1545     switch (MI.getOpcode()) {
 1566         (X86::PFSUBRrr == MI.getOpcode() ? X86::PFSUBrr : X86::PFSUBRrr);
 1579       switch (MI.getOpcode()) {
 1604     switch (MI.getOpcode()) {
 1656       switch (MI.getOpcode()) {
 1672     assert(MI.getOpcode() == X86::MOVSDrr &&
 1790     unsigned Opc = MI.getOpcode();
 1849     if (isCommutableVPERMV3Instruction(MI.getOpcode())) {
 1850       unsigned Opc = getCommutedVPERMV3Opcode(MI.getOpcode());
 1857     const X86InstrFMA3Group *FMA3Group = getFMA3Group(MI.getOpcode(),
 1984   switch (MI.getOpcode()) {
 2139     const X86InstrFMA3Group *FMA3Group = getFMA3Group(MI.getOpcode(),
 2185   switch (MI.getOpcode()) {
 2195   switch (MI.getOpcode()) {
 2205   switch (MI.getOpcode()) {
 2393   switch (MI.getOpcode()) {
 2409   if (TailCall.getOpcode() != X86::TCRETURNdi &&
 2410       TailCall.getOpcode() != X86::TCRETURNdi64) {
 2458   unsigned Opc = TailCall.getOpcode() == X86::TCRETURNdi ? X86::TCRETURNdicc
 2527     if (I->getOpcode() == X86::JMP_1) {
 2731   if (ConditionDef->getOpcode() == TestOpcode &&
 2757     if (I->getOpcode() != X86::JMP_1 &&
 3278   switch (MI.getOpcode()) {
 3366   if (((FlagI.getOpcode() == X86::CMP64rr && OI.getOpcode() == X86::SUB64rr) ||
 3366   if (((FlagI.getOpcode() == X86::CMP64rr && OI.getOpcode() == X86::SUB64rr) ||
 3367        (FlagI.getOpcode() == X86::CMP32rr && OI.getOpcode() == X86::SUB32rr) ||
 3367        (FlagI.getOpcode() == X86::CMP32rr && OI.getOpcode() == X86::SUB32rr) ||
 3368        (FlagI.getOpcode() == X86::CMP16rr && OI.getOpcode() == X86::SUB16rr) ||
 3368        (FlagI.getOpcode() == X86::CMP16rr && OI.getOpcode() == X86::SUB16rr) ||
 3369        (FlagI.getOpcode() == X86::CMP8rr && OI.getOpcode() == X86::SUB8rr)) &&
 3369        (FlagI.getOpcode() == X86::CMP8rr && OI.getOpcode() == X86::SUB8rr)) &&
 3377       ((FlagI.getOpcode() == X86::CMP64ri32 &&
 3378         OI.getOpcode() == X86::SUB64ri32) ||
 3379        (FlagI.getOpcode() == X86::CMP64ri8 &&
 3380         OI.getOpcode() == X86::SUB64ri8) ||
 3381        (FlagI.getOpcode() == X86::CMP32ri && OI.getOpcode() == X86::SUB32ri) ||
 3381        (FlagI.getOpcode() == X86::CMP32ri && OI.getOpcode() == X86::SUB32ri) ||
 3382        (FlagI.getOpcode() == X86::CMP32ri8 &&
 3383         OI.getOpcode() == X86::SUB32ri8) ||
 3384        (FlagI.getOpcode() == X86::CMP16ri && OI.getOpcode() == X86::SUB16ri) ||
 3384        (FlagI.getOpcode() == X86::CMP16ri && OI.getOpcode() == X86::SUB16ri) ||
 3385        (FlagI.getOpcode() == X86::CMP16ri8 &&
 3386         OI.getOpcode() == X86::SUB16ri8) ||
 3387        (FlagI.getOpcode() == X86::CMP8ri && OI.getOpcode() == X86::SUB8ri)) &&
 3387        (FlagI.getOpcode() == X86::CMP8ri && OI.getOpcode() == X86::SUB8ri)) &&
 3399   switch (MI.getOpcode()) {
 3511   switch (MI.getOpcode()) {
 3557   switch (CmpInstr.getOpcode()) {
 3578     switch (CmpInstr.getOpcode()) {
 3678       if (!Movr0Inst && Instr.getOpcode() == X86::MOV32r0 &&
 3936     assert(MIB->getOpcode() == X86::MOV64ImmSExti8 ||
 3937            MIB->getOpcode() == X86::MOV32ImmSExti8);
 3943       MIB->setDesc(TII.get(MIB->getOpcode() ==
 3956     assert(MIB->getOpcode() == X86::MOV32ImmSExti8);
 4010       MIB->getOpcode() == X86::XOR64_FP ? X86::XOR64rr : X86::XOR32rr;
 4080   switch (MI.getOpcode()) {
 4144     if (MI.getOpcode() == X86::AVX512_256_SET0) {
 4177     unsigned Opc = (MI.getOpcode() == X86::AVX512_512_SEXT_MASK_64) ?
 4343   if (OpNum != 0 || !hasPartialRegUpdate(MI.getOpcode(), Subtarget))
 4557   if (!hasUndefRegUpdate(MI.getOpcode(), OpNum))
 4731   switch (MI.getOpcode()) {
 4750             (MI.getOpcode() == X86::VINSERTPSZrr) ? X86::VINSERTPSZrm :
 4751             (MI.getOpcode() == X86::VINSERTPSrr)  ? X86::VINSERTPSrm  :
 4772             (MI.getOpcode() == X86::VMOVHLPSZrr) ? X86::VMOVLPSZ128rm :
 4773             (MI.getOpcode() == X86::VMOVHLPSrr)  ? X86::VMOVLPSrm     :
 4804   if (!hasUndefRegUpdate(MI.getOpcode(), Ignored, /*ForLoadFold*/true) ||
 4833       (MI.getOpcode() == X86::CALL32r || MI.getOpcode() == X86::CALL64r ||
 4833       (MI.getOpcode() == X86::CALL32r || MI.getOpcode() == X86::CALL64r ||
 4834        MI.getOpcode() == X86::PUSH16r || MI.getOpcode() == X86::PUSH32r ||
 4834        MI.getOpcode() == X86::PUSH16r || MI.getOpcode() == X86::PUSH32r ||
 4835        MI.getOpcode() == X86::PUSH64r))
 4840       (hasPartialRegUpdate(MI.getOpcode(), Subtarget, /*ForLoadFold*/true) ||
 4850   if (MI.getOpcode() == X86::ADD32ri &&
 4859       MI.getOpcode() != X86::ADD64rr)
 4877     I = lookupTwoAddrFoldTable(MI.getOpcode());
 4881       if (MI.getOpcode() == X86::MOV32r0) {
 4888     I = lookupFoldTable(MI.getOpcode(), OpNum);
 5011       (hasPartialRegUpdate(MI.getOpcode(), Subtarget, /*ForLoadFold*/true) ||
 5034     switch (MI.getOpcode()) {
 5073   unsigned Opc = LoadMI.getOpcode();
 5074   unsigned UserOpc = UserMI.getOpcode();
 5217       (hasPartialRegUpdate(MI.getOpcode(), Subtarget, /*ForLoadFold*/true) ||
 5226     switch (LoadMI.getOpcode()) {
 5258     switch (MI.getOpcode()) {
 5277   switch (LoadMI.getOpcode()) {
 5318     unsigned Opc = LoadMI.getOpcode();
 5454   const X86MemoryFoldTableEntry *I = lookupUnfoldTable(MI.getOpcode());
 5547   switch (DataMI->getOpcode()) {
 5560       switch (DataMI->getOpcode()) {
 6590   unsigned Opcode = MI.getOpcode();
 6693   unsigned Opcode = MI.getOpcode();
 6779     const uint16_t *table = lookupAVX512(MI.getOpcode(), dom,
 6784     if (Domain == 3 && (dom == 1 || table[3] == MI.getOpcode()))
 6822   unsigned opcode = MI.getOpcode();
 6869   const uint16_t *table = lookup(MI.getOpcode(), dom, ReplaceableInstrs);
 6873     table = lookup(MI.getOpcode(), dom, ReplaceableInstrsAVX2);
 6876     table = lookup(MI.getOpcode(), dom, ReplaceableInstrsFP);
 6883     table = lookup(MI.getOpcode(), dom, ReplaceableInstrsAVX2InsertExtract);
 6887     table = lookupAVX512(MI.getOpcode(), dom, ReplaceableInstrsAVX512);
 6889     if (table && Domain == 3 && table[3] == MI.getOpcode())
 6894     table = lookupAVX512(MI.getOpcode(), dom, ReplaceableInstrsAVX512DQ);
 6897     if (table && Domain == 3 && (dom == 1 || table[3] == MI.getOpcode()))
 6902     table = lookupAVX512(MI.getOpcode(), dom, ReplaceableInstrsAVX512DQMasked);
 6903     if (table && Domain == 3 && (dom == 1 || table[3] == MI.getOpcode()))
 7231   return isHighLatencyDef(DefMI.getOpcode());
 7261   switch (Inst.getOpcode()) {
 7566   switch (MI.getOpcode()) {
 7873         switch (I->getOpcode()) {
lib/Target/X86/X86InstructionSelector.cpp
  298       LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  315   unsigned Opcode = I.getOpcode();
  337   switch (I.getOpcode()) {
  479   if (I.getOpcode() == TargetOpcode::G_GEP) {
  488   } else if (I.getOpcode() == TargetOpcode::G_FRAME_INDEX) {
  501   unsigned Opc = I.getOpcode();
  561   unsigned Opc = I.getOpcode();
  589   assert((I.getOpcode() == TargetOpcode::G_GLOBAL_VALUE) &&
  635   assert((I.getOpcode() == TargetOpcode::G_CONSTANT) &&
  696     LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  707   assert((I.getOpcode() == TargetOpcode::G_TRUNC ||
  708           I.getOpcode() == TargetOpcode::G_PTRTOINT) &&
  721     LLVM_DEBUG(dbgs() << TII.getName(I.getOpcode())
  759     LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  773   assert((I.getOpcode() == TargetOpcode::G_ZEXT) && "unexpected instruction");
  813       LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  884   assert((I.getOpcode() == TargetOpcode::G_ANYEXT) && "unexpected instruction");
  915     LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
  939   assert((I.getOpcode() == TargetOpcode::G_ICMP) && "unexpected instruction");
  990   assert((I.getOpcode() == TargetOpcode::G_FCMP) && "unexpected instruction");
 1081   assert((I.getOpcode() == TargetOpcode::G_UADDE) && "unexpected instruction");
 1096   while (Def->getOpcode() == TargetOpcode::G_TRUNC) {
 1102   if (Def->getOpcode() == TargetOpcode::G_UADDE) {
 1140   assert((I.getOpcode() == TargetOpcode::G_EXTRACT) &&
 1273   assert((I.getOpcode() == TargetOpcode::G_INSERT) && "unexpected instruction");
 1330   assert((I.getOpcode() == TargetOpcode::G_UNMERGE_VALUES) &&
 1355   assert((I.getOpcode() == TargetOpcode::G_MERGE_VALUES ||
 1356           I.getOpcode() == TargetOpcode::G_CONCAT_VECTORS) &&
 1405   assert((I.getOpcode() == TargetOpcode::G_BRCOND) && "unexpected instruction");
 1426   assert((I.getOpcode() == TargetOpcode::G_FCONSTANT) &&
 1491   assert((I.getOpcode() == TargetOpcode::G_IMPLICIT_DEF ||
 1492           I.getOpcode() == TargetOpcode::G_PHI) &&
 1502       LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
 1508   if (I.getOpcode() == TargetOpcode::G_IMPLICIT_DEF)
 1520   assert((I.getOpcode() == TargetOpcode::G_SDIV ||
 1521           I.getOpcode() == TargetOpcode::G_SREM ||
 1522           I.getOpcode() == TargetOpcode::G_UDIV ||
 1523           I.getOpcode() == TargetOpcode::G_UREM) &&
 1613   switch (I.getOpcode()) {
 1637     LLVM_DEBUG(dbgs() << "Failed to constrain " << TII.getName(I.getOpcode())
 1687   if ((I.getOpcode() == Instruction::SRem ||
 1688        I.getOpcode() == Instruction::URem) &&
 1720   assert(I.getOpcode() == TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS &&
lib/Target/X86/X86MCInstLower.cpp
  466   OutMI.setOpcode(MI->getOpcode());
  884   bool Is64Bits = MI.getOpcode() == X86::TLS_addr64 ||
  885                   MI.getOpcode() == X86::TLS_base_addr64;
  889   switch (MI.getOpcode()) {
 1751     switch (MI->getOpcode()) {
 1781   switch (MI->getOpcode()) {
 1842   switch (MI->getOpcode()) {
 2123     switch (MI->getOpcode()) {
 2182     switch (MI->getOpcode()) {
 2242     switch (MI->getOpcode()) {
 2346       switch (MI->getOpcode()) {
 2438       switch (MI->getOpcode()) {
lib/Target/X86/X86MacroFusion.cpp
   41   switch (MI.getOpcode()) {
lib/Target/X86/X86OptimizeLEAs.cpp
  230   unsigned Opcode = MI.getOpcode();
lib/Target/X86/X86RegisterBankInfo.cpp
  164   unsigned Opc = MI.getOpcode();
  284   switch (MI.getOpcode()) {
lib/Target/X86/X86RegisterInfo.cpp
  680   unsigned Opc = II->getOpcode();
  704   switch (MI.getOpcode()) {
  746   unsigned Opc = MI.getOpcode();
lib/Target/X86/X86SpeculativeLoadHardening.cpp
  377       if (MI.getOpcode() == X86::LFENCE)
  385       if (MI.getOpcode() == X86::MFENCE)
  645       if (MI.getOpcode() == X86::JMP_1) {
  710         UncondBr ? (UncondBr->getOpcode() == X86::JMP_1
  864       switch (MI.getOpcode()) {
  903         auto *UnfoldedRC = getRegClassForUnfoldedLoad(MF, *TII, MI.getOpcode());
  989     switch (TI.getOpcode()) {
 1207   switch (MI.getOpcode()) {
 1406   switch (MI.getOpcode()) {
 1685         if (MI.getOpcode() == X86::LFENCE)
 1693         if (MI.getOpcode() == X86::MFENCE)
 2590   switch (MI.getOpcode()) {
lib/Target/X86/X86VZeroUpper.cpp
  213     if (MI.getOpcode() == X86::VZEROALL || MI.getOpcode() == X86::VZEROUPPER) {
  213     if (MI.getOpcode() == X86::VZEROALL || MI.getOpcode() == X86::VZEROUPPER) {
lib/Target/X86/X86WinAllocaExpander.cpp
   80   assert(MI->getOpcode() == X86::WIN_ALLOCA_32 ||
   81          MI->getOpcode() == X86::WIN_ALLOCA_64);
   88       (Def->getOpcode() != X86::MOV32ri && Def->getOpcode() != X86::MOV64ri) ||
   88       (Def->getOpcode() != X86::MOV32ri && Def->getOpcode() != X86::MOV64ri) ||
  111   switch (MI.getOpcode()) {
  155       if (MI.getOpcode() == X86::WIN_ALLOCA_32 ||
  156           MI.getOpcode() == X86::WIN_ALLOCA_64) {
  175       } else if (MI.getOpcode() == X86::ADJCALLSTACKUP32 ||
  176                  MI.getOpcode() == X86::ADJCALLSTACKUP64) {
  178       } else if (MI.getOpcode() == X86::ADJCALLSTACKDOWN32 ||
  179                  MI.getOpcode() == X86::ADJCALLSTACKDOWN64) {
  211   bool Is64BitAlloca = MI->getOpcode() == X86::WIN_ALLOCA_64;
lib/Target/XCore/XCoreAsmPrinter.cpp
  263   switch (MI->getOpcode()) {
  279     if (MI->getOpcode() == XCore::BR_JT)
lib/Target/XCore/XCoreFrameLowering.cpp
  350   unsigned RetOpcode = MBBI->getOpcode();
  516       if (Old.getOpcode() == XCore::ADJCALLSTACKDOWN) {
  520         assert(Old.getOpcode() == XCore::ADJCALLSTACKUP);
lib/Target/XCore/XCoreFrameToArgsOffsetElim.cpp
   56       if (MBBI->getOpcode() == XCore::FRAME_TO_ARGS_OFFSET) {
lib/Target/XCore/XCoreISelLowering.cpp
 1527   assert((MI.getOpcode() == XCore::SELECT_CC) &&
lib/Target/XCore/XCoreInstrInfo.cpp
   64   int Opcode = MI.getOpcode();
   84   int Opcode = MI.getOpcode();
  207     if (IsBRU(LastInst->getOpcode())) {
  212     XCore::CondCode BranchCode = GetCondFromBranchOpc(LastInst->getOpcode());
  232   unsigned SecondLastOpc    = SecondLastInst->getOpcode();
  238     && IsBRU(LastInst->getOpcode())) {
  250   if (IsBRU(SecondLastInst->getOpcode()) &&
  251       IsBRU(LastInst->getOpcode())) {
  260   if (IsBR_JT(SecondLastInst->getOpcode()) && IsBRU(LastInst->getOpcode())) {
  260   if (IsBR_JT(SecondLastInst->getOpcode()) && IsBRU(LastInst->getOpcode())) {
  313   if (!IsBRU(I->getOpcode()) && !IsCondBranch(I->getOpcode()))
  313   if (!IsBRU(I->getOpcode()) && !IsCondBranch(I->getOpcode()))
  323   if (!IsCondBranch(I->getOpcode()))
lib/Target/XCore/XCoreMCInstLower.cpp
  104   OutMI.setOpcode(MI->getOpcode());
lib/Target/XCore/XCoreRegisterInfo.cpp
   68   switch (MI.getOpcode()) {
  104   switch (MI.getOpcode()) {
  136   switch (MI.getOpcode()) {
  168   unsigned OpCode = MI.getOpcode();
unittests/CodeGen/GlobalISel/CSETest.cpp
   34   EXPECT_EQ(MIBAddCopy->getOpcode(), TargetOpcode::COPY);
   62   EXPECT_EQ(TargetOpcode::G_BUILD_VECTOR, Splat0->getOpcode());
   67   EXPECT_EQ(TargetOpcode::G_BUILD_VECTOR, FSplat->getOpcode());
   95   EXPECT_TRUE(MIBAdd1->getOpcode() != TargetOpcode::COPY);
unittests/CodeGen/GlobalISel/GISelMITest.h
  131       if (MI.getOpcode() == TargetOpcode::COPY)
unittests/tools/llvm-exegesis/X86/SnippetRepetitorTest.cpp
   62   return Property(&MachineInstr::getOpcode, Eq(Opcode));