|
reference, declaration → definition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced
|
References
gen/lib/Target/X86/X86GenAsmMatcher.inc 8419 { 1374 /* cmpq */, X86::CMP64rr, Convert__Reg1_1__Reg1_0, AMFBS_None, { MCK_GR64, MCK_GR64 }, },
23005 { 1348 /* cmp */, X86::CMP64rr, Convert__Reg1_0__Reg1_1, AMFBS_None, { MCK_GR64, MCK_GR64 }, },
gen/lib/Target/X86/X86GenDAGISel.inc10599 /* 22712*/ OPC_MorphNodeTo1, TARGET_VAL(X86::CMP64rr), 0,
gen/lib/Target/X86/X86GenFastISel.inc 9480 return fastEmitInst_rr(X86::CMP64rr, &X86::GR64RegClass, Op0, Op0IsKill, Op1, Op1IsKill);
gen/lib/Target/X86/X86GenSubtargetInfo.inc23039 case X86::CMP64rr:
23216 case X86::CMP64rr:
lib/Target/X86/X86ExpandPseudo.cpp 99 BuildMI(*MBB, MBBI, DL, TII->get(X86::CMP64rr))
lib/Target/X86/X86FastISel.cpp 1349 case MVT::i64: return X86::CMP64rr;
lib/Target/X86/X86FrameLowering.cpp 674 BuildMI(&MBB, DL, TII.get(X86::CMP64rr)).addReg(FinalReg).addReg(LimitReg);
710 BuildMI(LoopMBB, DL, TII.get(X86::CMP64rr))
lib/Target/X86/X86InstrFoldTables.cpp 272 { X86::CMP64rr, X86::CMP64mr, TB_FOLDED_LOAD },
477 { X86::CMP64rr, X86::CMP64rm, 0 },
lib/Target/X86/X86InstrInfo.cpp 3331 case X86::CMP64rr:
3366 if (((FlagI.getOpcode() == X86::CMP64rr && OI.getOpcode() == X86::SUB64rr) ||
3584 case X86::SUB64rr: NewOpcode = X86::CMP64rr; break;
lib/Target/X86/X86InstructionSelector.cpp 968 OpCmp = X86::CMP64rr;
lib/Target/X86/X86MacroFusion.cpp 86 case X86::CMP64rr:
lib/Target/X86/X86SpeculativeLoadHardening.cpp 1167 auto CheckI = BuildMI(MBB, InsertPt, DebugLoc(), TII->get(X86::CMP64rr))
2550 BuildMI(MBB, InsertPt, Loc, TII->get(X86::CMP64rr))
unittests/tools/llvm-exegesis/X86/SnippetGeneratorTest.cpp 168 const unsigned Opcode = X86::CMP64rr;