reference, declaration → definition definition → references, declarations, derived classes, virtual overrides reference to multiple definitions → definitions unreferenced |
100 if (!Subtarget.is64Bit()) 776 if (!Subtarget.is64Bit()) 844 if (Subtarget.is64Bit()) 916 bool Is64Bit = Subtarget.is64Bit(); 1654 if (Subtarget.hasSSE41()) { 1788 assert(Subtarget.hasSSE2() && "Commuting MOVHLP/UNPCKHPD requires SSE2!"); 2037 if (Subtarget.hasSSE41()) 2051 if (Subtarget.hasSSE2()) 2416 if (Subtarget.isTargetWin64() && MF->hasWinCFI()) { 2729 Subtarget.is64Bit() ? X86::TEST64rr : X86::TEST32rr; 2834 if (!Subtarget.hasCMov()) 2969 bool HasAVX = Subtarget.hasAVX(); 2970 bool HasVLX = Subtarget.hasVLX(); 2982 Subtarget.is64Bit()) { 3026 Opc = Subtarget.hasBWI() ? X86::KMOVQkk : X86::KMOVWkk; 3028 Opc = CopyToFromAsymmetricReg(DestReg, SrcReg, Subtarget); 3254 (Subtarget.getFrameLowering()->getStackAlignment() >= Alignment) || 3256 unsigned Opc = getStoreRegOpcode(SrcReg, RC, isAligned, Subtarget); 3269 (Subtarget.getFrameLowering()->getStackAlignment() >= Alignment) || 3271 unsigned Opc = getLoadRegOpcode(DestReg, RC, isAligned, Subtarget); 4078 bool HasAVX = Subtarget.hasAVX(); 4089 return ExpandMOVImmSExti8(MIB, *this, Subtarget); 4119 bool HasVLX = Subtarget.hasVLX(); 4133 bool HasVLX = Subtarget.hasVLX(); 4343 if (OpNum != 0 || !hasPartialRegUpdate(MI.getOpcode(), Subtarget)) 4577 unsigned Opc = Subtarget.hasAVX() ? X86::VXORPSrr : X86::XORPSrr; 4826 bool isSlowTwoMemOps = Subtarget.slowTwoMemOps(); 4840 (hasPartialRegUpdate(MI.getOpcode(), Subtarget, /*ForLoadFold*/true) || 5011 (hasPartialRegUpdate(MI.getOpcode(), Subtarget, /*ForLoadFold*/true) || 5030 std::min(Alignment, Subtarget.getFrameLowering()->getStackAlignment()); 5217 (hasPartialRegUpdate(MI.getOpcode(), Subtarget, /*ForLoadFold*/true) || 5305 if (Subtarget.is64Bit()) 5475 Subtarget.isUnalignedMem16Slow()) 5502 Opc = getBroadcastOpcode(I, RC, Subtarget); 5506 Opc = getLoadRegOpcode(Reg, RC, isAligned, Subtarget); 5583 unsigned Opc = getStoreRegOpcode(Reg, DstRC, isAligned, Subtarget); 5638 Subtarget.isUnalignedMem16Slow()) 5646 Opc = getBroadcastOpcode(I, RC, Subtarget); 5650 Opc = getLoadRegOpcode(0, RC, isAligned, Subtarget); 5709 Subtarget.isUnalignedMem16Slow()) 5717 DAG.getMachineNode(getStoreRegOpcode(0, DstRC, isAligned, Subtarget), 5971 if (Subtarget.is64Bit()) { 6016 assert((!Subtarget.is64Bit() || 6030 Subtarget.is64Bit() ? &X86::GR64_NOSPRegClass : &X86::GR32_NOSPRegClass); 6601 if (!Is256 || Subtarget.hasAVX2()) 6654 if (Subtarget.hasDQI()) 6711 if (Subtarget.hasAVX2()) { 6776 if (Subtarget.hasDQI()) 6833 validDomains = Subtarget.hasAVX2() ? 0xe : 0x6; 6839 if (!Subtarget.hasAVX2()) 6844 } else if (Subtarget.hasDQI() && lookupAVX512(opcode, domain, 6847 } else if (Subtarget.hasDQI()) { 6871 assert((Subtarget.hasAVX2() || Domain < 3) && 6881 assert(Subtarget.hasAVX2() && 6886 assert(Subtarget.hasAVX512() && "Requires AVX-512"); 6893 assert((Subtarget.hasDQI() || Domain >= 3) && "Requires AVX-512DQ"); 6901 assert((Subtarget.hasDQI() || Domain >= 3) && "Requires AVX-512DQ"); 8028 if (Subtarget.getFrameLowering()->has128ByteRedZone(MF)) {