reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

gen/lib/Target/X86/X86GenCallingConv.inc
  196     if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
  208     if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
  279   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
  300   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
  795   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
  859   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
  947   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
  972   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 1861   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 1928   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 2019   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 2044   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 2324   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 2391   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 2482   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 2507   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 2642   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 3096   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 3669   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
 3869   if (static_cast<const X86Subtarget&>(State.getMachineFunction().getSubtarget()).is64Bit()) {
gen/lib/Target/X86/X86GenDAGISel.inc
253364   case 8: return (Subtarget->is64Bit());
253366   case 10: return (Subtarget->is64Bit()) && (!Subtarget->slowIncDec() || MF->getFunction().hasOptSize());
253419   case 63: return (Subtarget->hasPTWRITE()) && (Subtarget->is64Bit());
253421   case 65: return (Subtarget->hasMOVDIRI()) && (Subtarget->is64Bit());
253423   case 67: return (Subtarget->hasMOVDIR64B()) && (Subtarget->is64Bit());
253428   case 72: return (Subtarget->hasFXSR()) && (Subtarget->is64Bit());
253430   case 74: return (Subtarget->hasXSAVE()) && (Subtarget->is64Bit());
253432   case 76: return (Subtarget->hasXSAVEOPT()) && (Subtarget->is64Bit());
253434   case 78: return (Subtarget->hasXSAVEC()) && (Subtarget->is64Bit());
253436   case 80: return (Subtarget->hasXSAVES()) && (Subtarget->is64Bit());
253437   case 81: return (Subtarget->hasINVPCID()) && (!Subtarget->is64Bit());
253438   case 82: return (Subtarget->hasINVPCID()) && (Subtarget->is64Bit());
253441   case 85: return (Subtarget->hasWAITPKG()) && (!Subtarget->is64Bit());
253443   case 87: return (Subtarget->hasWAITPKG()) && (Subtarget->is64Bit());
253446   case 90: return (Subtarget->hasAVX()) && (!Subtarget->is64Bit());
253447   case 91: return (!Subtarget->is64Bit()) && (Subtarget->hasSSE2() && !Subtarget->hasAVX());
253448   case 92: return (Subtarget->hasAVX()) && (Subtarget->is64Bit());
253449   case 93: return (Subtarget->is64Bit()) && (Subtarget->hasSSE2() && !Subtarget->hasAVX());
253451   case 95: return (Subtarget->hasMMX()) && (Subtarget->hasSSE1()) && (!Subtarget->is64Bit());
253452   case 96: return (Subtarget->hasMMX()) && (Subtarget->hasSSE1()) && (Subtarget->is64Bit());
253454   case 98: return (Subtarget->hasFSGSBase()) && (Subtarget->is64Bit());
253455   case 99: return (!Subtarget->is64Bit());
253456   case 100: return (Subtarget->is64Bit()) && (!Subtarget->useRetpolineIndirectCalls());
253457   case 101: return (!TM.isPositionIndependent()) && (!Subtarget->is64Bit()) && (!Subtarget->useRetpolineIndirectCalls());
253460   case 104: return (!Subtarget->is64Bit()) && (!Subtarget->useRetpolineIndirectCalls());
253461   case 105: return (Subtarget->is64Bit()) && (Subtarget->useRetpolineIndirectCalls());
253462   case 106: return (!Subtarget->is64Bit()) && (Subtarget->useRetpolineIndirectCalls());
253492   case 136: return (!Subtarget->slowTwoMemOps()) && (!Subtarget->is64Bit());
253493   case 137: return (!Subtarget->slowTwoMemOps()) && (!Subtarget->is64Bit()) && (!Subtarget->useRetpolineIndirectCalls());
253494   case 138: return (!Subtarget->slowTwoMemOps()) && (Subtarget->is64Bit()) && (!Subtarget->useRetpolineIndirectCalls());
253496   case 140: return (!Subtarget->slowTwoMemOps()) && (Subtarget->is64Bit());
253507   case 151: return (Subtarget->hasENQCMD()) && (!Subtarget->is64Bit());
253509   case 153: return (Subtarget->hasENQCMD()) && (Subtarget->is64Bit());
253511   case 155: return (Subtarget->hasCmpxchg16b()) && (Subtarget->is64Bit());
253513   case 157: return (!Subtarget->is64Bit()) && (MF->getFunction().hasOptSize());
253517   case 161: return (Subtarget->hasRDPID()) && (!Subtarget->is64Bit());
253518   case 162: return (Subtarget->hasRDPID()) && (Subtarget->is64Bit());
253526   case 170: return (Subtarget->is64Bit()) && (!Subtarget->isTarget64BitLP64());
gen/lib/Target/X86/X86GenFastISel.inc
  464   if ((!Subtarget->is64Bit())) {
  473   if ((!Subtarget->is64Bit())) {
  482   if ((Subtarget->is64Bit())) {
 1415   if ((Subtarget->is64Bit())) {
 1985   if ((Subtarget->is64Bit())) {
 1992   if ((Subtarget->is64Bit())) {
 2517   if ((!Subtarget->is64Bit())) {
 2526   if ((!Subtarget->is64Bit()) && (Subtarget->useRetpolineIndirectCalls())) {
 2529   if ((!Subtarget->is64Bit()) && (!Subtarget->useRetpolineIndirectCalls())) {
 2538   if ((Subtarget->is64Bit()) && (Subtarget->useRetpolineIndirectCalls())) {
 2541   if ((Subtarget->is64Bit()) && (!Subtarget->useRetpolineIndirectCalls())) {
 4497   if ((!Subtarget->is64Bit())) {
 4506   if ((!Subtarget->is64Bit())) {
 4515   if ((Subtarget->is64Bit())) {
 4535   if ((!Subtarget->is64Bit())) {
 4544   if ((!Subtarget->is64Bit())) {
 4553   if ((Subtarget->is64Bit())) {
 4851   if ((Subtarget->is64Bit())) {
 5885   if ((Subtarget->is64Bit())) {
gen/lib/Target/X86/X86GenGlobalISel.inc
  341   if (!Subtarget->is64Bit())
  343   if (Subtarget->is64Bit())
gen/lib/Target/X86/X86GenRegisterInfo.inc
 6251     return MF.getSubtarget<X86Subtarget>().is64Bit();
 6267     return MF.getSubtarget<X86Subtarget>().is64Bit();
lib/Target/X86/X86AsmPrinter.cpp
  420     Reg = getX86SubSuperRegister(Reg, P.getSubtarget().is64Bit() ? 64 : 32);
lib/Target/X86/X86AvoidStoreForwardingBlocks.cpp
  674       !MF.getSubtarget<X86Subtarget>().is64Bit())
lib/Target/X86/X86CallFrameOptimization.cpp
  503   bool Is64Bit = STI->is64Bit();
lib/Target/X86/X86CallLowering.cpp
  398   bool Is64Bit = STI.is64Bit();
  429   if (STI.is64Bit() && !IsFixed && !STI.isCallingConvWin64(Info.CallConv)) {
lib/Target/X86/X86CallingConv.cpp
   99                      .is64Bit();
  302   bool Is64Bit = static_cast<const X86Subtarget &>(MF.getSubtarget()).is64Bit();
lib/Target/X86/X86ExpandPseudo.cpp
  304             TII->get(STI->is64Bit() ? X86::IRET64 : X86::IRET32));
  314                     TII->get(STI->is64Bit() ? X86::RETQ : X86::RETL));
  317                     TII->get(STI->is64Bit() ? X86::RETIQ : X86::RETIL))
  320       assert(!STI->is64Bit() &&
lib/Target/X86/X86FastISel.cpp
 1279                   TII.get(Subtarget->is64Bit() ? X86::RETIQ : X86::RETIL))
 1283                   TII.get(Subtarget->is64Bit() ? X86::RETQ : X86::RETL));
 1920     if (!Subtarget->is64Bit())
 1987       OpEntry.DivRemResultReg == X86::AH && Subtarget->is64Bit()) {
 2562   return Len <= (Subtarget->is64Bit() ? 32 : 16);
 2572   bool i64Legal = Subtarget->is64Bit();
 2743     unsigned SizeWidth = Subtarget->is64Bit() ? 64 : 32;
 2758     unsigned SizeWidth = Subtarget->is64Bit() ? 64 : 32;
 3072   if (!Subtarget->is64Bit())
 3157   if (Subtarget->is64Bit())
 3185   bool Is64Bit        = Subtarget->is64Bit();
 3536       X86::isCalleePop(CC, Subtarget->is64Bit(), IsVarArg,
 3784   else if (Subtarget->is64Bit() && TM.getCodeModel() == CodeModel::Small)
lib/Target/X86/X86FixupSetCC.cpp
  136       const TargetRegisterClass *RC = MF.getSubtarget<X86Subtarget>().is64Bit()
lib/Target/X86/X86FlagsCopyLowering.cpp
  951       if (TargetRegSize == 1 && !Subtarget->is64Bit()) {
lib/Target/X86/X86FrameLowering.cpp
   40                           STI.is64Bit() ? -8 : -4),
   44   Is64Bit = STI.is64Bit();
  531   assert(STI.is64Bit() && "different expansion needed for 32 bit");
 2077   unsigned Opc = STI.is64Bit() ? X86::PUSH64r : X86::PUSH32r;
 2147   if (STI.is64Bit()) {
 2208   unsigned Opc = STI.is64Bit() ? X86::POP64r : X86::POP32r;
 2787             TII.get(STI.is64Bit() ? X86::POP64r : X86::POP32r), Regs[i]);
 3179   if (!STI.is64Bit() || !MF.hasEHFunclets() ||
lib/Target/X86/X86ISelDAGToDAG.cpp
  948           (Subtarget->is64Bit() ||
 1357   if (Subtarget->is64Bit()) {
 1423   if (Subtarget->is64Bit() &&
 1495       if (Subtarget->is64Bit() &&
 1904         (!Subtarget->is64Bit() || isDispSafeForFrameIndex(AM.Disp))) {
 2472     if (Subtarget->is64Bit())
lib/Target/X86/X86ISelLowering.cpp
  127   else if (Subtarget.is64Bit())
  138     if (Subtarget.hasSlowDivide64() && Subtarget.is64Bit())
  181   if (Subtarget.is64Bit())
  216     if (Subtarget.is64Bit())
  291     if (Subtarget.is64Bit()) {
  296   } else if (!Subtarget.is64Bit())
  325   if (Subtarget.is64Bit())
  346     if (Subtarget.is64Bit()) {
  364     if (Subtarget.is64Bit()) {
  401     if (Subtarget.is64Bit())
  420     if (VT == MVT::i64 && !Subtarget.is64Bit())
  441     if (VT == MVT::i64 && !Subtarget.is64Bit())
  453     if (VT == MVT::i64 && !Subtarget.is64Bit())
  476   if (!Subtarget.is64Bit())
  502   bool Is64Bit = Subtarget.is64Bit();
  665   if (!Subtarget.useSoftFloat() && Subtarget.is64Bit() && Subtarget.hasSSE1()) {
  920       if (VT == MVT::v2i64 && !Subtarget.is64Bit())
 1773   if (!Subtarget.is64Bit()) {
 1784     if (VT == MVT::i64 && !Subtarget.is64Bit())
 1800   if (!Subtarget.is64Bit()) {
 1908   return Subtarget.isTargetMachO() && Subtarget.is64Bit();
 1919   unsigned XorOp = Subtarget.is64Bit() ? X86::XOR64_FP : X86::XOR32_FP;
 2057   if (Subtarget.is64Bit()) {
 2111       if (Subtarget.hasSSE1() && (Subtarget.is64Bit() || Subtarget.hasX87()) &&
 2115                !Subtarget.is64Bit() && Subtarget.hasSSE2()) {
 2128   if (Subtarget.is64Bit() && Size >= 8)
 2194   if (Subtarget.is64Bit())
 2232   if (!Subtarget.is64Bit())
 2262     RRC = Subtarget.is64Bit() ? &X86::GR64RegClass : &X86::GR32RegClass;
 2281   if (Subtarget.is64Bit())
 2309       unsigned Offset = (Subtarget.is64Bit()) ? 0x28 : 0x14;
 2369     unsigned Offset = (Subtarget.is64Bit()) ? 0x48 : 0x24;
 2527         (Subtarget.is64Bit() && !Subtarget.hasSSE1())) {
 2531                (Subtarget.is64Bit() && !Subtarget.hasSSE2())) {
 2554     if (Subtarget.is64Bit()) {
 2631         = (Subtarget.is64Bit() && !Subtarget.isTarget64BitILP32()) ?
 2829   bool Is64Bit = Subtarget.is64Bit();
 3119   assert(Subtarget.is64Bit());
 3138   assert(Subtarget.is64Bit());
 3187   bool Is64Bit = Subtarget.is64Bit();
 3614   bool Is64Bit        = Subtarget.is64Bit();
 4448     if (!Subtarget.is64Bit() && ((!isa<GlobalAddressSDNode>(Callee) &&
 4477       X86::isCalleePop(CalleeCC, Subtarget.is64Bit(), isVarArg,
 5034     unsigned MaxIntSize = Subtarget.is64Bit() ? 64 : 32;
 5139   if (VT == MVT::i64 && !Subtarget.is64Bit())
 8548     if (VT == MVT::v64i1 && !Subtarget.is64Bit()) {
 9711           (EltVT == MVT::i64 && Subtarget.is64Bit())) {
10904   if (EltVT == MVT::i64 && !Subtarget.is64Bit()) {
12591     if (!Subtarget.is64Bit() && VT.getScalarType() == MVT::i64) {
12655   if (!Subtarget.is64Bit() && V.getValueType() == MVT::i64) {
18097         if (Subtarget.is64Bit())
18102                                            Subtarget.is64Bit());
18105         return LowerToTLSExecModel(GA, DAG, PtrVT, model, Subtarget.is64Bit(),
18119     bool PIC32 = PositionIndependent && !Subtarget.is64Bit();
18153     unsigned Reg = Subtarget.is64Bit() ? X86::RAX : X86::EAX;
18175     Value *Ptr = Constant::getNullValue(Subtarget.is64Bit()
18181     SDValue TlsArray = Subtarget.is64Bit()
18196       if (Subtarget.is64Bit())
18336    if (!Subtarget.hasDQI() || SrcVT != MVT::i64 || Subtarget.is64Bit() ||
18447   if (SrcVT == MVT::i64 && isScalarFPTypeInSSEReg(VT) && Subtarget.is64Bit())
18455       !Subtarget.is64Bit())
18809       (SrcVT == MVT::i32 || (SrcVT == MVT::i64 && Subtarget.is64Bit()))) {
18816   if (SrcVT == MVT::i32 && Subtarget.is64Bit()) {
18828   if (Subtarget.is64Bit() && SrcVT == MVT::i64 && DstVT == MVT::f32)
18845   if (isScalarFPTypeInSSEReg(Op.getValueType()) && !Subtarget.is64Bit())
19578     if (Subtarget.is64Bit()) {
20324       !(Subtarget.is64Bit() && VT == MVT::i64))
21275   if (VT == MVT::v64i1 && !Subtarget.is64Bit()) {
21884     MVT StVT = Subtarget.is64Bit() && StoreVT.isInteger() ? MVT::i64 : MVT::f64;
22211   bool Is64Bit = Subtarget.is64Bit();
22283   if (!Subtarget.is64Bit() ||
22333   assert(Subtarget.is64Bit() &&
22395   assert(Subtarget.is64Bit() && "This code only handles 64-bit va_copy!");
22726   if (Subtarget.is64Bit())
23819   if (Subtarget.is64Bit()) {
23831   if (Subtarget.is64Bit()) {
24343   if (!Subtarget.is64Bit()) {
24381   if (Subtarget.is64Bit()) {
26440     return Subtarget.hasCmpxchg8b() && !Subtarget.is64Bit();
26454   if (MemType->getPrimitiveSizeInBits() == 64 && !Subtarget.is64Bit() &&
26472   if (MemType->getPrimitiveSizeInBits() == 64 && !Subtarget.is64Bit() &&
26483   unsigned NativeWidth = Subtarget.is64Bit() ? 64 : 32;
26524   unsigned NativeWidth = Subtarget.is64Bit() ? 64 : 32;
26636   if (Subtarget.is64Bit()) {
26701     assert(Subtarget.is64Bit() && "Node not type legal!");
26765     assert(!Subtarget.is64Bit() && "Expected 32-bit mode");
26817     assert(SrcVT == MVT::i64 && !Subtarget.is64Bit() &&
27281   assert(Subtarget.isTargetDarwin() && Subtarget.is64Bit());
28185       assert(!Subtarget.is64Bit() && "i64 should be legal");
28444       assert(!Subtarget.is64Bit() && "Expected 32-bit mode");
28524       MVT LdVT = Subtarget.is64Bit() && VT.isInteger() ? MVT::i64 : MVT::f64;
28926         Subtarget.is64Bit() && (AM.BaseOffs || AM.Scale > 1))
29057   return Ty1->isIntegerTy(32) && Ty2->isIntegerTy(64) && Subtarget.is64Bit();
29062   return VT1 == MVT::i32 && VT2 == MVT::i64 && Subtarget.is64Bit();
30022   const bool Is64Bit = Subtarget.is64Bit();
30241       Subtarget.is64Bit() ?
30244   if (Subtarget.is64Bit()) {
30318       assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30321       assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30324       assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30327       assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30330       assert(Subtarget.is64Bit() && "Should not be using a 64-bit thunk!");
30339     assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30342     assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30345     assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30348     assert(!Subtarget.is64Bit() && "Should not be using a 32-bit thunk!");
30351     assert(Subtarget.is64Bit() && "Should not be using a 64-bit thunk!");
30374   if (Subtarget.is64Bit())
30542     if (Subtarget.is64Bit()) {
30909     if (Subtarget.is64Bit())
30926   addFrameReference(MIB, FI, Subtarget.is64Bit() ? 56 : 36);
31034                     Subtarget.is64Bit() ? 8 : 4);
31040   if (Subtarget.is64Bit()) {
38724           if (is64BitFP && !Subtarget.is64Bit()) {
38942     case 64: return Subtarget.is64Bit() ? true : false;
39183   if (VT == MVT::i64 && Subtarget.is64Bit() &&
40538     if (VT == MVT::v64i1 && !Subtarget.is64Bit()) {
40666   if ((VT == MVT::i64 && F64IsLegal && !Subtarget.is64Bit()) &&
40687     if (Subtarget.is64Bit() || F64IsLegal) {
40688       MVT LdVT = Subtarget.is64Bit() ? MVT::i64 : MVT::f64;
40732   if (VT == MVT::i64 && F64IsLegal && !Subtarget.is64Bit() &&
43277         !Subtarget.is64Bit() && LdVT == MVT::i64) {
45655           (Subtarget.is64Bit() && C->getZExtValue() == 0xffffffff)) {
45799       if (Subtarget.is64Bit())
45825       if (Subtarget.is64Bit()) {
45854       if (VT == MVT::i32 || VT == MVT::f32 || !Subtarget.is64Bit())
45862       if (VT == MVT::i32 || !Subtarget.is64Bit())
46011   if (!Subtarget.is64Bit() &&
46045       bool is64Bit = Subtarget.is64Bit();
46165   if (!Subtarget.is64Bit())
46214   return Subtarget.is64Bit();
46233   if (Subtarget.is64Bit())
lib/Target/X86/X86IndirectBranchTracking.cpp
  112   EndbrOpcode = SubTarget.is64Bit() ? X86::ENDBR64 : X86::ENDBR32;
lib/Target/X86/X86InstrInfo.cpp
   85                       (STI.is64Bit() ? X86::RETQ : X86::RETL)),
  100     if (!Subtarget.is64Bit())
  776   if (!Subtarget.is64Bit())
  844       if (Subtarget.is64Bit())
  916   bool Is64Bit = Subtarget.is64Bit();
 2729       Subtarget.is64Bit() ? X86::TEST64rr : X86::TEST32rr;
 2982         Subtarget.is64Bit()) {
 3074     if (STI.is64Bit())
 3159       if (STI.is64Bit())
 3935   if (Subtarget.is64Bit()) {
 5305       if (Subtarget.is64Bit())
 5971     if (Subtarget.is64Bit()) {
 6016   assert((!Subtarget.is64Bit() ||
 6030       Subtarget.is64Bit() ? &X86::GR64_NOSPRegClass : &X86::GR32_NOSPRegClass);
 7744       if (STI.is64Bit() && (TM->getCodeModel() == CodeModel::Small ||
 7772       if (STI.is64Bit()) {
 7902       const bool is64Bit = STI.is64Bit();
 7922       const bool is64Bit = STI.is64Bit();
lib/Target/X86/X86InstructionSelector.cpp
 1448   if (CM == CodeModel::Large && STI.is64Bit()) {
 1465   } else if (CM == CodeModel::Small || !STI.is64Bit()) {
 1475     } else if (STI.is64Bit() && TM.getCodeModel() == CodeModel::Small)
 1689       OpEntry.DivRemResultReg == X86::AH && STI.is64Bit()) {
lib/Target/X86/X86LegalizerInfo.cpp
  146   if (!Subtarget.is64Bit()) {
  201   if (!Subtarget.is64Bit())
lib/Target/X86/X86MCInstLower.cpp
   98              MF->getSubtarget<X86Subtarget>().is64Bit(), STI);
  346   if (Printer.getSubtarget().is64Bit())
  394   return Subtarget.is64Bit() ? X86::RETQ : X86::RETL;
  739     unsigned ReturnReg = Subtarget.is64Bit() ? X86::RAX : X86::EAX;
  776     if (!AsmPrinter.getSubtarget().is64Bit()) {
 1099   assert(Subtarget->is64Bit() && "Statepoint currently only supports X86-64");
 1103     EmitNops(*OutStreamer, PatchBytes, Subtarget->is64Bit(),
 1187   bool Is64Bits = Subtarget->is64Bit();
 1225       unsigned NopSize = EmitNop(*OutStreamer, MinSize, Subtarget->is64Bit(),
 1248   assert(Subtarget->is64Bit() && "Patchpoint currently only supports X86-64");
 1300   EmitNops(*OutStreamer, NumBytes - EncodedBytes, Subtarget->is64Bit(),
 1306   assert(Subtarget->is64Bit() && "XRay custom events only supports X86-64");
 1359         EmitNops(*OutStreamer, 4, Subtarget->is64Bit(), getSubtargetInfo());
 1388       EmitNops(*OutStreamer, 1, Subtarget->is64Bit(), getSubtargetInfo());
 1400   assert(Subtarget->is64Bit() && "XRay typed events only supports X86-64");
 1455         EmitNops(*OutStreamer, 4, Subtarget->is64Bit(), getSubtargetInfo());
 1489       EmitNops(*OutStreamer, 1, Subtarget->is64Bit(), getSubtargetInfo());
 1520   EmitNops(*OutStreamer, 9, Subtarget->is64Bit(), getSubtargetInfo());
 1550   EmitNops(*OutStreamer, 10, Subtarget->is64Bit(), getSubtargetInfo());
 1571   EmitNops(*OutStreamer, 9, Subtarget->is64Bit(), getSubtargetInfo());
lib/Target/X86/X86SelectionDAGInfo.cpp
  125       if (Subtarget.is64Bit() && ((Align & 0x7) == 0)) {  // QWORD aligned
  228     return Subtarget.is64Bit() ? MVT::i64 : MVT::i32;
lib/Target/X86/X86Subtarget.cpp
   75   if (is64Bit()) {
  153   if (is64Bit()) {
  194     if (is64Bit() && F && (CallingConv::X86_RegCall == F->getCallingConv()))
  202         is64Bit())
  207   if (is64Bit()) {
  341   else if (is64Bit())
lib/Target/X86/X86Subtarget.h
  583   bool hasCMov() const { return HasCMov || X86SSELevel >= SSE1 || is64Bit(); }
  654   bool hasCmpxchg16b() const { return HasCmpxchg16b && is64Bit(); }
  738   bool isXRaySupported() const override { return is64Bit(); }
  756   bool hasMFence() const { return hasSSE2() || is64Bit(); }
  775   bool isTargetNaCl32() const { return isTargetNaCl() && !is64Bit(); }
  776   bool isTargetNaCl64() const { return isTargetNaCl() && is64Bit(); }
lib/Target/X86/X86TargetTransformInfo.cpp
  124   if (ST->is64Bit()) {
  144   if (ST->is64Bit())
 2251       if (ST->is64Bit())
 2260       if (ST->is64Bit())
 2270     if (ST->is64Bit())
 2365     if (ST->is64Bit())
 3453   if (ST->is64Bit()) {
lib/Target/X86/X86WinAllocaExpander.cpp
  210   bool Is64Bit = STI->is64Bit();