reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

lib/Target/AArch64/AArch64ISelLowering.cpp
  137   if (Subtarget->hasFPARMv8()) {
  144   if (Subtarget->hasNEON()) {
  165   if (Subtarget->hasSVE()) {
  189   computeRegisterProperties(Subtarget->getRegisterInfo());
  286   if (Subtarget->isTargetWindows())
  357   if (Subtarget->hasFullFP16())
  396   if (!Subtarget->hasFullFP16()) {
  490   if (Subtarget->hasFullFP16()) {
  515   if (Subtarget->hasPerfMon())
  530   if (Subtarget->isTargetMachO() && TM.getCodeModel() == CodeModel::Large) {
  578   if (Subtarget->isTargetWindows())
  607   if (Subtarget->supportsAddressTopByteIgnored())
  623   MaxStoresPerMemset = Subtarget->requiresStrictAlign()
  628   MaxStoresPerMemcpy = Subtarget->requiresStrictAlign()
  634   MaxLoadsPerMemcmp = Subtarget->requiresStrictAlign()
  659   if (Subtarget->hasNEON()) {
  713     if (Subtarget->hasFullFP16()) {
  790     if (Subtarget->hasFullFP16()) {
  804   if (Subtarget->hasSVE()) {
  814   PredictableSelectIsExpensive = Subtarget->predictableSelectIsExpensive();
  882       (VT.getVectorElementType() != MVT::f16 || Subtarget->hasFullFP16()))
  887   if (Subtarget->isLittleEndian()) {
 1068     if (!Subtarget->isTargetILP32())
 1128   if (Subtarget->requiresStrictAlign())
 1133     *Fast = !Subtarget->isMisaligned128StoreSlow() || VT.getStoreSize() != 16 ||
 1153   if (Subtarget->requiresStrictAlign())
 1158     *Fast = !Subtarget->isMisaligned128StoreSlow() ||
 1337   const TargetInstrInfo *TII = Subtarget->getInstrInfo();
 2445       !Subtarget->hasFullFP16()) {
 2481       !Subtarget->hasFullFP16()) {
 2539       !Subtarget->hasFullFP16()) {
 2870     const auto *RegInfo = Subtarget->getRegisterInfo();
 3111     if (Subtarget->isTargetWindows() && IsVarArg)
 3113     if (!Subtarget->isTargetDarwin())
 3117     return Subtarget->isTargetILP32() ? CC_AArch64_DarwinPCS_ILP32_VarArg
 3140   bool IsWin64 = Subtarget->isCallingConvWin64(MF.getFunction().getCallingConv());
 3262       if (!Subtarget->isLittleEndian() && ArgSize < 8 &&
 3303     if (Subtarget->isTargetILP32() && Ins[i].Flags.isPointer())
 3312     if (!Subtarget->isTargetDarwin() || IsWin64) {
 3324     StackOffset = alignTo(StackOffset, Subtarget->isTargetILP32() ? 4 : 8);
 3385   if (Subtarget->hasCustomCallingConv())
 3386     Subtarget->getRegisterInfo()->UpdateCustomCalleeSavedRegs(MF);
 3399   bool IsWin64 = Subtarget->isCallingConvWin64(MF.getFunction().getCallingConv());
 3440   if (Subtarget->hasFPARMv8() && !IsWin64) {
 3640   const AArch64RegisterInfo *TRI = Subtarget->getRegisterInfo();
 3644     if (Subtarget->hasCustomCallingConv()) {
 3955       if (!Subtarget->isLittleEndian() && !Flags.isByVal() &&
 4026     if (Subtarget->classifyGlobalFunctionReference(GV, getTargetMachine()) ==
 4030     } else if (Subtarget->isTargetCOFF() && GV->hasDLLImportStorageClass()) {
 4031       assert(Subtarget->isTargetWindows() &&
 4040         Subtarget->isTargetMachO()) {
 4093   const AArch64RegisterInfo *TRI = Subtarget->getRegisterInfo();
 4104   if (Subtarget->hasCustomCallingConv())
 4251   const AArch64RegisterInfo *TRI = Subtarget->getRegisterInfo();
 4362   unsigned OpFlags = Subtarget->ClassifyGlobalReference(GV, getTargetMachine());
 4421   assert(Subtarget->isTargetDarwin() &&
 4452   const AArch64RegisterInfo *TRI = Subtarget->getRegisterInfo();
 4454   if (Subtarget->hasCustomCallingConv())
 4504   assert(Subtarget->isTargetELF() && "This function expects an ELF target");
 4604   assert(Subtarget->isTargetWindows() && "Windows specific TLS lowering");
 4666   if (Subtarget->isTargetDarwin())
 4668   if (Subtarget->isTargetELF())
 4670   if (Subtarget->isTargetWindows())
 4890   if (!Subtarget->hasNEON())
 5034   if (LHS.getValueType() == MVT::f16 && !Subtarget->hasFullFP16()) {
 5260       !Subtarget->isTargetMachO()) {
 5290     if (Subtarget->isTargetMachO()) {
 5305       !Subtarget->isTargetMachO()) {
 5415   if (Subtarget->isCallingConvWin64(MF.getFunction().getCallingConv()))
 5417   else if (Subtarget->isTargetDarwin())
 5428   unsigned PtrSize = Subtarget->isTargetILP32() ? 4 : 8;
 5429   unsigned VaListSize = (Subtarget->isTargetDarwin() ||
 5430                          Subtarget->isTargetWindows()) ? PtrSize : 32;
 5441   assert(Subtarget->isTargetDarwin() &&
 5450   unsigned MinSlotSize = Subtarget->isTargetILP32() ? 4 : 8;
 5520   if (Subtarget->isTargetILP32())
 5546     const MCRegisterInfo *MRI = Subtarget->getRegisterInfo();
 5548     if (!Subtarget->isXRegisterReserved(DwarfRegNum))
 5719   else if (VT == MVT::f16 && Subtarget->hasFullFP16())
 5736     unsigned Limit = (OptForSize ? 1 : (Subtarget->hasFuseLiterals() ? 5 : 2));
 5777       (Enabled == ReciprocalEstimate::Unspecified && Subtarget->useRSqrt()))
 5778     if (SDValue Estimate = getEstimate(Subtarget, AArch64ISD::FRSQRTE, Operand,
 5817     if (SDValue Estimate = getEstimate(Subtarget, AArch64ISD::FRECPE, Operand,
 5875   if (!Subtarget->hasFPARMv8())
 5984       if (!Subtarget->hasFPARMv8())
 6000       if (!Subtarget->hasFPARMv8())
 6008       if (!Subtarget->hasFPARMv8())
 6053   if (Res.second && !Subtarget->hasFPARMv8() &&
 8290   const AArch64RegisterInfo *TRI = Subtarget->getRegisterInfo();
 8292   if (Subtarget->hasCustomCallingConv())
 8315   assert(Subtarget->isTargetWindows() &&
 8747   if (Subtarget->getProcFamily() == AArch64Subtarget::Falkor &&
 8799   if (!Subtarget->hasNEON() || !isLegalInterleavedAccessType(VecTy, DL))
 8930   if (!Subtarget->hasNEON() || !isLegalInterleavedAccessType(SubVecTy, DL))
 9033   bool CanUseNEON = Subtarget->hasNEON() && CanImplicitFloat;
 9034   bool CanUseFP = Subtarget->hasFPARMv8() && CanImplicitFloat;
 9066   bool CanUseNEON = Subtarget->hasNEON() && CanImplicitFloat;
 9067   bool CanUseFP = Subtarget->hasFPARMv8() && CanImplicitFloat;
11726     return performXorCombine(N, DAG, DCI, Subtarget);
11728     return performMulCombine(N, DAG, DCI, Subtarget);
11731     return performIntToFpCombine(N, DAG, Subtarget);
11734     return performFpToIntCombine(N, DAG, DCI, Subtarget);
11736     return performFDivCombine(N, DAG, DCI, Subtarget);
11738     return performORCombine(N, DCI, Subtarget);
11744     return performIntrinsicCombine(N, DCI, Subtarget);
11762     return performSTORECombine(N, DCI, DAG, Subtarget);
11806     return performGlobalAddressCombine(N, DAG, Subtarget, getTargetMachine());
12094     ReplaceCMP_SWAP_128Results(N, Results, DAG, Subtarget);
12100   if (Subtarget->isTargetAndroid() || Subtarget->isTargetFuchsia())
12100   if (Subtarget->isTargetAndroid() || Subtarget->isTargetFuchsia())
12150   return (Subtarget->hasLSE() && Size < 128) ? AtomicExpansionKind::None : AtomicExpansionKind::LLSC;
12157   if (Subtarget->hasLSE())
12274   if (Subtarget->isTargetAndroid())
12279   if (Subtarget->isTargetFuchsia())
12287   if (Subtarget->getTargetTriple().isWindowsMSVCEnvironment()) {
12307   if (Subtarget->getTargetTriple().isWindowsMSVCEnvironment())
12314   if (Subtarget->getTargetTriple().isWindowsMSVCEnvironment())
12323   if (Subtarget->isTargetAndroid())
12328   if (Subtarget->isTargetFuchsia())
12363       !Subtarget->isTargetWindows())
12377   const AArch64RegisterInfo *TRI = Subtarget->getRegisterInfo();
12382   const TargetInstrInfo *TII = Subtarget->getInstrInfo();
12434   return Subtarget->hasAggressiveFMA() && VT.isFloatingPoint();
12439   if (Subtarget->isTargetDarwin() || Subtarget->isTargetWindows())
12439   if (Subtarget->isTargetDarwin() || Subtarget->isTargetWindows())