reference, declaration → definition definition → references, declarations, derived classes, virtual overrides reference to multiple definitions → definitions unreferenced |
1169 if (Op != Legal->getPrimaryInduction() && TTI.isTruncateFree(SrcTy, DestTy)) 1196 TTI.isLegalMaskedStore(DataType, Alignment); 1203 TTI.isLegalMaskedLoad(DataType, Alignment); 1209 return TTI.isLegalMaskedScatter(DataType); 1215 return TTI.isLegalMaskedGather(DataType); 3234 unsigned ScalarCallCost = TTI.getCallInstrCost(F, ScalarRetTy, ScalarTys); 3256 unsigned VectorCallCost = TTI.getCallInstrCost(nullptr, RetTy, Tys); 3274 return TTI.getIntrinsicInstrCost(ID, CI->getType(), Operands, FMF, VF); 4607 assert(useMaskedInterleavedAccesses(TTI) && 4612 return isa<LoadInst>(I) ? TTI.isLegalMaskedLoad(Ty, Alignment) 4613 : TTI.isLegalMaskedStore(Ty, Alignment); 4856 if (Legal->getRuntimePointerChecking()->Need && TTI.hasBranchDivergence()) { 4905 if (!useMaskedInterleavedAccesses(TTI)) 4943 MinBWs = computeMinimumValueSizes(TheLoop->getBlocks(), *DB, &TTI); 4946 unsigned WidestRegister = TTI.getRegisterBitWidth(true); 4980 if (TTI.shouldMaximizeVectorBandwidth(!isScalarEpilogueAllowed()) || 4997 unsigned TargetNumRegisters = TTI.getNumberOfRegisters(pair.first); 5006 if (unsigned MinVF = TTI.getMinimumVF(SmallestType)) { 5174 unsigned TargetNumRegisters = TTI.getNumberOfRegisters(pair.first); 5177 << TTI.getRegisterClassName(pair.first) << " register class\n"); 5202 unsigned MaxInterleaveCount = TTI.getMaxInterleaveFactor(VF); 5287 if (TTI.enableAggressiveInterleaving(HasReductions)) { 5375 std::min(TTI.getRegisterBitWidth(true), MaxSafeDepDist); 5414 unsigned ClassID = TTI.getRegisterClassForType(false, Inst->getType()); 5427 unsigned ClassID = TTI.getRegisterClassForType(false, Inst->getType()); 5433 unsigned ClassID = TTI.getRegisterClassForType(true, Inst->getType()); 5462 unsigned ClassID = TTI.getRegisterClassForType(VFs[i] > 1, Inst->getType()); 5475 << TTI.getRegisterClassName(pair.first) << ", " << pair.second 5482 << TTI.getRegisterClassName(pair.first) << ", " << pair.second 5617 ScalarCost += TTI.getScalarizationOverhead(ToVectorTy(I->getType(), VF), 5619 ScalarCost += VF * TTI.getCFInstrCost(Instruction::PHI); 5633 ScalarCost += TTI.getScalarizationOverhead( 5743 unsigned Cost = VF * TTI.getAddressComputationCost(PtrTy, SE, PtrSCEV); 5748 Cost += VF * TTI.getMemoryOpCost(I->getOpcode(), ValTy->getScalarType(), 5783 Cost += TTI.getMaskedMemoryOpCost(I->getOpcode(), VectorTy, 5786 Cost += TTI.getMemoryOpCost(I->getOpcode(), VectorTy, Alignment, AS, I); 5790 Cost += TTI.getShuffleCost(TargetTransformInfo::SK_Reverse, VectorTy, 0); 5801 return TTI.getAddressComputationCost(ValTy) + 5802 TTI.getMemoryOpCost(Instruction::Load, ValTy, Alignment, AS) + 5803 TTI.getShuffleCost(TargetTransformInfo::SK_Broadcast, VectorTy); 5808 return TTI.getAddressComputationCost(ValTy) + 5809 TTI.getMemoryOpCost(Instruction::Store, ValTy, Alignment, AS) + 5812 : TTI.getVectorInstrCost(Instruction::ExtractElement, VectorTy, 5823 return TTI.getAddressComputationCost(VectorTy) + 5824 TTI.getGatherScatterOpCost(I->getOpcode(), VectorTy, Ptr, 5853 unsigned Cost = TTI.getInterleavedMemoryOpCost( 5862 TTI.getShuffleCost(TargetTransformInfo::SK_Reverse, VectorTy, 0); 5876 return TTI.getAddressComputationCost(ValTy) + 5877 TTI.getMemoryOpCost(I->getOpcode(), ValTy, Alignment, AS, I); 5904 VF > 1 && VectorTy->isVectorTy() && TTI.getNumberOfParts(VectorTy) < VF; 5917 (!isa<LoadInst>(I) || !TTI.supportsEfficientVectorElementLoadStore())) 5918 Cost += TTI.getScalarizationOverhead(RetTy, true, false); 5921 if (isa<LoadInst>(I) && !TTI.prefersVectorizedAddressing()) 5925 if (isa<StoreInst>(I) && TTI.supportsEfficientVectorElementLoadStore()) 5934 return Cost + TTI.getOperandsScalarizationOverhead( 6037 if (TTI.prefersVectorizedAddressing()) 6124 return (TTI.getScalarizationOverhead(Vec_i1Ty, false, true) + 6125 (TTI.getCFInstrCost(Instruction::Br) * VF)); 6128 return TTI.getCFInstrCost(Instruction::Br); 6142 return TTI.getShuffleCost(TargetTransformInfo::SK_ExtractSubvector, 6150 TTI.getCmpSelInstrCost( 6154 return TTI.getCFInstrCost(Instruction::PHI); 6171 Cost += VF * TTI.getCFInstrCost(Instruction::PHI); 6174 Cost += VF * TTI.getArithmeticInstrCost(I->getOpcode(), RetTy); 6208 TTI.getOperandInfo(Op2, Op2VP); 6214 return N * TTI.getArithmeticInstrCost( 6220 return N * TTI.getArithmeticInstrCost( 6234 return TTI.getCmpSelInstrCost(I->getOpcode(), VectorTy, CondTy, I); 6243 return TTI.getCmpSelInstrCost(I->getOpcode(), VectorTy, nullptr, I); 6275 return TTI.getCastInstrCost(Instruction::Trunc, Trunc->getDestTy(), 6302 return N * TTI.getCastInstrCost(I->getOpcode(), VectorTy, SrcVecTy, I); 6315 return VF * TTI.getArithmeticInstrCost(Instruction::Mul, VectorTy) +