reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

Derived Classes

include/llvm/IR/IntrinsicInst.h
  430   template <class BaseCL> class MemTransferBase : public BaseCL {
  474   template <class BaseCL> class MemSetBase : public BaseCL {

Declarations

include/llvm/Analysis/MemoryLocation.h
   28 class MemIntrinsic;
include/llvm/Transforms/Utils/VNCoercion.h
   29 class MemIntrinsic;

References

include/llvm/Analysis/MemoryLocation.h
  231   static MemoryLocation getForDest(const MemIntrinsic *MI);
include/llvm/Analysis/PtrUseVisitor.h
  284   void visitMemIntrinsic(MemIntrinsic &I) {}
include/llvm/IR/InstVisitor.h
  214   RetTy visitMemIntrinsic(MemIntrinsic &I)        { DELEGATE(IntrinsicInst); }
include/llvm/IR/IntrinsicInst.h
  430   template <class BaseCL> class MemTransferBase : public BaseCL {
  474   template <class BaseCL> class MemSetBase : public BaseCL {
  589   class MemIntrinsic : public MemIntrinsicBase<MemIntrinsic> {
  621   class MemSetInst : public MemSetBase<MemIntrinsic> {
  633   class MemTransferInst : public MemTransferBase<MemIntrinsic> {
  677       if (auto *MI = dyn_cast<MemIntrinsic>(this))
include/llvm/Support/Casting.h
   57   static inline bool doit(const From &Val) {
   58     return To::classof(&Val);
   77     return isa_impl<To, From>::doit(Val);
   92     return isa_impl<To, From>::doit(*Val);
  104   static inline bool doit(const From *Val) {
  106     return isa_impl<To, From>::doit(*Val);
  106     return isa_impl<To, From>::doit(*Val);
  122     return isa_impl_wrap<To, SimpleFrom,
  132     return isa_impl_cl<To,FromTy>::doit(Val);
  142   return isa_impl_wrap<X, const Y,
  165   using ret_type = To &;       // Normal case, return Ty&
  168   using ret_type = const To &; // Normal case, return Ty&
  172   using ret_type = To *;       // Pointer arg case, return Ty*
  176   using ret_type = const To *; // Constant pointer arg case, return const Ty*
  198   using ret_type = typename cast_retty<To, SimpleFrom>::ret_type;
  204   using ret_type = typename cast_retty_impl<To,FromTy>::ret_type;
  210       To, From, typename simplify_type<From>::SimpleType>::ret_type;
  218   static typename cast_retty<To, From>::ret_type doit(From &Val) {
  219     return cast_convert_val<To, SimpleFrom,
  227   static typename cast_retty<To, FromTy>::ret_type doit(const FromTy &Val) {
  228     typename cast_retty<To, FromTy>::ret_type Res2
  248                                typename cast_retty<X, const Y>::ret_type>::type
  256 inline typename cast_retty<X, Y>::ret_type cast(Y &Val) {
  258   return cast_convert_val<X, Y,
  263 inline typename cast_retty<X, Y *>::ret_type cast(Y *Val) {
  263 inline typename cast_retty<X, Y *>::ret_type cast(Y *Val) {
  263 inline typename cast_retty<X, Y *>::ret_type cast(Y *Val) {
  265   return cast_convert_val<X, Y*,
  265   return cast_convert_val<X, Y*,
  266                           typename simplify_type<Y*>::SimpleType>::doit(Val);
  331                             typename cast_retty<X, const Y>::ret_type>::type
  337 LLVM_NODISCARD inline typename cast_retty<X, Y>::ret_type dyn_cast(Y &Val) {
  338   return isa<X>(Val) ? cast<X>(Val) : nullptr;
  338   return isa<X>(Val) ? cast<X>(Val) : nullptr;
  342 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type dyn_cast(Y *Val) {
  342 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type dyn_cast(Y *Val) {
  342 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type dyn_cast(Y *Val) {
  343   return isa<X>(Val) ? cast<X>(Val) : nullptr;
  343   return isa<X>(Val) ? cast<X>(Val) : nullptr;
  352                             typename cast_retty<X, const Y>::ret_type>::type
  360                             typename cast_retty<X, Y>::ret_type>::type
  362   return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
  362   return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
  366 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type
  368   return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
  368   return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
include/llvm/Transforms/Utils/VNCoercion.h
   71                                      MemIntrinsic *DepMI, const DataLayout &DL);
   98 Value *getMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
  103 Constant *getConstantMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
lib/Analysis/CaptureTracking.cpp
  278       if (auto *MI = dyn_cast<MemIntrinsic>(Call))
  278       if (auto *MI = dyn_cast<MemIntrinsic>(Call))
lib/Analysis/LazyValueInfo.cpp
  678   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
  678   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
lib/Analysis/MemoryLocation.cpp
  106 MemoryLocation MemoryLocation::getForDest(const MemIntrinsic *MI) {
lib/Analysis/StackSafetyAnalysis.cpp
  213   ConstantRange getMemIntrinsicAccessRange(const MemIntrinsic *MI, const Use &U,
  263     const MemIntrinsic *MI, const Use &U, const Value *AllocaPtr) {
  328         if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
  328         if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
lib/CodeGen/CodeGenPrepare.cpp
 1829     if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(CI)) {
 1829     if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(CI)) {
lib/CodeGen/SafeStack.cpp
  197   bool IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
  259 bool SafeStack::IsMemIntrinsicSafe(const MemIntrinsic *MI, const Use &U,
  329         if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
  329         if (const MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I)) {
lib/IR/AutoUpgrade.cpp
 3704     auto *MemCI = cast<MemIntrinsic>(NewCall);
 3704     auto *MemCI = cast<MemIntrinsic>(NewCall);
lib/IR/Verifier.cpp
 4351     const auto *MI = cast<MemIntrinsic>(&Call);
 4351     const auto *MI = cast<MemIntrinsic>(&Call);
lib/Target/ARM/ARMISelLowering.cpp
 1712   if (!isa<MemIntrinsic>(CI))
lib/Target/NVPTX/NVPTXLowerAggrCopies.cpp
   61   SmallVector<MemIntrinsic *, 4> MemCalls;
   84       } else if (MemIntrinsic *IntrCall = dyn_cast<MemIntrinsic>(II)) {
   84       } else if (MemIntrinsic *IntrCall = dyn_cast<MemIntrinsic>(II)) {
  126   for (MemIntrinsic *MemCall : MemCalls) {
lib/Transforms/IPO/Attributor.cpp
 1351       if (!cast<MemIntrinsic>(II)->isVolatile())
lib/Transforms/IPO/GlobalOpt.cpp
  342     } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(U)) { // memset/cpy/mv
  342     } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(U)) { // memset/cpy/mv
lib/Transforms/InstCombine/InstCombineCalls.cpp
 1818     if (auto *M = dyn_cast<MemIntrinsic>(MI))
 1818     if (auto *M = dyn_cast<MemIntrinsic>(MI))
lib/Transforms/InstCombine/InstructionCombining.cpp
 2318             MemIntrinsic *MI = cast<MemIntrinsic>(II);
 2318             MemIntrinsic *MI = cast<MemIntrinsic>(II);
lib/Transforms/Instrumentation/AddressSanitizer.cpp
  639   void instrumentMemIntrinsic(MemIntrinsic *MI);
 1301 void AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
 2680       } else if (isa<MemIntrinsic>(Inst)) {
 2718         instrumentMemIntrinsic(cast<MemIntrinsic>(Inst));
lib/Transforms/Instrumentation/HWAddressSanitizer.cpp
  212   void instrumentMemIntrinsic(MemIntrinsic *MI);
  701 void HWAddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
  726   if (ClInstrumentMemIntrinsics && isa<MemIntrinsic>(I)) {
  727     instrumentMemIntrinsic(cast<MemIntrinsic>(I));
 1143       if (Addr || isa<MemIntrinsic>(Inst))
lib/Transforms/Instrumentation/PGOMemOPSizeOpt.cpp
  163   void visitMemIntrinsic(MemIntrinsic &MI) {
  177   std::vector<MemIntrinsic *> WorkList;
  184   bool perform(MemIntrinsic *MI);
  200 static const char *getMIName(const MemIntrinsic *MI) {
  231 bool MemOPSizeOpt::perform(MemIntrinsic *MI) {
  377     auto *MemI = cast<MemIntrinsic>(NewInst);
  377     auto *MemI = cast<MemIntrinsic>(NewInst);
lib/Transforms/Instrumentation/ThreadSanitizer.cpp
  463         if (isa<MemIntrinsic>(Inst))
lib/Transforms/Instrumentation/ValueProfilePlugins.inc
   38   void visitMemIntrinsic(MemIntrinsic &MI) {
lib/Transforms/Scalar/AlignmentFromAssumptions.cpp
  335     } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(J)) {
  335     } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(J)) {
lib/Transforms/Scalar/DeadStoreElimination.cpp
  253       return !cast<MemIntrinsic>(II)->isVolatile();
lib/Transforms/Scalar/GVN.cpp
  186   static AvailableValue getMI(MemIntrinsic *MI, unsigned Offset = 0) {
  225   MemIntrinsic *getMemIntrinValue() const {
  227     return cast<MemIntrinsic>(Val.getPointer());
  904     if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInst)) {
  904     if (MemIntrinsic *DepMI = dyn_cast<MemIntrinsic>(DepInst)) {
lib/Transforms/Scalar/InferAddressSpaces.cpp
  368     else if (auto *MI = dyn_cast<MemIntrinsic>(&I)) {
  368     else if (auto *MI = dyn_cast<MemIntrinsic>(&I)) {
  786 static bool handleMemIntrinsicPtrUse(MemIntrinsic *MI, Value *OldV,
  944       if (auto *MI = dyn_cast<MemIntrinsic>(CurUser)) {
  944       if (auto *MI = dyn_cast<MemIntrinsic>(CurUser)) {
lib/Transforms/Scalar/LoopRerollPass.cpp
  734   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
  734   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
lib/Transforms/Scalar/NewGVN.cpp
 1451   } else if (auto *DepMI = dyn_cast<MemIntrinsic>(DepInst)) {
 1451   } else if (auto *DepMI = dyn_cast<MemIntrinsic>(DepInst)) {
lib/Transforms/Scalar/SROA.cpp
 1838   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(U->getUser())) {
 1838   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(U->getUser())) {
 2067   } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(U->getUser())) {
 2067   } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(U->getUser())) {
lib/Transforms/Utils/AddDiscriminators.cpp
  113   return !isa<IntrinsicInst>(I) || isa<MemIntrinsic>(I);
lib/Transforms/Utils/VNCoercion.cpp
  284                                      MemIntrinsic *MI, const DataLayout &DL) {
  463 T *getMemInstValueForLoadHelper(MemIntrinsic *SrcInst, unsigned Offset,
  519 Value *getMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
  527 Constant *getConstantMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset,
lib/Transforms/Vectorize/SLPVectorizer.cpp
  480   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
  480   if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(I))
tools/polly/include/polly/ScopInfo.h
  783     return isa<MemIntrinsic>(getAccessInstruction());
tools/polly/include/polly/Support/ScopHelper.h
   84   /* implicit */ MemAccInst(llvm::MemIntrinsic *MI) : I(MI) {}
   91            llvm::isa<llvm::CallInst>(V) || llvm::isa<llvm::MemIntrinsic>(V);
   95            llvm::isa<llvm::CallInst>(V) || llvm::isa<llvm::MemIntrinsic>(V);
  143   MemAccInst &operator=(llvm::MemIntrinsic &MI) {
  147   MemAccInst &operator=(llvm::MemIntrinsic *MI) {
  262   bool isMemIntrinsic() const { return I && llvm::isa<llvm::MemIntrinsic>(I); }
  271   llvm::MemIntrinsic *asMemIntrinsic() const {
  272     return llvm::cast<llvm::MemIntrinsic>(I);
tools/polly/lib/Analysis/ScopBuilder.cpp
 1725   auto *MemIntr = dyn_cast_or_null<MemIntrinsic>(Inst);
 1725   auto *MemIntr = dyn_cast_or_null<MemIntrinsic>(Inst);
tools/polly/lib/Analysis/ScopDetection.cpp
  765     AF = SE.getSCEVAtScope(cast<MemIntrinsic>(II).getDest(), L);
  774     if (!isAffine(SE.getSCEVAtScope(cast<MemIntrinsic>(II).getLength(), L), L,
 1077   if (!isa<MemIntrinsic>(Inst)) {
 1105   if (isa<MemIntrinsic>(Inst) && !IsAffine) {
tools/polly/lib/Analysis/ScopInfo.cpp
  731   if (isa<MemIntrinsic>(MAI))
  850   if (getAccessInstruction() && isa<MemIntrinsic>(getAccessInstruction())) {
usr/include/c++/7.4.0/type_traits
 1983     { typedef _Up     type; };