reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

Declarations

include/llvm/Transforms/Scalar/MemCpyOptimizer.h
   34 class MemSetInst;
include/llvm/Transforms/Utils/LowerMemIntrinsics.h
   23 class MemSetInst;

References

include/llvm/IR/InstVisitor.h
  210   RetTy visitMemSetInst(MemSetInst &I)            { DELEGATE(MemIntrinsic); }
include/llvm/Support/Casting.h
   58     return To::classof(&Val);
   77     return isa_impl<To, From>::doit(Val);
   92     return isa_impl<To, From>::doit(*Val);
  106     return isa_impl<To, From>::doit(*Val);
  122     return isa_impl_wrap<To, SimpleFrom,
  132     return isa_impl_cl<To,FromTy>::doit(Val);
  142   return isa_impl_wrap<X, const Y,
  168   using ret_type = const To &; // Normal case, return Ty&
  172   using ret_type = To *;       // Pointer arg case, return Ty*
  176   using ret_type = const To *; // Constant pointer arg case, return const Ty*
  198   using ret_type = typename cast_retty<To, SimpleFrom>::ret_type;
  204   using ret_type = typename cast_retty_impl<To,FromTy>::ret_type;
  210       To, From, typename simplify_type<From>::SimpleType>::ret_type;
  218   static typename cast_retty<To, From>::ret_type doit(From &Val) {
  219     return cast_convert_val<To, SimpleFrom,
  227   static typename cast_retty<To, FromTy>::ret_type doit(const FromTy &Val) {
  228     typename cast_retty<To, FromTy>::ret_type Res2
  248                                typename cast_retty<X, const Y>::ret_type>::type
  256 inline typename cast_retty<X, Y>::ret_type cast(Y &Val) {
  258   return cast_convert_val<X, Y,
  263 inline typename cast_retty<X, Y *>::ret_type cast(Y *Val) {
  265   return cast_convert_val<X, Y*,
  337 LLVM_NODISCARD inline typename cast_retty<X, Y>::ret_type dyn_cast(Y &Val) {
  342 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type dyn_cast(Y *Val) {
  343   return isa<X>(Val) ? cast<X>(Val) : nullptr;
  343   return isa<X>(Val) ? cast<X>(Val) : nullptr;
  366 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type
  368   return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
  368   return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
include/llvm/Transforms/Scalar/MemCpyOptimizer.h
   61   bool processMemSet(MemSetInst *SI, BasicBlock::iterator &BBI);
   67   bool processMemSetMemCpyDependence(MemCpyInst *M, MemSetInst *MDep);
   68   bool performMemCpyToMemSetOptzn(MemCpyInst *M, MemSetInst *MDep);
include/llvm/Transforms/Utils/LowerMemIntrinsics.h
   51 void expandMemSetAsLoop(MemSetInst *MemSet);
lib/Analysis/Lint.cpp
  357       MemSetInst *MSI = cast<MemSetInst>(&I);
  357       MemSetInst *MSI = cast<MemSetInst>(&I);
lib/CodeGen/GlobalISel/IRTranslator.cpp
 1143     auto *MSI = cast<MemSetInst>(&CI);
lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp
 5721     const auto &MSI = cast<MemSetInst>(I);
 5721     const auto &MSI = cast<MemSetInst>(I);
lib/IR/IRBuilder.cpp
  112     cast<MemSetInst>(CI)->setDestAlignment(Align);
lib/Target/AArch64/AArch64FastISel.cpp
 3553     const MemSetInst *MSI = cast<MemSetInst>(II);
 3553     const MemSetInst *MSI = cast<MemSetInst>(II);
lib/Target/AArch64/AArch64StackTagging.cpp
  116   bool addMemSet(uint64_t Offset, MemSetInst *MSI) {
  348     if (!isa<StoreInst>(BI) && !isa<MemSetInst>(BI)) {
  371       MemSetInst *MSI = cast<MemSetInst>(BI);
  371       MemSetInst *MSI = cast<MemSetInst>(BI);
lib/Target/AMDGPU/AMDGPULowerIntrinsics.cpp
   96       auto *Memset = cast<MemSetInst>(Inst);
   96       auto *Memset = cast<MemSetInst>(Inst);
lib/Target/AMDGPU/AMDGPUPromoteAlloca.cpp
  900       MemSetInst *MemSet = cast<MemSetInst>(Intr);
  900       MemSetInst *MemSet = cast<MemSetInst>(Intr);
lib/Target/ARM/ARMFastISel.cpp
 2555     const MemSetInst &MSI = cast<MemSetInst>(I);
 2555     const MemSetInst &MSI = cast<MemSetInst>(I);
lib/Target/Mips/MipsFastISel.cpp
 1669     const MemSetInst *MSI = cast<MemSetInst>(II);
 1669     const MemSetInst *MSI = cast<MemSetInst>(II);
lib/Target/NVPTX/NVPTXLowerAggrCopies.cpp
  131     } else if (MemSetInst *Memset = dyn_cast<MemSetInst>(MemCall)) {
  131     } else if (MemSetInst *Memset = dyn_cast<MemSetInst>(MemCall)) {
lib/Target/X86/X86FastISel.cpp
 2753     const MemSetInst *MSI = cast<MemSetInst>(II);
 2753     const MemSetInst *MSI = cast<MemSetInst>(II);
lib/Transforms/IPO/GlobalOpt.cpp
  219     } else if (MemSetInst *MSI = dyn_cast<MemSetInst>(U)) {
  219     } else if (MemSetInst *MSI = dyn_cast<MemSetInst>(U)) {
lib/Transforms/Instrumentation/AddressSanitizer.cpp
 1309   } else if (isa<MemSetInst>(MI)) {
lib/Transforms/Instrumentation/DataFlowSanitizer.cpp
  457   void visitMemSetInst(MemSetInst &I);
 1481 void DFSanVisitor::visitMemSetInst(MemSetInst &I) {
lib/Transforms/Instrumentation/HWAddressSanitizer.cpp
  709   } else if (isa<MemSetInst>(MI)) {
lib/Transforms/Instrumentation/MemorySanitizer.cpp
 2430   void visitMemSetInst(MemSetInst &I) {
lib/Transforms/Instrumentation/ThreadSanitizer.cpp
  602   if (MemSetInst *M = dyn_cast<MemSetInst>(I)) {
  602   if (MemSetInst *M = dyn_cast<MemSetInst>(I)) {
lib/Transforms/Scalar/InferAddressSpaces.cpp
  793   if (auto *MSI = dyn_cast<MemSetInst>(MI)) {
  793   if (auto *MSI = dyn_cast<MemSetInst>(MI)) {
lib/Transforms/Scalar/LoopIdiomRecognize.cpp
  201   bool processLoopMemSet(MemSetInst *MSI, const SCEV *BECount);
  645     if (MemSetInst *MSI = dyn_cast<MemSetInst>(Inst)) {
  645     if (MemSetInst *MSI = dyn_cast<MemSetInst>(Inst)) {
  806 bool LoopIdiomRecognize::processLoopMemSet(MemSetInst *MSI,
lib/Transforms/Scalar/MemCpyOptimizer.cpp
  169       addMemSet(OffsetFromFirst, cast<MemSetInst>(Inst));
  179   void addMemSet(int64_t OffsetFromFirst, MemSetInst *MSI) {
  317     if (!isa<StoreInst>(BI) && !isa<MemSetInst>(BI)) {
  345       MemSetInst *MSI = cast<MemSetInst>(BI);
  345       MemSetInst *MSI = cast<MemSetInst>(BI);
  707 bool MemCpyOptPass::processMemSet(MemSetInst *MSI, BasicBlock::iterator &BBI) {
 1015                                                   MemSetInst *MemSet) {
 1097                                                MemSetInst *MemSet) {
 1168     if (MemSetInst *MDep = dyn_cast<MemSetInst>(DepInfo.getInst()))
 1168     if (MemSetInst *MDep = dyn_cast<MemSetInst>(DepInfo.getInst()))
 1215     if (MemSetInst *MDep = dyn_cast<MemSetInst>(SrcDepInfo.getInst()))
 1215     if (MemSetInst *MDep = dyn_cast<MemSetInst>(SrcDepInfo.getInst()))
 1355       else if (MemSetInst *M = dyn_cast<MemSetInst>(I))
 1355       else if (MemSetInst *M = dyn_cast<MemSetInst>(I))
lib/Transforms/Scalar/SROA.cpp
  828   void visitMemSetInst(MemSetInst &II) {
 2755   bool visitMemSetInst(MemSetInst &II) {
lib/Transforms/Utils/Evaluator.cpp
  495         if (MemSetInst *MSI = dyn_cast<MemSetInst>(II)) {
  495         if (MemSetInst *MSI = dyn_cast<MemSetInst>(II)) {
lib/Transforms/Utils/GlobalStatus.cpp
  162       } else if (const MemSetInst *MSI = dyn_cast<MemSetInst>(I)) {
  162       } else if (const MemSetInst *MSI = dyn_cast<MemSetInst>(I)) {
lib/Transforms/Utils/LowerMemIntrinsics.cpp
  444 void llvm::expandMemSetAsLoop(MemSetInst *Memset) {
lib/Transforms/Utils/VNCoercion.cpp
  295       auto *CI = dyn_cast<ConstantInt>(cast<MemSetInst>(MI)->getValue());
  471   if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
  471   if (MemSetInst *MSI = dyn_cast<MemSetInst>(SrcInst)) {
  531   if (auto *MSI = dyn_cast<MemSetInst>(SrcInst))
  531   if (auto *MSI = dyn_cast<MemSetInst>(SrcInst))
tools/polly/include/polly/Support/ScopHelper.h
  263   bool isMemSetInst() const { return I && llvm::isa<llvm::MemSetInst>(I); }
  274   llvm::MemSetInst *asMemSetInst() const {
  275     return llvm::cast<llvm::MemSetInst>(I);
tools/polly/lib/CodeGen/IRBuilder.cpp
  207   if (isa<CallInst>(Inst) && !isa<MemSetInst>(Inst))
tools/polly/lib/Support/ScopHelper.cpp
  438       if (isa<MemSetInst>(CI) || isa<MemTransferInst>(CI))
tools/polly/lib/Transform/ZoneAlgo.cpp
  434   if (auto *Memset = dyn_cast<MemSetInst>(AccInst)) {
  434   if (auto *Memset = dyn_cast<MemSetInst>(AccInst)) {