|
reference, declaration → definition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced
|
Declarations
include/llvm/Analysis/AliasSetTracker.h 38 class LoadInst;
include/llvm/Analysis/MemoryBuiltins.h 47 class LoadInst;
include/llvm/Analysis/MemoryDependenceAnalysis.h 42 class LoadInst;
include/llvm/Analysis/MemoryLocation.h 25 class LoadInst;
include/llvm/Analysis/TargetTransformInfo.h 47 class LoadInst;
include/llvm/CodeGen/FastISel.h 43 class LoadInst;
include/llvm/CodeGen/SelectionDAGISel.h 41 class LoadInst;
include/llvm/IR/Instruction.def 172 HANDLE_MEMORY_INST(32, Load , LoadInst ) // Memory manipulation instrs
include/llvm/Transforms/Scalar/GVN.h 47 class LoadInst;
include/llvm/Transforms/Scalar/JumpThreading.h 42 class LoadInst;
include/llvm/Transforms/Utils/Local.h 51 class LoadInst;
include/llvm/Transforms/Utils/SSAUpdater.h 24 class LoadInst;
include/llvm/Transforms/Utils/VNCoercion.h 28 class LoadInst;
lib/CodeGen/SelectionDAG/SelectionDAGBuilder.h 74 class LoadInst;
References
examples/BrainF/BrainF.cpp 227 LoadInst *tape_0 = builder->CreateLoad(curhead, tapereg);
279 LoadInst *tape_0 = builder->CreateLoad(curhead, tapereg);
443 LoadInst *tape_0 = new LoadInst(head_0, tapereg, testbb);
443 LoadInst *tape_0 = new LoadInst(head_0, tapereg, testbb);
include/llvm/ADT/DenseMapInfo.h 39 static inline T* getEmptyKey() {
41 Val <<= PointerLikeTypeTraits<T*>::NumLowBitsAvailable;
45 static inline T* getTombstoneKey() {
47 Val <<= PointerLikeTypeTraits<T*>::NumLowBitsAvailable;
51 static unsigned getHashValue(const T *PtrVal) {
56 static bool isEqual(const T *LHS, const T *RHS) { return LHS == RHS; }
56 static bool isEqual(const T *LHS, const T *RHS) { return LHS == RHS; }
include/llvm/Analysis/AliasAnalysis.h 534 ModRefInfo getModRefInfo(const LoadInst *L, const MemoryLocation &Loc);
537 ModRefInfo getModRefInfo(const LoadInst *L, const Value *P,
705 ModRefInfo getModRefInfo(const LoadInst *L, const MemoryLocation &Loc,
include/llvm/Analysis/AliasSetTracker.h 378 void add(LoadInst *LI);
include/llvm/Analysis/Loads.h 74 bool isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L,
119 Value *FindAvailableLoadedValue(LoadInst *Load,
include/llvm/Analysis/LoopAccessAnalysis.h 193 void addAccess(LoadInst *LI) {
include/llvm/Analysis/LoopUnrollAnalyzer.h 88 bool visitLoad(LoadInst &I);
include/llvm/Analysis/MemoryBuiltins.h 270 SizeOffsetType visitLoadInst(LoadInst &I);
337 SizeOffsetEvalType visitLoadInst(LoadInst &I);
include/llvm/Analysis/MemoryDependenceAnalysis.h 469 MemDepResult getInvariantGroupPointerDependency(LoadInst *LI, BasicBlock *BB);
481 const LoadInst *LI);
include/llvm/Analysis/MemoryLocation.h 199 static MemoryLocation get(const LoadInst *LI);
210 return get(cast<LoadInst>(Inst));
include/llvm/Analysis/ObjCARCAnalysisUtils.h 188 if (const LoadInst *LI = dyn_cast<LoadInst>(Op))
188 if (const LoadInst *LI = dyn_cast<LoadInst>(Op))
221 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) {
221 if (const LoadInst *LI = dyn_cast<LoadInst>(V)) {
include/llvm/Analysis/ScalarEvolution.h 1587 ExitLimit computeLoadConstantCompareExitLimit(LoadInst *LI, Constant *RHS,
include/llvm/Analysis/TargetTransformInfo.h 1089 bool isLegalToVectorizeLoad(LoadInst *LI) const;
1358 virtual bool isLegalToVectorizeLoad(LoadInst *LI) const = 0;
1810 bool isLegalToVectorizeLoad(LoadInst *LI) const override {
include/llvm/Analysis/TargetTransformInfoImpl.h 562 bool isLegalToVectorizeLoad(LoadInst *LI) const { return true; }
893 if (isa<LoadInst>(I))
include/llvm/Analysis/ValueTracking.h 392 bool mustSuppressSpeculation(const LoadInst &LI);
include/llvm/CodeGen/BasicTTIImpl.h 299 if (const LoadInst *LI = dyn_cast<LoadInst>(Src))
299 if (const LoadInst *LI = dyn_cast<LoadInst>(Src))
512 if (isa<LoadInst>(I))
720 I && isa<LoadInst>(I->getOperand(0))) {
include/llvm/CodeGen/FastISel.h 296 bool tryToFoldLoad(const LoadInst *LI, const Instruction *FoldInst);
305 const LoadInst * /*LI*/) {
include/llvm/CodeGen/TargetLowering.h 1845 virtual AtomicExpansionKind shouldExpandAtomicLoadInIR(LoadInst *LI) const {
1874 virtual LoadInst *
2374 bool isExtLoad(const LoadInst *Load, const Instruction *Ext,
2471 virtual bool lowerInterleavedLoad(LoadInst *LI,
3772 virtual bool lowerAtomicLoadAsLoadSDNode(const LoadInst &LI) const {
include/llvm/IR/IRBuilder.h 845 InstTy *Insert(InstTy *I, const Twine &Name = "") const {
845 InstTy *Insert(InstTy *I, const Twine &Name = "") const {
1591 LoadInst *CreateLoad(Type *Ty, Value *Ptr, const char *Name) {
1592 return Insert(new LoadInst(Ty, Ptr), Name);
1595 LoadInst *CreateLoad(Type *Ty, Value *Ptr, const Twine &Name = "") {
1596 return Insert(new LoadInst(Ty, Ptr), Name);
1599 LoadInst *CreateLoad(Type *Ty, Value *Ptr, bool isVolatile,
1601 return Insert(new LoadInst(Ty, Ptr, Twine(), isVolatile), Name);
1605 LoadInst *CreateLoad(Value *Ptr, const char *Name) {
1610 LoadInst *CreateLoad(Value *Ptr, const Twine &Name = "") {
1615 LoadInst *CreateLoad(Value *Ptr, bool isVolatile, const Twine &Name = "") {
1627 LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr, unsigned Align,
1629 LoadInst *LI = CreateLoad(Ty, Ptr, Name);
1633 LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr, unsigned Align,
1635 LoadInst *LI = CreateLoad(Ty, Ptr, Name);
1639 LoadInst *CreateAlignedLoad(Type *Ty, Value *Ptr, unsigned Align,
1641 LoadInst *LI = CreateLoad(Ty, Ptr, isVolatile, Name);
1647 LoadInst *CreateAlignedLoad(Value *Ptr, unsigned Align, const char *Name) {
1652 LoadInst *CreateAlignedLoad(Value *Ptr, unsigned Align,
1658 LoadInst *CreateAlignedLoad(Value *Ptr, unsigned Align, bool isVolatile,
include/llvm/IR/InstVisitor.h 171 RetTy visitLoadInst(LoadInst &I) { DELEGATE(UnaryInstruction);}
include/llvm/IR/Instructions.h 176 LoadInst *cloneImpl() const;
5259 if (auto *Load = dyn_cast<LoadInst>(V))
5287 if (auto *LI = dyn_cast<LoadInst>(I))
5287 if (auto *LI = dyn_cast<LoadInst>(I))
5297 if (auto *LI = dyn_cast<LoadInst>(I))
5297 if (auto *LI = dyn_cast<LoadInst>(I))
include/llvm/IR/ValueHandle.h 256 ValueTy *getValPtr() const { return static_cast<ValueTy *>(getRawValPtr()); }
257 void setValPtr(ValueTy *P) { setRawValPtr(GetAsValue(P)); }
262 AssertingVH(ValueTy *P) : ValueHandleBase(Assert, GetAsValue(P)) {}
269 operator ValueTy*() const {
273 ValueTy *operator=(ValueTy *RHS) {
273 ValueTy *operator=(ValueTy *RHS) {
277 ValueTy *operator=(const AssertingVH<ValueTy> &RHS) {
277 ValueTy *operator=(const AssertingVH<ValueTy> &RHS) {
282 ValueTy *operator->() const { return getValPtr(); }
283 ValueTy &operator*() const { return *getValPtr(); }
289 static inline AssertingVH<T> getEmptyKey() {
290 AssertingVH<T> Res;
295 static inline AssertingVH<T> getTombstoneKey() {
296 AssertingVH<T> Res;
301 static unsigned getHashValue(const AssertingVH<T> &Val) {
305 static bool isEqual(const AssertingVH<T> &LHS, const AssertingVH<T> &RHS) {
305 static bool isEqual(const AssertingVH<T> &LHS, const AssertingVH<T> &RHS) {
include/llvm/Support/Casting.h 58 return To::classof(&Val);
66 static inline bool doit(const From &) { return true; }
77 return isa_impl<To, From>::doit(Val);
92 return isa_impl<To, From>::doit(*Val);
104 static inline bool doit(const From *Val) {
106 return isa_impl<To, From>::doit(*Val);
106 return isa_impl<To, From>::doit(*Val);
122 return isa_impl_wrap<To, SimpleFrom,
132 return isa_impl_cl<To,FromTy>::doit(Val);
142 return isa_impl_wrap<X, const Y,
165 using ret_type = To &; // Normal case, return Ty&
168 using ret_type = const To &; // Normal case, return Ty&
172 using ret_type = To *; // Pointer arg case, return Ty*
176 using ret_type = const To *; // Constant pointer arg case, return const Ty*
198 using ret_type = typename cast_retty<To, SimpleFrom>::ret_type;
204 using ret_type = typename cast_retty_impl<To,FromTy>::ret_type;
210 To, From, typename simplify_type<From>::SimpleType>::ret_type;
218 static typename cast_retty<To, From>::ret_type doit(From &Val) {
219 return cast_convert_val<To, SimpleFrom,
227 static typename cast_retty<To, FromTy>::ret_type doit(const FromTy &Val) {
228 typename cast_retty<To, FromTy>::ret_type Res2
248 typename cast_retty<X, const Y>::ret_type>::type
256 inline typename cast_retty<X, Y>::ret_type cast(Y &Val) {
258 return cast_convert_val<X, Y,
263 inline typename cast_retty<X, Y *>::ret_type cast(Y *Val) {
263 inline typename cast_retty<X, Y *>::ret_type cast(Y *Val) {
263 inline typename cast_retty<X, Y *>::ret_type cast(Y *Val) {
265 return cast_convert_val<X, Y*,
265 return cast_convert_val<X, Y*,
266 typename simplify_type<Y*>::SimpleType>::doit(Val);
331 typename cast_retty<X, const Y>::ret_type>::type
337 LLVM_NODISCARD inline typename cast_retty<X, Y>::ret_type dyn_cast(Y &Val) {
338 return isa<X>(Val) ? cast<X>(Val) : nullptr;
338 return isa<X>(Val) ? cast<X>(Val) : nullptr;
342 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type dyn_cast(Y *Val) {
343 return isa<X>(Val) ? cast<X>(Val) : nullptr;
343 return isa<X>(Val) ? cast<X>(Val) : nullptr;
366 LLVM_NODISCARD inline typename cast_retty<X, Y *>::ret_type
368 return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
368 return (Val && isa<X>(Val)) ? cast<X>(Val) : nullptr;
include/llvm/Support/PointerLikeTypeTraits.h 56 static inline void *getAsVoidPointer(T *P) { return P; }
57 static inline T *getFromVoidPointer(void *P) { return static_cast<T *>(P); }
59 enum { NumLowBitsAvailable = detail::ConstantLog2<alignof(T)>::value };
91 typedef PointerLikeTypeTraits<T *> NonConst;
93 static inline const void *getAsVoidPointer(const T *P) {
96 static inline const T *getFromVoidPointer(const void *P) {
include/llvm/Transforms/Scalar/GVN.h 251 bool processLoad(LoadInst *L);
252 bool processNonLocalLoad(LoadInst *L);
258 bool AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
264 void AnalyzeLoadAvailability(LoadInst *LI, LoadDepVect &Deps,
268 bool PerformLoadPRE(LoadInst *LI, AvailValInBlkVect &ValuesPerBlock,
include/llvm/Transforms/Scalar/GVNExpression.h 325 LoadInst *Load;
329 LoadExpression(unsigned NumOperands, LoadInst *L,
333 LoadExpression(enum ExpressionType EType, unsigned NumOperands, LoadInst *L,
349 LoadInst *getLoadInst() const { return Load; }
350 void setLoadInst(LoadInst *L) { Load = L; }
include/llvm/Transforms/Scalar/JumpThreading.h 140 bool SimplifyPartiallyRedundantLoad(LoadInst *LI);
include/llvm/Transforms/Utils/Local.h 295 LoadInst *LI, DIBuilder &Builder);
437 void copyMetadataForLoad(LoadInst &Dest, const LoadInst &Source);
437 void copyMetadataForLoad(LoadInst &Dest, const LoadInst &Source);
471 void copyNonnullMetadata(const LoadInst &OldLI, MDNode *N, LoadInst &NewLI);
471 void copyNonnullMetadata(const LoadInst &OldLI, MDNode *N, LoadInst &NewLI);
477 void copyRangeMetadata(const DataLayout &DL, const LoadInst &OldLI, MDNode *N,
478 LoadInst &NewLI);
include/llvm/Transforms/Utils/SSAUpdater.h 165 virtual void replaceLoadWithValue(LoadInst *LI, Value *V) const {}
include/llvm/Transforms/Utils/VNCoercion.h 62 int analyzeLoadFromClobberingLoad(Type *LoadTy, Value *LoadPtr, LoadInst *DepLI,
87 Value *getLoadValueForLoad(LoadInst *SrcVal, unsigned Offset, Type *LoadTy,
lib/Analysis/AliasAnalysis.cpp 432 ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
437 ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
lib/Analysis/AliasAnalysisEvaluator.cpp 110 if (EvalAAMD && isa<LoadInst>(&*I))
179 AliasResult AR = AA.alias(MemoryLocation::get(cast<LoadInst>(Load)),
lib/Analysis/AliasSetTracker.cpp 408 void AliasSetTracker::add(LoadInst *LI) {
462 if (LoadInst *LI = dyn_cast<LoadInst>(I))
462 if (LoadInst *LI = dyn_cast<LoadInst>(I))
lib/Analysis/BasicAliasAnalysis.cpp 173 if (isa<LoadInst>(V))
lib/Analysis/CFLGraph.h 346 void visitLoadInst(LoadInst &Inst) {
348 auto *Val = &Inst;
lib/Analysis/CaptureTracking.cpp 302 if (cast<LoadInst>(I)->isVolatile())
371 auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx));
371 auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx));
lib/Analysis/ConstantFolding.cpp 702 Constant *ConstantFoldLoadInst(const LoadInst *LI, const DataLayout &DL) {
1170 if (const auto *LI = dyn_cast<LoadInst>(I))
1170 if (const auto *LI = dyn_cast<LoadInst>(I))
lib/Analysis/Delinearization.cpp 77 if (!isa<StoreInst>(Inst) && !isa<LoadInst>(Inst) &&
lib/Analysis/DependenceAnalysis.cpp 176 if (isa<StoreInst>(*SrcI) || isa<LoadInst>(*SrcI)) {
179 if (isa<StoreInst>(*DstI) || isa<LoadInst>(*DstI)) {
678 if (const LoadInst *LI = dyn_cast<LoadInst>(I))
678 if (const LoadInst *LI = dyn_cast<LoadInst>(I))
lib/Analysis/GlobalsModRef.cpp 352 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
352 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
416 if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
416 if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
655 if (auto *LI = dyn_cast<LoadInst>(Input)) {
766 if (auto *LI = dyn_cast<LoadInst>(Input)) {
858 if (const LoadInst *LI = dyn_cast<LoadInst>(UV1))
858 if (const LoadInst *LI = dyn_cast<LoadInst>(UV1))
862 if (const LoadInst *LI = dyn_cast<LoadInst>(UV2))
862 if (const LoadInst *LI = dyn_cast<LoadInst>(UV2))
lib/Analysis/InlineCost.cpp 277 bool visitLoad(LoadInst &I);
1125 bool CallAnalyzer::visitLoad(LoadInst &I) {
lib/Analysis/InstructionPrecedenceTracking.cpp 141 if (isa<LoadInst>(Insn)) {
lib/Analysis/InstructionSimplify.cpp 3751 if (LoadInst *LI = dyn_cast<LoadInst>(I))
3751 if (LoadInst *LI = dyn_cast<LoadInst>(I))
lib/Analysis/LazyValueInfo.cpp 668 if (LoadInst *L = dyn_cast<LoadInst>(I)) {
668 if (LoadInst *L = dyn_cast<LoadInst>(I)) {
lib/Analysis/Lint.cpp 105 void visitLoadInst(LoadInst &I);
505 void Lint::visitLoadInst(LoadInst &I) {
674 if (LoadInst *L = dyn_cast<LoadInst>(V)) {
674 if (LoadInst *L = dyn_cast<LoadInst>(V)) {
lib/Analysis/Loads.cpp 197 bool llvm::isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L,
305 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
305 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
361 Value *llvm::FindAvailableLoadedValue(LoadInst *Load,
413 if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
413 if (LoadInst *LI = dyn_cast<LoadInst>(Inst))
lib/Analysis/LoopAccessAnalysis.cpp 1163 if (LoadInst *L = dyn_cast<LoadInst>(I))
1163 if (LoadInst *L = dyn_cast<LoadInst>(I))
1795 SmallVector<LoadInst *, 16> Loads;
1850 auto *Ld = dyn_cast<LoadInst>(&I);
1850 auto *Ld = dyn_cast<LoadInst>(&I);
1959 for (LoadInst *LD : Loads) {
2283 if (LoadInst *LI = dyn_cast<LoadInst>(MemAccess))
2283 if (LoadInst *LI = dyn_cast<LoadInst>(MemAccess))
lib/Analysis/LoopCacheAnalysis.cpp 520 if (!isa<StoreInst>(I) && !isa<LoadInst>(I))
lib/Analysis/LoopUnrollAnalyzer.cpp 94 bool UnrolledInstAnalyzer::visitLoad(LoadInst &I) {
lib/Analysis/MemDerefPrinter.cpp 54 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
54 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
lib/Analysis/MemoryBuiltins.cpp 775 SizeOffsetType ObjectSizeOffsetVisitor::visitLoadInst(LoadInst&) {
985 SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitLoadInst(LoadInst&) {
lib/Analysis/MemoryDependenceAnalysis.cpp 118 if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
118 if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
242 const LoadInst *LI) {
318 if (auto *LI = dyn_cast<LoadInst>(Inst))
318 if (auto *LI = dyn_cast<LoadInst>(Inst))
333 if (auto *LI = dyn_cast<LoadInst>(QueryInst)) {
333 if (auto *LI = dyn_cast<LoadInst>(QueryInst)) {
356 MemoryDependenceResults::getInvariantGroupPointerDependency(LoadInst *LI,
419 if ((isa<LoadInst>(U) || isa<StoreInst>(U)) &&
483 LoadInst *LI = dyn_cast<LoadInst>(QueryInst);
483 LoadInst *LI = dyn_cast<LoadInst>(QueryInst);
502 if (auto *LI = dyn_cast<LoadInst>(I))
502 if (auto *LI = dyn_cast<LoadInst>(I))
512 return !isa<LoadInst>(I) && !isa<StoreInst>(I) && I->mayReadOrWriteMemory();
549 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
549 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
917 bool isLoad = isa<LoadInst>(QueryInst);
944 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
944 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
lib/Analysis/MemoryLocation.cpp 34 MemoryLocation MemoryLocation::get(const LoadInst *LI) {
lib/Analysis/MemorySSA.cpp 225 static bool areLoadsReorderable(const LoadInst *Use,
226 const LoadInst *MayClobber) {
304 if (auto *DefLoad = dyn_cast<LoadInst>(DefInst))
304 if (auto *DefLoad = dyn_cast<LoadInst>(DefInst))
305 if (auto *UseLoad = dyn_cast<LoadInst>(UseInst))
378 return isa<LoadInst>(I) && (I->hasMetadata(LLVMContext::MD_invariant_load) ||
380 cast<LoadInst>(I)->getPointerOperand())));
1721 } else if (auto *LI = dyn_cast<LoadInst>(I)) {
lib/Analysis/ModuleSummaryAnalysis.cpp 228 if (const auto *LI = dyn_cast<LoadInst>(I))
228 if (const auto *LI = dyn_cast<LoadInst>(I))
lib/Analysis/ScalarEvolution.cpp 7370 if (LoadInst *LI = dyn_cast<LoadInst>(ExitCond->getOperand(0)))
7370 if (LoadInst *LI = dyn_cast<LoadInst>(ExitCond->getOperand(0)))
7492 LoadInst *LI,
7714 isa<LoadInst>(I) || isa<ExtractValueInst>(I))
7839 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
7839 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
8251 else if (const LoadInst *LI = dyn_cast<LoadInst>(I)) {
8251 else if (const LoadInst *LI = dyn_cast<LoadInst>(I)) {
11138 else if (LoadInst *Load = dyn_cast<LoadInst>(Inst))
11138 else if (LoadInst *Load = dyn_cast<LoadInst>(Inst))
lib/Analysis/TargetTransformInfo.cpp 798 bool TargetTransformInfo::isLegalToVectorizeLoad(LoadInst *LI) const {
1208 const LoadInst *LI = cast<LoadInst>(I);
1208 const LoadInst *LI = cast<LoadInst>(I);
lib/Analysis/TypeMetadataUtils.cpp 58 } else if (isa<LoadInst>(User)) {
lib/Analysis/ValueLatticeUtils.cpp 35 } else if (auto *Load = dyn_cast<LoadInst>(U)) {
35 } else if (auto *Load = dyn_cast<LoadInst>(U)) {
lib/Analysis/ValueTracking.cpp 987 Q.IIQ.getMetadata(cast<LoadInst>(I), LLVMContext::MD_range))
2069 if (const LoadInst *LI = dyn_cast<LoadInst>(V))
2069 if (const LoadInst *LI = dyn_cast<LoadInst>(V))
3708 if (auto *Load = dyn_cast<LoadInst>(PrevValue))
3708 if (auto *Load = dyn_cast<LoadInst>(PrevValue))
3882 bool llvm::mustSuppressSpeculation(const LoadInst &LI) {
3937 const LoadInst *LI = cast<LoadInst>(Inst);
3937 const LoadInst *LI = cast<LoadInst>(Inst);
4359 return cast<LoadInst>(I)->getPointerOperand();
lib/Analysis/VectorUtils.cpp 434 if (isa<SExtInst>(I) || isa<ZExtInst>(I) || isa<LoadInst>(I) ||
818 auto *LI = dyn_cast<LoadInst>(&I);
818 auto *LI = dyn_cast<LoadInst>(&I);
lib/AsmParser/LLParser.cpp 6973 Inst = new LoadInst(Ty, Val, "", isVolatile, Alignment, Ordering, SSID);
lib/Bitcode/Reader/BitcodeReader.cpp 4795 I = new LoadInst(Ty, Op, "", Record[OpNum + 1], Align);
4832 I = new LoadInst(Ty, Op, "", Record[OpNum + 1], Align, Ordering, SSID);
lib/Bitcode/Writer/BitcodeWriter.cpp 2928 if (cast<LoadInst>(I).isAtomic()) {
2937 Vals.push_back(Log2_32(cast<LoadInst>(I).getAlignment())+1);
2938 Vals.push_back(cast<LoadInst>(I).isVolatile());
2939 if (cast<LoadInst>(I).isAtomic()) {
2940 Vals.push_back(getEncodedOrdering(cast<LoadInst>(I).getOrdering()));
2941 Vals.push_back(getEncodedSyncScopeID(cast<LoadInst>(I).getSyncScopeID()));
lib/CodeGen/AtomicExpandPass.cpp 73 LoadInst *convertAtomicLoadToIntegerType(LoadInst *LI);
73 LoadInst *convertAtomicLoadToIntegerType(LoadInst *LI);
74 bool tryExpandAtomicLoad(LoadInst *LI);
75 bool expandAtomicLoadToLL(LoadInst *LI);
76 bool expandAtomicLoadToCmpXchg(LoadInst *LI);
112 void expandAtomicLoadToLibcall(LoadInst *LI);
134 static unsigned getAtomicOpSize(LoadInst *LI) {
155 static unsigned getAtomicOpAlign(LoadInst *LI) {
191 static bool atomicSizeSupported(const TargetLowering *TLI, Inst *I) {
219 auto LI = dyn_cast<LoadInst>(I);
372 LoadInst *AtomicExpand::convertAtomicLoadToIntegerType(LoadInst *LI) {
372 LoadInst *AtomicExpand::convertAtomicLoadToIntegerType(LoadInst *LI) {
384 auto *NewLI = Builder.CreateLoad(NewTy, NewAddr);
396 bool AtomicExpand::tryExpandAtomicLoad(LoadInst *LI) {
414 bool AtomicExpand::expandAtomicLoadToLL(LoadInst *LI) {
430 bool AtomicExpand::expandAtomicLoadToCmpXchg(LoadInst *LI) {
878 LoadInst *InitLoaded = Builder.CreateLoad(PMV.WordType, PMV.AlignedAddr);
1377 LoadInst *InitLoaded = Builder.CreateLoad(ResultTy, Addr);
1466 void AtomicExpand::expandAtomicLoadToLibcall(LoadInst *I) {
lib/CodeGen/CodeGenPrepare.cpp 366 bool optimizeLoadExt(LoadInst *Load);
375 LoadInst *&LI, Instruction *&Inst, bool HasPromoted);
4442 if (LoadInst *LI = dyn_cast<LoadInst>(UserI)) {
4442 if (LoadInst *LI = dyn_cast<LoadInst>(UserI)) {
5120 if (isa<LoadInst>(I->getOperand(0))) {
5184 if (isa<LoadInst>(ExtOperand) &&
5396 const SmallVectorImpl<Instruction *> &MovedExts, LoadInst *&LI,
5399 if (isa<LoadInst>(MovedExtInst->getOperand(0))) {
5400 LI = cast<LoadInst>(MovedExtInst->getOperand(0));
5475 LoadInst *LI = nullptr;
5616 if (isa<PHINode>(UI) || isa<LoadInst>(UI) || isa<StoreInst>(UI))
5701 bool CodeGenPrepare::optimizeLoadExt(LoadInst *Load) {
6975 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
6975 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
lib/CodeGen/DwarfEHPrepare.cpp 106 LoadInst *SelLoad = nullptr;
116 SelLoad = dyn_cast<LoadInst>(SelIVI->getOperand(1));
lib/CodeGen/GCRootLowering.cpp 130 isa<LoadInst>(I))
216 Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI);
lib/CodeGen/GlobalISel/IRTranslator.cpp 249 } else if (const LoadInst *LI = dyn_cast<LoadInst>(&I)) {
249 } else if (const LoadInst *LI = dyn_cast<LoadInst>(&I)) {
860 const LoadInst &LI = cast<LoadInst>(U);
860 const LoadInst &LI = cast<LoadInst>(U);
lib/CodeGen/InterleavedAccessPass.cpp 107 bool lowerInterleavedLoad(LoadInst *LI,
281 LoadInst *LI, SmallVector<Instruction *, 32> &DeadInsts) {
461 if (LoadInst *LI = dyn_cast<LoadInst>(&I))
461 if (LoadInst *LI = dyn_cast<LoadInst>(&I))
lib/CodeGen/InterleavedLoadCombinePass.cpp 93 LoadInst *findFirstLoad(const std::set<LoadInst *> &LIs);
93 LoadInst *findFirstLoad(const std::set<LoadInst *> &LIs);
650 LoadInst *LI;
652 ElementInfo(Polynomial Offset = Polynomial(), LoadInst *LI = nullptr)
663 std::set<LoadInst *> LIs;
715 LoadInst *LI = dyn_cast<LoadInst>(V);
715 LoadInst *LI = dyn_cast<LoadInst>(V);
867 static bool computeFromLI(LoadInst *LI, VectorInfo &Result,
1100 LoadInst *
1101 InterleavedLoadCombineImpl::findFirstLoad(const std::set<LoadInst *> &LIs) {
1112 return cast<LoadInst>(FLI);
1122 LoadInst *InsertionPoint = InterleavedLoad.front().EI[0].LI;
1128 std::set<LoadInst *> LIs;
1178 LoadInst *First = findFirstLoad(LIs);
lib/CodeGen/ScalarizeMaskedMemIntrin.cpp 168 LoadInst *Load = Builder.CreateAlignedLoad(EltTy, Gep, AlignVal);
212 LoadInst *Load = Builder.CreateAlignedLoad(EltTy, Gep, AlignVal);
416 LoadInst *Load =
461 LoadInst *Load =
626 LoadInst *Load =
672 LoadInst *Load = Builder.CreateAlignedLoad(EltTy, Ptr, 1);
lib/CodeGen/SelectionDAG/FastISel.cpp 2316 bool FastISel::tryToFoldLoad(const LoadInst *LI, const Instruction *FoldInst) {
2396 if (const auto *LI = dyn_cast<LoadInst>(I)) {
2396 if (const auto *LI = dyn_cast<LoadInst>(I)) {
lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp 3999 void SelectionDAGBuilder::visitLoad(const LoadInst &I) {
4149 void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) {
4648 void SelectionDAGBuilder::visitAtomicLoad(const LoadInst &I) {
lib/CodeGen/SelectionDAG/SelectionDAGBuilder.h 721 void visitLoad(const LoadInst &I);
741 void visitAtomicLoad(const LoadInst &I);
743 void visitLoadFromSwiftError(const LoadInst &I);
lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp 1479 if (BeforeInst != Inst && isa<LoadInst>(BeforeInst) &&
1481 FastIS->tryToFoldLoad(cast<LoadInst>(BeforeInst), Inst)) {
lib/CodeGen/StackProtector.cpp 448 LoadInst *Guard = B.CreateLoad(B.getInt8PtrTy(), AI, true, "Guard");
503 LoadInst *LI2 = B.CreateLoad(B.getInt8PtrTy(), AI, true);
lib/CodeGen/SwiftErrorValueTracking.cpp 288 } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
288 } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
lib/CodeGen/WinEHPrepare.cpp 1082 Value *V = new LoadInst(PN->getType(), SpillSlot,
1225 Load = new LoadInst(V->getType(), SpillSlot,
1232 auto *Load = new LoadInst(V->getType(), SpillSlot,
1232 auto *Load = new LoadInst(V->getType(), SpillSlot,
lib/ExecutionEngine/Interpreter/Execution.cpp 1091 void Interpreter::visitLoadInst(LoadInst &I) {
lib/ExecutionEngine/Interpreter/Interpreter.h 132 void visitLoadInst(LoadInst &I);
lib/ExecutionEngine/Orc/IndirectionUtils.cpp 243 LoadInst *ImplAddr = Builder.CreateLoad(F.getType(), &ImplPointer);
lib/FuzzMutate/RandomIRBuilder.cpp 56 auto *NewLoad = new LoadInst(
56 auto *NewLoad = new LoadInst(
lib/IR/AsmWriter.cpp 3690 if ((isa<LoadInst>(I) && cast<LoadInst>(I).isAtomic()) ||
3690 if ((isa<LoadInst>(I) && cast<LoadInst>(I).isAtomic()) ||
3698 if ((isa<LoadInst>(I) && cast<LoadInst>(I).isVolatile()) ||
3698 if ((isa<LoadInst>(I) && cast<LoadInst>(I).isVolatile()) ||
4016 } else if (const auto *LI = dyn_cast<LoadInst>(&I)) {
4016 } else if (const auto *LI = dyn_cast<LoadInst>(&I)) {
4057 if (const LoadInst *LI = dyn_cast<LoadInst>(&I)) {
4057 if (const LoadInst *LI = dyn_cast<LoadInst>(&I)) {
lib/IR/AutoUpgrade.cpp 3033 LoadInst *LI = Builder.CreateAlignedLoad(VTy, BC, VTy->getBitWidth() / 8);
lib/IR/Core.cpp 1999 if (LoadInst *LI = dyn_cast<LoadInst>(P))
1999 if (LoadInst *LI = dyn_cast<LoadInst>(P))
2014 else if (LoadInst *LI = dyn_cast<LoadInst>(P))
2014 else if (LoadInst *LI = dyn_cast<LoadInst>(P))
3645 if (LoadInst *LI = dyn_cast<LoadInst>(P))
3645 if (LoadInst *LI = dyn_cast<LoadInst>(P))
3656 if (LoadInst *LI = dyn_cast<LoadInst>(P))
3656 if (LoadInst *LI = dyn_cast<LoadInst>(P))
3676 if (LoadInst *LI = dyn_cast<LoadInst>(P))
3676 if (LoadInst *LI = dyn_cast<LoadInst>(P))
3689 if (LoadInst *LI = dyn_cast<LoadInst>(P))
3689 if (LoadInst *LI = dyn_cast<LoadInst>(P))
lib/IR/Instruction.cpp 388 if (const LoadInst *LI = dyn_cast<LoadInst>(I1))
388 if (const LoadInst *LI = dyn_cast<LoadInst>(I1))
389 return LI->isVolatile() == cast<LoadInst>(I2)->isVolatile() &&
390 (LI->getAlignment() == cast<LoadInst>(I2)->getAlignment() ||
392 LI->getOrdering() == cast<LoadInst>(I2)->getOrdering() &&
393 LI->getSyncScopeID() == cast<LoadInst>(I2)->getSyncScopeID();
549 return !cast<LoadInst>(this)->isUnordered();
562 return cast<LoadInst>(this)->getOrdering() != AtomicOrdering::NotAtomic;
lib/IR/Instructions.cpp 4138 LoadInst *LoadInst::cloneImpl() const {
4139 return new LoadInst(getType(), getOperand(0), Twine(), isVolatile(),
lib/IR/Value.cpp 632 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
632 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
726 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
726 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
lib/IR/Verifier.cpp 463 void visitLoadInst(LoadInst &LI);
3376 void Verifier::visitLoadInst(LoadInst &LI) {
lib/Target/AArch64/AArch64FalkorHWPFFix.cpp 155 LoadInst *LoadI = dyn_cast<LoadInst>(&I);
155 LoadInst *LoadI = dyn_cast<LoadInst>(&I);
lib/Target/AArch64/AArch64FastISel.cpp 315 if (const auto *LI = dyn_cast<LoadInst>(I->getOperand(0)))
315 if (const auto *LI = dyn_cast<LoadInst>(I->getOperand(0)))
1983 cast<LoadInst>(I)->isAtomic())
4526 const auto *LI = dyn_cast<LoadInst>(I->getOperand(0));
4526 const auto *LI = dyn_cast<LoadInst>(I->getOperand(0));
lib/Target/AArch64/AArch64ISelLowering.cpp 8784 LoadInst *LI, ArrayRef<ShuffleVectorInst *> Shuffles,
12134 AArch64TargetLowering::shouldExpandAtomicLoadInIR(LoadInst *LI) const {
lib/Target/AArch64/AArch64ISelLowering.h 362 bool lowerInterleavedLoad(LoadInst *LI,
425 shouldExpandAtomicLoadInIR(LoadInst *LI) const override;
lib/Target/AArch64/AArch64PromoteConstant.cpp 276 if (isa<const LoadInst>(Instr) && OpIdx > 0)
496 LoadInst *LoadedCst =
lib/Target/AArch64/AArch64TargetTransformInfo.cpp 730 LoadInst *LMemI = dyn_cast<LoadInst>(&I);
730 LoadInst *LMemI = dyn_cast<LoadInst>(&I);
lib/Target/AMDGPU/AMDGPUAnnotateUniformValues.cpp 56 void visitLoadInst(LoadInst &I);
57 bool isClobberedInFunction(LoadInst * Load);
85 bool AMDGPUAnnotateUniformValues::isClobberedInFunction(LoadInst * Load) {
122 void AMDGPUAnnotateUniformValues::visitLoadInst(LoadInst &I) {
lib/Target/AMDGPU/AMDGPUCodeGenPrepare.cpp 170 bool canWidenScalarExtLoad(LoadInst &I) const;
181 bool visitLoadInst(LoadInst &I);
273 bool AMDGPUCodeGenPrepare::canWidenScalarExtLoad(LoadInst &I) const {
930 bool AMDGPUCodeGenPrepare::visitLoadInst(LoadInst &I) {
943 LoadInst *WidenLoad = Builder.CreateLoad(I32Ty, BitCast);
lib/Target/AMDGPU/AMDGPULibCalls.cpp 1297 LoadInst *LI = dyn_cast<LoadInst>(CArgVal);
1297 LoadInst *LI = dyn_cast<LoadInst>(CArgVal);
lib/Target/AMDGPU/AMDGPULowerKernelArguments.cpp 162 LoadInst *Load =
lib/Target/AMDGPU/AMDGPULowerKernelAttributes.cpp 108 auto *Load = dyn_cast<LoadInst>(*BCI->user_begin());
108 auto *Load = dyn_cast<LoadInst>(*BCI->user_begin());
lib/Target/AMDGPU/AMDGPUPerfHintAnalysis.cpp 125 if (auto LI = dyn_cast<LoadInst>(Inst)) {
160 if (auto LD = dyn_cast<LoadInst>(V)) {
lib/Target/AMDGPU/AMDGPUPrintfRuntimeBinding.cpp 163 if (auto LI = dyn_cast<LoadInst>(Op)) {
lib/Target/AMDGPU/AMDGPUPromoteAlloca.cpp 252 LoadInst *LoadXY = Builder.CreateAlignedLoad(I32Ty, GEPXY, 4);
255 LoadInst *LoadZU = Builder.CreateAlignedLoad(I32Ty, GEPZU, 4);
333 LoadInst *LI = cast<LoadInst>(Inst);
333 LoadInst *LI = cast<LoadInst>(Inst);
429 Value *Ptr = cast<LoadInst>(Inst)->getPointerOperand();
542 if (LoadInst *LI = dyn_cast<LoadInst>(UseInst)) {
542 if (LoadInst *LI = dyn_cast<LoadInst>(UseInst)) {
lib/Target/AMDGPU/AMDGPUTargetTransformInfo.cpp 557 if (const LoadInst *Load = dyn_cast<LoadInst>(V))
557 if (const LoadInst *Load = dyn_cast<LoadInst>(V))
lib/Target/ARM/ARMCodeGenPrepare.cpp 217 if (auto *Ld = dyn_cast<LoadInst>(V))
217 if (auto *Ld = dyn_cast<LoadInst>(V))
241 else if (isa<LoadInst>(V))
lib/Target/ARM/ARMFastISel.cpp 160 const LoadInst *LI) override;
1021 if (cast<LoadInst>(I)->isAtomic())
1049 if (!ARMEmitLoad(VT, ResultReg, Addr, cast<LoadInst>(I)->getAlignment()))
2910 const LoadInst *LI) {
lib/Target/ARM/ARMISelLowering.cpp16491 ARMTargetLowering::shouldExpandAtomicLoadInIR(LoadInst *LI) const {
16739 LoadInst *LI, ArrayRef<ShuffleVectorInst *> Shuffles,
lib/Target/ARM/ARMISelLowering.h 553 bool lowerInterleavedLoad(LoadInst *LI,
562 shouldExpandAtomicLoadInIR(LoadInst *LI) const override;
lib/Target/ARM/ARMParallelDSP.cpp 55 using MemInstList = SmallVectorImpl<LoadInst*>;
67 SmallVector<LoadInst*, 2> VecLd; // Container for loads to widen.
73 return isa<LoadInst>(LHS) && isa<LoadInst>(RHS);
73 return isa<LoadInst>(LHS) && isa<LoadInst>(RHS);
76 LoadInst *getBaseLoad() const {
197 LoadInst *NewLd = nullptr;
198 SmallVector<LoadInst*, 4> Loads;
201 WidenedLoad(SmallVectorImpl<LoadInst*> &Lds, LoadInst *Wide)
201 WidenedLoad(SmallVectorImpl<LoadInst*> &Lds, LoadInst *Wide)
203 for (auto *I : Lds)
206 LoadInst *getLoad() {
218 std::map<LoadInst*, LoadInst*> LoadPairs;
218 std::map<LoadInst*, LoadInst*> LoadPairs;
219 SmallPtrSet<LoadInst*, 4> OffsetLoads;
220 std::map<LoadInst*, std::unique_ptr<WidenedLoad>> WideLoads;
227 bool AreSequentialLoads(LoadInst *Ld0, LoadInst *Ld1, MemInstList &VecMem);
227 bool AreSequentialLoads(LoadInst *Ld0, LoadInst *Ld1, MemInstList &VecMem);
228 LoadInst* CreateWideLoad(MemInstList &Loads, IntegerType *LoadTy);
302 static bool AreSequentialAccesses(MemInst *MemOp0, MemInst *MemOp1,
302 static bool AreSequentialAccesses(MemInst *MemOp0, MemInst *MemOp1,
309 bool ARMParallelDSP::AreSequentialLoads(LoadInst *Ld0, LoadInst *Ld1,
309 bool ARMParallelDSP::AreSequentialLoads(LoadInst *Ld0, LoadInst *Ld1,
339 if (auto *Ld = dyn_cast<LoadInst>(SExt->getOperand(0))) {
339 if (auto *Ld = dyn_cast<LoadInst>(SExt->getOperand(0))) {
350 SmallVector<LoadInst*, 8> Loads;
362 auto *Ld = dyn_cast<LoadInst>(&I);
362 auto *Ld = dyn_cast<LoadInst>(&I);
394 LoadInst *Dominator = OrderedBB.dominates(Base, Offset) ? Base : Offset;
395 LoadInst *Dominated = OrderedBB.dominates(Base, Offset) ? Offset : Base;
411 for (auto *Base : Loads) {
412 for (auto *Offset : Loads) {
416 if (AreSequentialAccesses<LoadInst>(Base, Offset, *DL, *SE) &&
718 LoadInst *BaseLHS = LHSMul->getBaseLoad();
719 LoadInst *BaseRHS = RHSMul->getBaseLoad();
720 LoadInst *WideLHS = WideLoads.count(BaseLHS) ?
722 LoadInst *WideRHS = WideLoads.count(BaseRHS) ?
732 LoadInst* ARMParallelDSP::CreateWideLoad(MemInstList &Loads,
736 LoadInst *Base = Loads[0];
737 LoadInst *Offset = Loads[1];
764 LoadInst *DomLoad = DT->dominates(Base, Offset) ? Base : Offset;
774 LoadInst *WideLoad = IRB.CreateAlignedLoad(LoadTy, VecPtr,
lib/Target/ARM/ARMTargetTransformInfo.cpp 175 if (I && isa<LoadInst>(I->getOperand(0))) {
lib/Target/BPF/BPFAbstractMemberAccess.cpp 870 auto *LDInst = new LoadInst(Type::getInt32Ty(BB->getContext()), GV);
870 auto *LDInst = new LoadInst(Type::getInt32Ty(BB->getContext()), GV);
888 auto *LDInst = new LoadInst(Type::getInt64Ty(BB->getContext()), GV);
888 auto *LDInst = new LoadInst(Type::getInt64Ty(BB->getContext()), GV);
lib/Target/Hexagon/HexagonCommonGEP.cpp 1027 if (LoadInst *Ld = dyn_cast<LoadInst>(R)) {
1027 if (LoadInst *Ld = dyn_cast<LoadInst>(R)) {
1028 unsigned PtrX = LoadInst::getPointerOperandIndex();
lib/Target/Hexagon/HexagonISelLowering.cpp 3286 HexagonTargetLowering::shouldExpandAtomicLoadInIR(LoadInst *LI) const {
lib/Target/Hexagon/HexagonISelLowering.h 323 AtomicExpansionKind shouldExpandAtomicLoadInIR(LoadInst *LI) const override;
lib/Target/Hexagon/HexagonLoopIdiomRecognition.cpp 1943 LoadInst *LI = dyn_cast<LoadInst>(SI->getValueOperand());
1943 LoadInst *LI = dyn_cast<LoadInst>(SI->getValueOperand());
2023 auto *LI = cast<LoadInst>(SI->getValueOperand());
2023 auto *LI = cast<LoadInst>(SI->getValueOperand());
lib/Target/Hexagon/HexagonTargetTransformInfo.cpp 307 const LoadInst *LI = dyn_cast<const LoadInst>(CI->getOperand(0));
307 const LoadInst *LI = dyn_cast<const LoadInst>(CI->getOperand(0));
lib/Target/Mips/MipsFastISel.cpp 894 if (cast<LoadInst>(I)->isAtomic())
908 if (!emitLoad(VT, ResultReg, Addr, cast<LoadInst>(I)->getAlignment()))
lib/Target/NVPTX/NVPTXLowerAggrCopies.cpp 60 SmallVector<LoadInst *, 4> AggrLoads;
72 if (LoadInst *LI = dyn_cast<LoadInst>(II)) {
72 if (LoadInst *LI = dyn_cast<LoadInst>(II)) {
105 for (LoadInst *LI : AggrLoads) {
lib/Target/NVPTX/NVPTXLowerAlloca.cpp 91 auto LI = dyn_cast<LoadInst>(AllocaUse.getUser());
lib/Target/NVPTX/NVPTXLowerArgs.cpp 173 LoadInst *LI =
174 new LoadInst(StructType, ArgInParam, Arg->getName(), FirstInst);
213 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
213 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
lib/Target/NVPTX/NVPTXTargetTransformInfo.cpp 79 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) {
79 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) {
lib/Target/PowerPC/PPCFastISel.cpp 111 const LoadInst *LI) override;
595 if (cast<LoadInst>(I)->isAtomic())
2294 const LoadInst *LI) {
lib/Target/PowerPC/PPCISelLowering.cpp10285 if (isa<LoadInst>(Inst) && Subtarget.isPPC64())
lib/Target/PowerPC/PPCLoopPreIncPrep.cpp 184 if (LoadInst *LMemI = dyn_cast<LoadInst>(MemI)) {
184 if (LoadInst *LMemI = dyn_cast<LoadInst>(MemI)) {
249 if (LoadInst *LMemI = dyn_cast<LoadInst>(&J)) {
249 if (LoadInst *LMemI = dyn_cast<LoadInst>(&J)) {
lib/Target/RISCV/RISCVISelLowering.cpp 2724 if (isa<LoadInst>(Inst) && Ord == AtomicOrdering::SequentiallyConsistent)
2734 if (isa<LoadInst>(Inst) && isAcquireOrStronger(Ord))
lib/Target/RISCV/RISCVISelLowering.h 119 return isa<LoadInst>(I) || isa<StoreInst>(I);
lib/Target/SystemZ/SystemZISelLowering.cpp 849 if (isa<LoadInst>(I) && I->hasOneUse()) {
863 if (auto *LoadI = dyn_cast<LoadInst>(StoreI->getValueOperand()))
863 if (auto *LoadI = dyn_cast<LoadInst>(StoreI->getValueOperand()))
869 if (HasVector && (isa<LoadInst>(I) || isa<StoreInst>(I))) {
877 Type *MemAccessTy = (isa<LoadInst>(I) ? I->getType() :
892 if (!IsVectorAccess && isa<LoadInst>(I) && I->hasOneUse()) {
3848 if (auto *LI = dyn_cast<LoadInst>(&I))
lib/Target/SystemZ/SystemZTargetTransformInfo.cpp 768 (I != nullptr && isa<LoadInst>(I->getOperand(0))))
802 if (!isa<LoadInst>(Op) && !isa<ConstantInt>(Op))
867 if (LoadInst *Ld = dyn_cast<LoadInst>(I->getOperand(0)))
867 if (LoadInst *Ld = dyn_cast<LoadInst>(I->getOperand(0)))
910 isFoldableLoad(const LoadInst *Ld, const Instruction *&FoldedValue) {
1005 if (isFoldableLoad(cast<LoadInst>(I), FoldedValue)) {
1016 LoadInst *OtherLoad = dyn_cast<LoadInst>(OtherOp);
1016 LoadInst *OtherLoad = dyn_cast<LoadInst>(OtherOp);
1020 OtherLoad = dyn_cast<LoadInst>(OtherOp->getOperand(0));
lib/Target/SystemZ/SystemZTargetTransformInfo.h 89 bool isFoldableLoad(const LoadInst *Ld, const Instruction *&FoldedValue);
lib/Target/WebAssembly/WebAssemblyFastISel.cpp 1157 const auto *Load = cast<LoadInst>(I);
1157 const auto *Load = cast<LoadInst>(I);
lib/Target/WebAssembly/WebAssemblyLowerEmscriptenEHSjLj.cpp 982 LoadInst *ThrewLI = nullptr;
986 if (auto *LI = dyn_cast<LoadInst>(I))
986 if (auto *LI = dyn_cast<LoadInst>(I))
lib/Target/X86/X86FastISel.cpp 75 const LoadInst *LI) override;
1293 const LoadInst *LI = cast<LoadInst>(I);
1293 const LoadInst *LI = cast<LoadInst>(I);
3925 const LoadInst *LI) {
lib/Target/X86/X86ISelLowering.cpp26464 X86TargetLowering::shouldExpandAtomicLoadInIR(LoadInst *LI) const {
26522 LoadInst *
26580 LoadInst *Loaded =
26594 bool X86TargetLowering::lowerAtomicLoadAsLoadSDNode(const LoadInst &LI) const {
lib/Target/X86/X86ISelLowering.h 1239 bool lowerInterleavedLoad(LoadInst *LI,
1408 shouldExpandAtomicLoadInIR(LoadInst *SI) const override;
1413 LoadInst *
1417 bool lowerAtomicLoadAsLoadSDNode(const LoadInst &LI) const override;
lib/Target/X86/X86InterleavedAccess.cpp 143 if (isa<LoadInst>(Inst)) {
145 if (cast<LoadInst>(Inst)->getPointerAddressSpace())
195 LoadInst *LI = cast<LoadInst>(VecInst);
195 LoadInst *LI = cast<LoadInst>(VecInst);
726 if (isa<LoadInst>(Inst)) {
807 LoadInst *LI, ArrayRef<ShuffleVectorInst *> Shuffles,
lib/Target/XCore/XCoreISelLowering.cpp 1006 if (auto *LI = dyn_cast<LoadInst>(&I))
lib/Transforms/CFGuard/CFGuard.cpp 169 LoadInst *GuardCheckLoad = B.CreateLoad(GuardFnPtrType, GuardFnGlobal);
199 LoadInst *GuardDispatchLoad = B.CreateLoad(CalledOperandType, GuardFnGlobal);
lib/Transforms/Coroutines/CoroCleanup.cpp 52 auto *Load = Builder.CreateLoad(FrameTy->getElementType(Index), Gep);
lib/Transforms/Coroutines/CoroEarly.cpp 101 auto *Load = Builder.CreateLoad(BCI);
lib/Transforms/Coroutines/CoroFrame.cpp 508 void visitLoadInst(LoadInst &) {} // Good. Nothing to do.
787 auto *Value = Builder.CreateLoad(A);
1237 if (isa<LoadInst>(User) || isa<StoreInst>(User))
lib/Transforms/Coroutines/CoroSplit.cpp 285 auto *Index = Builder.CreateLoad(Shape.getIndexType(), GepIndex, "index");
385 auto *Load = Builder.CreateLoad(Shape.getSwitchResumePointerType(),
lib/Transforms/IPO/ArgumentPromotion.cpp 129 std::map<std::pair<Argument *, IndicesVector>, LoadInst *> OriginalLoads;
170 if (LoadInst *L = dyn_cast<LoadInst>(UI))
170 if (LoadInst *L = dyn_cast<LoadInst>(UI))
186 LoadInst *OrigLoad;
187 if (LoadInst *L = dyn_cast<LoadInst>(UI))
187 if (LoadInst *L = dyn_cast<LoadInst>(UI))
191 OrigLoad = cast<LoadInst>(UI->user_back());
281 LoadInst *OrigLoad =
305 LoadInst *newLoad =
429 if (LoadInst *LI = dyn_cast<LoadInst>(I->user_back())) {
429 if (LoadInst *LI = dyn_cast<LoadInst>(I->user_back())) {
468 LoadInst *L = cast<LoadInst>(GEP->user_back());
468 LoadInst *L = cast<LoadInst>(GEP->user_back());
628 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
628 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
664 SmallVector<LoadInst *, 16> Loads;
669 if (LoadInst *LI = dyn_cast<LoadInst>(UR)) {
669 if (LoadInst *LI = dyn_cast<LoadInst>(UR)) {
703 if (LoadInst *LI = dyn_cast<LoadInst>(GEPU)) {
703 if (LoadInst *LI = dyn_cast<LoadInst>(GEPU)) {
751 for (LoadInst *Load : Loads) {
833 } else if (!isa<LoadInst>(V)) {
lib/Transforms/IPO/Attributor.cpp 286 if (auto *LI = dyn_cast<LoadInst>(I))
1303 Ordering = cast<LoadInst>(I)->getOrdering();
1371 return cast<LoadInst>(I)->isVolatile();
2712 } else if (auto *LI = dyn_cast<LoadInst>(U.getUser())) {
2712 } else if (auto *LI = dyn_cast<LoadInst>(U.getUser())) {
3649 if (isa<LoadInst>(UserI))
4133 if (isa<LoadInst>(UserI))
4844 if (isa<LoadInst>(I))
4846 IRPosition::value(*cast<LoadInst>(I).getPointerOperand()));
lib/Transforms/IPO/CalledValuePropagation.cpp 178 return visitLoad(*cast<LoadInst>(&I), ChangedValues, SS);
314 void visitLoad(LoadInst &I,
lib/Transforms/IPO/FunctionAttrs.cpp 185 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
185 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
553 if (cast<LoadInst>(I)->isVolatile())
lib/Transforms/IPO/GlobalOpt.cpp 165 if (isa<LoadInst>(V) || isa<InvokeInst>(V) || isa<Argument>(V) ||
291 if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
291 if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
407 if (isa<LoadInst>(I)) return true;
623 if (isa<LoadInst>(U)) {
665 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
665 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
687 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
687 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
762 if (LoadInst *LI = dyn_cast<LoadInst>(GlobalUser)) {
762 if (LoadInst *LI = dyn_cast<LoadInst>(GlobalUser)) {
900 LoadInst *LI = cast<LoadInst>(GV->user_back());
900 LoadInst *LI = cast<LoadInst>(GV->user_back());
911 Value *LV = new LoadInst(InitBool->getValueType(), InitBool,
971 if (isa<LoadInst>(Inst) || isa<CmpInst>(Inst)) {
1048 new LoadInst(GV->getValueType(), GV, GV->getName() + ".val", InsertPt);
1112 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
1112 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
1139 if (const LoadInst *LI = dyn_cast<LoadInst>(InVal))
1139 if (const LoadInst *LI = dyn_cast<LoadInst>(InVal))
1168 if (LoadInst *LI = dyn_cast<LoadInst>(V)) {
1168 if (LoadInst *LI = dyn_cast<LoadInst>(V)) {
1173 Result = new LoadInst(V->getType()->getPointerElementType(), V,
1260 static void RewriteUsesOfLoadForHeapSRoA(LoadInst *Load,
1365 new LoadInst(cast<GlobalVariable>(FieldGlobals[i])->getValueType(),
1403 if (LoadInst *LI = dyn_cast<LoadInst>(User)) {
1403 if (LoadInst *LI = dyn_cast<LoadInst>(User)) {
1446 else if (LoadInst *LI = dyn_cast<LoadInst>(I->first))
1446 else if (LoadInst *LI = dyn_cast<LoadInst>(I->first))
1456 else if (LoadInst *LI = dyn_cast<LoadInst>(I->first))
1456 else if (LoadInst *LI = dyn_cast<LoadInst>(I->first))
1622 if (!isa<LoadInst>(U) && !isa<StoreInst>(U))
1715 if (LoadInst *LI = dyn_cast<LoadInst>(StoredVal)) {
1715 if (LoadInst *LI = dyn_cast<LoadInst>(StoredVal)) {
1718 StoreVal = new LoadInst(NewGV->getValueType(), NewGV,
1734 LoadInst *LI = cast<LoadInst>(UI);
1734 LoadInst *LI = cast<LoadInst>(UI);
1735 LoadInst *NLI = new LoadInst(NewGV->getValueType(), NewGV,
1735 LoadInst *NLI = new LoadInst(NewGV->getValueType(), NewGV,
1799 SmallVector<LoadInst *, 4> Loads;
1804 if (auto *LI = dyn_cast<LoadInst>(UU))
1804 if (auto *LI = dyn_cast<LoadInst>(UU))
1819 if (auto *LI = dyn_cast<LoadInst>(I))
1819 if (auto *LI = dyn_cast<LoadInst>(I))
1846 for (auto *L : Loads) {
lib/Transforms/InstCombine/InstCombineAtomicRMW.cpp 155 LoadInst *Load = new LoadInst(RMWI.getType(), RMWI.getPointerOperand());
155 LoadInst *Load = new LoadInst(RMWI.getType(), RMWI.getPointerOperand());
lib/Transforms/InstCombine/InstCombineCalls.cpp 186 LoadInst *L = Builder.CreateLoad(IntType, Src);
2338 return new LoadInst(II->getType(), Ptr);
2346 return new LoadInst(II->getType(), Ptr, Twine(""), false, Align::None());
2384 return new LoadInst(II->getType(), Ptr);
lib/Transforms/InstCombine/InstCombineCasts.cpp 2186 if (auto *LI = dyn_cast<LoadInst>(IncValue)) {
2186 if (auto *LI = dyn_cast<LoadInst>(IncValue)) {
2192 if (Addr == &CI || isa<LoadInst>(Addr))
2236 } else if (auto *LI = dyn_cast<LoadInst>(V)) {
2236 } else if (auto *LI = dyn_cast<LoadInst>(V)) {
lib/Transforms/InstCombine/InstCombineCompares.cpp 1062 } else if (isa<LoadInst>(V)) {
1836 if (auto *LI = dyn_cast<LoadInst>(X))
1836 if (auto *LI = dyn_cast<LoadInst>(X))
3255 !cast<LoadInst>(LHSI)->isVolatile())
6062 !cast<LoadInst>(LHSI)->isVolatile())
lib/Transforms/InstCombine/InstCombineInternal.h 434 Instruction *visitLoadInst(LoadInst &LI);
lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp 73 if (auto *LI = dyn_cast<LoadInst>(I)) {
73 if (auto *LI = dyn_cast<LoadInst>(I)) {
273 if (isa<LoadInst>(Inst)) {
298 if (auto *LT = dyn_cast<LoadInst>(I)) {
298 if (auto *LT = dyn_cast<LoadInst>(I)) {
301 auto *NewI = new LoadInst(I->getType(), V);
301 auto *NewI = new LoadInst(I->getType(), V);
452 static LoadInst *combineLoadToNewType(InstCombiner &IC, LoadInst &LI, Type *NewTy,
452 static LoadInst *combineLoadToNewType(InstCombiner &IC, LoadInst &LI, Type *NewTy,
465 LoadInst *NewLoad = IC.Builder.CreateAlignedLoad(
566 static Instruction *combineLoadToOperationType(InstCombiner &IC, LoadInst &LI) {
599 LoadInst *NewLoad = combineLoadToNewType(
623 LoadInst *NewLoad = combineLoadToNewType(IC, LI, CI->getDestTy());
634 static Instruction *unpackLoadToAggregate(InstCombiner &IC, LoadInst &LI) {
651 LoadInst *NewLoad = combineLoadToNewType(IC, LI, ST->getTypeAtIndex(0U),
684 auto *L = IC.Builder.CreateAlignedLoad(ST->getElementType(i), Ptr,
701 LoadInst *NewLoad = combineLoadToNewType(IC, LI, ET, ".unpack");
735 auto *L = IC.Builder.CreateAlignedLoad(
908 T &MemI) {
935 static bool canSimplifyNullLoadOrGEP(LoadInst &LI, Value *Op) {
949 Instruction *InstCombiner::visitLoadInst(LoadInst &LI) {
985 combineMetadataForCSE(cast<LoadInst>(AvailableVal), &LI, false);
1028 LoadInst *V1 =
1031 LoadInst *V2 =
1322 auto *LI = cast<LoadInst>(SI.getValueOperand());
1322 auto *LI = cast<LoadInst>(SI.getValueOperand());
1337 LoadInst *NewLI = combineLoadToNewType(
1436 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
1436 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
lib/Transforms/InstCombine/InstCombinePHI.cpp 109 if (LoadInst *LoadI = dyn_cast<LoadInst>(U)) {
109 if (LoadInst *LoadI = dyn_cast<LoadInst>(U)) {
164 auto *LoadI = dyn_cast<LoadInst>(Arg);
164 auto *LoadI = dyn_cast<LoadInst>(Arg);
244 LoadInst *LoadI = dyn_cast<LoadInst>(IncomingVal);
244 LoadInst *LoadI = dyn_cast<LoadInst>(IncomingVal);
493 static bool isSafeAndProfitableToSinkLoad(LoadInst *L) {
505 if (isa<LoadInst>(U)) continue;
532 LoadInst *FirstLI = cast<LoadInst>(PN.getIncomingValue(0));
532 LoadInst *FirstLI = cast<LoadInst>(PN.getIncomingValue(0));
563 LoadInst *LI = dyn_cast<LoadInst>(PN.getIncomingValue(i));
563 LoadInst *LI = dyn_cast<LoadInst>(PN.getIncomingValue(i));
598 LoadInst *NewLI =
599 new LoadInst(FirstLI->getType(), NewPN, "", isVolatile, LoadAlignment);
619 LoadInst *LI = cast<LoadInst>(PN.getIncomingValue(i));
619 LoadInst *LI = cast<LoadInst>(PN.getIncomingValue(i));
641 cast<LoadInst>(IncValue)->setVolatile(false);
735 if (isa<LoadInst>(FirstInst))
lib/Transforms/InstCombine/InstructionCombining.cpp 2262 if (auto *LI = dyn_cast<LoadInst>(V))
2262 if (auto *LI = dyn_cast<LoadInst>(V))
2729 if (LoadInst *L = dyn_cast<LoadInst>(Agg))
2729 if (LoadInst *L = dyn_cast<LoadInst>(Agg))
lib/Transforms/Instrumentation/AddressSanitizer.cpp 1357 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1357 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
lib/Transforms/Instrumentation/BoundsChecking.cpp 155 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
155 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
lib/Transforms/Instrumentation/DataFlowSanitizer.cpp 445 void visitLoadInst(LoadInst &LI);
1214 LoadInst *LI = new LoadInst(DFS.ShadowTy, ShadowAddr, "", Pos);
1214 LoadInst *LI = new LoadInst(DFS.ShadowTy, ShadowAddr, "", Pos);
1299 void DFSanVisitor::visitLoadInst(LoadInst &LI) {
1441 if (isa<LoadInst>(U))
1659 LoadInst *LabelLoad =
1698 LoadInst *LI = NextIRB.CreateLoad(DFSF.DFS.ShadowTy, DFSF.getRetvalTLS());
lib/Transforms/Instrumentation/GCOVProfiling.cpp 1115 auto *NumCounters =
1118 auto *EmitFunctionCallArgsArray =
1121 auto *EmitArcsCallArgsArray =
lib/Transforms/Instrumentation/HWAddressSanitizer.cpp 520 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
520 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
565 if (LoadInst *LI = dyn_cast<LoadInst>(I))
565 if (LoadInst *LI = dyn_cast<LoadInst>(I))
951 LoadInst *ReloadThreadLong = IRB.CreateLoad(IntptrTy, SlotPtr);
lib/Transforms/Instrumentation/InstrOrderFile.cpp 138 LoadInst *loadBitMap = entryB.CreateLoad(Int8Ty, MapAddr, "");
lib/Transforms/Instrumentation/InstrProfiling.cpp 211 LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
1008 auto *Load = IRB.CreateLoad(Int32Ty, Var);
lib/Transforms/Instrumentation/MemorySanitizer.cpp 1779 void visitLoadInst(LoadInst &I) {
lib/Transforms/Instrumentation/ThreadSanitizer.cpp 349 } else if (LoadInst *L = dyn_cast<LoadInst>(Addr)) {
349 } else if (LoadInst *L = dyn_cast<LoadInst>(Addr)) {
383 LoadInst *Load = cast<LoadInst>(I);
383 LoadInst *Load = cast<LoadInst>(I);
399 : cast<LoadInst>(I)->getPointerOperand();
415 if (LoadInst *LI = dyn_cast<LoadInst>(I))
415 if (LoadInst *LI = dyn_cast<LoadInst>(I))
458 else if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst))
524 : cast<LoadInst>(I)->getPointerOperand();
561 : cast<LoadInst>(I)->getAlignment();
630 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
630 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
lib/Transforms/ObjCARC/ObjCARCContract.cpp 206 static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,
374 auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
374 auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
lib/Transforms/ObjCARC/ObjCARCOpts.cpp 1849 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
1849 if (const LoadInst *LI = dyn_cast<LoadInst>(Arg))
lib/Transforms/ObjCARC/ProvenanceAnalysis.cpp 134 if (isa<LoadInst>(B))
138 if (isa<LoadInst>(A))
145 if (isa<LoadInst>(A))
lib/Transforms/Scalar/AlignmentFromAssumptions.cpp 319 if (LoadInst *LI = dyn_cast<LoadInst>(J)) {
319 if (LoadInst *LI = dyn_cast<LoadInst>(J)) {
lib/Transforms/Scalar/ConstantHoisting.cpp 665 if (LoadInst *LI = dyn_cast<LoadInst>(UI)) {
665 if (LoadInst *LI = dyn_cast<LoadInst>(UI)) {
lib/Transforms/Scalar/CorrelatedValuePropagation.cpp 283 if (LoadInst *L = dyn_cast<LoadInst>(I))
283 if (LoadInst *L = dyn_cast<LoadInst>(I))
lib/Transforms/Scalar/DeadStoreElimination.cpp 876 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
876 if (LoadInst *L = dyn_cast<LoadInst>(BBI)) {
1039 if (LoadInst *DepLoad = dyn_cast<LoadInst>(SI->getValueOperand())) {
1039 if (LoadInst *DepLoad = dyn_cast<LoadInst>(SI->getValueOperand())) {
lib/Transforms/Scalar/EarlyCSE.cpp 613 return isa<LoadInst>(Inst);
631 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
631 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
644 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
644 if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
654 if (auto *LI = dyn_cast<LoadInst>(Inst))
654 if (auto *LI = dyn_cast<LoadInst>(Inst))
702 if (auto *LI = dyn_cast<LoadInst>(Inst))
702 if (auto *LI = dyn_cast<LoadInst>(Inst))
793 if (auto *LI = dyn_cast<LoadInst>(I))
793 if (auto *LI = dyn_cast<LoadInst>(I))
lib/Transforms/Scalar/GVN.cpp 194 static AvailableValue getLoad(LoadInst *LI, unsigned Offset = 0) {
220 LoadInst *getCoercedLoadValue() const {
222 return cast<LoadInst>(Val.getPointer());
232 Value *MaterializeAdjustedValue(LoadInst *LI, Instruction *InsertPt,
263 Value *MaterializeAdjustedValue(LoadInst *LI, GVN &gvn) const {
730 static Value *ConstructSSAForLoadSet(LoadInst *LI,
770 Value *AvailableValue::MaterializeAdjustedValue(LoadInst *LI,
787 LoadInst *Load = getCoercedLoadValue();
827 static void reportMayClobberedLoad(LoadInst *LI, MemDepResult DepInfo,
839 if (U != LI && (isa<LoadInst>(U) || isa<StoreInst>(U)) &&
858 bool GVN::AnalyzeLoadAvailability(LoadInst *LI, MemDepResult DepInfo,
887 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
887 if (LoadInst *DepLI = dyn_cast<LoadInst>(DepInst)) {
956 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
956 if (LoadInst *LD = dyn_cast<LoadInst>(DepInst)) {
979 void GVN::AnalyzeLoadAvailability(LoadInst *LI, LoadDepVect &Deps,
1024 bool GVN::PerformLoadPRE(LoadInst *LI, AvailValInBlkVect &ValuesPerBlock,
1244 auto *NewLoad = new LoadInst(
1244 auto *NewLoad = new LoadInst(
1294 static void reportLoadElim(LoadInst *LI, Value *AvailableValue,
1308 bool GVN::processNonLocalLoad(LoadInst *LI) {
1516 bool GVN::processLoad(LoadInst *L) {
1972 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1972 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
lib/Transforms/Scalar/GVNHoist.cpp 186 void insert(LoadInst *Load, GVN::ValueTable &VN) {
878 if (auto *OtherLd = dyn_cast<LoadInst>(OtherInst))
891 if (auto *ReplacementLoad = dyn_cast<LoadInst>(Repl)) {
891 if (auto *ReplacementLoad = dyn_cast<LoadInst>(Repl)) {
893 ReplacementLoad->getAlignment(), cast<LoadInst>(I)->getAlignment())));
979 if (auto *Ld = dyn_cast<LoadInst>(Repl)) {
979 if (auto *Ld = dyn_cast<LoadInst>(Repl)) {
1060 if (isa<LoadInst>(Repl))
1103 if (auto *Load = dyn_cast<LoadInst>(&I1))
1103 if (auto *Load = dyn_cast<LoadInst>(&I1))
lib/Transforms/Scalar/GVNSink.cpp 101 return isa<LoadInst>(I) || isa<StoreInst>(I) ||
410 template <class Inst> InstructionUseExpr *createMemoryExpr(Inst *I) {
437 exp = createMemoryExpr(cast<LoadInst>(I));
543 if (isa<LoadInst>(&*I))
lib/Transforms/Scalar/InferAddressSpaces.cpp 360 } else if (auto *LI = dyn_cast<LoadInst>(&I))
360 } else if (auto *LI = dyn_cast<LoadInst>(&I))
764 if (auto *LI = dyn_cast<LoadInst>(Inst))
764 if (auto *LI = dyn_cast<LoadInst>(Inst))
765 return OpNo == LoadInst::getPointerOperandIndex() &&
lib/Transforms/Scalar/JumpThreading.cpp 1208 if (LoadInst *LoadI = dyn_cast<LoadInst>(SimplifyValue))
1208 if (LoadInst *LoadI = dyn_cast<LoadInst>(SimplifyValue))
1293 bool JumpThreadingPass::SimplifyPartiallyRedundantLoad(LoadInst *LoadI) {
1326 LoadInst *NLoadI = cast<LoadInst>(AvailableVal);
1326 LoadInst *NLoadI = cast<LoadInst>(AvailableVal);
1359 SmallVector<LoadInst*, 8> CSELoads;
1403 CSELoads.push_back(cast<LoadInst>(PredAvailable));
1472 LoadInst *NewVal = new LoadInst(
1472 LoadInst *NewVal = new LoadInst(
1517 for (LoadInst *PredLoadI : CSELoads) {
lib/Transforms/Scalar/LICM.cpp 981 static bool isLoadInvariantInLoop(LoadInst *LI, DominatorTree *DT,
1035 return (isa<LoadInst>(I) || isa<StoreInst>(I) || isa<CallInst>(I) ||
1095 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
1095 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
1252 if (auto *LI = dyn_cast<LoadInst>(MD->getMemoryInst())) {
1252 if (auto *LI = dyn_cast<LoadInst>(MD->getMemoryInst())) {
1312 (!isa<StoreInst>(UI) && !isa<LoadInst>(UI))))
1571 if (isa<LoadInst>(I))
1696 if (isa<LoadInst>(I))
1719 auto *LI = dyn_cast<LoadInst>(&Inst);
1719 auto *LI = dyn_cast<LoadInst>(&Inst);
1782 if (LoadInst *LI = dyn_cast<LoadInst>(I))
1782 if (LoadInst *LI = dyn_cast<LoadInst>(I))
1825 void replaceLoadWithValue(LoadInst *LI, Value *V) const override {
1968 if (LoadInst *Load = dyn_cast<LoadInst>(UI)) {
1968 if (LoadInst *Load = dyn_cast<LoadInst>(UI)) {
2117 LoadInst *PreheaderLoad = new LoadInst(
2117 LoadInst *PreheaderLoad = new LoadInst(
lib/Transforms/Scalar/LoopDataPrefetch.cpp 256 if (LoadInst *LMemI = dyn_cast<LoadInst>(&I)) {
256 if (LoadInst *LMemI = dyn_cast<LoadInst>(&I)) {
lib/Transforms/Scalar/LoopDistribute.cpp 369 if (isa<LoadInst>(Inst)) {
lib/Transforms/Scalar/LoopFuse.cpp 199 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
199 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
lib/Transforms/Scalar/LoopIdiomRecognize.cpp 237 LoadInst *&LoadA, LoadInst *&LoadB, const SCEV *&SrcA,
237 LoadInst *&LoadA, LoadInst *&LoadB, const SCEV *&SrcA,
241 LoadInst *LoadA, LoadInst *LoadB, const SCEV *SrcA,
241 LoadInst *LoadA, LoadInst *LoadB, const SCEV *SrcA,
547 LoadInst *LI = dyn_cast<LoadInst>(SI->getValueOperand());
547 LoadInst *LI = dyn_cast<LoadInst>(SI->getValueOperand());
1066 LoadInst *LI = cast<LoadInst>(SI->getValueOperand());
1066 LoadInst *LI = cast<LoadInst>(SI->getValueOperand());
2127 LoadInst *&LoadA, LoadInst *&LoadB,
2127 LoadInst *&LoadA, LoadInst *&LoadB,
2152 LoadA = cast<LoadInst>(CmpOfLoads.LoadA); // these cast with
2153 LoadB = cast<LoadInst>(CmpOfLoads.LoadB); // m_Value() matcher?
2593 LoadInst *LoadA, LoadInst *LoadB,
2593 LoadInst *LoadA, LoadInst *LoadB,
2685 LoadInst *LoadA, *LoadB;
lib/Transforms/Scalar/LoopInterchange.cpp 97 if (auto *Ld = dyn_cast<LoadInst>(&I)) {
97 if (auto *Ld = dyn_cast<LoadInst>(&I)) {
122 if (isa<LoadInst>(Src) && isa<LoadInst>(Dst))
122 if (isa<LoadInst>(Src) && isa<LoadInst>(Dst))
lib/Transforms/Scalar/LoopLoadElimination.cpp 89 LoadInst *Load;
92 StoreToLoadForwardingCandidate(LoadInst *Load, StoreInst *Store)
156 static bool isLoadConditional(LoadInst *Load, Loop *L) {
194 if (isa<LoadInst>(Source))
196 if (isa<LoadInst>(Destination))
212 auto *Load = dyn_cast<LoadInst>(Destination);
212 auto *Load = dyn_cast<LoadInst>(Destination);
262 DenseMap<LoadInst *, const StoreToLoadForwardingCandidate *>;
343 LoadInst *LastLoad =
436 Value *Initial = new LoadInst(
lib/Transforms/Scalar/LoopPredication.cpp 543 if (const auto *LI = dyn_cast<LoadInst>(U->getValue()))
543 if (const auto *LI = dyn_cast<LoadInst>(U->getValue()))
lib/Transforms/Scalar/LoopRerollPass.cpp 730 if (LoadInst *LI = dyn_cast<LoadInst>(I))
730 if (LoadInst *LI = dyn_cast<LoadInst>(I))
lib/Transforms/Scalar/LoopStrengthReduce.cpp 799 bool isAddress = isa<LoadInst>(Inst);
843 } else if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
843 } else if (const LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
lib/Transforms/Scalar/LoopUnrollAndJamPass.cpp 259 if (auto *Ld = dyn_cast<LoadInst>(&I)) {
259 if (auto *Ld = dyn_cast<LoadInst>(&I)) {
lib/Transforms/Scalar/LoopVersioningLICM.cpp 352 LoadInst *Ld = dyn_cast<LoadInst>(I);
352 LoadInst *Ld = dyn_cast<LoadInst>(I);
lib/Transforms/Scalar/LowerAtomic.cpp 29 LoadInst *Orig = Builder.CreateLoad(Val->getType(), Ptr);
47 LoadInst *Orig = Builder.CreateLoad(Val->getType(), Ptr);
107 static bool LowerLoadInst(LoadInst *LI) {
127 else if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
127 else if (LoadInst *LI = dyn_cast<LoadInst>(Inst)) {
lib/Transforms/Scalar/MemCpyOptimizer.cpp 426 static unsigned findLoadAlignment(const DataLayout &DL, const LoadInst *LI) {
434 const LoadInst *LI) {
445 const LoadInst *LI) {
502 } else if (isa<LoadInst>(C) || isa<StoreInst>(C) || isa<VAArgInst>(C)) {
549 if (LoadInst *LI = dyn_cast<LoadInst>(SI->getOperand(0))) {
549 if (LoadInst *LI = dyn_cast<LoadInst>(SI->getOperand(0))) {
lib/Transforms/Scalar/MergeICmps.cpp 70 if (const LoadInst *LI = dyn_cast<LoadInst>(I))
70 if (const LoadInst *LI = dyn_cast<LoadInst>(I))
82 BCEAtom(GetElementPtrInst *GEP, LoadInst *LoadI, int BaseId, APInt Offset)
114 LoadInst *LoadI = nullptr;
142 auto *const LoadI = dyn_cast<LoadInst>(Val);
142 auto *const LoadI = dyn_cast<LoadInst>(Val);
lib/Transforms/Scalar/NewGVN.cpp 686 LoadExpression *createLoadExpression(Type *, Value *, LoadInst *,
745 const Expression *performSymbolicLoadCoercion(Type *, Value *, LoadInst *,
1326 LoadInst *LI,
1397 if (auto *LI = dyn_cast<LoadInst>(LastStore->getStoredValue()))
1397 if (auto *LI = dyn_cast<LoadInst>(LastStore->getStoredValue()))
1416 LoadInst *LI, Instruction *DepInst,
1436 } else if (auto *DepLI = dyn_cast<LoadInst>(DepInst)) {
1436 } else if (auto *DepLI = dyn_cast<LoadInst>(DepInst)) {
1491 auto *LI = cast<LoadInst>(I);
1491 auto *LI = cast<LoadInst>(I);
2579 isa<LoadInst>(I);
3654 if (!isa<LoadInst>(D) && !isa<StoreInst>(D))
4116 if (isa<LoadInst>(Member))
lib/Transforms/Scalar/RewriteStatepointsForGC.cpp 448 if (isa<LoadInst>(I))
533 if (isa<LoadInst>(I))
1829 LoadInst *Load =
1830 new LoadInst(Alloca->getAllocatedType(), Alloca, "",
1836 LoadInst *Load =
1837 new LoadInst(Alloca->getAllocatedType(), Alloca, "", Use);
2415 if (!isa<LoadInst>(I) && !isa<StoreInst>(I))
lib/Transforms/Scalar/SCCP.cpp 632 void visitLoadInst (LoadInst &I);
1149 void SCCPSolver::visitLoadInst(LoadInst &I) {
lib/Transforms/Scalar/SROA.cpp 779 void visitLoadInst(LoadInst &LI) {
961 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
961 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
1144 if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) {
1144 if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) {
1207 LoadInst *LI = dyn_cast<LoadInst>(U);
1207 LoadInst *LI = dyn_cast<LoadInst>(U);
1265 LoadInst *SomeLoad = cast<LoadInst>(PN.user_back());
1265 LoadInst *SomeLoad = cast<LoadInst>(PN.user_back());
1279 LoadInst *LI = cast<LoadInst>(PN.user_back());
1279 LoadInst *LI = cast<LoadInst>(PN.user_back());
1302 LoadInst *Load = PredBuilder.CreateLoad(
1336 LoadInst *LI = dyn_cast<LoadInst>(U);
1336 LoadInst *LI = dyn_cast<LoadInst>(U);
1362 LoadInst *LI = cast<LoadInst>(SI.user_back());
1362 LoadInst *LI = cast<LoadInst>(SI.user_back());
1366 LoadInst *TL = IRB.CreateLoad(LI->getType(), TV,
1368 LoadInst *FL = IRB.CreateLoad(LI->getType(), FV,
1687 if (auto *LI = dyn_cast<LoadInst>(I)) {
1687 if (auto *LI = dyn_cast<LoadInst>(I)) {
1849 } else if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) {
1849 } else if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) {
1912 if (auto *LI = dyn_cast<LoadInst>(S.getUse()->getUser()))
1912 if (auto *LI = dyn_cast<LoadInst>(S.getUse()->getUser()))
2018 if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) {
2018 if (LoadInst *LI = dyn_cast<LoadInst>(U->getUser())) {
2469 Value *rewriteIntegerLoad(LoadInst &LI) {
2493 bool visitLoadInst(LoadInst &LI) {
2517 LoadInst *NewLI = IRB.CreateAlignedLoad(NewAI.getAllocatedType(), &NewAI,
2554 LoadInst *NewLI = IRB.CreateAlignedLoad(
2581 Value *Placeholder = new LoadInst(
3042 LoadInst *Load = IRB.CreateAlignedLoad(OtherTy, SrcPtr, SrcAlign,
3119 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
3119 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
3363 LoadInst *Load = IRB.CreateAlignedLoad(Ty, GEP, Align, Name + ".load");
3371 bool visitLoadInst(LoadInst &LI) {
3635 SmallVector<LoadInst *, 4> Loads;
3659 SmallPtrSet<LoadInst *, 8> UnsplittableLoads;
3669 if (auto *LI = dyn_cast<LoadInst>(I))
3669 if (auto *LI = dyn_cast<LoadInst>(I))
3672 if (auto *LI = dyn_cast<LoadInst>(SI->getValueOperand()))
3672 if (auto *LI = dyn_cast<LoadInst>(SI->getValueOperand()))
3680 if (auto *LI = dyn_cast<LoadInst>(I)) {
3680 if (auto *LI = dyn_cast<LoadInst>(I)) {
3704 auto *StoredLoad = dyn_cast<LoadInst>(SI->getValueOperand());
3704 auto *StoredLoad = dyn_cast<LoadInst>(SI->getValueOperand());
3756 auto *LI = cast<LoadInst>(SI->getValueOperand());
3756 auto *LI = cast<LoadInst>(SI->getValueOperand());
3796 auto *LI =
3797 cast<LoadInst>(SI->getValueOperand());
3833 SmallDenseMap<LoadInst *, std::vector<LoadInst *>, 1> SplitLoadsMap;
3833 SmallDenseMap<LoadInst *, std::vector<LoadInst *>, 1> SplitLoadsMap;
3834 std::vector<LoadInst *> SplitLoads;
3836 for (LoadInst *LI : Loads) {
3861 LoadInst *PLoad = IRB.CreateAlignedLoad(
3913 LoadInst *PLoad = SplitLoads[Idx];
3961 auto *LI = cast<LoadInst>(SI->getValueOperand());
3961 auto *LI = cast<LoadInst>(SI->getValueOperand());
3980 std::vector<LoadInst *> *SplitLoads = nullptr;
3997 LoadInst *PLoad;
4293 if (isa<LoadInst>(S.getUse()->getUser()) ||
4310 if (isa<LoadInst>(S.getUse()->getUser()) ||
lib/Transforms/Scalar/Scalarizer.cpp 195 bool visitLoadInst(LoadInst &LI);
766 bool ScalarizerVisitor::visitLoadInst(LoadInst &LI) {
lib/Transforms/Scalar/Sink.cpp 67 if (LoadInst *L = dyn_cast<LoadInst>(Inst)) {
67 if (LoadInst *L = dyn_cast<LoadInst>(Inst)) {
lib/Transforms/Scalar/TailRecursionElimination.cpp 334 if (LoadInst *L = dyn_cast<LoadInst>(I)) {
334 if (LoadInst *L = dyn_cast<LoadInst>(I)) {
lib/Transforms/Utils/CodeExtractor.cpp 329 LoadInst *LI = cast<LoadInst>(&II);
329 LoadInst *LI = cast<LoadInst>(&II);
939 RewriteVal = new LoadInst(StructTy->getElementType(i), GEP,
1166 LoadInst *load = new LoadInst(outputs[i]->getType(), Output,
1166 LoadInst *load = new LoadInst(outputs[i]->getType(), Output,
lib/Transforms/Utils/DemoteRegToStack.cpp 75 V = new LoadInst(I.getType(), Slot, I.getName() + ".reload",
84 Value *V = new LoadInst(I.getType(), Slot, I.getName() + ".reload",
147 new LoadInst(P->getType(), Slot, P->getName() + ".reload", &*InsertPt);
lib/Transforms/Utils/Evaluator.cpp 443 } else if (LoadInst *LI = dyn_cast<LoadInst>(CurInst)) {
443 } else if (LoadInst *LI = dyn_cast<LoadInst>(CurInst)) {
lib/Transforms/Utils/FunctionComparator.cpp 542 if (const LoadInst *LI = dyn_cast<LoadInst>(L)) {
542 if (const LoadInst *LI = dyn_cast<LoadInst>(L)) {
543 if (int Res = cmpNumbers(LI->isVolatile(), cast<LoadInst>(R)->isVolatile()))
546 cmpNumbers(LI->getAlignment(), cast<LoadInst>(R)->getAlignment()))
549 cmpOrderings(LI->getOrdering(), cast<LoadInst>(R)->getOrdering()))
552 cast<LoadInst>(R)->getSyncScopeID()))
555 cast<LoadInst>(R)->getMetadata(LLVMContext::MD_range));
lib/Transforms/Utils/GlobalStatus.cpp 88 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) {
88 if (const LoadInst *LI = dyn_cast<LoadInst>(I)) {
123 } else if (isa<LoadInst>(StoredVal) &&
124 cast<LoadInst>(StoredVal)->getOperand(0) == GV) {
lib/Transforms/Utils/InlineFunction.cpp 985 if (const LoadInst *LI = dyn_cast<LoadInst>(I))
985 if (const LoadInst *LI = dyn_cast<LoadInst>(I))
lib/Transforms/Utils/Local.cpp 1318 LoadInst *LI, DIBuilder &Builder) {
1416 if (LoadInst *LI = dyn_cast<LoadInst>(U))
1416 if (LoadInst *LI = dyn_cast<LoadInst>(U))
1429 } else if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
1429 } else if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
2359 if (isa<LoadInst>(K) || isa<StoreInst>(K))
2376 void llvm::copyMetadataForLoad(LoadInst &Dest, const LoadInst &Source) {
2376 void llvm::copyMetadataForLoad(LoadInst &Dest, const LoadInst &Source) {
2438 if (!isa<LoadInst>(I))
2541 void llvm::copyNonnullMetadata(const LoadInst &OldLI, MDNode *N,
2542 LoadInst &NewLI) {
2566 void llvm::copyRangeMetadata(const DataLayout &DL, const LoadInst &OldLI,
2567 MDNode *N, LoadInst &NewLI) {
lib/Transforms/Utils/LoopUnrollAndJam.cpp 595 if (auto *Ld = dyn_cast<LoadInst>(&I)) {
595 if (auto *Ld = dyn_cast<LoadInst>(&I)) {
624 if (isa<LoadInst>(Src) && isa<LoadInst>(Dst))
624 if (isa<LoadInst>(Src) && isa<LoadInst>(Dst))
lib/Transforms/Utils/LoopVersioning.cpp 228 const Value *Ptr = isa<LoadInst>(OrigInst)
229 ? cast<LoadInst>(OrigInst)->getPointerOperand()
lib/Transforms/Utils/PromoteMemoryToRegister.cpp 71 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
71 if (const LoadInst *LI = dyn_cast<LoadInst>(U)) {
143 LoadInst *LI = cast<LoadInst>(User);
143 LoadInst *LI = cast<LoadInst>(User);
192 return (isa<LoadInst>(I) && isa<AllocaInst>(I->getOperand(0))) ||
304 static void addAssumeNonNull(AssumptionCache *AC, LoadInst *LI) {
322 if (isa<LoadInst>(I) || isa<StoreInst>(I))
362 LoadInst *LI = cast<LoadInst>(UserInst);
362 LoadInst *LI = cast<LoadInst>(UserInst);
469 LoadInst *LI = dyn_cast<LoadInst>(*UI++);
469 LoadInst *LI = dyn_cast<LoadInst>(*UI++);
807 if (LoadInst *LI = dyn_cast<LoadInst>(I))
807 if (LoadInst *LI = dyn_cast<LoadInst>(I))
935 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
935 if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
lib/Transforms/Utils/SSAUpdater.cpp 342 if (const LoadInst *LI = dyn_cast<LoadInst>(Insts[0]))
342 if (const LoadInst *LI = dyn_cast<LoadInst>(Insts[0]))
364 SmallVector<LoadInst *, 32> LiveInLoads;
383 LiveInLoads.push_back(cast<LoadInst>(User));
402 LiveInLoads.push_back(cast<LoadInst>(I));
414 if (LoadInst *L = dyn_cast<LoadInst>(&I)) {
414 if (LoadInst *L = dyn_cast<LoadInst>(&I)) {
448 for (LoadInst *ALoad : LiveInLoads) {
481 replaceLoadWithValue(cast<LoadInst>(User), NewVal);
lib/Transforms/Utils/SimplifyCFG.cpp 1507 if (isa<LoadInst>(I0) && any_of(Insts, [](const Instruction *I) {
4182 } else if (auto *LI = dyn_cast<LoadInst>(BBI)) {
4182 } else if (auto *LI = dyn_cast<LoadInst>(BBI)) {
5995 if (LoadInst *LI = dyn_cast<LoadInst>(Use))
5995 if (LoadInst *LI = dyn_cast<LoadInst>(Use))
lib/Transforms/Utils/SimplifyLibCalls.cpp 2316 LoadInst *LI = dyn_cast<LoadInst>(CI->getArgOperand(StreamArg));
2316 LoadInst *LI = dyn_cast<LoadInst>(CI->getArgOperand(StreamArg));
lib/Transforms/Utils/VNCoercion.cpp 246 int analyzeLoadFromClobberingLoad(Type *LoadTy, Value *LoadPtr, LoadInst *DepLI,
407 Value *getLoadValueForLoad(LoadInst *SrcVal, unsigned Offset, Type *LoadTy,
432 LoadInst *NewLoad = Builder.CreateLoad(DestTy, PtrVal);
lib/Transforms/Vectorize/LoadStoreVectorizer.cpp 131 unsigned getAlignment(LoadInst *LI) const {
304 if (LoadInst *L = dyn_cast<LoadInst>(I))
304 if (LoadInst *L = dyn_cast<LoadInst>(I))
606 bool IsLoadChain = isa<LoadInst>(Chain[0]);
619 if (isa<LoadInst>(I) || isa<StoreInst>(I)) {
660 auto *MemLoad = dyn_cast<LoadInst>(MemInstr);
660 auto *MemLoad = dyn_cast<LoadInst>(MemInstr);
661 auto *ChainLoad = dyn_cast<LoadInst>(ChainInstr);
661 auto *ChainLoad = dyn_cast<LoadInst>(ChainInstr);
749 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
749 if (LoadInst *LI = dyn_cast<LoadInst>(&I)) {
927 if (isa<LoadInst>(*Operands.begin()))
1092 LoadInst *L0 = cast<LoadInst>(Chain[0]);
1092 LoadInst *L0 = cast<LoadInst>(Chain[0]);
1097 LoadTy = cast<LoadInst>(V)->getType();
1196 LoadInst *LI = Builder.CreateAlignedLoad(VecTy, Bitcast, Alignment);
lib/Transforms/Vectorize/LoopVectorizationLegality.cpp 755 } else if (auto *LD = dyn_cast<LoadInst>(&I)) {
755 } else if (auto *LD = dyn_cast<LoadInst>(&I)) {
890 auto *LI = dyn_cast<LoadInst>(&I);
890 auto *LI = dyn_cast<LoadInst>(&I);
956 LoadInst *LI = dyn_cast<LoadInst>(&I);
956 LoadInst *LI = dyn_cast<LoadInst>(&I);
lib/Transforms/Vectorize/LoopVectorize.cpp 310 if (auto *LI = dyn_cast<LoadInst>(I))
310 if (auto *LI = dyn_cast<LoadInst>(I))
904 if (LVer && (isa<LoadInst>(Orig) || isa<StoreInst>(Orig)))
1221 bool LI = isa<LoadInst>(V);
1244 if (isa<LoadInst>(I) || isa<StoreInst>(I))
2236 if (isa<LoadInst>(Instr)) {
2348 LoadInst *LI = dyn_cast<LoadInst>(Instr);
2348 LoadInst *LI = dyn_cast<LoadInst>(Instr);
3366 } else if (isa<LoadInst>(I) || isa<PHINode>(I)) {
4417 return isa<LoadInst>(U) || isa<StoreInst>(U);
4441 if (auto *Load = dyn_cast<LoadInst>(&I)) {
4441 if (auto *Load = dyn_cast<LoadInst>(&I)) {
4493 ((isa<LoadInst>(J) || isa<StoreInst>(J)) &&
4566 return isa<LoadInst>(I) ?
4612 return isa<LoadInst>(I) ? TTI.isLegalMaskedLoad(Ty, Alignment)
4619 LoadInst *LI = dyn_cast<LoadInst>(I);
4619 LoadInst *LI = dyn_cast<LoadInst>(I);
5085 if (!isa<LoadInst>(I) && !isa<StoreInst>(I) && !isa<PHINode>(I))
5505 return isa<LoadInst>(I) ||
5800 if (isa<LoadInst>(I)) {
5844 if (isa<LoadInst>(I)) {
5917 (!isa<LoadInst>(I) || !TTI.supportsEfficientVectorElementLoadStore()))
5921 if (isa<LoadInst>(I) && !TTI.prefersVectorizedAddressing())
6065 if (isa<LoadInst>(I)) {
6747 if (!isa<LoadInst>(I) && !isa<StoreInst>(I))
6914 if (isa<LoadInst>(I) || isa<StoreInst>(I)) {
lib/Transforms/Vectorize/SLPVectorizer.cpp 444 LoadInst *LI = cast<LoadInst>(UserInst);
444 LoadInst *LI = cast<LoadInst>(UserInst);
469 if (LoadInst *LI = dyn_cast<LoadInst>(I))
469 if (LoadInst *LI = dyn_cast<LoadInst>(I))
476 if (LoadInst *LI = dyn_cast<LoadInst>(I))
476 if (LoadInst *LI = dyn_cast<LoadInst>(I))
797 if (isa<LoadInst>(Op)) {
803 if (isConsecutiveAccess(cast<LoadInst>(OpLeft),
804 cast<LoadInst>(OpRight), DL, SE))
1011 if (isa<LoadInst>(OpLane0))
2421 auto *L = cast<LoadInst>(V);
2421 auto *L = cast<LoadInst>(V);
2853 LoadInst *LI = dyn_cast<LoadInst>(Vec);
2853 LoadInst *LI = dyn_cast<LoadInst>(Vec);
3165 MaybeAlign alignment(cast<LoadInst>(VL0)->getAlignment());
3827 LoadInst *LI = cast<LoadInst>(E->getSingleOperand(0));
3827 LoadInst *LI = cast<LoadInst>(E->getSingleOperand(0));
3831 LoadInst *V = Builder.CreateAlignedLoad(VecTy, Ptr, LI->getAlignment());
4017 LoadInst *LI = cast<LoadInst>(VL0);
4017 LoadInst *LI = cast<LoadInst>(VL0);
4954 else if (isa<LoadInst>(I))
lib/Transforms/Vectorize/VPlanHCFGTransforms.cpp 58 if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst))
lib/Transforms/Vectorize/VPlanSLP.cpp 140 return cast<LoadInst>(cast<VPInstruction>(Op)->getUnderlyingInstr())
tools/bugpoint/Miscompilation.cpp 883 new LoadInst(F->getType(), Cache, "fpcache", EntryBB);
tools/clang/lib/CodeGen/Address.h 108 template <class U> inline U cast(CodeGen::Address addr) {
tools/clang/lib/CodeGen/CGAtomic.cpp 548 llvm::LoadInst *Load = CGF.Builder.CreateLoad(Ptr);
1446 llvm::LoadInst *Load = CGF.Builder.CreateLoad(Addr, "atomic-load");
1750 auto *OldVal = CGF.Builder.CreateLoad(ExpectedAddr);
1789 auto *DesiredVal = CGF.Builder.CreateLoad(NewAtomicIntAddr);
1836 auto *OldVal = CGF.Builder.CreateLoad(ExpectedAddr);
1869 auto *DesiredVal = CGF.Builder.CreateLoad(NewAtomicIntAddr);
tools/clang/lib/CodeGen/CGBuilder.h 69 llvm::LoadInst *CreateLoad(Address Addr, const llvm::Twine &Name = "") {
74 llvm::LoadInst *CreateLoad(Address Addr, const char *Name) {
81 llvm::LoadInst *CreateLoad(Address Addr, bool IsVolatile,
90 llvm::LoadInst *CreateAlignedLoad(llvm::Value *Addr, CharUnits Align,
94 llvm::LoadInst *CreateAlignedLoad(llvm::Value *Addr, CharUnits Align,
98 llvm::LoadInst *CreateAlignedLoad(llvm::Type *Ty, llvm::Value *Addr,
128 llvm::LoadInst *CreateFlagLoad(llvm::Value *Addr,
tools/clang/lib/CodeGen/CGBuiltin.cpp 329 llvm::LoadInst *Load = CGF.Builder.CreateAlignedLoad(Ptr, LoadSize);
5525 LoadInst *Ld = Builder.CreateLoad(PtrOp0);
12331 LoadInst *Load = Builder.CreateAlignedLoad(
12343 LoadInst *Load = Builder.CreateAlignedLoad(
14553 LoadInst *QLd = Builder.CreateLoad(Dest);
14575 LoadInst *QLd = Builder.CreateLoad(Dest);
tools/clang/lib/CodeGen/CGCall.cpp 2692 llvm::LoadInst *load =
2693 dyn_cast<llvm::LoadInst>(retainedValue->stripPointerCasts());
tools/clang/lib/CodeGen/CGCleanup.cpp 310 static llvm::LoadInst *createLoadInstBefore(Address addr, const Twine &name,
312 auto load = new llvm::LoadInst(addr.getPointer(), name, beforeInst);
625 llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
625 llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition());
862 llvm::LoadInst *Load =
tools/clang/lib/CodeGen/CGExpr.cpp 1672 llvm::LoadInst *Load = Builder.CreateLoad(Addr, Volatile);
1801 llvm::LoadInst *Load = Builder.CreateLoad(LV.getVectorAddress(),
2323 llvm::LoadInst *Load = Builder.CreateLoad(RefLVal.getAddress(),
tools/clang/lib/CodeGen/CGObjC.cpp 135 cast<llvm::LoadInst>(Ptr)->setMetadata(
1067 llvm::LoadInst *load = Builder.CreateLoad(ivarAddr, "load");
tools/clang/lib/CodeGen/CGObjCMac.cpp 2181 if (auto LI = dyn_cast<llvm::LoadInst>(Arg0->stripPointerCasts())) {
7075 cast<llvm::LoadInst>(IvarOffsetValue)
7479 llvm::LoadInst* LI = CGF.Builder.CreateLoad(Addr);
tools/clang/lib/CodeGen/ItaniumCXXABI.cpp 1794 auto *VFuncLoad =
2274 llvm::LoadInst *LI =
tools/clang/lib/CodeGen/MicrosoftCXXABI.cpp 2409 llvm::LoadInst *LI = Builder.CreateLoad(Guard);
2525 llvm::LoadInst *LI = Builder.CreateLoad(GuardAddr);
2558 llvm::LoadInst *FirstGuardLoad = Builder.CreateLoad(GuardAddr);
2560 llvm::LoadInst *InitThreadEpoch =
2574 llvm::LoadInst *SecondGuardLoad = Builder.CreateLoad(GuardAddr);
tools/lldb/source/Expression/IRInterpreter.cpp 1268 const LoadInst *load_inst = dyn_cast<LoadInst>(inst);
1268 const LoadInst *load_inst = dyn_cast<LoadInst>(inst);
tools/lldb/source/Plugins/ExpressionParser/Clang/IRDynamicChecks.cpp 332 if (llvm::LoadInst *li = dyn_cast<llvm::LoadInst>(inst))
332 if (llvm::LoadInst *li = dyn_cast<llvm::LoadInst>(inst))
358 if (dyn_cast<llvm::LoadInst>(&i) || dyn_cast<llvm::StoreInst>(&i))
tools/lldb/source/Plugins/ExpressionParser/Clang/IRForTarget.cpp 770 LoadInst *load = dyn_cast<LoadInst>(selector_load);
770 LoadInst *load = dyn_cast<LoadInst>(selector_load);
899 if (LoadInst *load = dyn_cast<LoadInst>(&inst))
899 if (LoadInst *load = dyn_cast<LoadInst>(&inst))
931 LoadInst *load = dyn_cast<LoadInst>(class_load);
931 LoadInst *load = dyn_cast<LoadInst>(class_load);
1050 if (LoadInst *load = dyn_cast<LoadInst>(&inst))
1050 if (LoadInst *load = dyn_cast<LoadInst>(&inst))
1128 LoadInst *persistent_load = new LoadInst(persistent_global, "", alloc);
1128 LoadInst *persistent_load = new LoadInst(persistent_global, "", alloc);
1362 SmallVector<LoadInst *, 2> load_instructions;
1365 if (LoadInst *load_instruction = dyn_cast<LoadInst>(u))
1365 if (LoadInst *load_instruction = dyn_cast<LoadInst>(u))
1374 for (LoadInst *load_instruction : load_instructions) {
1517 if (LoadInst *load = dyn_cast<LoadInst>(&inst))
1517 if (LoadInst *load = dyn_cast<LoadInst>(&inst))
1802 LoadInst *load = new LoadInst(bit_cast, "", entry_instruction);
1802 LoadInst *load = new LoadInst(bit_cast, "", entry_instruction);
tools/lldb/source/Plugins/LanguageRuntime/RenderScript/RenderScriptRuntime/RenderScriptx86ABIFixups.cpp 191 llvm::LoadInst *new_func_addr_load =
192 new llvm::LoadInst(new_func_ptr, "load_func_pointer", call_inst);
199 llvm::LoadInst *lldb_save_result_address =
200 new llvm::LoadInst(return_value_alloc, "save_return_val", call_inst);
tools/llvm-stress/llvm-stress.cpp 346 Value *V = new LoadInst(Ptr, "L", BB->getTerminator());
tools/polly/include/polly/CodeGen/BlockGenerators.h 536 Value *generateArrayLoad(ScopStmt &Stmt, LoadInst *load, ValueMapT &BBMap,
685 Value *generateStrideOneLoad(ScopStmt &Stmt, LoadInst *Load,
703 Value *generateStrideZeroLoad(ScopStmt &Stmt, LoadInst *Load,
721 Value *generateUnknownStrideLoad(ScopStmt &Stmt, LoadInst *Load,
728 void generateLoad(ScopStmt &Stmt, LoadInst *Load, ValueMapT &VectorMap,
tools/polly/include/polly/ScopBuilder.h 697 bool isRequiredInvariantLoad(LoadInst *LI) const {
tools/polly/include/polly/ScopInfo.h 2469 void addRequiredInvariantLoad(LoadInst *LI) { DC.RequiredILS.insert(LI); }
tools/polly/include/polly/Support/ScopHelper.h 42 using InvariantLoadsSetTy = llvm::SetVector<llvm::AssertingVH<llvm::LoadInst>>;
80 /* implicit */ MemAccInst(llvm::LoadInst &LI) : I(&LI) {}
81 /* implicit */ MemAccInst(llvm::LoadInst *LI) : I(LI) {}
90 return llvm::isa<llvm::LoadInst>(V) || llvm::isa<llvm::StoreInst>(V) ||
94 return llvm::isa<llvm::LoadInst>(V) || llvm::isa<llvm::StoreInst>(V) ||
127 MemAccInst &operator=(llvm::LoadInst &LI) {
131 MemAccInst &operator=(llvm::LoadInst *LI) {
259 bool isLoad() const { return I && llvm::isa<llvm::LoadInst>(I); }
268 llvm::LoadInst *asLoad() const { return llvm::cast<llvm::LoadInst>(I); }
268 llvm::LoadInst *asLoad() const { return llvm::cast<llvm::LoadInst>(I); }
414 bool isHoistableLoad(llvm::LoadInst *LInst, llvm::Region &R, llvm::LoopInfo &LI,
tools/polly/lib/Analysis/ScopBuilder.cpp 434 if (auto Load = dyn_cast<LoadInst>(Condition)) {
806 DenseMap<std::pair<const SCEV *, Type *>, LoadInst *> EquivClasses;
809 for (LoadInst *LInst : RIL) {
813 LoadInst *&ClassRep = EquivClasses[std::make_pair(PointerSCEV, Ty)];
1387 if (!isa<LoadInst>(Inst) && !isa<StoreInst>(Inst))
1457 if (!isa<LoadInst>(Inst) && !isa<StoreInst>(Inst))
1462 LoadInst *MemLoad = nullptr;
1466 MemLoad = dyn_cast<LoadInst>(SlotGEP->getPointerOperand());
1469 MemLoad = dyn_cast<LoadInst>(Slot);
1609 isa<LoadInst>(Inst) ? MemoryAccess::READ : MemoryAccess::MUST_WRITE;
1654 for (LoadInst *LInst : AccessILS)
1682 isa<LoadInst>(Inst) ? MemoryAccess::READ : MemoryAccess::MUST_WRITE;
1741 for (LoadInst *LInst : AccessILS)
1847 isa<LoadInst>(Inst) ? MemoryAccess::READ : MemoryAccess::MUST_WRITE;
1875 for (LoadInst *LInst : AccessILS)
2254 if (isa<LoadInst>(Inst) && RIL.count(cast<LoadInst>(&Inst)))
2254 if (isa<LoadInst>(Inst) && RIL.count(cast<LoadInst>(&Inst)))
2730 const LoadInst *Load =
2731 dyn_cast<const LoadInst>(CandidatePair.first->getAccessInstruction());
2744 for (LoadInst *LI : RIL) {
2809 if (!isa<LoadInst>(BasePtrInst))
2878 auto *LI = cast<LoadInst>(Access->getAccessInstruction());
2878 auto *LI = cast<LoadInst>(Access->getAccessInstruction());
2935 LoadInst *LInst = cast<LoadInst>(MA->getAccessInstruction());
2935 LoadInst *LInst = cast<LoadInst>(MA->getAccessInstruction());
3019 LoadInst *LInst = cast<LoadInst>(MA->getAccessInstruction());
3019 LoadInst *LInst = cast<LoadInst>(MA->getAccessInstruction());
3119 auto *PossibleLoad0 = dyn_cast<LoadInst>(BinOp->getOperand(0));
3119 auto *PossibleLoad0 = dyn_cast<LoadInst>(BinOp->getOperand(0));
3120 auto *PossibleLoad1 = dyn_cast<LoadInst>(BinOp->getOperand(1));
3120 auto *PossibleLoad1 = dyn_cast<LoadInst>(BinOp->getOperand(1));
3474 cast<LoadInst>(BasePtrMA->getAccessInstruction()));
3613 LoadInst *Load = dyn_cast<LoadInst>(&Inst);
3613 LoadInst *Load = dyn_cast<LoadInst>(&Inst);
tools/polly/lib/Analysis/ScopDetection.cpp 458 for (LoadInst *Load : RequiredILS) {
580 if (auto Load = dyn_cast<LoadInst>(Condition))
802 if (auto LI = dyn_cast<LoadInst>(I)) {
925 if (auto *Load = dyn_cast<LoadInst>(V)) {
925 if (auto *Load = dyn_cast<LoadInst>(V)) {
1150 auto *Load = dyn_cast<LoadInst>(Inst);
1150 auto *Load = dyn_cast<LoadInst>(Inst);
1235 Context.hasLoads |= isa<LoadInst>(MemInst);
tools/polly/lib/Analysis/ScopInfo.cpp 201 LoadInst *BasePtrLI = dyn_cast<LoadInst>(BasePtr);
201 LoadInst *BasePtrLI = dyn_cast<LoadInst>(BasePtr);
1509 else if (LoadInst *LI = dyn_cast<LoadInst>(Val)) {
1509 else if (LoadInst *LI = dyn_cast<LoadInst>(Val)) {
1795 LoadInst *LInst = dyn_cast<LoadInst>(Val);
1795 LoadInst *LInst = dyn_cast<LoadInst>(Val);
1800 LInst = cast<LoadInst>(Rep);
tools/polly/lib/CodeGen/BlockGenerators.cpp 311 Value *BlockGenerator::generateArrayLoad(ScopStmt &Stmt, LoadInst *Load,
368 if (auto *Load = dyn_cast<LoadInst>(Inst)) {
368 if (auto *Load = dyn_cast<LoadInst>(Inst)) {
1050 ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps,
1061 LoadInst *VecLoad =
1080 ScopStmt &Stmt, LoadInst *Load, ValueMapT &BBMap,
1088 LoadInst *ScalarLoad =
1103 ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps,
1125 ScopStmt &Stmt, LoadInst *Load, ValueMapT &VectorMap,
1302 if (auto *Load = dyn_cast<LoadInst>(Inst)) {
1302 if (auto *Load = dyn_cast<LoadInst>(Inst)) {
1344 auto *Val = Builder.CreateLoad(VectorPtr, Address->getName() + ".reload");
tools/polly/lib/CodeGen/IslNodeBuilder.cpp 212 if (isa<LoadInst>(Inst))
1215 if (LoadInst *PreloadInst = dyn_cast<LoadInst>(PreloadVal))
1215 if (LoadInst *PreloadInst = dyn_cast<LoadInst>(PreloadVal))
1217 MaybeAlign(dyn_cast<LoadInst>(AccInst)->getAlignment()));
tools/polly/lib/CodeGen/PerfMonitor.cpp 280 LoadInst *CyclesStart = Builder.CreateLoad(CyclesInScopStartPtr, true);
tools/polly/lib/Exchange/JSONExporter.cpp 449 if (LoadInst *LoadI = dyn_cast<LoadInst>(MA->getAccessInstruction())) {
449 if (LoadInst *LoadI = dyn_cast<LoadInst>(MA->getAccessInstruction())) {
tools/polly/lib/Support/SCEVValidator.cpp 366 ILS->insert(cast<LoadInst>(I));
525 LoadInst *LI = dyn_cast<LoadInst>(Inst);
525 LoadInst *LI = dyn_cast<LoadInst>(Inst);
tools/polly/lib/Support/ScopHelper.cpp 549 bool polly::isHoistableLoad(LoadInst *LInst, Region &R, LoopInfo &LI,
564 if (auto *DecidingLoad =
565 dyn_cast<LoadInst>(GepInst->getPointerOperand())) {
tools/polly/lib/Support/VirtualInstruction.cpp 75 if (S->lookupInvariantEquivClass(Val) || RIL.count(dyn_cast<LoadInst>(Val)))
174 if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst))
tools/polly/lib/Transform/ForwardOpTree.cpp 346 MemoryAccess *makeReadArrayAccess(ScopStmt *Stmt, LoadInst *LI,
400 LoadInst *LI = dyn_cast<LoadInst>(Inst);
400 LoadInst *LI = dyn_cast<LoadInst>(Inst);
tools/polly/lib/Transform/RewriteByReferenceParameters.cpp 72 auto *LoadedVal =
73 new LoadInst(Alloca, "polly_byref_load_" + InstName, &Inst);
tools/polly/lib/Transform/ZoneAlgo.cpp 399 if (LoadInst *Load = dyn_cast_or_null<LoadInst>(MA->getAccessInstruction())) {
399 if (LoadInst *Load = dyn_cast_or_null<LoadInst>(MA->getAccessInstruction())) {
888 return isa<StoreInst>(AccInst) || isa<LoadInst>(AccInst);
unittests/Analysis/AliasAnalysisTest.cpp 178 auto *Load1 = new LoadInst(IntType, Addr, "load", BB);
178 auto *Load1 = new LoadInst(IntType, Addr, "load", BB);
unittests/Analysis/MemorySSATest.cpp 95 LoadInst *LoadInst = B.CreateLoad(B.getInt8Ty(), PointerArg);
141 LoadInst *FirstLoad = B.CreateLoad(B.getInt8Ty(), PointerArg);
169 LoadInst *SecondLoad = B.CreateLoad(B.getInt8Ty(), PointerArg);
235 LoadInst *LoadInst = B.CreateLoad(B.getInt8Ty(), PointerArg);
265 LoadInst *LoadInst1 = B.CreateLoad(B.getInt8Ty(), PointerArg);
280 LoadInst *LoadInstClone = cast<LoadInst>(LoadInst1->clone());
280 LoadInst *LoadInstClone = cast<LoadInst>(LoadInst1->clone());
349 auto *MergeLoad = B.CreateLoad(B.getInt8Ty(), PointerArg);
395 auto *MergeLoad = B.CreateLoad(B.getInt8Ty(), PointerArg);
439 auto *MergeLoad = B.CreateLoad(B.getInt8Ty(), PointerArg);
493 LoadInst *LoadInst = B.CreateLoad(B.getInt8Ty(), PointerArg);
538 LoadInst *LoadInst = B.CreateLoad(B.getInt8Ty(), PointerArg);
837 LoadInst *LoadB = B.CreateLoad(Int8, B_);
841 LoadInst *LoadC = B.CreateLoad(Int8, C);
905 LoadInst *LoadInst = B.CreateLoad(B.getInt8Ty(), FirstArg);
1013 LoadInst *LA1 = B.CreateLoad(Int8, AllocaA, "");
1015 LoadInst *LA2 = B.CreateLoad(Int8, AllocaA, "");
1019 LoadInst *LA3 = B.CreateLoad(Int8, AllocaA, "");
1021 LoadInst *LA4 = B.CreateLoad(Int8, AllocaA, "");
1027 for (LoadInst *V : {LA1, LA2}) {
1035 for (LoadInst *V : {LA3, LA4}) {
1106 LoadInst *LA1 = B.CreateLoad(Int8, PointerA, "");
1108 LoadInst *LB1 = B.CreateLoad(Int8, PointerB, "");
1110 LoadInst *LA2 = B.CreateLoad(Int8, PointerA, "");
1112 LoadInst *LB2 = B.CreateLoad(Int8, PointerB, "");
1118 for (LoadInst *V : {LA1, LB1}) {
1126 for (LoadInst *V : {LA2, LB2}) {
unittests/Analysis/OrderedInstructionsTest.cpp 47 LoadInst *LoadInstX = B.CreateLoad(B.getInt8Ty(), PointerArg);
48 LoadInst *LoadInstY = B.CreateLoad(B.getInt8Ty(), PointerArg);
53 LoadInst *LoadInstZ = B.CreateLoad(B.getInt8Ty(), PointerArg);
unittests/Analysis/PhiValuesTest.cpp 40 Value *Val1 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val1", Entry);
41 Value *Val2 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val2", Entry);
42 Value *Val3 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val3", Entry);
43 Value *Val4 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val4", Entry);
113 Value *Val1 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val1", Entry);
114 Value *Val2 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val2", Entry);
115 Value *Val3 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val3", Entry);
116 Value *Val4 = new LoadInst(I32Ty, UndefValue::get(I32PtrTy), "val4", Entry);
unittests/Analysis/ScalarEvolutionTest.cpp 415 X = new LoadInst(IntPtrTy, new IntToPtrInst(X, IntPtrPtrTy, "", EntryBB),
418 Y = new LoadInst(IntPtrTy, new IntToPtrInst(Y, IntPtrPtrTy, "", EntryBB),
1659 LoadInst *Load = cast<LoadInst>(&Entry.front());
1659 LoadInst *Load = cast<LoadInst>(&Entry.front());
unittests/Analysis/SparsePropagation.cpp 384 LoadInst *Cond = Builder.CreateLoad(Type::getInt1Ty(Context), F->arg_begin());
424 LoadInst *Cond = Builder.CreateLoad(Type::getInt1Ty(Context), F->arg_begin());
unittests/Transforms/Scalar/LoopPassManagerTest.cpp 909 auto *Cond = new LoadInst(Type::getInt1Ty(Context), &Ptr, Name,
909 auto *Cond = new LoadInst(Type::getInt1Ty(Context), &Ptr, Name,
1112 auto *Cond = new LoadInst(Type::getInt1Ty(Context), &Ptr, Name,
1112 auto *Cond = new LoadInst(Type::getInt1Ty(Context), &Ptr, Name,
1506 auto *Cond =
1507 new LoadInst(Type::getInt1Ty(Context), &Ptr, "cond.0.3",
unittests/Transforms/Utils/FunctionComparatorTest.cpp 34 LoadInst *LoadInst = B.CreateLoad(T, PointerArg);
usr/include/c++/7.4.0/type_traits 1983 { typedef _Up type; };