reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

projects/compiler-rt/lib/asan/asan_allocator.cpp
   70 static const uptr kAllocBegMagic = 0xCC6E96B9;
   98 static const uptr kChunkHeaderSize = sizeof(ChunkHeader);
   99 static const uptr kChunkHeader2Size = sizeof(ChunkBase) - kChunkHeaderSize;
  114   uptr Beg() { return reinterpret_cast<uptr>(this) + kChunkHeaderSize; }
  115   uptr UsedSize(bool locked_version = false) {
  130   bool AddrIsInside(uptr addr, bool locked_version = false) {
  151       uptr *alloc_magic = reinterpret_cast<uptr *>(p);
  167   void *Allocate(uptr size) {
  187 void AsanMapUnmapCallback::OnMap(uptr p, uptr size) const {
  187 void AsanMapUnmapCallback::OnMap(uptr p, uptr size) const {
  194 void AsanMapUnmapCallback::OnUnmap(uptr p, uptr size) const {
  194 void AsanMapUnmapCallback::OnUnmap(uptr p, uptr size) const {
  240   static const uptr kMaxAllowedMallocSize =
  293   void RePoisonChunk(uptr chunk) {
  297     uptr allocated_size = allocator.GetActuallyAllocatedSize((void *)ac);
  298     uptr beg = ac->Beg();
  299     uptr end = ac->Beg() + ac->UsedSize(true);
  300     uptr chunk_end = chunk + allocated_size;
  305       uptr end_aligned_down = RoundDownTo(end, SHADOW_GRANULARITY);
  345   uptr ComputeRZLog(uptr user_requested_size) {
  345   uptr ComputeRZLog(uptr user_requested_size) {
  359   static uptr ComputeUserRequestedAlignmentLog(uptr user_requested_alignment) {
  359   static uptr ComputeUserRequestedAlignmentLog(uptr user_requested_alignment) {
  367   static uptr ComputeUserAlignment(uptr user_requested_alignment_log) {
  367   static uptr ComputeUserAlignment(uptr user_requested_alignment_log) {
  374   AsanChunk *ChooseChunk(uptr addr, AsanChunk *left_chunk,
  398   void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack,
  398   void *Allocate(uptr size, uptr alignment, BufferedStackTrace *stack,
  409     const uptr min_alignment = SHADOW_GRANULARITY;
  410     const uptr user_requested_alignment_log =
  424     uptr rz_log = ComputeRZLog(size);
  425     uptr rz_size = RZLog2Size(rz_log);
  426     uptr rounded_size = RoundUpTo(Max(size, kChunkHeader2Size), alignment);
  427     uptr needed_size = rounded_size + rz_size;
  470       uptr allocated_size = allocator.GetActuallyAllocatedSize(allocated);
  474     uptr alloc_beg = reinterpret_cast<uptr>(allocated);
  475     uptr alloc_end = alloc_beg + needed_size;
  476     uptr beg_plus_redzone = alloc_beg + rz_size;
  477     uptr user_beg = beg_plus_redzone;
  480     uptr user_end = user_beg + size;
  482     uptr chunk_beg = user_beg - kChunkHeaderSize;
  503       uptr *meta = reinterpret_cast<uptr *>(allocator.GetMetaData(allocated));
  511     uptr size_rounded_down_to_granularity =
  534       uptr fill_size = Min(size, (uptr)fl.max_malloc_fill_size);
  578       uptr scribble_start = (uptr)m + kChunkHeaderSize + kChunkHeader2Size;
  580         uptr size_to_fill = m->UsedSize() - kChunkHeader2Size;
  609   void Deallocate(void *ptr, uptr delete_size, uptr delete_alignment,
  609   void Deallocate(void *ptr, uptr delete_size, uptr delete_alignment,
  611     uptr p = reinterpret_cast<uptr>(ptr);
  614     uptr chunk_beg = p - kChunkHeaderSize;
  650   void *Reallocate(void *old_ptr, uptr new_size, BufferedStackTrace *stack) {
  652     uptr p = reinterpret_cast<uptr>(old_ptr);
  653     uptr chunk_beg = p - kChunkHeaderSize;
  666       uptr memcpy_size = Min(new_size, m->UsedSize());
  675   void *Calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
  675   void *Calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
  708       uptr *meta = reinterpret_cast<uptr *>(allocator.GetMetaData(alloc_beg));
  712     uptr *alloc_magic = reinterpret_cast<uptr *>(alloc_beg);
  718   AsanChunk *GetAsanChunkByAddr(uptr p) {
  724   AsanChunk *GetAsanChunkByAddrFastLocked(uptr p) {
  730   uptr AllocationSize(uptr p) {
  730   uptr AllocationSize(uptr p) {
  738   AsanChunkView FindHeapChunkByAddress(uptr addr) {
  747       for (uptr l = 1; l < GetPageSizeCached(); l++) {
  807 uptr AsanChunkView::Beg() const { return chunk_->Beg(); }
  808 uptr AsanChunkView::End() const { return Beg() + UsedSize(); }
  809 uptr AsanChunkView::UsedSize() const { return chunk_->UsedSize(); }
  813 uptr AsanChunkView::AllocTid() const { return chunk_->alloc_tid; }
  814 uptr AsanChunkView::FreeTid() const { return chunk_->free_tid; }
  849 AsanChunkView FindHeapChunkByAddress(uptr addr) {
  852 AsanChunkView FindHeapChunkByAllocBeg(uptr addr) {
  869 void asan_delete(void *ptr, uptr size, uptr alignment,
  869 void asan_delete(void *ptr, uptr size, uptr alignment,
  874 void *asan_malloc(uptr size, BufferedStackTrace *stack) {
  878 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
  878 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack) {
  882 void *asan_reallocarray(void *p, uptr nmemb, uptr size,
  882 void *asan_reallocarray(void *p, uptr nmemb, uptr size,
  893 void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack) {
  907 void *asan_valloc(uptr size, BufferedStackTrace *stack) {
  912 void *asan_pvalloc(uptr size, BufferedStackTrace *stack) {
  913   uptr PageSize = GetPageSizeCached();
  926 void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
  926 void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
  938 void *asan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack) {
  938 void *asan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack) {
  949 int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
  949 int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
  965 uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp) {
  965 uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp) {
  965 uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp) {
  967   uptr usable_size = instance.AllocationSize(reinterpret_cast<uptr>(ptr));
  975 uptr asan_mz_size(const void *ptr) {
 1003 void GetAllocatorGlobalRange(uptr *begin, uptr *end) {
 1003 void GetAllocatorGlobalRange(uptr *begin, uptr *end) {
 1008 uptr PointsIntoChunk(void* p) {
 1009   uptr addr = reinterpret_cast<uptr>(p);
 1012   uptr chunk = m->Beg();
 1023 uptr GetUserBegin(uptr chunk) {
 1023 uptr GetUserBegin(uptr chunk) {
 1029 LsanMetadata::LsanMetadata(uptr chunk) {
 1048 uptr LsanMetadata::requested_size() const {
 1063   uptr addr = reinterpret_cast<uptr>(p);
 1082 uptr __sanitizer_get_estimated_allocated_size(uptr size) {
 1082 uptr __sanitizer_get_estimated_allocated_size(uptr size) {
 1087   uptr ptr = reinterpret_cast<uptr>(p);
 1091 uptr __sanitizer_get_allocated_size(const void *p) {
 1093   uptr ptr = reinterpret_cast<uptr>(p);
 1094   uptr allocated_size = instance.AllocationSize(ptr);
projects/compiler-rt/lib/asan/asan_allocator.h
   57   uptr Beg() const;            // First byte of user memory.
   58   uptr End() const;            // Last byte of user memory.
   59   uptr UsedSize() const;       // Size requested by the user.
   61   uptr AllocTid() const;
   62   uptr FreeTid() const;
   69   bool AddrIsInside(uptr addr, uptr access_size, sptr *offset) const {
   69   bool AddrIsInside(uptr addr, uptr access_size, sptr *offset) const {
   76   bool AddrIsAtLeft(uptr addr, uptr access_size, sptr *offset) const {
   76   bool AddrIsAtLeft(uptr addr, uptr access_size, sptr *offset) const {
   84   bool AddrIsAtRight(uptr addr, uptr access_size, sptr *offset) const {
   84   bool AddrIsAtRight(uptr addr, uptr access_size, sptr *offset) const {
   96 AsanChunkView FindHeapChunkByAddress(uptr address);
   97 AsanChunkView FindHeapChunkByAllocBeg(uptr address);
  107   uptr size() { return size_; }
  113   uptr size_;
  117   void OnMap(uptr p, uptr size) const;
  117   void OnMap(uptr p, uptr size) const;
  118   void OnUnmap(uptr p, uptr size) const;
  118   void OnUnmap(uptr p, uptr size) const;
  150 const uptr kAllocatorSpace = 0x600000000000ULL;
  151 const uptr kAllocatorSize  =  0x40000000000ULL;  // 4T.
  156   static const uptr kSpaceBeg = kAllocatorSpace;
  157   static const uptr kSpaceSize = kAllocatorSize;
  158   static const uptr kMetadataSize = 0;
  161   static const uptr kFlags = 0;
  186 static const uptr kNumberOfSizeClasses = SizeClassMap::kNumClasses;
  195   uptr quarantine_cache[16];
  203 void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
  203 void *asan_memalign(uptr alignment, uptr size, BufferedStackTrace *stack,
  206 void asan_delete(void *ptr, uptr size, uptr alignment,
  206 void asan_delete(void *ptr, uptr size, uptr alignment,
  209 void *asan_malloc(uptr size, BufferedStackTrace *stack);
  210 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack);
  210 void *asan_calloc(uptr nmemb, uptr size, BufferedStackTrace *stack);
  211 void *asan_realloc(void *p, uptr size, BufferedStackTrace *stack);
  212 void *asan_reallocarray(void *p, uptr nmemb, uptr size,
  212 void *asan_reallocarray(void *p, uptr nmemb, uptr size,
  214 void *asan_valloc(uptr size, BufferedStackTrace *stack);
  215 void *asan_pvalloc(uptr size, BufferedStackTrace *stack);
  217 void *asan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack);
  217 void *asan_aligned_alloc(uptr alignment, uptr size, BufferedStackTrace *stack);
  218 int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
  218 int asan_posix_memalign(void **memptr, uptr alignment, uptr size,
  220 uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp);
  220 uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp);
  220 uptr asan_malloc_usable_size(const void *ptr, uptr pc, uptr bp);
  222 uptr asan_mz_size(const void *ptr);
projects/compiler-rt/lib/asan/asan_debugging.cpp
   26 static void FindInfoForStackVar(uptr addr, const char *frame_descr, uptr offset,
   26 static void FindInfoForStackVar(uptr addr, const char *frame_descr, uptr offset,
   27                                 char *name, uptr name_size,
   28                                 uptr *region_address, uptr *region_size) {
   28                                 uptr *region_address, uptr *region_size) {
   35   for (uptr i = 0; i < vars.size(); i++) {
   49 uptr AsanGetStack(uptr addr, uptr *trace, u32 size, u32 *thread_id,
   49 uptr AsanGetStack(uptr addr, uptr *trace, u32 size, u32 *thread_id,
   49 uptr AsanGetStack(uptr addr, uptr *trace, u32 size, u32 *thread_id,
   67     for (uptr i = 0; i < size; i++)
   79 const char *__asan_locate_address(uptr addr, char *name, uptr name_size,
   79 const char *__asan_locate_address(uptr addr, char *name, uptr name_size,
   80                                   uptr *region_address_ptr,
   81                                   uptr *region_size_ptr) {
   83   uptr region_address = 0;
   84   uptr region_size = 0;
  131 uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  131 uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  131 uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  131 uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  136 uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  136 uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  136 uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  136 uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size, u32 *thread_id) {
  141 void __asan_get_shadow_mapping(uptr *shadow_scale, uptr *shadow_offset) {
  141 void __asan_get_shadow_mapping(uptr *shadow_scale, uptr *shadow_offset) {
projects/compiler-rt/lib/asan/asan_descriptions.cpp
   71 static bool GetShadowKind(uptr addr, ShadowKind *shadow_kind) {
   86 bool DescribeAddressIfShadow(uptr addr) {
   93 bool GetShadowAddressInformation(uptr addr, ShadowAddressDescription *descr) {
  105                                             AsanChunkView chunk, uptr addr,
  106                                             uptr access_size) {
  127 static void PrintHeapChunkAccess(uptr addr, const ChunkAccess &descr) {
  156 bool GetHeapAddressInformation(uptr addr, uptr access_size,
  156 bool GetHeapAddressInformation(uptr addr, uptr access_size,
  181 bool DescribeAddressIfHeap(uptr addr, uptr access_size) {
  181 bool DescribeAddressIfHeap(uptr addr, uptr access_size) {
  194 bool GetStackAddressInformation(uptr addr, uptr access_size,
  194 bool GetStackAddressInformation(uptr addr, uptr access_size,
  224 static void PrintAccessAndVarIntersection(const StackVarDescr &var, uptr addr,
  225                                           uptr access_size, uptr prev_var_end,
  225                                           uptr access_size, uptr prev_var_end,
  226                                           uptr next_var_beg) {
  227   uptr var_end = var.beg + var.size;
  228   uptr addr_end = addr + access_size;
  250   for (uptr i = 0; i < var.name_len; ++i) {
  269 bool DescribeAddressIfStack(uptr addr, uptr access_size) {
  269 bool DescribeAddressIfStack(uptr addr, uptr access_size) {
  277 static void DescribeAddressRelativeToGlobal(uptr addr, uptr access_size,
  277 static void DescribeAddressRelativeToGlobal(uptr addr, uptr access_size,
  302 bool GetGlobalAddressInformation(uptr addr, uptr access_size,
  302 bool GetGlobalAddressInformation(uptr addr, uptr access_size,
  312 bool DescribeAddressIfGlobal(uptr addr, uptr access_size,
  312 bool DescribeAddressIfGlobal(uptr addr, uptr access_size,
  341   for (uptr i = 0; i < size; i++) {
  343     for (uptr j = 0; j < other.size; j++) {
  390   uptr n_objects = vars.size();
  395   for (uptr i = 0; i < n_objects; i++) {
  396     uptr prev_var_end = i ? vars[i - 1].beg + vars[i - 1].size : 0;
  397     uptr next_var_beg = i + 1 < n_objects ? vars[i + 1].beg : ~(0UL);
  439 AddressDescription::AddressDescription(uptr addr, uptr access_size,
  439 AddressDescription::AddressDescription(uptr addr, uptr access_size,
  470 void PrintAddressDescription(uptr addr, uptr access_size,
  470 void PrintAddressDescription(uptr addr, uptr access_size,
projects/compiler-rt/lib/asan/asan_descriptions.h
   93   uptr addr;
  100 bool GetShadowAddressInformation(uptr addr, ShadowAddressDescription *descr);
  101 bool DescribeAddressIfShadow(uptr addr);
  111   uptr bad_addr;
  113   uptr chunk_begin;
  114   uptr chunk_size;
  121   uptr addr;
  122   uptr alloc_tid;
  123   uptr free_tid;
  131 bool GetHeapAddressInformation(uptr addr, uptr access_size,
  131 bool GetHeapAddressInformation(uptr addr, uptr access_size,
  133 bool DescribeAddressIfHeap(uptr addr, uptr access_size = 1);
  133 bool DescribeAddressIfHeap(uptr addr, uptr access_size = 1);
  136   uptr addr;
  137   uptr tid;
  138   uptr offset;
  139   uptr frame_pc;
  140   uptr access_size;
  146 bool GetStackAddressInformation(uptr addr, uptr access_size,
  146 bool GetStackAddressInformation(uptr addr, uptr access_size,
  150   uptr addr;
  155   uptr access_size;
  165 bool GetGlobalAddressInformation(uptr addr, uptr access_size,
  165 bool GetGlobalAddressInformation(uptr addr, uptr access_size,
  167 bool DescribeAddressIfGlobal(uptr addr, uptr access_size, const char *bug_type);
  167 bool DescribeAddressIfGlobal(uptr addr, uptr access_size, const char *bug_type);
  177 void PrintAddressDescription(uptr addr, uptr access_size = 1,
  177 void PrintAddressDescription(uptr addr, uptr access_size = 1,
  196       uptr addr;
  206   explicit AddressDescription(uptr addr, bool shouldLockThreadRegistry = true)
  208   AddressDescription(uptr addr, uptr access_size,
  208   AddressDescription(uptr addr, uptr access_size,
  211   uptr Address() const {
projects/compiler-rt/lib/asan/asan_errors.cpp
   76   const uptr user_alignment =
  333   uptr granularity = SHADOW_GRANULARITY;
  386 ErrorGeneric::ErrorGeneric(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr addr,
  386 ErrorGeneric::ErrorGeneric(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr addr,
  386 ErrorGeneric::ErrorGeneric(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr addr,
  386 ErrorGeneric::ErrorGeneric(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr addr,
  387                            bool is_write_, uptr access_size_)
  539                              u8 *bytes, u8 *guilty, uptr n) {
  542   for (uptr i = 0; i < n; i++) {
  552 static void PrintShadowMemoryForAddress(uptr addr) {
  554   uptr shadow_addr = MemToShadow(addr);
  555   const uptr n_bytes_per_row = 16;
  556   uptr aligned_shadow = shadow_addr & ~(n_bytes_per_row - 1);
  560     uptr row_shadow_addr = aligned_shadow + i * n_bytes_per_row;
  576   uptr addr = addr_description.Address();
projects/compiler-rt/lib/asan/asan_errors.h
   72   ErrorDoubleFree(u32 tid, BufferedStackTrace *stack, uptr addr)
   84   uptr delete_size;
   85   uptr delete_alignment;
   88   ErrorNewDeleteTypeMismatch(u32 tid, BufferedStackTrace *stack, uptr addr,
   89                              uptr delete_size_, uptr delete_alignment_)
   89                              uptr delete_size_, uptr delete_alignment_)
  104   ErrorFreeNotMalloced(u32 tid, BufferedStackTrace *stack, uptr addr)
  117   ErrorAllocTypeMismatch(u32 tid, BufferedStackTrace *stack, uptr addr,
  132   ErrorMallocUsableSizeNotOwned(u32 tid, BufferedStackTrace *stack_, uptr addr)
  145                                          uptr addr)
  154   uptr count;
  155   uptr size;
  158   ErrorCallocOverflow(u32 tid, BufferedStackTrace *stack_, uptr count_,
  159                       uptr size_)
  169   uptr count;
  170   uptr size;
  173   ErrorReallocArrayOverflow(u32 tid, BufferedStackTrace *stack_, uptr count_,
  174                             uptr size_)
  184   uptr size;
  187   ErrorPvallocOverflow(u32 tid, BufferedStackTrace *stack_, uptr size_)
  196   uptr alignment;
  200                                   uptr alignment_)
  209   uptr size;
  210   uptr alignment;
  214                                     uptr size_, uptr alignment_)
  214                                     uptr size_, uptr alignment_)
  224   uptr alignment;
  228                                      uptr alignment_)
  237   uptr user_size;
  238   uptr total_size;
  239   uptr max_size;
  243                             uptr user_size_, uptr total_size_, uptr max_size_)
  243                             uptr user_size_, uptr total_size_, uptr max_size_)
  243                             uptr user_size_, uptr total_size_, uptr max_size_)
  264   uptr requested_size;
  267   ErrorOutOfMemory(u32 tid, BufferedStackTrace *stack_, uptr requested_size_)
  276   uptr length1, length2;
  283                                          uptr addr1, uptr length1_, uptr addr2,
  283                                          uptr addr1, uptr length1_, uptr addr2,
  283                                          uptr addr1, uptr length1_, uptr addr2,
  284                                          uptr length2_, const char *function_)
  303   uptr size;
  307                                   uptr addr, uptr size_)
  307                                   uptr addr, uptr size_)
  317   uptr beg, end, old_mid, new_mid;
  323                                               uptr beg_, uptr end_,
  323                                               uptr beg_, uptr end_,
  324                                               uptr old_mid_, uptr new_mid_)
  324                                               uptr old_mid_, uptr new_mid_)
  350   uptr pc, bp, sp;
  355   ErrorInvalidPointerPair(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr p1,
  355   ErrorInvalidPointerPair(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr p1,
  355   ErrorInvalidPointerPair(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr p1,
  355   ErrorInvalidPointerPair(u32 tid, uptr pc_, uptr bp_, uptr sp_, uptr p1,
  356                           uptr p2)
  368   uptr pc, bp, sp;
  369   uptr access_size;
  375   ErrorGeneric(u32 tid, uptr addr, uptr pc_, uptr bp_, uptr sp_, bool is_write_,
  375   ErrorGeneric(u32 tid, uptr addr, uptr pc_, uptr bp_, uptr sp_, bool is_write_,
  375   ErrorGeneric(u32 tid, uptr addr, uptr pc_, uptr bp_, uptr sp_, bool is_write_,
  375   ErrorGeneric(u32 tid, uptr addr, uptr pc_, uptr bp_, uptr sp_, bool is_write_,
  376                uptr access_size_);
projects/compiler-rt/lib/asan/asan_fake_stack.cpp
   29 ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
   29 ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
   29 ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
   33     for (uptr i = 0; i < (((uptr)1) << class_id); i++) {
   44 FakeStack *FakeStack::Create(uptr stack_size_log) {
   45   static uptr kMinStackSizeLog = 16;
   46   static uptr kMaxStackSizeLog = FIRST_32_SECOND_64(24, 28);
   51   uptr size = RequiredSize(stack_size_log);
   69     for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++)
   74   uptr size = RequiredSize(stack_size_log_);
   87 FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
   87 FakeFrame *FakeStack::Allocate(uptr stack_size_log, uptr class_id,
   88                                uptr real_stack) {
   92   uptr &hint_position = hint_position_[class_id];
   96     uptr pos = ModuloNumberOfFrames(stack_size_log, class_id, hint_position++);
  115 uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
  115 uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
  115 uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
  115 uptr FakeStack::AddrIsInFakeStack(uptr ptr, uptr *frame_beg, uptr *frame_end) {
  116   uptr stack_size_log = this->stack_size_log();
  117   uptr beg = reinterpret_cast<uptr>(GetFrame(stack_size_log, 0, 0));
  118   uptr end = reinterpret_cast<uptr>(this) + RequiredSize(stack_size_log);
  120   uptr class_id = (ptr - beg) >> stack_size_log;
  121   uptr base = beg + (class_id << stack_size_log);
  124   uptr pos = (ptr - base) >> (kMinStackFrameSizeLog + class_id);
  125   uptr res = base + pos * BytesInSizeClass(class_id);
  141 NOINLINE void FakeStack::GC(uptr real_stack) {
  142   uptr collected = 0;
  143   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
  145     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
  160   for (uptr class_id = 0; class_id < kNumberOfSizeClasses; class_id++) {
  162     for (uptr i = 0, n = NumberOfFrames(stack_size_log(), class_id); i < n;
  167       uptr begin = reinterpret_cast<uptr>(ff);
  201 ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
  201 ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
  201 ALWAYS_INLINE uptr OnMalloc(uptr class_id, uptr size) {
  204   uptr local_stack;
  205   uptr real_stack = reinterpret_cast<uptr>(&local_stack);
  208   uptr ptr = reinterpret_cast<uptr>(ff);
  213 ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
  213 ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
  213 ALWAYS_INLINE void OnFree(uptr ptr, uptr class_id, uptr size) {
  252   uptr frame_beg, frame_end;
  264 void __asan_alloca_poison(uptr addr, uptr size) {
  264 void __asan_alloca_poison(uptr addr, uptr size) {
  265   uptr LeftRedzoneAddr = addr - kAllocaRedzoneSize;
  266   uptr PartialRzAddr = addr + size;
  267   uptr RightRzAddr = (PartialRzAddr + kAllocaRedzoneMask) & ~kAllocaRedzoneMask;
  268   uptr PartialRzAligned = PartialRzAddr & ~(SHADOW_GRANULARITY - 1);
  277 void __asan_allocas_unpoison(uptr top, uptr bottom) {
  277 void __asan_allocas_unpoison(uptr top, uptr bottom) {
projects/compiler-rt/lib/asan/asan_fake_stack.h
   23   uptr magic;  // Modified by the instrumented code.
   24   uptr descr;  // Modified by the instrumented code.
   25   uptr pc;     // Modified by the instrumented code.
   26   uptr real_stack;
   57   static const uptr kMinStackFrameSizeLog = 6;  // Min frame is 64B.
   58   static const uptr kMaxStackFrameSizeLog = 16;  // Max stack frame is 64K.
   61   static const uptr kNumberOfSizeClasses =
   65   static FakeStack *Create(uptr stack_size_log);
   70   static uptr SizeRequiredForFlags(uptr stack_size_log) {
   70   static uptr SizeRequiredForFlags(uptr stack_size_log) {
   75   static uptr SizeRequiredForFrames(uptr stack_size_log) {
   75   static uptr SizeRequiredForFrames(uptr stack_size_log) {
   80   static uptr RequiredSize(uptr stack_size_log) {
   80   static uptr RequiredSize(uptr stack_size_log) {
   91   static uptr FlagsOffset(uptr stack_size_log, uptr class_id) {
   91   static uptr FlagsOffset(uptr stack_size_log, uptr class_id) {
   91   static uptr FlagsOffset(uptr stack_size_log, uptr class_id) {
   92     uptr t = kNumberOfSizeClasses - 1 - class_id;
   93     const uptr all_ones = (((uptr)1) << (kNumberOfSizeClasses - 1)) - 1;
   97   static uptr NumberOfFrames(uptr stack_size_log, uptr class_id) {
   97   static uptr NumberOfFrames(uptr stack_size_log, uptr class_id) {
   97   static uptr NumberOfFrames(uptr stack_size_log, uptr class_id) {
  102   static uptr ModuloNumberOfFrames(uptr stack_size_log, uptr class_id, uptr n) {
  102   static uptr ModuloNumberOfFrames(uptr stack_size_log, uptr class_id, uptr n) {
  102   static uptr ModuloNumberOfFrames(uptr stack_size_log, uptr class_id, uptr n) {
  102   static uptr ModuloNumberOfFrames(uptr stack_size_log, uptr class_id, uptr n) {
  107   u8 *GetFlags(uptr stack_size_log, uptr class_id) {
  107   u8 *GetFlags(uptr stack_size_log, uptr class_id) {
  113   u8 *GetFrame(uptr stack_size_log, uptr class_id, uptr pos) {
  113   u8 *GetFrame(uptr stack_size_log, uptr class_id, uptr pos) {
  113   u8 *GetFrame(uptr stack_size_log, uptr class_id, uptr pos) {
  121   FakeFrame *Allocate(uptr stack_size_log, uptr class_id, uptr real_stack);
  121   FakeFrame *Allocate(uptr stack_size_log, uptr class_id, uptr real_stack);
  121   FakeFrame *Allocate(uptr stack_size_log, uptr class_id, uptr real_stack);
  124   static void Deallocate(uptr x, uptr class_id) {
  124   static void Deallocate(uptr x, uptr class_id) {
  132   uptr AddrIsInFakeStack(uptr addr, uptr *frame_beg, uptr *frame_end);
  132   uptr AddrIsInFakeStack(uptr addr, uptr *frame_beg, uptr *frame_end);
  132   uptr AddrIsInFakeStack(uptr addr, uptr *frame_beg, uptr *frame_end);
  132   uptr AddrIsInFakeStack(uptr addr, uptr *frame_beg, uptr *frame_end);
  133   USED uptr AddrIsInFakeStack(uptr addr) {
  133   USED uptr AddrIsInFakeStack(uptr addr) {
  134     uptr t1, t2;
  139   static uptr BytesInSizeClass(uptr class_id) {
  139   static uptr BytesInSizeClass(uptr class_id) {
  146   static u8 **SavedFlagPtr(uptr x, uptr class_id) {
  146   static u8 **SavedFlagPtr(uptr x, uptr class_id) {
  150   uptr stack_size_log() const { return stack_size_log_; }
  153   void GC(uptr real_stack);
  159   static const uptr kFlagsOffset = 4096;  // This is were the flags begin.
  162   static const uptr kMaxStackMallocSize = ((uptr)1) << kMaxStackFrameSizeLog;
  164   uptr hint_position_[kNumberOfSizeClasses];
  165   uptr stack_size_log_;
projects/compiler-rt/lib/asan/asan_globals.cpp
   64   uptr aligned_size = RoundUpTo(g.size, SHADOW_GRANULARITY);
   76 const uptr kMinimalDistanceFromAnotherGlobal = 64;
   78 static bool IsAddressNearGlobal(uptr addr, const __asan_global &g) {
  100   for (uptr i = 0, n = global_registration_site_vector->size(); i < n; i++) {
  108 int GetGlobalsForAddress(uptr addr, Global *globals, u32 *reg_sites,
  264   for (uptr i = 0, n = dynamic_init_globals->size(); i < n; ++i) {
  290   for (uptr p = g.beg; p < g.beg + g.size - 1; p++) {
  322 void __asan_register_image_globals(uptr *flag) {
  330 void __asan_unregister_image_globals(uptr *flag) {
  337 void __asan_register_elf_globals(uptr *flag, void *start, void *stop) {
  347 void __asan_unregister_elf_globals(uptr *flag, void *start, void *stop) {
  358 void __asan_register_globals(__asan_global *globals, uptr n) {
  374   for (uptr i = 0; i < n; i++) {
  399 void __asan_unregister_globals(__asan_global *globals, uptr n) {
  402   for (uptr i = 0; i < n; i++) {
  430   for (uptr i = 0, n = dynamic_init_globals->size(); i < n; ++i) {
  453   for (uptr i = 0, n = dynamic_init_globals->size(); i < n; ++i) {
projects/compiler-rt/lib/asan/asan_interceptors.cpp
   55 static inline uptr MaybeRealStrnlen(const char *s, uptr maxlen) {
   55 static inline uptr MaybeRealStrnlen(const char *s, uptr maxlen) {
  257 static void ClearShadowMemoryForContextStack(uptr stack, uptr ssize) {
  257 static void ClearShadowMemoryForContextStack(uptr stack, uptr ssize) {
  259   uptr PageSize = GetPageSizeCached();
  260   uptr bottom = stack & ~(PageSize - 1);
  263   static const uptr kMaxSaneContextStackSize = 1 << 22;  // 4 Mb
  279   uptr stack, ssize;
  386       uptr from_length = REAL(strlen)(from);
  388       uptr to_length = REAL(strlen)(to);
  407     uptr from_length = MaybeRealStrnlen(from, size);
  408     uptr copy_length = Min(size, from_length + 1);
  410     uptr to_length = REAL(strlen)(to);
  435     uptr from_size = REAL(strlen)(from) + 1;
  448   uptr length = REAL(strlen)(s);
  464   uptr length = REAL(strlen)(s);
  480     uptr from_size = Min(size, MaybeRealStrnlen(from, size) + 1);
projects/compiler-rt/lib/asan/asan_interceptors_memintrinsics.cpp
   21 void *__asan_memcpy(void *to, const void *from, uptr size) {
   25 void *__asan_memset(void *block, int c, uptr size) {
   29 void *__asan_memmove(void *to, const void *from, uptr size) {
projects/compiler-rt/lib/asan/asan_interceptors_memintrinsics.h
   28 static inline bool QuickCheckForUnpoisonedRegion(uptr beg, uptr size) {
   28 static inline bool QuickCheckForUnpoisonedRegion(uptr beg, uptr size) {
  131 static inline bool RangesOverlap(const char *offset1, uptr length1,
  132                                  const char *offset2, uptr length2) {
projects/compiler-rt/lib/asan/asan_interface_internal.h
   49     uptr beg;                // The address of the global.
   50     uptr size;               // The original size of the global.
   51     uptr size_with_redzone;  // The size with the redzone.
   55     uptr has_dynamic_init;   // Non-zero if the global has dynamic initializer.
   58     uptr odr_indicator;      // The address of the ODR indicator symbol.
   65   void __asan_register_image_globals(uptr *flag);
   67   void __asan_unregister_image_globals(uptr *flag);
   70   void __asan_register_elf_globals(uptr *flag, void *start, void *stop);
   72   void __asan_unregister_elf_globals(uptr *flag, void *start, void *stop);
   77   void __asan_register_globals(__asan_global *globals, uptr n);
   79   void __asan_unregister_globals(__asan_global *globals, uptr n);
   90   void __asan_set_shadow_00(uptr addr, uptr size);
   90   void __asan_set_shadow_00(uptr addr, uptr size);
   92   void __asan_set_shadow_f1(uptr addr, uptr size);
   92   void __asan_set_shadow_f1(uptr addr, uptr size);
   94   void __asan_set_shadow_f2(uptr addr, uptr size);
   94   void __asan_set_shadow_f2(uptr addr, uptr size);
   96   void __asan_set_shadow_f3(uptr addr, uptr size);
   96   void __asan_set_shadow_f3(uptr addr, uptr size);
   98   void __asan_set_shadow_f5(uptr addr, uptr size);
   98   void __asan_set_shadow_f5(uptr addr, uptr size);
  100   void __asan_set_shadow_f8(uptr addr, uptr size);
  100   void __asan_set_shadow_f8(uptr addr, uptr size);
  107   void __asan_poison_stack_memory(uptr addr, uptr size);
  107   void __asan_poison_stack_memory(uptr addr, uptr size);
  109   void __asan_unpoison_stack_memory(uptr addr, uptr size);
  109   void __asan_unpoison_stack_memory(uptr addr, uptr size);
  116   void __asan_poison_memory_region(void const volatile *addr, uptr size);
  118   void __asan_unpoison_memory_region(void const volatile *addr, uptr size);
  124   uptr __asan_region_is_poisoned(uptr beg, uptr size);
  124   uptr __asan_region_is_poisoned(uptr beg, uptr size);
  124   uptr __asan_region_is_poisoned(uptr beg, uptr size);
  127   void __asan_describe_address(uptr addr);
  133   uptr __asan_get_report_pc();
  135   uptr __asan_get_report_bp();
  137   uptr __asan_get_report_sp();
  139   uptr __asan_get_report_address();
  143   uptr __asan_get_report_access_size();
  148   const char * __asan_locate_address(uptr addr, char *name, uptr name_size,
  148   const char * __asan_locate_address(uptr addr, char *name, uptr name_size,
  149                                      uptr *region_address, uptr *region_size);
  149                                      uptr *region_address, uptr *region_size);
  152   uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size,
  152   uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size,
  152   uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size,
  152   uptr __asan_get_alloc_stack(uptr addr, uptr *trace, uptr size,
  156   uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size,
  156   uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size,
  156   uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size,
  156   uptr __asan_get_free_stack(uptr addr, uptr *trace, uptr size,
  160   void __asan_get_shadow_mapping(uptr *shadow_scale, uptr *shadow_offset);
  160   void __asan_get_shadow_mapping(uptr *shadow_scale, uptr *shadow_offset);
  163   void __asan_report_error(uptr pc, uptr bp, uptr sp,
  163   void __asan_report_error(uptr pc, uptr bp, uptr sp,
  163   void __asan_report_error(uptr pc, uptr bp, uptr sp,
  164                            uptr addr, int is_write, uptr access_size, u32 exp);
  164                            uptr addr, int is_write, uptr access_size, u32 exp);
  180   extern uptr __asan_shadow_memory_dynamic_address;
  187   extern uptr *__asan_test_only_reported_buggy_pointer;
  189   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load1(uptr p);
  190   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load2(uptr p);
  191   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load4(uptr p);
  192   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load8(uptr p);
  193   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load16(uptr p);
  194   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store1(uptr p);
  195   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store2(uptr p);
  196   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store4(uptr p);
  197   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store8(uptr p);
  198   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store16(uptr p);
  199   SANITIZER_INTERFACE_ATTRIBUTE void __asan_loadN(uptr p, uptr size);
  199   SANITIZER_INTERFACE_ATTRIBUTE void __asan_loadN(uptr p, uptr size);
  200   SANITIZER_INTERFACE_ATTRIBUTE void __asan_storeN(uptr p, uptr size);
  200   SANITIZER_INTERFACE_ATTRIBUTE void __asan_storeN(uptr p, uptr size);
  202   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load1_noabort(uptr p);
  203   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load2_noabort(uptr p);
  204   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load4_noabort(uptr p);
  205   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load8_noabort(uptr p);
  206   SANITIZER_INTERFACE_ATTRIBUTE void __asan_load16_noabort(uptr p);
  207   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store1_noabort(uptr p);
  208   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store2_noabort(uptr p);
  209   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store4_noabort(uptr p);
  210   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store8_noabort(uptr p);
  211   SANITIZER_INTERFACE_ATTRIBUTE void __asan_store16_noabort(uptr p);
  212   SANITIZER_INTERFACE_ATTRIBUTE void __asan_loadN_noabort(uptr p, uptr size);
  212   SANITIZER_INTERFACE_ATTRIBUTE void __asan_loadN_noabort(uptr p, uptr size);
  213   SANITIZER_INTERFACE_ATTRIBUTE void __asan_storeN_noabort(uptr p, uptr size);
  213   SANITIZER_INTERFACE_ATTRIBUTE void __asan_storeN_noabort(uptr p, uptr size);
  215   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_load1(uptr p, u32 exp);
  216   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_load2(uptr p, u32 exp);
  217   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_load4(uptr p, u32 exp);
  218   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_load8(uptr p, u32 exp);
  219   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_load16(uptr p, u32 exp);
  220   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_store1(uptr p, u32 exp);
  221   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_store2(uptr p, u32 exp);
  222   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_store4(uptr p, u32 exp);
  223   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_store8(uptr p, u32 exp);
  224   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_store16(uptr p, u32 exp);
  225   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_loadN(uptr p, uptr size,
  225   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_loadN(uptr p, uptr size,
  227   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_storeN(uptr p, uptr size,
  227   SANITIZER_INTERFACE_ATTRIBUTE void __asan_exp_storeN(uptr p, uptr size,
  231       void* __asan_memcpy(void *dst, const void *src, uptr size);
  233       void* __asan_memset(void *s, int c, uptr n);
  235       void* __asan_memmove(void* dest, const void* src, uptr n);
  238   void __asan_poison_cxx_array_cookie(uptr p);
  240   uptr __asan_load_cxx_array_cookie(uptr *p);
  240   uptr __asan_load_cxx_array_cookie(uptr *p);
  242   void __asan_poison_intra_object_redzone(uptr p, uptr size);
  242   void __asan_poison_intra_object_redzone(uptr p, uptr size);
  244   void __asan_unpoison_intra_object_redzone(uptr p, uptr size);
  244   void __asan_unpoison_intra_object_redzone(uptr p, uptr size);
  246   void __asan_alloca_poison(uptr addr, uptr size);
  246   void __asan_alloca_poison(uptr addr, uptr size);
  248   void __asan_allocas_unpoison(uptr top, uptr bottom);
  248   void __asan_allocas_unpoison(uptr top, uptr bottom);
projects/compiler-rt/lib/asan/asan_internal.h
   68 bool IsSystemHeapAddress(uptr addr);
   81 uptr FindDynamicShadowStart();
   93 typedef void (*globals_op_fptr)(__asan_global *, uptr);
   98 void ReadContextStack(void *context, uptr *stack, uptr *ssize);
   98 void ReadContextStack(void *context, uptr *stack, uptr *ssize);
  111 void ReserveShadowMemoryRange(uptr beg, uptr end, const char *name);
  111 void ReserveShadowMemoryRange(uptr beg, uptr end, const char *name);
  158 static const uptr kCurrentStackFrameMagic = 0x41B58AB3;
  159 static const uptr kRetiredStackFrameMagic = 0x45E0360E;
projects/compiler-rt/lib/asan/asan_linux.cpp
   83 bool IsSystemHeapAddress (uptr addr) { return false; }
   90 static void UnmapFromTo(uptr from, uptr to) {
   90 static void UnmapFromTo(uptr from, uptr to) {
   93   uptr res = internal_munmap(reinterpret_cast<void *>(from), to - from);
  115 uptr FindDynamicShadowStart() {
  121   uptr granularity = GetMmapGranularity();
  122   uptr alignment = granularity * 8;
  123   uptr left_padding = granularity;
  124   uptr shadow_size = RoundUpTo(kHighShadowEnd, granularity);
  125   uptr map_size = shadow_size + left_padding + alignment;
  127   uptr map_start = (uptr)MmapNoAccess(map_size);
  130   uptr shadow_start = RoundUpTo(map_start + left_padding, alignment);
  235 void ReadContextStack(void *context, uptr *stack, uptr *ssize) {
  235 void ReadContextStack(void *context, uptr *stack, uptr *ssize) {
projects/compiler-rt/lib/asan/asan_malloc_linux.cpp
   32 static uptr allocated_for_dlsym;
   33 static uptr last_dlsym_alloc_size_in_words;
   34 static const uptr kDlsymAllocPoolSize = SANITIZER_RTEMS ? 4096 : 1024;
   35 static uptr alloc_memory_for_dlsym[kDlsymAllocPoolSize];
   38   uptr off = (uptr)ptr - (uptr)alloc_memory_for_dlsym;
   42 static void *AllocateFromLocalPool(uptr size_in_bytes) {
   43   uptr size_in_words = RoundUpTo(size_in_bytes, kWordSize) / kWordSize;
   55   uptr prev_offset = allocated_for_dlsym - last_dlsym_alloc_size_in_words;
   64 static int PosixMemalignFromLocalPool(void **memptr, uptr alignment,
   65                                       uptr size_in_bytes) {
   71   uptr addr = (uptr)&alloc_memory_for_dlsym[allocated_for_dlsym];
   72   uptr aligned_addr = RoundUpTo(addr, alignment);
   73   uptr aligned_size = RoundUpTo(size_in_bytes, kWordSize);
   75   uptr *end_mem = (uptr*)(aligned_addr + aligned_size);
   76   uptr allocated = end_mem - alloc_memory_for_dlsym;
  107 static void *ReallocFromLocalPool(void *ptr, uptr size) {
  108   const uptr offset = (uptr)ptr - (uptr)alloc_memory_for_dlsym;
  109   const uptr copy_size = Min(size, kDlsymAllocPoolSize - offset);
projects/compiler-rt/lib/asan/asan_mapping.h
  254 extern uptr AsanMappingProfile[];
  264 extern uptr kHighMemEnd, kMidMemBeg, kMidMemEnd;  // Initialized in __asan_init.
  307 static inline bool AddrIsInLowMem(uptr a) {
  312 static inline bool AddrIsInLowShadow(uptr a) {
  317 static inline bool AddrIsInMidMem(uptr a) {
  322 static inline bool AddrIsInMidShadow(uptr a) {
  327 static inline bool AddrIsInHighMem(uptr a) {
  332 static inline bool AddrIsInHighShadow(uptr a) {
  337 static inline bool AddrIsInShadowGap(uptr a) {
  358 static inline bool AddrIsInMem(uptr a) {
  364 static inline uptr MemToShadow(uptr p) {
  364 static inline uptr MemToShadow(uptr p) {
  370 static inline bool AddrIsInShadow(uptr a) {
  375 static inline bool AddrIsAlignedByGranularity(uptr a) {
  380 static inline bool AddressIsPoisoned(uptr a) {
  384   const uptr kAccessSize = 1;
  396 static const uptr kAsanMappingProfileSize = __LINE__;
projects/compiler-rt/lib/asan/asan_memory_profile.cpp
   27   uptr total_size;
   28   uptr count;
   50   void Print(uptr top_percent, uptr max_number_of_contexts) {
   50   void Print(uptr top_percent, uptr max_number_of_contexts) {
   56     uptr total_shown = 0;
   65     for (uptr i = 0; i < Min(allocations_.size(), max_number_of_contexts);
   78   uptr total_allocated_user_size_ = 0;
   79   uptr total_allocated_count_ = 0;
   80   uptr total_quarantined_user_size_ = 0;
   81   uptr total_quarantined_count_ = 0;
   82   uptr total_other_count_ = 0;
   85   void Insert(u32 id, uptr size) {
   87     for (uptr i = 0; i < allocations_.size(); i++) {
   98 static void ChunkCallback(uptr chunk, void *arg) {
  107   uptr *Arg = reinterpret_cast<uptr*>(argument);
  120 void __sanitizer_print_memory_profile(uptr top_percent,
  121                                       uptr max_number_of_contexts) {
  123   uptr Arg[2];
projects/compiler-rt/lib/asan/asan_poisoning.cpp
   33 void PoisonShadow(uptr addr, uptr size, u8 value) {
   33 void PoisonShadow(uptr addr, uptr size, u8 value) {
   43 void PoisonShadowPartialRightRedzone(uptr addr,
   44                                      uptr size,
   45                                      uptr redzone_size,
   58   explicit ShadowSegmentEndpoint(uptr address) {
   65 void FlushUnneededASanShadowMemory(uptr p, uptr size) {
   65 void FlushUnneededASanShadowMemory(uptr p, uptr size) {
   71 void AsanPoisonOrUnpoisonIntraObjectRedzone(uptr ptr, uptr size, bool poison) {
   71 void AsanPoisonOrUnpoisonIntraObjectRedzone(uptr ptr, uptr size, bool poison) {
   72   uptr end = ptr + size;
  107 void __asan_poison_memory_region(void const volatile *addr, uptr size) {
  109   uptr beg_addr = (uptr)addr;
  110   uptr end_addr = beg_addr + size;
  147 void __asan_unpoison_memory_region(void const volatile *addr, uptr size) {
  149   uptr beg_addr = (uptr)addr;
  150   uptr end_addr = beg_addr + size;
  181 uptr __asan_region_is_poisoned(uptr beg, uptr size) {
  181 uptr __asan_region_is_poisoned(uptr beg, uptr size) {
  181 uptr __asan_region_is_poisoned(uptr beg, uptr size) {
  183   uptr end = beg + size;
  194   uptr aligned_b = RoundUpTo(beg, SHADOW_GRANULARITY);
  195   uptr aligned_e = RoundDownTo(end, SHADOW_GRANULARITY);
  196   uptr shadow_beg = MemToShadow(aligned_b);
  197   uptr shadow_end = MemToShadow(aligned_e);
  266 void __asan_poison_cxx_array_cookie(uptr p) {
  269   uptr s = MEM_TO_SHADOW(p);
  274 uptr __asan_load_cxx_array_cookie(uptr *p) {
  274 uptr __asan_load_cxx_array_cookie(uptr *p) {
  277   uptr s = MEM_TO_SHADOW(reinterpret_cast<uptr>(p));
  297 static void PoisonAlignedStackMemory(uptr addr, uptr size, bool do_poison) {
  297 static void PoisonAlignedStackMemory(uptr addr, uptr size, bool do_poison) {
  299   uptr aligned_size = size & ~(SHADOW_GRANULARITY - 1);
  320 void __asan_set_shadow_00(uptr addr, uptr size) {
  320 void __asan_set_shadow_00(uptr addr, uptr size) {
  324 void __asan_set_shadow_f1(uptr addr, uptr size) {
  324 void __asan_set_shadow_f1(uptr addr, uptr size) {
  328 void __asan_set_shadow_f2(uptr addr, uptr size) {
  328 void __asan_set_shadow_f2(uptr addr, uptr size) {
  332 void __asan_set_shadow_f3(uptr addr, uptr size) {
  332 void __asan_set_shadow_f3(uptr addr, uptr size) {
  336 void __asan_set_shadow_f5(uptr addr, uptr size) {
  336 void __asan_set_shadow_f5(uptr addr, uptr size) {
  340 void __asan_set_shadow_f8(uptr addr, uptr size) {
  340 void __asan_set_shadow_f8(uptr addr, uptr size) {
  344 void __asan_poison_stack_memory(uptr addr, uptr size) {
  344 void __asan_poison_stack_memory(uptr addr, uptr size) {
  349 void __asan_unpoison_stack_memory(uptr addr, uptr size) {
  349 void __asan_unpoison_stack_memory(uptr addr, uptr size) {
  361   uptr beg = reinterpret_cast<uptr>(beg_p);
  362   uptr end = reinterpret_cast<uptr>(end_p);
  363   uptr old_mid = reinterpret_cast<uptr>(old_mid_p);
  364   uptr new_mid = reinterpret_cast<uptr>(new_mid_p);
  365   uptr granularity = SHADOW_GRANULARITY;
  375   uptr a = RoundDownTo(Min(old_mid, new_mid), granularity);
  376   uptr c = RoundUpTo(Max(old_mid, new_mid), granularity);
  377   uptr d1 = RoundDownTo(old_mid, granularity);
  393   uptr b1 = RoundDownTo(new_mid, granularity);
  394   uptr b2 = RoundUpTo(new_mid, granularity);
  409   uptr beg = reinterpret_cast<uptr>(beg_p);
  410   uptr end = reinterpret_cast<uptr>(end_p);
  411   uptr mid = reinterpret_cast<uptr>(mid_p);
  416   uptr kMaxRangeToCheck = 32;
  417   uptr r1_beg = beg;
  418   uptr r1_end = Min(beg + kMaxRangeToCheck, mid);
  419   uptr r2_beg = Max(beg, mid - kMaxRangeToCheck);
  420   uptr r2_end = Min(end, mid + kMaxRangeToCheck);
  421   uptr r3_beg = Max(end - kMaxRangeToCheck, mid);
  422   uptr r3_end = end;
  423   for (uptr i = r1_beg; i < r1_end; i++)
  426   for (uptr i = r2_beg; i < mid; i++)
  429   for (uptr i = mid; i < r2_end; i++)
  432   for (uptr i = r3_beg; i < r3_end; i++)
  446 void __asan_poison_intra_object_redzone(uptr ptr, uptr size) {
  446 void __asan_poison_intra_object_redzone(uptr ptr, uptr size) {
  451 void __asan_unpoison_intra_object_redzone(uptr ptr, uptr size) {
  451 void __asan_unpoison_intra_object_redzone(uptr ptr, uptr size) {
  457 bool WordIsPoisoned(uptr addr) {
  458   return (__asan_region_is_poisoned(addr, sizeof(uptr)) != 0);
projects/compiler-rt/lib/asan/asan_poisoning.h
   27 void PoisonShadow(uptr addr, uptr size, u8 value);
   27 void PoisonShadow(uptr addr, uptr size, u8 value);
   31 void PoisonShadowPartialRightRedzone(uptr addr,
   32                                      uptr size,
   33                                      uptr redzone_size,
   39 ALWAYS_INLINE void FastPoisonShadow(uptr aligned_beg, uptr aligned_size,
   39 ALWAYS_INLINE void FastPoisonShadow(uptr aligned_beg, uptr aligned_size,
   46   uptr shadow_beg = MEM_TO_SHADOW(aligned_beg);
   47   uptr shadow_end = MEM_TO_SHADOW(
   60     uptr page_size = GetPageSizeCached();
   61     uptr page_beg = RoundUpTo(shadow_beg, page_size);
   62     uptr page_end = RoundDownTo(shadow_end, page_size);
   80     uptr aligned_addr, uptr size, uptr redzone_size, u8 value) {
   80     uptr aligned_addr, uptr size, uptr redzone_size, u8 value) {
   80     uptr aligned_addr, uptr size, uptr redzone_size, u8 value) {
   84   for (uptr i = 0; i < redzone_size; i += SHADOW_GRANULARITY, shadow++) {
   98 void FlushUnneededASanShadowMemory(uptr p, uptr size);
   98 void FlushUnneededASanShadowMemory(uptr p, uptr size);
projects/compiler-rt/lib/asan/asan_report.cpp
   34 static uptr error_message_buffer_pos = 0;
   46   uptr length = internal_strlen(buffer);
   48   uptr remaining = kErrorMessageBufferSize - error_message_buffer_pos;
   66 static void PrintZoneForPointer(uptr ptr, uptr zone_ptr,
   66 static void PrintZoneForPointer(uptr ptr, uptr zone_ptr,
   91   uptr n_objects = (uptr)internal_simple_strtoll(frame_descr, &p, 10);
   95   for (uptr i = 0; i < n_objects; i++) {
   96     uptr beg  = (uptr)internal_simple_strtoll(p, &p, 10);
   97     uptr size = (uptr)internal_simple_strtoll(p, &p, 10);
   98     uptr len  = (uptr)internal_simple_strtoll(p, &p, 10);
  104     uptr line = 0;
  105     uptr name_len = len;
  216 void ReportDoubleFree(uptr addr, BufferedStackTrace *free_stack) {
  222 void ReportNewDeleteTypeMismatch(uptr addr, uptr delete_size,
  222 void ReportNewDeleteTypeMismatch(uptr addr, uptr delete_size,
  223                                  uptr delete_alignment,
  231 void ReportFreeNotMalloced(uptr addr, BufferedStackTrace *free_stack) {
  237 void ReportAllocTypeMismatch(uptr addr, BufferedStackTrace *free_stack,
  246 void ReportMallocUsableSizeNotOwned(uptr addr, BufferedStackTrace *stack) {
  252 void ReportSanitizerGetAllocatedSizeNotOwned(uptr addr,
  260 void ReportCallocOverflow(uptr count, uptr size, BufferedStackTrace *stack) {
  260 void ReportCallocOverflow(uptr count, uptr size, BufferedStackTrace *stack) {
  266 void ReportReallocArrayOverflow(uptr count, uptr size,
  266 void ReportReallocArrayOverflow(uptr count, uptr size,
  273 void ReportPvallocOverflow(uptr size, BufferedStackTrace *stack) {
  279 void ReportInvalidAllocationAlignment(uptr alignment,
  287 void ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
  287 void ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
  295 void ReportInvalidPosixMemalignAlignment(uptr alignment,
  303 void ReportAllocationSizeTooBig(uptr user_size, uptr total_size, uptr max_size,
  303 void ReportAllocationSizeTooBig(uptr user_size, uptr total_size, uptr max_size,
  303 void ReportAllocationSizeTooBig(uptr user_size, uptr total_size, uptr max_size,
  317 void ReportOutOfMemory(uptr requested_size, BufferedStackTrace *stack) {
  324                                              const char *offset1, uptr length1,
  325                                              const char *offset2, uptr length2,
  334 void ReportStringFunctionSizeOverflow(uptr offset, uptr size,
  334 void ReportStringFunctionSizeOverflow(uptr offset, uptr size,
  342 void ReportBadParamsToAnnotateContiguousContainer(uptr beg, uptr end,
  342 void ReportBadParamsToAnnotateContiguousContainer(uptr beg, uptr end,
  343                                                   uptr old_mid, uptr new_mid,
  343                                                   uptr old_mid, uptr new_mid,
  360 static NOINLINE void ReportInvalidPointerPair(uptr pc, uptr bp, uptr sp,
  360 static NOINLINE void ReportInvalidPointerPair(uptr pc, uptr bp, uptr sp,
  360 static NOINLINE void ReportInvalidPointerPair(uptr pc, uptr bp, uptr sp,
  361                                               uptr a1, uptr a2) {
  361                                               uptr a1, uptr a2) {
  367 static bool IsInvalidPointerPair(uptr a1, uptr a2) {
  367 static bool IsInvalidPointerPair(uptr a1, uptr a2) {
  372   static const uptr kMaxOffset = 2048;
  374   uptr left = a1 < a2 ? a1 : a2;
  375   uptr right = a1 < a2 ? a2 : a1;
  376   uptr offset = right - left;
  383   if (uptr shadow_offset1 = t->GetStackVariableShadowStart(left)) {
  384     uptr shadow_offset2 = t->GetStackVariableShadowStart(right);
  421   uptr a1 = reinterpret_cast<uptr>(p1);
  422   uptr a2 = reinterpret_cast<uptr>(p2);
  431 void ReportMacMzReallocUnknown(uptr addr, uptr zone_ptr, const char *zone_name,
  431 void ReportMacMzReallocUnknown(uptr addr, uptr zone_ptr, const char *zone_name,
  444 static bool SuppressErrorReport(uptr pc) {
  447     uptr cmp = atomic_load_relaxed(&AsanBuggyPcPool[i]);
  456 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
  456 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
  456 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
  456 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
  457                         uptr access_size, u32 exp, bool fatal) {
  481 void __asan_report_error(uptr pc, uptr bp, uptr sp, uptr addr, int is_write,
  481 void __asan_report_error(uptr pc, uptr bp, uptr sp, uptr addr, int is_write,
  481 void __asan_report_error(uptr pc, uptr bp, uptr sp, uptr addr, int is_write,
  481 void __asan_report_error(uptr pc, uptr bp, uptr sp, uptr addr, int is_write,
  482                          uptr access_size, u32 exp) {
  493 void __asan_describe_address(uptr addr) {
  504 uptr __asan_get_report_pc() {
  510 uptr __asan_get_report_bp() {
  516 uptr __asan_get_report_sp() {
  522 uptr __asan_get_report_address() {
  537 uptr __asan_get_report_access_size() {
projects/compiler-rt/lib/asan/asan_report.h
   24   uptr beg;
   25   uptr size;
   27   uptr name_len;
   28   uptr line;
   33 int GetGlobalsForAddress(uptr addr, __asan_global *globals, u32 *reg_sites,
   49 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
   49 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
   49 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
   49 void ReportGenericError(uptr pc, uptr bp, uptr sp, uptr addr, bool is_write,
   50                         uptr access_size, u32 exp, bool fatal);
   52 void ReportNewDeleteTypeMismatch(uptr addr, uptr delete_size,
   52 void ReportNewDeleteTypeMismatch(uptr addr, uptr delete_size,
   53                                  uptr delete_alignment,
   55 void ReportDoubleFree(uptr addr, BufferedStackTrace *free_stack);
   56 void ReportFreeNotMalloced(uptr addr, BufferedStackTrace *free_stack);
   57 void ReportAllocTypeMismatch(uptr addr, BufferedStackTrace *free_stack,
   60 void ReportMallocUsableSizeNotOwned(uptr addr, BufferedStackTrace *stack);
   61 void ReportSanitizerGetAllocatedSizeNotOwned(uptr addr,
   63 void ReportCallocOverflow(uptr count, uptr size, BufferedStackTrace *stack);
   63 void ReportCallocOverflow(uptr count, uptr size, BufferedStackTrace *stack);
   64 void ReportReallocArrayOverflow(uptr count, uptr size,
   64 void ReportReallocArrayOverflow(uptr count, uptr size,
   66 void ReportPvallocOverflow(uptr size, BufferedStackTrace *stack);
   67 void ReportInvalidAllocationAlignment(uptr alignment,
   69 void ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
   69 void ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
   71 void ReportInvalidPosixMemalignAlignment(uptr alignment,
   73 void ReportAllocationSizeTooBig(uptr user_size, uptr total_size, uptr max_size,
   73 void ReportAllocationSizeTooBig(uptr user_size, uptr total_size, uptr max_size,
   73 void ReportAllocationSizeTooBig(uptr user_size, uptr total_size, uptr max_size,
   76 void ReportOutOfMemory(uptr requested_size, BufferedStackTrace *stack);
   78                                              const char *offset1, uptr length1,
   79                                              const char *offset2, uptr length2,
   81 void ReportStringFunctionSizeOverflow(uptr offset, uptr size,
   81 void ReportStringFunctionSizeOverflow(uptr offset, uptr size,
   83 void ReportBadParamsToAnnotateContiguousContainer(uptr beg, uptr end,
   83 void ReportBadParamsToAnnotateContiguousContainer(uptr beg, uptr end,
   84                                                   uptr old_mid, uptr new_mid,
   84                                                   uptr old_mid, uptr new_mid,
   91 void ReportMacMzReallocUnknown(uptr addr, uptr zone_ptr,
   91 void ReportMacMzReallocUnknown(uptr addr, uptr zone_ptr,
   94 void ReportMacCfReallocUnknown(uptr addr, uptr zone_ptr,
   94 void ReportMacCfReallocUnknown(uptr addr, uptr zone_ptr,
projects/compiler-rt/lib/asan/asan_rtl.cpp
   34 uptr __asan_shadow_memory_dynamic_address;  // Global interface symbol.
   36 uptr *__asan_test_only_reported_buggy_pointer;  // Used only for testing asan.
   40 uptr AsanMappingProfile[kAsanMappingProfileSize];
   84 uptr kHighMemEnd, kMidMemBeg, kMidMemEnd;
   94 static void OnLowLevelAllocate(uptr ptr, uptr size) {
   94 static void OnLowLevelAllocate(uptr ptr, uptr size) {
  195 void __asan_loadN(uptr addr, uptr size) {
  195 void __asan_loadN(uptr addr, uptr size) {
  204 void __asan_exp_loadN(uptr addr, uptr size, u32 exp) {
  204 void __asan_exp_loadN(uptr addr, uptr size, u32 exp) {
  213 void __asan_loadN_noabort(uptr addr, uptr size) {
  213 void __asan_loadN_noabort(uptr addr, uptr size) {
  222 void __asan_storeN(uptr addr, uptr size) {
  222 void __asan_storeN(uptr addr, uptr size) {
  231 void __asan_exp_storeN(uptr addr, uptr size, u32 exp) {
  231 void __asan_exp_storeN(uptr addr, uptr size, u32 exp) {
  240 void __asan_storeN_noabort(uptr addr, uptr size) {
  240 void __asan_storeN_noabort(uptr addr, uptr size) {
  310   for (uptr i = 0; i < kAsanMappingProfileSize; i++) {
  565   uptr PageSize = GetPageSizeCached();
  566   uptr top, bottom;
  576     uptr tls_addr, tls_size, stack_size;
  581   static const uptr kMaxExpectedCleanupSize = 64 << 20;  // 64M
  609   uptr bottom = t->stack_bottom();
projects/compiler-rt/lib/asan/asan_shadow_setup.cpp
   28 void ReserveShadowMemoryRange(uptr beg, uptr end, const char *name) {
   28 void ReserveShadowMemoryRange(uptr beg, uptr end, const char *name) {
   31   uptr size = end - beg + 1;
   43 static void ProtectGap(uptr addr, uptr size) {
   43 static void ProtectGap(uptr addr, uptr size) {
   47     uptr GapShadowBeg = RoundDownTo(MEM_TO_SHADOW(addr), GetPageSizeCached());
   48     uptr GapShadowEnd =
   66     uptr step = GetMmapGranularity();
   95   uptr shadow_start = kLowShadowBeg;
projects/compiler-rt/lib/asan/asan_stack.cpp
   57     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
   57     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
projects/compiler-rt/lib/asan/asan_stats.cpp
   33                                   uptr (&array)[kNumberOfSizeClasses]) {
   35   for (uptr i = 0; i < kNumberOfSizeClasses; i++) {
   58   uptr *dst_ptr = reinterpret_cast<uptr*>(this);
   59   const uptr *src_ptr = reinterpret_cast<const uptr*>(stats);
   60   uptr num_fields = sizeof(*this) / sizeof(uptr);
   60   uptr num_fields = sizeof(*this) / sizeof(uptr);
   61   for (uptr i = 0; i < num_fields; i++)
   72 static uptr max_malloced_memory;
  138 uptr __sanitizer_get_current_allocated_bytes() {
  141   uptr malloced = stats.malloced;
  142   uptr freed = stats.freed;
  148 uptr __sanitizer_get_heap_size() {
  154 uptr __sanitizer_get_free_bytes() {
  157   uptr total_free = stats.mmaped
  160   uptr total_used = stats.malloced
  167 uptr __sanitizer_get_unmapped_bytes() {
projects/compiler-rt/lib/asan/asan_stats.h
   27   uptr mallocs;
   28   uptr malloced;
   29   uptr malloced_redzones;
   30   uptr frees;
   31   uptr freed;
   32   uptr real_frees;
   33   uptr really_freed;
   34   uptr reallocs;
   35   uptr realloced;
   36   uptr mmaps;
   37   uptr mmaped;
   38   uptr munmaps;
   39   uptr munmaped;
   40   uptr malloc_large;
   41   uptr malloced_by_size[kNumberOfSizeClasses];
   61   uptr blocks_in_use;
   62   uptr size_in_use;
   63   uptr max_size_in_use;
   64   uptr size_allocated;
projects/compiler-rt/lib/asan/asan_suppressions.cpp
   73   for (uptr i = 0; i < stack->size && stack->trace[i]; i++) {
   74     uptr addr = stack->trace[i];
projects/compiler-rt/lib/asan/asan_thread.cpp
   80   uptr PageSize = GetPageSizeCached();
   81   uptr size = RoundUpTo(sizeof(AsanThread), PageSize);
  112   uptr size = RoundUpTo(sizeof(AsanThread), GetPageSizeCached());
  117 void AsanThread::StartSwitchFiber(FakeStack **fake_stack_save, uptr bottom,
  118                                   uptr size) {
  139                                    uptr *bottom_old,
  140                                    uptr *size_old) {
  169   const uptr cur_stack = (uptr)&local;
  178 uptr AsanThread::stack_top() {
  182 uptr AsanThread::stack_bottom() {
  186 uptr AsanThread::stack_size() {
  194   uptr stack_size = this->stack_size();
  197   uptr old_val = 0;
  207     uptr stack_size_log = Log2(RoundUpToPowerOfTwo(stack_size));
  290   uptr tls_size = 0;
  291   uptr stack_size = 0;
  310     uptr tls_begin_aligned = RoundDownTo(tls_begin_, SHADOW_GRANULARITY);
  311     uptr tls_end_aligned = RoundUpTo(tls_end_, SHADOW_GRANULARITY);
  318 bool AsanThread::GetStackFrameAccessByAddr(uptr addr,
  323   uptr bottom = 0;
  334   uptr aligned_addr = RoundDownTo(addr, SANITIZER_WORDSIZE / 8);  // align addr.
  335   uptr mem_ptr = RoundDownTo(aligned_addr, SHADOW_GRANULARITY);
  355   uptr* ptr = (uptr*)(mem_ptr + SHADOW_GRANULARITY);
  363 uptr AsanThread::GetStackVariableShadowStart(uptr addr) {
  363 uptr AsanThread::GetStackVariableShadowStart(uptr addr) {
  364   uptr bottom = 0;
  374   uptr aligned_addr = RoundDownTo(addr, SANITIZER_WORDSIZE / 8);  // align addr.
  387 bool AsanThread::AddrIsInStack(uptr addr) {
  442 AsanThread *FindThreadByStackAddress(uptr addr) {
  467 bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
  467 bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
  468                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  468                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  468                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  469                            uptr *cache_end, DTLS **dtls) {
  514                                     uptr size) {
  526                                      uptr *size_old) {
projects/compiler-rt/lib/asan/asan_thread.h
   75   uptr stack_top();
   76   uptr stack_bottom();
   77   uptr stack_size();
   78   uptr tls_begin() { return tls_begin_; }
   79   uptr tls_end() { return tls_end_; }
   86     uptr offset;
   87     uptr frame_pc;
   90   bool GetStackFrameAccessByAddr(uptr addr, StackFrameAccess *access);
   93   uptr GetStackVariableShadowStart(uptr addr);
   93   uptr GetStackVariableShadowStart(uptr addr);
   95   bool AddrIsInStack(uptr addr);
  105   void StartSwitchFiber(FakeStack **fake_stack_save, uptr bottom, uptr size);
  105   void StartSwitchFiber(FakeStack **fake_stack_save, uptr bottom, uptr size);
  106   void FinishSwitchFiber(FakeStack *fake_stack_save, uptr *bottom_old,
  107                          uptr *size_old);
  145     uptr bottom;
  146     uptr top;
  154   uptr stack_top_;
  155   uptr stack_bottom_;
  157   uptr next_stack_top_;
  158   uptr next_stack_bottom_;
  162   uptr tls_begin_;
  163   uptr tls_end_;
  170   uptr extra_spill_area_;
  183 AsanThread *FindThreadByStackAddress(uptr addr);
projects/compiler-rt/lib/cfi/cfi.cpp
   61     uptr start;
   62     uptr size;
   66 static constexpr uptr kShadowGranularity = 12;
   67 static constexpr uptr kShadowAlign = 1UL << kShadowGranularity; // 4096
   73 uptr GetShadow() {
   77 uptr GetShadowSize() {
   82 void SetShadowSize(uptr size) {
   86 uptr MemToShadowOffset(uptr x) {
   86 uptr MemToShadowOffset(uptr x) {
   90 uint16_t *MemToShadow(uptr x, uptr shadow_base) {
   90 uint16_t *MemToShadow(uptr x, uptr shadow_base) {
   98   uptr addr;
  100   explicit ShadowValue(uptr addr, uint16_t v) : addr(addr), v(v) {}
  109     uptr aligned_addr = addr & ~(kShadowAlign - 1);
  110     uptr p = aligned_addr - (((uptr)v - 1) << kShadowGranularity);
  115   static const ShadowValue load(uptr addr) {
  116     uptr shadow_base = GetShadow();
  117     uptr shadow_offset = MemToShadowOffset(addr);
  127   uptr shadow_;
  135   void AddUnchecked(uptr begin, uptr end);
  135   void AddUnchecked(uptr begin, uptr end);
  138   void Add(uptr begin, uptr end, uptr cfi_check);
  138   void Add(uptr begin, uptr end, uptr cfi_check);
  138   void Add(uptr begin, uptr end, uptr cfi_check);
  149 void ShadowBuilder::AddUnchecked(uptr begin, uptr end) {
  149 void ShadowBuilder::AddUnchecked(uptr begin, uptr end) {
  160 void ShadowBuilder::Add(uptr begin, uptr end, uptr cfi_check) {
  160 void ShadowBuilder::Add(uptr begin, uptr end, uptr cfi_check) {
  160 void ShadowBuilder::Add(uptr begin, uptr end, uptr cfi_check) {
  177   uptr main_shadow = GetShadow();
  212 uptr find_cfi_check_in_dso(dl_phdr_info *info) {
  222   uptr strtab = 0, symtab = 0, strsz = 0;
  243       uptr beg = info->dlpi_addr + phdr->p_vaddr;
  244       uptr end = beg + phdr->p_memsz;
  267       uptr addr = info->dlpi_addr + p->st_value;
  275   uptr cfi_check = find_cfi_check_in_dso(info);
  289       uptr cur_beg = info->dlpi_addr + phdr->p_vaddr;
  290       uptr cur_end = cur_beg + phdr->p_memsz;
  314   uptr vma = GetMaxUserVirtualAddress();
  343   uptr Addr = (uptr)Ptr;
projects/compiler-rt/lib/dfsan/dfsan.cpp
   35 static const uptr kNumLabels = 1 << (sizeof(dfsan_label) * 8);
   45 SANITIZER_INTERFACE_ATTRIBUTE uptr __dfsan_shadow_ptr_mask;
  144 static uptr UnusedAddr() {
  211 dfsan_label __dfsan_union_load(const dfsan_label *ls, uptr n) {
  213   for (uptr i = 1; i != n; ++i) {
  266 void __dfsan_set_label(dfsan_label label, void *addr, uptr size) {
  284 void dfsan_set_label(dfsan_label label, void *addr, uptr size) {
  289 void dfsan_add_label(dfsan_label label, void *addr, uptr size) {
  306 dfsan_read_label(const void *addr, uptr size) {
  340 extern "C" SANITIZER_INTERFACE_ATTRIBUTE uptr
  353   for (uptr l = 1; l <= last_label; ++l) {
  443   uptr init_addr = (uptr)&dfsan_init;
projects/compiler-rt/lib/dfsan/dfsan.h
   34 void dfsan_add_label(dfsan_label label, void *addr, uptr size);
   35 void dfsan_set_label(dfsan_label label, void *addr, uptr size);
   36 dfsan_label dfsan_read_label(const void *addr, uptr size);
projects/compiler-rt/lib/dfsan/dfsan_custom.cpp
  335 static void unpoison(const void *ptr, uptr size) {
projects/compiler-rt/lib/dfsan/dfsan_platform.h
   21   static const uptr kShadowAddr = 0x10000;
   22   static const uptr kUnionTableAddr = 0x200000000000;
   23   static const uptr kAppAddr = 0x700000008000;
   24   static const uptr kShadowMask = ~0x700000000000;
   69 uptr MappingImpl(void) {
   79 uptr MappingArchImpl(void) {
   94 uptr ShadowAddr() {
   99 uptr UnionTableAddr() {
  104 uptr AppAddr() {
  109 uptr ShadowMask() {
projects/compiler-rt/lib/gwp_asan/optional/backtrace_sanitizer_common.cpp
   21 void __sanitizer::BufferedStackTrace::UnwindImpl(uptr pc, uptr bp,
   21 void __sanitizer::BufferedStackTrace::UnwindImpl(uptr pc, uptr bp,
projects/compiler-rt/lib/hwasan/hwasan.cpp
  152 static constexpr uptr kMemoryUsageBufferSize = 4096;
  201     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
  201     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
  226     uptr gv = reinterpret_cast<uptr>(desc) + desc->gv_relptr;
  227     uptr size = desc->info & 0xffffff;
  228     uptr full_granule_size = RoundDownTo(size, 16);
  331 uptr __hwasan_shadow_memory_dynamic_address;  // Global interface symbol.
  335 void __hwasan_init_frames(uptr beg, uptr end) {}
  335 void __hwasan_init_frames(uptr beg, uptr end) {}
  421 void __hwasan_print_shadow(const void *p, uptr sz) {
  422   uptr ptr_raw = UntagAddr(reinterpret_cast<uptr>(p));
  423   uptr shadow_first = MemToShadow(ptr_raw);
  424   uptr shadow_last = MemToShadow(ptr_raw + sz - 1);
  427   for (uptr s = shadow_first; s <= shadow_last; ++s)
  431 sptr __hwasan_test_shadow(const void *p, uptr sz) {
  435   uptr ptr_raw = UntagAddr(reinterpret_cast<uptr>(p));
  436   uptr shadow_first = MemToShadow(ptr_raw);
  437   uptr shadow_last = MemToShadow(ptr_raw + sz - 1);
  438   for (uptr s = shadow_first; s <= shadow_last; ++s)
  465 void __hwasan_loadN(uptr p, uptr sz) {
  465 void __hwasan_loadN(uptr p, uptr sz) {
  468 void __hwasan_load1(uptr p) {
  471 void __hwasan_load2(uptr p) {
  474 void __hwasan_load4(uptr p) {
  477 void __hwasan_load8(uptr p) {
  480 void __hwasan_load16(uptr p) {
  484 void __hwasan_loadN_noabort(uptr p, uptr sz) {
  484 void __hwasan_loadN_noabort(uptr p, uptr sz) {
  487 void __hwasan_load1_noabort(uptr p) {
  490 void __hwasan_load2_noabort(uptr p) {
  493 void __hwasan_load4_noabort(uptr p) {
  496 void __hwasan_load8_noabort(uptr p) {
  499 void __hwasan_load16_noabort(uptr p) {
  503 void __hwasan_storeN(uptr p, uptr sz) {
  503 void __hwasan_storeN(uptr p, uptr sz) {
  506 void __hwasan_store1(uptr p) {
  509 void __hwasan_store2(uptr p) {
  512 void __hwasan_store4(uptr p) {
  515 void __hwasan_store8(uptr p) {
  518 void __hwasan_store16(uptr p) {
  522 void __hwasan_storeN_noabort(uptr p, uptr sz) {
  522 void __hwasan_storeN_noabort(uptr p, uptr sz) {
  525 void __hwasan_store1_noabort(uptr p) {
  528 void __hwasan_store2_noabort(uptr p) {
  531 void __hwasan_store4_noabort(uptr p) {
  534 void __hwasan_store8_noabort(uptr p) {
  537 void __hwasan_store16_noabort(uptr p) {
  541 void __hwasan_tag_memory(uptr p, u8 tag, uptr sz) {
  541 void __hwasan_tag_memory(uptr p, u8 tag, uptr sz) {
  545 uptr __hwasan_tag_pointer(uptr p, u8 tag) {
  545 uptr __hwasan_tag_pointer(uptr p, u8 tag) {
  550   uptr dst = (uptr)sp_dst;
  554   uptr sp = (uptr)__builtin_frame_address(0);
  555   static const uptr kMaxExpectedCleanupSize = 64 << 20;  // 64M
  568   uptr sp = (uptr)sp_dst;
  571   uptr top = t->stack_top();
  572   uptr bottom = t->stack_bottom();
projects/compiler-rt/lib/hwasan/hwasan.h
   41 const uptr kAddressTagMask = 0xFFUL << kAddressTagShift;
   52 static inline tag_t GetTagFromPointer(uptr p) {
   56 static inline uptr UntagAddr(uptr tagged_addr) {
   56 static inline uptr UntagAddr(uptr tagged_addr) {
   65 static inline uptr AddTagToPointer(uptr p, tag_t tag) {
   65 static inline uptr AddTagToPointer(uptr p, tag_t tag) {
   75 bool ProtectRange(uptr beg, uptr end);
   75 bool ProtectRange(uptr beg, uptr end);
   86 void *hwasan_malloc(uptr size, StackTrace *stack);
   87 void *hwasan_calloc(uptr nmemb, uptr size, StackTrace *stack);
   87 void *hwasan_calloc(uptr nmemb, uptr size, StackTrace *stack);
   88 void *hwasan_realloc(void *ptr, uptr size, StackTrace *stack);
   89 void *hwasan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack);
   89 void *hwasan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack);
   90 void *hwasan_valloc(uptr size, StackTrace *stack);
   91 void *hwasan_pvalloc(uptr size, StackTrace *stack);
   92 void *hwasan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack);
   92 void *hwasan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack);
   93 void *hwasan_memalign(uptr alignment, uptr size, StackTrace *stack);
   93 void *hwasan_memalign(uptr alignment, uptr size, StackTrace *stack);
   94 int hwasan_posix_memalign(void **memptr, uptr alignment, uptr size,
   94 int hwasan_posix_memalign(void **memptr, uptr alignment, uptr size,
projects/compiler-rt/lib/hwasan/hwasan_allocator.cpp
   49 static uptr AlignRight(uptr addr, uptr requested_size) {
   49 static uptr AlignRight(uptr addr, uptr requested_size) {
   49 static uptr AlignRight(uptr addr, uptr requested_size) {
   50   uptr tail_size = requested_size % kShadowAlignment;
   55 uptr HwasanChunkView::Beg() const {
   60 uptr HwasanChunkView::End() const {
   63 uptr HwasanChunkView::UsedSize() const {
   70 uptr HwasanChunkView::ActualSize() const {
   87   for (uptr i = 0; i < sizeof(tail_magic); i++)
   95 static uptr TaggedSize(uptr size) {
   95 static uptr TaggedSize(uptr size) {
   97   uptr new_size = RoundUpTo(size, kShadowAlignment);
  102 static void *HwasanAllocate(StackTrace *stack, uptr orig_size, uptr alignment,
  102 static void *HwasanAllocate(StackTrace *stack, uptr orig_size, uptr alignment,
  114   uptr size = TaggedSize(orig_size);
  138     uptr fill_size = Min(size, (uptr)flags()->max_malloc_fill_size);
  154       uptr tag_size = orig_size ? orig_size : 1;
  155       uptr full_granule_size = RoundDownTo(tag_size, kShadowAlignment);
  176   uptr tagged_uptr = reinterpret_cast<uptr>(tagged_ptr);
  194   uptr orig_size = meta->requested_size;
  199   uptr tagged_size = TaggedSize(orig_size);
  202     uptr tail_size = tagged_size - orig_size - 1;
  217     uptr fill_size =
  238                               uptr new_size, uptr alignment) {
  238                               uptr new_size, uptr alignment) {
  255 static void *HwasanCalloc(StackTrace *stack, uptr nmemb, uptr size) {
  255 static void *HwasanCalloc(StackTrace *stack, uptr nmemb, uptr size) {
  264 HwasanChunkView FindHeapChunkByAddress(uptr address) {
  273 static uptr AllocationSize(const void *tagged_ptr) {
  288 void *hwasan_malloc(uptr size, StackTrace *stack) {
  292 void *hwasan_calloc(uptr nmemb, uptr size, StackTrace *stack) {
  292 void *hwasan_calloc(uptr nmemb, uptr size, StackTrace *stack) {
  296 void *hwasan_realloc(void *ptr, uptr size, StackTrace *stack) {
  306 void *hwasan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack) {
  306 void *hwasan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack) {
  316 void *hwasan_valloc(uptr size, StackTrace *stack) {
  321 void *hwasan_pvalloc(uptr size, StackTrace *stack) {
  322   uptr PageSize = GetPageSizeCached();
  334 void *hwasan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack) {
  334 void *hwasan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack) {
  344 void *hwasan_memalign(uptr alignment, uptr size, StackTrace *stack) {
  344 void *hwasan_memalign(uptr alignment, uptr size, StackTrace *stack) {
  354 int hwasan_posix_memalign(void **memptr, uptr alignment, uptr size,
  354 int hwasan_posix_memalign(void **memptr, uptr alignment, uptr size,
  386 uptr __sanitizer_get_current_allocated_bytes() {
  387   uptr stats[AllocatorStatCount];
  392 uptr __sanitizer_get_heap_size() {
  393   uptr stats[AllocatorStatCount];
  398 uptr __sanitizer_get_free_bytes() { return 1; }
  400 uptr __sanitizer_get_unmapped_bytes() { return 1; }
  402 uptr __sanitizer_get_estimated_allocated_size(uptr size) { return size; }
  402 uptr __sanitizer_get_estimated_allocated_size(uptr size) { return size; }
  406 uptr __sanitizer_get_allocated_size(const void *p) { return AllocationSize(p); }
projects/compiler-rt/lib/hwasan/hwasan_allocator.h
   37   void OnMap(uptr p, uptr size) const { UpdateMemoryUsage(); }
   37   void OnMap(uptr p, uptr size) const { UpdateMemoryUsage(); }
   38   void OnUnmap(uptr p, uptr size) const {
   38   void OnUnmap(uptr p, uptr size) const {
   46 static const uptr kMaxAllowedMallocSize = 2UL << 30;  // 2G
   49   static const uptr kSpaceBeg = ~0ULL;
   50   static const uptr kSpaceSize = 0x2000000000ULL;
   51   static const uptr kMetadataSize = sizeof(Metadata);
   55   static const uptr kFlags = 0;
   66   HwasanChunkView(uptr block, Metadata *metadata)
   69   uptr Beg() const;            // First byte of user memory
   70   uptr End() const;            // Last byte of user memory
   71   uptr UsedSize() const;       // Size requested by the user
   72   uptr ActualSize() const;     // Size allocated by the allocator.
   76   uptr block_;
   80 HwasanChunkView FindHeapChunkByAddress(uptr address);
   87   uptr tagged_addr;
projects/compiler-rt/lib/hwasan/hwasan_checks.h
   21 __attribute__((always_inline)) static void SigTrap(uptr p) {
   47 __attribute__((always_inline)) static void SigTrap(uptr p, uptr size) {
   47 __attribute__((always_inline)) static void SigTrap(uptr p, uptr size) {
   65     tag_t mem_tag, uptr ptr, uptr sz) {
   65     tag_t mem_tag, uptr ptr, uptr sz) {
   83 __attribute__((always_inline, nodebug)) static void CheckAddress(uptr p) {
   84   uptr ptr_raw = p & ~kAddressTagMask;
   95 __attribute__((always_inline, nodebug)) static void CheckAddressSized(uptr p,
   96                                                                       uptr sz) {
  100   uptr ptr_raw = p & ~kAddressTagMask;
  110   uptr end = p + sz;
  111   uptr tail_sz = end & 0xf;
projects/compiler-rt/lib/hwasan/hwasan_dynamic_shadow.cpp
   29 static void UnmapFromTo(uptr from, uptr to) {
   29 static void UnmapFromTo(uptr from, uptr to) {
   33   uptr res = internal_munmap(reinterpret_cast<void *>(from), to - from);
   44 static uptr MapDynamicShadow(uptr shadow_size_bytes) {
   44 static uptr MapDynamicShadow(uptr shadow_size_bytes) {
   45   const uptr granularity = GetMmapGranularity();
   46   const uptr min_alignment = granularity << kShadowScale;
   47   const uptr alignment = 1ULL << kShadowBaseAlignment;
   50   const uptr left_padding = 1ULL << kShadowBaseAlignment;
   51   const uptr shadow_size =
   53   const uptr map_size = shadow_size + left_padding + alignment;
   55   const uptr map_start = (uptr)MmapNoAccess(map_size);
   58   const uptr shadow_start = RoundUpTo(map_start + left_padding, alignment);
  158 uptr FindDynamicShadowStart(uptr shadow_size_bytes) {
  158 uptr FindDynamicShadowStart(uptr shadow_size_bytes) {
projects/compiler-rt/lib/hwasan/hwasan_dynamic_shadow.h
   22 uptr FindDynamicShadowStart(uptr shadow_size_bytes);
   22 uptr FindDynamicShadowStart(uptr shadow_size_bytes);
projects/compiler-rt/lib/hwasan/hwasan_exceptions.cpp
   56     uptr fp = get_gr(context, 6); // rbp
   62     uptr sp = get_cfa(context);
projects/compiler-rt/lib/hwasan/hwasan_interceptors.cpp
   47 static uptr allocated_for_dlsym;
   48 static const uptr kDlsymAllocPoolSize = 1024;
   49 static uptr alloc_memory_for_dlsym[kDlsymAllocPoolSize];
   52   uptr off = (uptr)ptr - (uptr)alloc_memory_for_dlsym;
   56 static void *AllocateFromLocalPool(uptr size_in_bytes) {
   57   uptr size_in_words = RoundUpTo(size_in_bytes, kWordSize) / kWordSize;
   72 int __sanitizer_posix_memalign(void **memptr, uptr alignment, uptr size) {
   72 int __sanitizer_posix_memalign(void **memptr, uptr alignment, uptr size) {
   79 void * __sanitizer_memalign(uptr alignment, uptr size) {
   79 void * __sanitizer_memalign(uptr alignment, uptr size) {
   84 void * __sanitizer_aligned_alloc(uptr alignment, uptr size) {
   84 void * __sanitizer_aligned_alloc(uptr alignment, uptr size) {
   89 void * __sanitizer___libc_memalign(uptr alignment, uptr size) {
   89 void * __sanitizer___libc_memalign(uptr alignment, uptr size) {
   97 void * __sanitizer_valloc(uptr size) {
  102 void * __sanitizer_pvalloc(uptr size) {
  119 uptr __sanitizer_malloc_usable_size(const void *ptr) {
  137 void * __sanitizer_calloc(uptr nmemb, uptr size) {
  137 void * __sanitizer_calloc(uptr nmemb, uptr size) {
  145 void * __sanitizer_realloc(void *ptr, uptr size) {
  148     uptr offset = (uptr)ptr - (uptr)alloc_memory_for_dlsym;
  149     uptr copy_size = Min(size, kDlsymAllocPoolSize - offset);
  163 void * __sanitizer_reallocarray(void *ptr, uptr nmemb, uptr size) {
  163 void * __sanitizer_reallocarray(void *ptr, uptr nmemb, uptr size) {
  168 void * __sanitizer_malloc(uptr size) {
projects/compiler-rt/lib/hwasan/hwasan_interface_internal.h
   48 void __hwasan_init_frames(uptr, uptr);
   48 void __hwasan_init_frames(uptr, uptr);
   51 extern uptr __hwasan_shadow_memory_dynamic_address;
   54 void __hwasan_loadN(uptr, uptr);
   54 void __hwasan_loadN(uptr, uptr);
   56 void __hwasan_load1(uptr);
   58 void __hwasan_load2(uptr);
   60 void __hwasan_load4(uptr);
   62 void __hwasan_load8(uptr);
   64 void __hwasan_load16(uptr);
   67 void __hwasan_loadN_noabort(uptr, uptr);
   67 void __hwasan_loadN_noabort(uptr, uptr);
   69 void __hwasan_load1_noabort(uptr);
   71 void __hwasan_load2_noabort(uptr);
   73 void __hwasan_load4_noabort(uptr);
   75 void __hwasan_load8_noabort(uptr);
   77 void __hwasan_load16_noabort(uptr);
   80 void __hwasan_storeN(uptr, uptr);
   80 void __hwasan_storeN(uptr, uptr);
   82 void __hwasan_store1(uptr);
   84 void __hwasan_store2(uptr);
   86 void __hwasan_store4(uptr);
   88 void __hwasan_store8(uptr);
   90 void __hwasan_store16(uptr);
   93 void __hwasan_storeN_noabort(uptr, uptr);
   93 void __hwasan_storeN_noabort(uptr, uptr);
   95 void __hwasan_store1_noabort(uptr);
   97 void __hwasan_store2_noabort(uptr);
   99 void __hwasan_store4_noabort(uptr);
  101 void __hwasan_store8_noabort(uptr);
  103 void __hwasan_store16_noabort(uptr);
  106 void __hwasan_tag_memory(uptr p, u8 tag, uptr sz);
  106 void __hwasan_tag_memory(uptr p, u8 tag, uptr sz);
  109 uptr __hwasan_tag_pointer(uptr p, u8 tag);
  109 uptr __hwasan_tag_pointer(uptr p, u8 tag);
  112 void __hwasan_tag_mismatch(uptr addr, u8 ts);
  115 void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame,
  115 void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame,
  115 void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame,
  124 sptr __hwasan_test_shadow(const void *x, uptr size);
  130 void __hwasan_print_shadow(const void *x, uptr size);
  172 int __sanitizer_posix_memalign(void **memptr, uptr alignment, uptr size);
  172 int __sanitizer_posix_memalign(void **memptr, uptr alignment, uptr size);
  175 void * __sanitizer_memalign(uptr alignment, uptr size);
  175 void * __sanitizer_memalign(uptr alignment, uptr size);
  178 void * __sanitizer_aligned_alloc(uptr alignment, uptr size);
  178 void * __sanitizer_aligned_alloc(uptr alignment, uptr size);
  181 void * __sanitizer___libc_memalign(uptr alignment, uptr size);
  181 void * __sanitizer___libc_memalign(uptr alignment, uptr size);
  184 void * __sanitizer_valloc(uptr size);
  187 void * __sanitizer_pvalloc(uptr size);
  196 uptr __sanitizer_malloc_usable_size(const void *ptr);
  208 void * __sanitizer_calloc(uptr nmemb, uptr size);
  208 void * __sanitizer_calloc(uptr nmemb, uptr size);
  211 void * __sanitizer_realloc(void *ptr, uptr size);
  214 void * __sanitizer_reallocarray(void *ptr, uptr nmemb, uptr size);
  214 void * __sanitizer_reallocarray(void *ptr, uptr nmemb, uptr size);
  217 void * __sanitizer_malloc(uptr size);
  220 void *__hwasan_memcpy(void *dst, const void *src, uptr size);
  222 void *__hwasan_memset(void *s, int c, uptr n);
  224 void *__hwasan_memmove(void *dest, const void *src, uptr n);
projects/compiler-rt/lib/hwasan/hwasan_linux.cpp
   55 THREADLOCAL uptr __hwasan_tls;
   60 static void ReserveShadowMemoryRange(uptr beg, uptr end, const char *name) {
   60 static void ReserveShadowMemoryRange(uptr beg, uptr end, const char *name) {
   63   uptr size = end - beg + 1;
   74 static void ProtectGap(uptr addr, uptr size) {
   74 static void ProtectGap(uptr addr, uptr size) {
   84     uptr step = GetMmapGranularity();
  102 static uptr kLowMemStart;
  103 static uptr kLowMemEnd;
  104 static uptr kLowShadowEnd;
  105 static uptr kLowShadowStart;
  106 static uptr kHighShadowStart;
  107 static uptr kHighShadowEnd;
  108 static uptr kHighMemStart;
  109 static uptr kHighMemEnd;
  111 static void PrintRange(uptr start, uptr end, const char *name) {
  111 static void PrintRange(uptr start, uptr end, const char *name) {
  135 static uptr GetHighMemEnd() {
  137   uptr max_address = GetMaxUserVirtualAddress();
  144 static void InitializeShadowBaseAddress(uptr shadow_size_bytes) {
  239   uptr guard_page_size = GetMmapGranularity();
  240   uptr thread_space_start =
  242   uptr thread_space_end =
  251 static void MadviseShadowRegion(uptr beg, uptr end) {
  251 static void MadviseShadowRegion(uptr beg, uptr end) {
  252   uptr size = end - beg + 1;
  263 bool MemIsApp(uptr p) {
  309   uptr iterations = (uptr)tsd;
  332 uptr *GetCurrentThreadLongPtr() {
  357   uptr *ThreadLong = GetCurrentThreadLongPtr();
  367   uptr addr;
  368   uptr size;
  401   uptr pc = (uptr)uc->uc_mcontext.gregs[REG_RIP];
  410   const uptr addr = uc->uc_mcontext.gregs[REG_RDI];
  414   const uptr size =
  424 static void HandleTagMismatch(AccessInfo ai, uptr pc, uptr frame,
  424 static void HandleTagMismatch(AccessInfo ai, uptr pc, uptr frame,
  425                               ucontext_t *uc, uptr *registers_frame = nullptr) {
  483 void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame,
  483 void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame,
  483 void __hwasan_tag_mismatch4(uptr addr, uptr access_info, uptr *registers_frame,
projects/compiler-rt/lib/hwasan/hwasan_malloc_bisect.h
   18 static u32 malloc_hash(StackTrace *stack, uptr orig_size) {
   19   uptr len = Min(stack->size, (unsigned)7);
   27   for (uptr i = 1; i < len; ++i) H.add(((u32)stack->trace[i]) & 0xFFF);
   31 static INLINE bool malloc_bisect(StackTrace *stack, uptr orig_size) {
   32   uptr left = flags()->malloc_bisect_left;
   33   uptr right = flags()->malloc_bisect_right;
   40   uptr h = (uptr)malloc_hash(stack, orig_size);
projects/compiler-rt/lib/hwasan/hwasan_mapping.h
   37 constexpr uptr kShadowScale = 4;
   38 constexpr uptr kShadowAlignment = 1ULL << kShadowScale;
   42 inline uptr MemToShadow(uptr untagged_addr) {
   42 inline uptr MemToShadow(uptr untagged_addr) {
   46 inline uptr ShadowToMem(uptr shadow_addr) {
   46 inline uptr ShadowToMem(uptr shadow_addr) {
   49 inline uptr MemToShadowSize(uptr size) {
   49 inline uptr MemToShadowSize(uptr size) {
   53 bool MemIsApp(uptr p);
projects/compiler-rt/lib/hwasan/hwasan_memintrinsics.cpp
   24 void *__hwasan_memset(void *block, int c, uptr size) {
   30 void *__hwasan_memcpy(void *to, const void *from, uptr size) {
   38 void *__hwasan_memmove(void *to, const void *from, uptr size) {
projects/compiler-rt/lib/hwasan/hwasan_poisoning.cpp
   22 uptr TagMemoryAligned(uptr p, uptr size, tag_t tag) {
   22 uptr TagMemoryAligned(uptr p, uptr size, tag_t tag) {
   22 uptr TagMemoryAligned(uptr p, uptr size, tag_t tag) {
   25   uptr shadow_start = MemToShadow(p);
   26   uptr shadow_size = MemToShadowSize(size);
   28   uptr page_size = GetPageSizeCached();
   29   uptr page_start = RoundUpTo(shadow_start, page_size);
   30   uptr page_end = RoundDownTo(shadow_start + shadow_size, page_size);
   31   uptr threshold = common_flags()->clear_shadow_mmap_threshold;
   46 uptr TagMemory(uptr p, uptr size, tag_t tag) {
   46 uptr TagMemory(uptr p, uptr size, tag_t tag) {
   46 uptr TagMemory(uptr p, uptr size, tag_t tag) {
   47   uptr start = RoundDownTo(p, kShadowAlignment);
   48   uptr end = RoundUpTo(p + size, kShadowAlignment);
projects/compiler-rt/lib/hwasan/hwasan_poisoning.h
   19 uptr TagMemory(uptr p, uptr size, tag_t tag);
   19 uptr TagMemory(uptr p, uptr size, tag_t tag);
   19 uptr TagMemory(uptr p, uptr size, tag_t tag);
   20 uptr TagMemoryAligned(uptr p, uptr size, tag_t tag);
   20 uptr TagMemoryAligned(uptr p, uptr size, tag_t tag);
   20 uptr TagMemoryAligned(uptr p, uptr size, tag_t tag);
projects/compiler-rt/lib/hwasan/hwasan_report.cpp
   59     uptr len = internal_strlen(msg);
   60     uptr old_size = error_message_ptr_->size();
   95     uptr size = rb->size() * sizeof(uptr);
   95     uptr size = rb->size() * sizeof(uptr);
  103     UnmapOrDie(rb->StartOfStorage(), rb->size() * sizeof(uptr));
  111   uptr rb_;
  127 uptr FindHeapAllocation(HeapAllocationsRingBuffer *rb,
  128                         uptr tagged_addr,
  131   for (uptr i = 0, size = rb->size(); i < size; i++) {
  143                                   tag_t addr_tag, uptr untagged_addr) {
  144   uptr frames = Min((uptr)flags()->stack_history_size, sa->size());
  146   for (uptr i = 0; i < frames; i++) {
  147     const uptr *record_addr = &(*sa)[i];
  148     uptr record = *record_addr;
  153     uptr fp = (record >> kRecordFPShift) << kRecordFPLShift;
  154     uptr pc_mask = (1ULL << kRecordFPShift) - 1;
  155     uptr pc = record & pc_mask;
  169         uptr obj_offset =
  191   for (uptr i = 0; i < frames; i++) {
  192     const uptr *record_addr = &(*sa)[i];
  193     uptr record = *record_addr;
  196     uptr pc_mask = (1ULL << 48) - 1;
  197     uptr pc = record & pc_mask;
  219   uptr mem = ShadowToMem(reinterpret_cast<uptr>(tag_ptr));
  225     uptr tagged_addr, uptr access_size,
  225     uptr tagged_addr, uptr access_size,
  229   uptr untagged_addr = UntagAddr(tagged_addr);
  233   if (uptr beg = chunk.Beg()) {
  234     uptr size = chunk.ActualSize();
  266     uptr mem = ShadowToMem(reinterpret_cast<uptr>(candidate));
  285       uptr module_address;
  312     if (uptr D = FindHeapAllocation(t->heap_allocations(), tagged_addr, &har)) {
  363 static void PrintTagInfoAroundAddr(tag_t *tag_ptr, uptr num_rows,
  366   const uptr row_len = 16;  // better be power of two.
  374     for (uptr i = 0; i < row_len; i++) {
  398       uptr granule_addr = ShadowToMem(reinterpret_cast<uptr>(tag));
  412 void ReportInvalidFree(StackTrace *stack, uptr tagged_addr) {
  415   uptr untagged_addr = UntagAddr(tagged_addr);
  421   uptr pc = stack->size ? stack->trace[0] : 0;
  438 void ReportTailOverwritten(StackTrace *stack, uptr tagged_addr, uptr orig_size,
  438 void ReportTailOverwritten(StackTrace *stack, uptr tagged_addr, uptr orig_size,
  440   uptr tail_size = kShadowAlignment - (orig_size % kShadowAlignment);
  443   uptr untagged_addr = UntagAddr(tagged_addr);
  463   for (uptr i = 0; i < kShadowAlignment - tail_size; i++)
  465   for (uptr i = 0; i < tail_size; i++)
  469   for (uptr i = 0; i < kShadowAlignment - tail_size; i++)
  471   for (uptr i = 0; i < tail_size; i++)
  475   for (uptr i = 0; i < kShadowAlignment - tail_size; i++)
  477   for (uptr i = 0; i < tail_size; i++)
  497 void ReportTagMismatch(StackTrace *stack, uptr tagged_addr, uptr access_size,
  497 void ReportTagMismatch(StackTrace *stack, uptr tagged_addr, uptr access_size,
  498                        bool is_store, bool fatal, uptr *registers_frame) {
  505   uptr untagged_addr = UntagAddr(tagged_addr);
  508   uptr pc = stack->size ? stack->trace[0] : 0;
  547 void ReportRegisters(uptr *frame, uptr pc) {
  547 void ReportRegisters(uptr *frame, uptr pc) {
projects/compiler-rt/lib/hwasan/hwasan_report.h
   24 void ReportTagMismatch(StackTrace *stack, uptr addr, uptr access_size,
   24 void ReportTagMismatch(StackTrace *stack, uptr addr, uptr access_size,
   25                        bool is_store, bool fatal, uptr *registers_frame);
   26 void ReportInvalidFree(StackTrace *stack, uptr addr);
   27 void ReportTailOverwritten(StackTrace *stack, uptr addr, uptr orig_size,
   27 void ReportTailOverwritten(StackTrace *stack, uptr addr, uptr orig_size,
   29 void ReportRegisters(uptr *registers_frame, uptr pc);
   29 void ReportRegisters(uptr *registers_frame, uptr pc);
projects/compiler-rt/lib/hwasan/hwasan_thread.cpp
   37 void Thread::Init(uptr stack_buffer_start, uptr stack_buffer_size) {
   37 void Thread::Init(uptr stack_buffer_start, uptr stack_buffer_size) {
   44   uptr *ThreadLong = GetCurrentThreadLongPtr();
   54   uptr tls_size;
   55   uptr stack_size;
   72              stack_allocations_->size() * sizeof(uptr));
projects/compiler-rt/lib/hwasan/hwasan_thread.h
   22 typedef __sanitizer::CompactRingBuffer<uptr> StackAllocationsRingBuffer;
   26   void Init(uptr stack_buffer_start, uptr stack_buffer_size);  // Must be called from the thread itself.
   26   void Init(uptr stack_buffer_start, uptr stack_buffer_size);  // Must be called from the thread itself.
   30   uptr stack_top() { return stack_top_; }
   31   uptr stack_bottom() { return stack_bottom_; }
   32   uptr stack_size() { return stack_top() - stack_bottom(); }
   33   uptr tls_begin() { return tls_begin_; }
   34   uptr tls_end() { return tls_end_; }
   37   bool AddrIsInStack(uptr addr) {
   66   uptr &vfork_spill() { return vfork_spill_; }
   73   uptr vfork_spill_;
   74   uptr stack_top_;
   75   uptr stack_bottom_;
   76   uptr tls_begin_;
   77   uptr tls_end_;
  103 uptr *GetCurrentThreadLongPtr();
projects/compiler-rt/lib/hwasan/hwasan_thread_list.cpp
    9 void InitThreadList(uptr storage, uptr size) {
    9 void InitThreadList(uptr storage, uptr size) {
projects/compiler-rt/lib/hwasan/hwasan_thread_list.h
   55 static uptr RingBufferSize() {
   56   uptr desired_bytes = flags()->stack_history_size * sizeof(uptr);
   56   uptr desired_bytes = flags()->stack_history_size * sizeof(uptr);
   60     uptr size = 4096 * (1ULL << shift);
  104   uptr n_live_threads;
  105   uptr total_stack_size;
  110   HwasanThreadList(uptr storage, uptr size)
  110   HwasanThreadList(uptr storage, uptr size)
  128         uptr start = (uptr)t - ring_buffer_size_;
  141     uptr start = (uptr)t - ring_buffer_size_;
  154   Thread *GetThreadByBufferAddress(uptr p) {
  159   uptr MemoryUsedPerThread() {
  160     uptr res = sizeof(Thread) + ring_buffer_size_;
  191     uptr align = ring_buffer_size_ * 2;
  199   uptr free_space_;
  200   uptr free_space_end_;
  201   uptr ring_buffer_size_;
  202   uptr thread_alloc_size_;
  212 void InitThreadList(uptr storage, uptr size);
  212 void InitThreadList(uptr storage, uptr size);
projects/compiler-rt/lib/interception/interception.h
   27 typedef __sanitizer::uptr    SIZE_T;
projects/compiler-rt/lib/lsan/lsan.cpp
   29 bool WordIsPoisoned(uptr addr) {
   36     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
   36     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
   38   uptr stack_top = 0, stack_bottom = 0;
projects/compiler-rt/lib/lsan/lsan_allocator.cpp
   26 extern "C" void *memset(void *ptr, int value, uptr num);
   34 static const uptr kMaxAllowedMallocSize = 8UL << 30;
   53 static void RegisterAllocation(const StackTrace &stack, void *p, uptr size) {
   70 static void *ReportAllocationSizeTooBig(uptr size, const StackTrace &stack) {
   78 void *Allocate(const StackTrace &stack, uptr size, uptr alignment,
   78 void *Allocate(const StackTrace &stack, uptr size, uptr alignment,
  100 static void *Calloc(uptr nmemb, uptr size, const StackTrace &stack) {
  100 static void *Calloc(uptr nmemb, uptr size, const StackTrace &stack) {
  117 void *Reallocate(const StackTrace &stack, void *p, uptr new_size,
  118                  uptr alignment) {
  129 void GetAllocatorCacheRange(uptr *begin, uptr *end) {
  129 void GetAllocatorCacheRange(uptr *begin, uptr *end) {
  134 uptr GetMallocUsableSize(const void *p) {
  140 int lsan_posix_memalign(void **memptr, uptr alignment, uptr size,
  140 int lsan_posix_memalign(void **memptr, uptr alignment, uptr size,
  156 void *lsan_aligned_alloc(uptr alignment, uptr size, const StackTrace &stack) {
  156 void *lsan_aligned_alloc(uptr alignment, uptr size, const StackTrace &stack) {
  166 void *lsan_memalign(uptr alignment, uptr size, const StackTrace &stack) {
  166 void *lsan_memalign(uptr alignment, uptr size, const StackTrace &stack) {
  176 void *lsan_malloc(uptr size, const StackTrace &stack) {
  184 void *lsan_realloc(void *p, uptr size, const StackTrace &stack) {
  188 void *lsan_reallocarray(void *ptr, uptr nmemb, uptr size,
  188 void *lsan_reallocarray(void *ptr, uptr nmemb, uptr size,
  199 void *lsan_calloc(uptr nmemb, uptr size, const StackTrace &stack) {
  199 void *lsan_calloc(uptr nmemb, uptr size, const StackTrace &stack) {
  203 void *lsan_valloc(uptr size, const StackTrace &stack) {
  208 void *lsan_pvalloc(uptr size, const StackTrace &stack) {
  209   uptr PageSize = GetPageSizeCached();
  221 uptr lsan_mz_size(const void *p) {
  235 void GetAllocatorGlobalRange(uptr *begin, uptr *end) {
  235 void GetAllocatorGlobalRange(uptr *begin, uptr *end) {
  240 uptr PointsIntoChunk(void* p) {
  241   uptr addr = reinterpret_cast<uptr>(p);
  242   uptr chunk = reinterpret_cast<uptr>(allocator.GetBlockBeginFastLocked(p));
  258 uptr GetUserBegin(uptr chunk) {
  258 uptr GetUserBegin(uptr chunk) {
  262 LsanMetadata::LsanMetadata(uptr chunk) {
  279 uptr LsanMetadata::requested_size() const {
  311 uptr __sanitizer_get_current_allocated_bytes() {
  312   uptr stats[AllocatorStatCount];
  318 uptr __sanitizer_get_heap_size() {
  319   uptr stats[AllocatorStatCount];
  325 uptr __sanitizer_get_free_bytes() { return 0; }
  328 uptr __sanitizer_get_unmapped_bytes() { return 0; }
  331 uptr __sanitizer_get_estimated_allocated_size(uptr size) { return size; }
  331 uptr __sanitizer_get_estimated_allocated_size(uptr size) { return size; }
  337 uptr __sanitizer_get_allocated_size(const void *p) {
projects/compiler-rt/lib/lsan/lsan_allocator.h
   24 void *Allocate(const StackTrace &stack, uptr size, uptr alignment,
   24 void *Allocate(const StackTrace &stack, uptr size, uptr alignment,
   27 void *Reallocate(const StackTrace &stack, void *p, uptr new_size,
   28                  uptr alignment);
   29 uptr GetMallocUsableSize(const void *p);
   34 void GetAllocatorCacheRange(uptr *begin, uptr *end);
   34 void GetAllocatorCacheRange(uptr *begin, uptr *end);
   44   uptr requested_size : 54;
   73 const uptr kAllocatorSpace = 0x600000000000ULL;
   74 const uptr kAllocatorSize  = 0x40000000000ULL;  // 4T.
   78   static const uptr kSpaceBeg = kAllocatorSpace;
   79   static const uptr kSpaceSize = kAllocatorSize;
   80   static const uptr kMetadataSize = sizeof(ChunkMetadata);
   83   static const uptr kFlags = 0;
   99 int lsan_posix_memalign(void **memptr, uptr alignment, uptr size,
   99 int lsan_posix_memalign(void **memptr, uptr alignment, uptr size,
  101 void *lsan_aligned_alloc(uptr alignment, uptr size, const StackTrace &stack);
  101 void *lsan_aligned_alloc(uptr alignment, uptr size, const StackTrace &stack);
  102 void *lsan_memalign(uptr alignment, uptr size, const StackTrace &stack);
  102 void *lsan_memalign(uptr alignment, uptr size, const StackTrace &stack);
  103 void *lsan_malloc(uptr size, const StackTrace &stack);
  105 void *lsan_realloc(void *p, uptr size, const StackTrace &stack);
  106 void *lsan_reallocarray(void *p, uptr nmemb, uptr size,
  106 void *lsan_reallocarray(void *p, uptr nmemb, uptr size,
  108 void *lsan_calloc(uptr nmemb, uptr size, const StackTrace &stack);
  108 void *lsan_calloc(uptr nmemb, uptr size, const StackTrace &stack);
  109 void *lsan_valloc(uptr size, const StackTrace &stack);
  110 void *lsan_pvalloc(uptr size, const StackTrace &stack);
  111 uptr lsan_mz_size(const void *p);
projects/compiler-rt/lib/lsan/lsan_common.cpp
  131 static inline bool CanBeAHeapPointer(uptr p) {
  134   const uptr kMinAddress = 4 * 4096;
  156 void ScanRangeForPointers(uptr begin, uptr end,
  156 void ScanRangeForPointers(uptr begin, uptr end,
  160   const uptr alignment = flags()->pointer_alignment();
  162   uptr pp = begin;
  168     uptr chunk = PointsIntoChunk(p);
  193 void ScanGlobalRange(uptr begin, uptr end, Frontier *frontier) {
  193 void ScanGlobalRange(uptr begin, uptr end, Frontier *frontier) {
  194   uptr allocator_begin = 0, allocator_end = 0;
  209 void ForEachExtraStackRangeCb(uptr begin, uptr end, void* arg) {
  209 void ForEachExtraStackRangeCb(uptr begin, uptr end, void* arg) {
  217   InternalMmapVector<uptr> registers(suspended_threads.RegisterCount());
  218   uptr registers_begin = reinterpret_cast<uptr>(registers.data());
  219   uptr registers_end =
  221   for (uptr i = 0; i < suspended_threads.ThreadCount(); i++) {
  224     uptr stack_begin, stack_end, tls_begin, tls_end, cache_begin, cache_end;
  235     uptr sp;
  257         uptr page_size = GetPageSizeCached();
  293         for (uptr j = 0; j < dtls->dtv_size; ++j) {
  294           uptr dtls_beg = dtls->dtv[j].beg;
  295           uptr dtls_end = dtls_beg + dtls->dtv[j].size;
  312                     uptr region_begin, uptr region_end, bool is_readable) {
  312                     uptr region_begin, uptr region_end, bool is_readable) {
  313   uptr intersection_begin = Max(root_region.begin, region_begin);
  314   uptr intersection_end = Min(region_end, root_region.begin + root_region.size);
  339   for (uptr i = 0; i < root_regions->size(); i++) {
  346     uptr next_chunk = frontier->back();
  356 static void MarkIndirectlyLeakedCb(uptr chunk, void *arg) {
  367 static void CollectIgnoredCb(uptr chunk, void *arg) {
  378 static uptr GetCallerPC(u32 stack_id, StackDepotReverseMap *map) {
  395 static void MarkInvalidPCCb(uptr chunk, void *arg) {
  402     uptr caller_pc = 0;
  473 static void ResetTagsCb(uptr chunk, void *arg) {
  488 static void CollectLeaksCb(uptr chunk, void *arg) {
  518   for (uptr i = 0; i < matched.size(); i++)
  533     uptr i = InternalLowerBound(suspended_threads, 0, suspended_threads.size(),
  544   for (uptr i = 0; i < suspended_threads.ThreadCount(); ++i)
  585   uptr unsuppressed_count = param.leak_report.UnsuppressedLeakCount();
  625 static Suppression *GetSuppressionForAddr(uptr addr) {
  649   for (uptr i = 0; i < stack.size; i++) {
  664 const uptr kMaxLeaksConsidered = 5000;
  666 void LeakReport::AddLeakedChunk(uptr chunk, u32 stack_trace_id,
  667                                 uptr leaked_size, ChunkTag tag) {
  670   uptr i;
  698 void LeakReport::ReportTopLeaks(uptr num_leaks_to_report) {
  706   uptr unsuppressed_count = UnsuppressedLeakCount();
  710   uptr leaks_reported = 0;
  711   for (uptr i = 0; i < leaks_.size(); i++) {
  718     uptr remaining = unsuppressed_count - leaks_reported;
  723 void LeakReport::PrintReportForLeak(uptr index) {
  740 void LeakReport::PrintLeakedObjectsForLeak(uptr index) {
  742   for (uptr j = 0; j < leaked_objects_.size(); j++) {
  751   uptr bytes = 0, allocations = 0;
  752   for (uptr i = 0; i < leaks_.size(); i++) {
  764   for (uptr i = 0; i < leaks_.size(); i++) {
  775 uptr LeakReport::UnsuppressedLeakCount() {
  776   uptr result = 0;
  777   for (uptr i = 0; i < leaks_.size(); i++)
  816 void __lsan_register_root_region(const void *begin, uptr size) {
  827 void __lsan_unregister_root_region(const void *begin, uptr size) {
  832   for (uptr i = 0; i < root_regions->size(); i++) {
  836       uptr last_index = root_regions->size() - 1;
projects/compiler-rt/lib/lsan/lsan_common.h
   73   uptr pointer_alignment() const {
   74     return use_unaligned ? 1 : sizeof(uptr);
   84   uptr hit_count;
   85   uptr total_size;
   93   uptr addr;
   94   uptr size;
  101   void AddLeakedChunk(uptr chunk, u32 stack_trace_id, uptr leaked_size,
  101   void AddLeakedChunk(uptr chunk, u32 stack_trace_id, uptr leaked_size,
  103   void ReportTopLeaks(uptr max_leaks);
  106   uptr UnsuppressedLeakCount();
  109   void PrintReportForLeak(uptr index);
  110   void PrintLeakedObjectsForLeak(uptr index);
  117 typedef InternalMmapVector<uptr> Frontier;
  125   uptr begin;
  126   uptr size;
  131                     uptr region_begin, uptr region_end, bool is_readable);
  131                     uptr region_begin, uptr region_end, bool is_readable);
  136 void ScanRangeForPointers(uptr begin, uptr end,
  136 void ScanRangeForPointers(uptr begin, uptr end,
  139 void ScanGlobalRange(uptr begin, uptr end, Frontier *frontier);
  139 void ScanGlobalRange(uptr begin, uptr end, Frontier *frontier);
  167 static inline bool IsItaniumABIArrayCookie(uptr chunk_beg, uptr chunk_size,
  167 static inline bool IsItaniumABIArrayCookie(uptr chunk_beg, uptr chunk_size,
  168                                            uptr addr) {
  169   return chunk_size == sizeof(uptr) && chunk_beg + chunk_size == addr &&
  178 static inline bool IsARMABIArrayCookie(uptr chunk_beg, uptr chunk_size,
  178 static inline bool IsARMABIArrayCookie(uptr chunk_beg, uptr chunk_size,
  179                                        uptr addr) {
  180   return chunk_size == 2 * sizeof(uptr) && chunk_beg + chunk_size == addr &&
  181          *reinterpret_cast<uptr *>(chunk_beg + sizeof(uptr)) == 0;
  188 inline bool IsSpecialCaseOfOperatorNew0(uptr chunk_beg, uptr chunk_size,
  188 inline bool IsSpecialCaseOfOperatorNew0(uptr chunk_beg, uptr chunk_size,
  189                                         uptr addr) {
  201 void GetAllocatorGlobalRange(uptr *begin, uptr *end);
  201 void GetAllocatorGlobalRange(uptr *begin, uptr *end);
  206 bool WordIsPoisoned(uptr addr);
  211 bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
  211 bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
  212                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  212                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  212                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  213                            uptr *cache_end, DTLS **dtls);
  225 uptr PointsIntoChunk(void *p);
  227 uptr GetUserBegin(uptr chunk);
  227 uptr GetUserBegin(uptr chunk);
  244   explicit LsanMetadata(uptr chunk);
  248   uptr requested_size() const;
projects/compiler-rt/lib/lsan/lsan_common_linux.cpp
   82   for (uptr j = 0; j < info->dlpi_phnum; j++) {
   89     uptr begin = info->dlpi_addr + phdr->p_vaddr;
   90     uptr end = begin + phdr->p_memsz;
projects/compiler-rt/lib/lsan/lsan_interceptors.cpp
   66     const uptr kCallocPoolSize = 1024;
   67     static uptr calloc_memory_for_dlsym[kCallocPoolSize];
   68     static uptr allocated;
   69     uptr size_in_words = ((nmemb * size) + kWordSize - 1) / kWordSize;
  314   uptr iter = (uptr)v;
projects/compiler-rt/lib/lsan/lsan_thread.cpp
   32 static const uptr kMaxThreads = 1 << 13;
   33 static const uptr kThreadQuarantineSize = 64;
   52   uptr stack_begin, stack_end,
   74 u32 ThreadCreate(u32 parent_tid, uptr user_id, bool detached) {
   81   uptr stack_size = 0;
   82   uptr tls_size = 0;
  106   uptr uid = (uptr)arg;
  113 u32 ThreadTid(uptr uid) {
  129 bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
  129 bool GetThreadRangesLocked(tid_t os_id, uptr *stack_begin, uptr *stack_end,
  130                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  130                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  130                            uptr *tls_begin, uptr *tls_end, uptr *cache_begin,
  131                            uptr *cache_end, DTLS **dtls) {
projects/compiler-rt/lib/lsan/lsan_thread.h
   30   uptr stack_begin() { return stack_begin_; }
   31   uptr stack_end() { return stack_end_; }
   32   uptr tls_begin() { return tls_begin_; }
   33   uptr tls_end() { return tls_end_; }
   34   uptr cache_begin() { return cache_begin_; }
   35   uptr cache_end() { return cache_end_; }
   39   uptr stack_begin_, stack_end_,
   50 u32 ThreadCreate(u32 tid, uptr uid, bool detached);
   52 u32 ThreadTid(uptr uid);
projects/compiler-rt/lib/msan/msan.cpp
   99 static const uptr kNumStackOriginDescrs = 1024 * 1024;
  101 static uptr StackOriginPC[kNumStackOriginDescrs];
  222 void PrintWarning(uptr pc, uptr bp) {
  222 void PrintWarning(uptr pc, uptr bp) {
  226 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin) {
  226 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin) {
  250 void UnpoisonParam(uptr n) {
  283 const char *GetStackOriginDescr(u32 id, uptr *pc) {
  303     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
  303     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
  480 void __msan_print_shadow(const void *x, uptr size) {
  489 void __msan_dump_shadow(const void *x, uptr size) {
  496   for (uptr i = 0; i < size; i++)
  501 sptr __msan_test_shadow(const void *x, uptr size) {
  504   for (uptr i = 0; i < size; ++i)
  510 void __msan_check_mem_is_initialized(const void *x, uptr size) {
  540 void __msan_partial_poison(const void* data, void* shadow, uptr size) {
  544 void __msan_load_unpoisoned(const void *src, uptr size, void *dst) {
  549 void __msan_set_origin(const void *a, uptr size, u32 origin) {
  556 void __msan_set_alloca_origin(void *a, uptr size, char *descr) {
  560 void __msan_set_alloca_origin4(void *a, uptr size, char *descr, uptr pc) {
  560 void __msan_set_alloca_origin4(void *a, uptr size, char *descr, uptr pc) {
  598   uptr x = (uptr)a;
  599   uptr aligned = x & ~3ULL;
  600   uptr origin_ptr = MEM_TO_ORIGIN(aligned);
projects/compiler-rt/lib/msan/msan.h
   33   uptr start;
   34   uptr end;
  246 const uptr kMemoryLayoutSize = sizeof(kMemoryLayout) / sizeof(kMemoryLayout[0]);
  253 inline bool addr_is_type(uptr addr, MappingDesc::Type mapping_type) {
  279 bool ProtectRange(uptr beg, uptr end);
  279 bool ProtectRange(uptr beg, uptr end);
  288 void *msan_malloc(uptr size, StackTrace *stack);
  289 void *msan_calloc(uptr nmemb, uptr size, StackTrace *stack);
  289 void *msan_calloc(uptr nmemb, uptr size, StackTrace *stack);
  290 void *msan_realloc(void *ptr, uptr size, StackTrace *stack);
  291 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack);
  291 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack);
  292 void *msan_valloc(uptr size, StackTrace *stack);
  293 void *msan_pvalloc(uptr size, StackTrace *stack);
  294 void *msan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack);
  294 void *msan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack);
  295 void *msan_memalign(uptr alignment, uptr size, StackTrace *stack);
  295 void *msan_memalign(uptr alignment, uptr size, StackTrace *stack);
  296 int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
  296 int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
  302 const char *GetStackOriginDescr(u32 id, uptr *pc);
  313 void PrintWarning(uptr pc, uptr bp);
  313 void PrintWarning(uptr pc, uptr bp);
  314 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
  314 void PrintWarningWithOrigin(uptr pc, uptr bp, u32 origin);
  317 void UnpoisonParam(uptr n);
projects/compiler-rt/lib/msan/msan_allocator.cpp
   28   uptr requested_size;
   32   void OnMap(uptr p, uptr size) const {}
   32   void OnMap(uptr p, uptr size) const {}
   33   void OnUnmap(uptr p, uptr size) const {
   33   void OnUnmap(uptr p, uptr size) const {
   38     uptr shadow_p = MEM_TO_SHADOW(p);
   41       uptr origin_p = MEM_TO_ORIGIN(p);
   64 static const uptr kAllocatorSpace = 0x700000000000ULL;
   68 static const uptr kMaxAllowedMallocSize = 8UL << 30;
   71   static const uptr kSpaceBeg = kAllocatorSpace;
   72   static const uptr kSpaceSize = 0x40000000000;  // 4T.
   73   static const uptr kMetadataSize = sizeof(Metadata);
   76   static const uptr kFlags = 0;
  133 static void *MsanAllocate(StackTrace *stack, uptr size, uptr alignment,
  133 static void *MsanAllocate(StackTrace *stack, uptr size, uptr alignment,
  179   uptr size = meta->requested_size;
  202 void *MsanReallocate(StackTrace *stack, void *old_p, uptr new_size,
  203                      uptr alignment) {
  205   uptr old_size = meta->requested_size;
  206   uptr actually_allocated_size = allocator.GetActuallyAllocatedSize(old_p);
  218   uptr memcpy_size = Min(new_size, old_size);
  227 void *MsanCalloc(StackTrace *stack, uptr nmemb, uptr size) {
  227 void *MsanCalloc(StackTrace *stack, uptr nmemb, uptr size) {
  236 static uptr AllocationSize(const void *p) {
  244 void *msan_malloc(uptr size, StackTrace *stack) {
  248 void *msan_calloc(uptr nmemb, uptr size, StackTrace *stack) {
  248 void *msan_calloc(uptr nmemb, uptr size, StackTrace *stack) {
  252 void *msan_realloc(void *ptr, uptr size, StackTrace *stack) {
  262 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack) {
  262 void *msan_reallocarray(void *ptr, uptr nmemb, uptr size, StackTrace *stack) {
  272 void *msan_valloc(uptr size, StackTrace *stack) {
  276 void *msan_pvalloc(uptr size, StackTrace *stack) {
  277   uptr PageSize = GetPageSizeCached();
  289 void *msan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack) {
  289 void *msan_aligned_alloc(uptr alignment, uptr size, StackTrace *stack) {
  299 void *msan_memalign(uptr alignment, uptr size, StackTrace *stack) {
  299 void *msan_memalign(uptr alignment, uptr size, StackTrace *stack) {
  309 int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
  309 int msan_posix_memalign(void **memptr, uptr alignment, uptr size,
  329 uptr __sanitizer_get_current_allocated_bytes() {
  330   uptr stats[AllocatorStatCount];
  335 uptr __sanitizer_get_heap_size() {
  336   uptr stats[AllocatorStatCount];
  341 uptr __sanitizer_get_free_bytes() { return 1; }
  343 uptr __sanitizer_get_unmapped_bytes() { return 1; }
  345 uptr __sanitizer_get_estimated_allocated_size(uptr size) { return size; }
  345 uptr __sanitizer_get_estimated_allocated_size(uptr size) { return size; }
  349 uptr __sanitizer_get_allocated_size(const void *p) { return AllocationSize(p); }
projects/compiler-rt/lib/msan/msan_allocator.h
   21   uptr quarantine_cache[16];
   23   ALIGNED(8) uptr allocator_cache[96 * (512 * 8 + 16)];  // Opaque.
projects/compiler-rt/lib/msan/msan_chained_origin_depot.cpp
   35   static uptr storage_size(const args_type &args) {
projects/compiler-rt/lib/msan/msan_interceptors.cpp
   76 static uptr allocated_for_dlsym;
   77 static const uptr kDlsymAllocPoolSize = 1024;
   78 static uptr alloc_memory_for_dlsym[kDlsymAllocPoolSize];
   81   uptr off = (uptr)ptr - (uptr)alloc_memory_for_dlsym;
   85 static void *AllocateFromLocalPool(uptr size_in_bytes) {
   86   uptr size_in_words = RoundUpTo(size_in_bytes, kWordSize) / kWordSize;
  904     uptr offset = (uptr)ptr - (uptr)alloc_memory_for_dlsym;
  905     uptr copy_size = Min(size, kDlsymAllocPoolSize - offset);
  932 void __msan_allocated_memory(const void *data, uptr size) {
  940 void __msan_copy_shadow(void *dest, const void *src, uptr n) {
  945 void __sanitizer_dtor_callback(const void *data, uptr size) {
 1122     uptr element = interceptor_ctx()->AtExitStack.Size() - 1;
 1357 static uptr signal_impl(int signo, uptr cb);
 1357 static uptr signal_impl(int signo, uptr cb);
 1381     uptr old_cb = atomic_load(&sigactions[signo], memory_order_relaxed);
 1386       uptr cb = (uptr)pnew_act->sigaction;
 1387       uptr new_cb = (pnew_act->sa_flags & __sanitizer::sa_siginfo)
 1397       uptr cb = (uptr)oldact->sigaction;
 1412 static uptr signal_impl(int signo, uptr cb) {
 1412 static uptr signal_impl(int signo, uptr cb) {
 1539 void __msan_unpoison(const void *a, uptr size) {
 1544 void __msan_poison(const void *a, uptr size) {
 1549 void __msan_poison_stack(void *a, uptr size) {
 1554 void __msan_unpoison_param(uptr n) { UnpoisonParam(n); }
 1556 void __msan_clear_and_unpoison(void *a, uptr size) {
projects/compiler-rt/lib/msan/msan_interface_internal.h
   68 void __msan_unpoison(const void *a, uptr size);
   72 void __msan_unpoison_param(uptr n);
   74 void __msan_clear_and_unpoison(void *a, uptr size);
   76 void* __msan_memcpy(void *dst, const void *src, uptr size);
   78 void* __msan_memset(void *s, int c, uptr n);
   80 void* __msan_memmove(void* dest, const void* src, uptr n);
   82 void __msan_poison(const void *a, uptr size);
   84 void __msan_poison_stack(void *a, uptr size);
   89 void __msan_load_unpoisoned(void *src, uptr size, void *dst);
   94 sptr __msan_test_shadow(const void *x, uptr size);
   97 void __msan_check_mem_is_initialized(const void *x, uptr size);
  100 void __msan_set_origin(const void *a, uptr size, u32 origin);
  102 void __msan_set_alloca_origin(void *a, uptr size, char *descr);
  104 void __msan_set_alloca_origin4(void *a, uptr size, char *descr, uptr pc);
  104 void __msan_set_alloca_origin4(void *a, uptr size, char *descr, uptr pc);
  133 void __msan_print_shadow(const void *x, uptr size);
  135 void __msan_dump_shadow(const void *x, uptr size);
  143 void __msan_partial_poison(const void* data, void* shadow, uptr size);
  148 void __msan_allocated_memory(const void* data, uptr size);
  153 void __sanitizer_dtor_callback(const void* data, uptr size);
  177 void __msan_copy_shadow(void *dst, const void *src, uptr size);
projects/compiler-rt/lib/msan/msan_linux.cpp
   38 void ReportMapRange(const char *descr, uptr beg, uptr size) {
   38 void ReportMapRange(const char *descr, uptr beg, uptr size) {
   40     uptr end = beg + size - 1;
   45 static bool CheckMemoryRangeAvailability(uptr beg, uptr size) {
   45 static bool CheckMemoryRangeAvailability(uptr beg, uptr size) {
   47     uptr end = beg + size - 1;
   56 static bool ProtectMemoryRange(uptr beg, uptr size, const char *name) {
   56 static bool ProtectMemoryRange(uptr beg, uptr size, const char *name) {
   62       uptr gap = 16 * GetPageSizeCached();
   68       uptr end = beg + size - 1;
   78   uptr prev_end = 0;
   80     uptr start = kMemoryLayout[i].start;
   81     uptr end = kMemoryLayout[i].end;
   89       uptr addr = start;
  123   const uptr maxVirtualAddress = GetMaxUserVirtualAddress();
  126     uptr start = kMemoryLayout[i].start;
  127     uptr end = kMemoryLayout[i].end;
  128     uptr size = end - start;
projects/compiler-rt/lib/msan/msan_poisoning.cpp
   25 u32 GetOriginIfPoisoned(uptr addr, uptr size) {
   25 u32 GetOriginIfPoisoned(uptr addr, uptr size) {
   27   for (uptr i = 0; i < size; ++i)
   32 void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size,
   32 void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size,
   32 void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size,
   34   uptr dst_s = MEM_TO_SHADOW(addr);
   35   uptr src_s = src_shadow;
   36   uptr src_s_end = src_s + size;
   42 void CopyOrigin(const void *dst, const void *src, uptr size,
   46   uptr d = (uptr)dst;
   47   uptr beg = d & ~3UL;
   58   uptr end = (d + size) & ~3UL;
   73     uptr s = ((uptr)src + 3) & ~3UL;
   97 void MoveShadowAndOrigin(const void *dst, const void *src, uptr size,
  107 void CopyShadowAndOrigin(const void *dst, const void *src, uptr size,
  116 void CopyMemory(void *dst, const void *src, uptr size, StackTrace *stack) {
  121 void SetShadow(const void *ptr, uptr size, u8 value) {
  122   uptr PageSize = GetPageSizeCached();
  123   uptr shadow_beg = MEM_TO_SHADOW(ptr);
  124   uptr shadow_end = shadow_beg + size;
  129     uptr page_beg = RoundUpTo(shadow_beg, PageSize);
  130     uptr page_end = RoundDownTo(shadow_end, PageSize);
  147 void SetOrigin(const void *dst, uptr size, u32 origin) {
  151   uptr x = MEM_TO_ORIGIN((uptr)dst);
  152   uptr beg = x & ~3UL;               // align down.
  153   uptr end = (x + size + 3) & ~3UL;  // align up.
  161   for (uptr addr = beg; addr < (end & ~7UL); addr += 8) *(u64 *)addr = origin64;
  165 void PoisonMemory(const void *dst, uptr size, StackTrace *stack) {
projects/compiler-rt/lib/msan/msan_poisoning.h
   21 u32 GetOriginIfPoisoned(uptr addr, uptr size);
   21 u32 GetOriginIfPoisoned(uptr addr, uptr size);
   26 void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size, u32 src_origin);
   26 void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size, u32 src_origin);
   26 void SetOriginIfPoisoned(uptr addr, uptr src_shadow, uptr size, u32 src_origin);
   31 void CopyOrigin(const void *dst, const void *src, uptr size, StackTrace *stack);
   35 void MoveShadowAndOrigin(const void *dst, const void *src, uptr size,
   40 void CopyShadowAndOrigin(const void *dst, const void *src, uptr size,
   45 void CopyMemory(void *dst, const void *src, uptr size, StackTrace *stack);
   48 void SetShadow(const void *ptr, uptr size, u8 value);
   51 void SetOrigin(const void *dst, uptr size, u32 origin);
   54 void PoisonMemory(const void *dst, uptr size, StackTrace *stack);
projects/compiler-rt/lib/msan/msan_report.cpp
   37 static void DescribeStackOrigin(const char *so, uptr pc) {
   71     uptr pc;
  183 void DescribeMemoryRange(const void *x, uptr size) {
  185   uptr start = MEM_TO_SHADOW(x);
  186   uptr end = start + size;
  188   uptr s = start & ~3UL;
  191   uptr e = s + size;
  196   uptr pos = 0;  // Offset from aligned start.
  261 void ReportUMRInsideAddressRange(const char *what, const void *start, uptr size,
  262                                  uptr offset) {
projects/compiler-rt/lib/msan/msan_report.h
   27 void DescribeMemoryRange(const void *x, uptr size);
   28 void ReportUMRInsideAddressRange(const char *what, const void *start, uptr size,
   29                                  uptr offset);
projects/compiler-rt/lib/msan/msan_thread.cpp
   12   uptr PageSize = GetPageSizeCached();
   13   uptr size = RoundUpTo(sizeof(MsanThread), PageSize);
   23   uptr tls_size = 0;
   24   uptr stack_size = 0;
   40   for (uptr i = 0; i < dtls->dtv_size; ++i)
   62   uptr size = RoundUpTo(sizeof(MsanThread), GetPageSizeCached());
projects/compiler-rt/lib/msan/msan_thread.h
   30   uptr stack_top() { return stack_top_; }
   31   uptr stack_bottom() { return stack_bottom_; }
   32   uptr tls_begin() { return tls_begin_; }
   33   uptr tls_end() { return tls_end_; }
   36   bool AddrIsInStack(uptr addr) {
   55   uptr stack_top_;
   56   uptr stack_bottom_;
   57   uptr tls_begin_;
   58   uptr tls_end_;
projects/compiler-rt/lib/sanitizer_common/sanitizer_addrhashmap.h
   51     uptr cap;
   52     uptr size;
   56   static const uptr kBucketSize = 3;
   69     Handle(AddrHashMap<T, kSize> *map, uptr addr);
   70     Handle(AddrHashMap<T, kSize> *map, uptr addr, bool remove);
   71     Handle(AddrHashMap<T, kSize> *map, uptr addr, bool remove, bool create);
   85     uptr                   addr_;
   86     uptr                   addidx_;
   98   uptr calcHash(uptr addr);
   98   uptr calcHash(uptr addr);
  102 AddrHashMap<T, kSize>::Handle::Handle(AddrHashMap<T, kSize> *map, uptr addr) {
  111 AddrHashMap<T, kSize>::Handle::Handle(AddrHashMap<T, kSize> *map, uptr addr,
  121 AddrHashMap<T, kSize>::Handle::Handle(AddrHashMap<T, kSize> *map, uptr addr,
  167   uptr addr = h->addr_;
  168   uptr hash = calcHash(addr);
  185   for (uptr i = 0; i < kBucketSize; i++) {
  187     uptr addr1 = atomic_load(&c->addr, memory_order_acquire);
  198     for (uptr i = 0; i < add->size; i++) {
  200       uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
  214   for (uptr i = 0; i < kBucketSize; i++) {
  216     uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
  230     for (uptr i = 0; i < add->size; i++) {
  232       uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
  254   for (uptr i = 0; i < kBucketSize; i++) {
  256     uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
  266     const uptr kInitSize = 64;
  275     uptr oldsize = sizeof(*add) + (add->cap - 1) * sizeof(add->cells[0]);
  276     uptr newsize = oldsize * 2;
  287   uptr i = add->size++;
  300   uptr addr1 = atomic_load(&c->addr, memory_order_relaxed);
  317         uptr last = --add->size;
  320         uptr addr1 = atomic_load(&c1->addr, memory_order_relaxed);
  326       uptr last = --add->size;
  345 uptr AddrHashMap<T, kSize>::calcHash(uptr addr) {
  345 uptr AddrHashMap<T, kSize>::calcHash(uptr addr) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator.cpp
  108 static void *RawInternalAlloc(uptr size, InternalAllocatorCache *cache,
  109                               uptr alignment) {
  119 static void *RawInternalRealloc(void *ptr, uptr size,
  121   uptr alignment = 8;
  142 static void NORETURN ReportInternalAllocatorOutOfMemory(uptr requested_size) {
  149 void *InternalAlloc(uptr size, InternalAllocatorCache *cache, uptr alignment) {
  149 void *InternalAlloc(uptr size, InternalAllocatorCache *cache, uptr alignment) {
  159 void *InternalRealloc(void *addr, uptr size, InternalAllocatorCache *cache) {
  173 void *InternalReallocArray(void *addr, uptr count, uptr size,
  173 void *InternalReallocArray(void *addr, uptr count, uptr size,
  185 void *InternalCalloc(uptr count, uptr size, InternalAllocatorCache *cache) {
  185 void *InternalCalloc(uptr count, uptr size, InternalAllocatorCache *cache) {
  208 constexpr uptr kLowLevelAllocatorDefaultAlignment = 8;
  209 static uptr low_level_alloc_min_alignment = kLowLevelAllocatorDefaultAlignment;
  212 void *LowLevelAllocator::Allocate(uptr size) {
  216     uptr size_to_allocate = Max(size, GetPageSizeCached());
  231 void SetLowLevelAllocateMinAlignment(uptr alignment) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator.h
   48   void OnMap(uptr p, uptr size) const { }
   48   void OnMap(uptr p, uptr size) const { }
   49   void OnUnmap(uptr p, uptr size) const { }
   49   void OnUnmap(uptr p, uptr size) const { }
   53 typedef void (*ForEachChunkCallback)(uptr chunk, void *arg);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_bytemap.h
   25   void set(uptr idx, u8 val) {
   30   u8 operator[] (uptr idx) {
   56     for (uptr i = 0; i < kSize1; i++) {
   64   uptr size() const { return kSize1 * kSize2; }
   65   uptr size1() const { return kSize1; }
   66   uptr size2() const { return kSize2; }
   68   void set(uptr idx, u8 val) {
   75   u8 operator[] (uptr idx) const {
   84   u8 *Get(uptr idx) const {
   90   u8 *GetOrCreate(uptr idx) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_checks.h
   44 INLINE bool CheckAlignedAllocAlignmentAndSize(uptr alignment, uptr size) {
   44 INLINE bool CheckAlignedAllocAlignmentAndSize(uptr alignment, uptr size) {
   55 INLINE bool CheckPosixMemalignAlignment(uptr alignment) {
   61 INLINE bool CheckForCallocOverflow(uptr size, uptr n) {
   61 INLINE bool CheckForCallocOverflow(uptr size, uptr n) {
   64   uptr max = (uptr)-1L;
   70 INLINE bool CheckForPvallocOverflow(uptr size, uptr page_size) {
   70 INLINE bool CheckForPvallocOverflow(uptr size, uptr page_size) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_combined.h
   44   void *Allocate(AllocatorCache *cache, uptr size, uptr alignment) {
   44   void *Allocate(AllocatorCache *cache, uptr size, uptr alignment) {
   54     uptr original_size = size;
   97   void *Reallocate(AllocatorCache *cache, void *p, uptr new_size,
   98                    uptr alignment) {
  106     uptr old_size = GetActuallyAllocatedSize(p);
  107     uptr memcpy_size = Min(new_size, old_size);
  145   uptr GetActuallyAllocatedSize(void *p) {
  151   uptr TotalMemoryUsed() {
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_interface.h
   22 uptr __sanitizer_get_estimated_allocated_size(uptr size);
   22 uptr __sanitizer_get_estimated_allocated_size(uptr size);
   24 SANITIZER_INTERFACE_ATTRIBUTE uptr
   26 SANITIZER_INTERFACE_ATTRIBUTE uptr __sanitizer_get_current_allocated_bytes();
   27 SANITIZER_INTERFACE_ATTRIBUTE uptr __sanitizer_get_heap_size();
   28 SANITIZER_INTERFACE_ATTRIBUTE uptr __sanitizer_get_free_bytes();
   29 SANITIZER_INTERFACE_ATTRIBUTE uptr __sanitizer_get_unmapped_bytes();
   32     void (*malloc_hook)(const void *, uptr),
   36     void __sanitizer_malloc_hook(void *ptr, uptr size);
   44 __sanitizer_print_memory_profile(uptr top_percent, uptr max_number_of_contexts);
   44 __sanitizer_print_memory_profile(uptr top_percent, uptr max_number_of_contexts);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_internal.h
   26   static const uptr kSpaceBeg = 0;
   28   static const uptr kMetadataSize = 0;
   30   static const uptr kRegionSizeLog = 20;
   33   static const uptr kFlags = 0;
   42 void *InternalAlloc(uptr size, InternalAllocatorCache *cache = nullptr,
   43                     uptr alignment = 0);
   44 void *InternalRealloc(void *p, uptr size,
   46 void *InternalReallocArray(void *p, uptr count, uptr size,
   46 void *InternalReallocArray(void *p, uptr count, uptr size,
   48 void *InternalCalloc(uptr count, uptr size,
   48 void *InternalCalloc(uptr count, uptr size,
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_local_cache.h
   33   void *Allocate(SizeClassAllocator *allocator, uptr class_id) {
   48   void Deallocate(SizeClassAllocator *allocator, uptr class_id, void *p) {
   65     for (uptr i = 1; i < kNumClasses; i++) {
   74   static const uptr kNumClasses = SizeClassMap::kNumClasses;
   80     uptr class_size;
   89     for (uptr i = 1; i < kNumClasses; i++) {
   91       const uptr size = Allocator::ClassIdToSize(i);
   99                        uptr class_id) {
  101     const uptr num_requested_chunks = c->max_count / 2;
  109   NOINLINE void Drain(PerClass *c, SizeClassAllocator *allocator, uptr class_id,
  110                       uptr count) {
  112     const uptr first_idx_to_drain = c->count - count;
  132   TransferBatch *CreateBatch(uptr class_id, SizeClassAllocator *allocator,
  134     if (uptr batch_class_id = per_class_[class_id].batch_class_id)
  140   void DestroyBatch(uptr class_id, SizeClassAllocator *allocator,
  142     if (uptr batch_class_id = per_class_[class_id].batch_class_id)
  152   void *Allocate(SizeClassAllocator *allocator, uptr class_id) {
  167   void Deallocate(SizeClassAllocator *allocator, uptr class_id, void *p) {
  181     for (uptr i = 1; i < kNumClasses; i++) {
  190   static const uptr kBatchClassID = SizeClassMap::kBatchClassID;
  191   static const uptr kNumClasses = SizeClassMap::kNumClasses;
  200     uptr count;
  201     uptr max_count;
  202     uptr class_size;
  203     uptr batch_class_id;
  212     const uptr batch_class_id = SizeClassMap::ClassID(sizeof(TransferBatch));
  213     for (uptr i = 1; i < kNumClasses; i++) {
  215       const uptr size = Allocator::ClassIdToSize(i);
  216       const uptr max_cached = TransferBatch::MaxCached(size);
  235                        uptr class_id) {
  248                       uptr class_id) {
  249     const uptr count = Min(c->max_count / 2, c->count);
  250     const uptr first_idx_to_drain = c->count - count;
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h
   56   static const uptr kSpaceBeg = Params::kSpaceBeg;
   58   static const uptr kMetadataSize = Params::kMetadataSize;
   60   static const uptr kRegionSizeLog = Params::kRegionSizeLog;
   77     static const uptr kMaxNumCached = SizeClassMap::kMaxNumCachedHint - 2;
   78     void SetFromArray(void *batch[], uptr count) {
   81       for (uptr i = 0; i < count; i++)
   84     uptr Count() const { return count_; }
   91       for (uptr i = 0, n = Count(); i < n; i++)
   96     static uptr AllocationSizeRequiredForNElements(uptr n) {
   96     static uptr AllocationSizeRequiredForNElements(uptr n) {
   97       return sizeof(uptr) * 2 + sizeof(void *) * n;
   99     static uptr MaxCached(uptr size) {
   99     static uptr MaxCached(uptr size) {
  106     uptr count_;
  110   static const uptr kBatchSize = sizeof(TransferBatch);
  114   static uptr ClassIdToSize(uptr class_id) {
  114   static uptr ClassIdToSize(uptr class_id) {
  139   void *MapWithCallback(uptr size) {
  145   void UnmapWithCallback(uptr beg, uptr size) {
  145   void UnmapWithCallback(uptr beg, uptr size) {
  150   static bool CanAllocate(uptr size, uptr alignment) {
  150   static bool CanAllocate(uptr size, uptr alignment) {
  157     uptr mem = reinterpret_cast<uptr>(p);
  158     uptr beg = ComputeRegionBeg(mem);
  159     uptr size = ClassIdToSize(GetSizeClass(p));
  161     uptr n = offset / (u32)size;  // 32-bit division
  162     uptr meta = (beg + kRegionSize) - (n + 1) * kMetadataSize;
  167                                         uptr class_id) {
  181   NOINLINE void DeallocateBatch(AllocatorStats *stat, uptr class_id,
  191     uptr mem = reinterpret_cast<uptr>(p);
  199   uptr GetSizeClass(const void *p) {
  205     uptr mem = reinterpret_cast<uptr>(p);
  206     uptr beg = ComputeRegionBeg(mem);
  207     uptr size = ClassIdToSize(GetSizeClass(p));
  210     uptr res = beg + (n * (u32)size);
  214   uptr GetActuallyAllocatedSize(void *p) {
  219   static uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); }
  219   static uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); }
  221   uptr TotalMemoryUsed() {
  223     uptr res = 0;
  224     for (uptr i = 0; i < kNumPossibleRegions; i++)
  231     for (uptr i = 0; i < kNumPossibleRegions; i++)
  239     for (uptr i = 0; i < kNumClasses; i++) {
  253     for (uptr region = 0; region < kNumPossibleRegions; region++)
  255         uptr chunk_size = ClassIdToSize(possible_regions[region]);
  256         uptr max_chunks_in_region = kRegionSize / (chunk_size + kMetadataSize);
  257         uptr region_beg = region * kRegionSize;
  258         for (uptr chunk = region_beg;
  269   static uptr AdditionalSize() { return 0; }
  272   static const uptr kNumClasses = SizeClassMap::kNumClasses;
  275   static const uptr kRegionSize = 1 << kRegionSizeLog;
  276   static const uptr kNumPossibleRegions = kSpaceSize / kRegionSize;
  285   uptr ComputeRegionId(uptr mem) const {
  285   uptr ComputeRegionId(uptr mem) const {
  288     const uptr res = mem >> kRegionSizeLog;
  293   uptr ComputeRegionBeg(uptr mem) {
  293   uptr ComputeRegionBeg(uptr mem) {
  297   uptr AllocateRegion(AllocatorStats *stat, uptr class_id) {
  297   uptr AllocateRegion(AllocatorStats *stat, uptr class_id) {
  299     const uptr res = reinterpret_cast<uptr>(MmapAlignedOrDieOnFatalError(
  310   SizeClassInfo *GetSizeClassInfo(uptr class_id) {
  315   bool PopulateBatches(AllocatorCache *c, SizeClassInfo *sci, uptr class_id,
  316                        TransferBatch **current_batch, uptr max_count,
  317                        uptr *pointers_array, uptr count) {
  317                        uptr *pointers_array, uptr count) {
  323     for (uptr i = 0; i < count; i++) {
  341                         SizeClassInfo *sci, uptr class_id) {
  342     const uptr region = AllocateRegion(stat, class_id);
  349     const uptr size = ClassIdToSize(class_id);
  350     const uptr n_chunks = kRegionSize / (size + kMetadataSize);
  351     const uptr max_count = TransferBatch::MaxCached(size);
  354     constexpr uptr kShuffleArraySize = 48;
  355     uptr shuffle_array[kShuffleArraySize];
  356     uptr count = 0;
  357     for (uptr i = region; i < region + n_chunks * size; i += size) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h
   49   static const uptr kSpaceBeg = Params::kSpaceBeg;
   50   static const uptr kSpaceSize = Params::kSpaceSize;
   51   static const uptr kMetadataSize = Params::kMetadataSize;
   64   static const uptr kCompactPtrScale = 4;
   65   CompactPtrT PointerToCompactPtr(uptr base, uptr ptr) const {
   65   CompactPtrT PointerToCompactPtr(uptr base, uptr ptr) const {
   68   uptr CompactPtrToPointer(uptr base, CompactPtrT ptr32) const {
   68   uptr CompactPtrToPointer(uptr base, CompactPtrT ptr32) const {
   73     uptr TotalSpaceSize = kSpaceSize + AdditionalSize();
   99     for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
  105   static bool CanAllocate(uptr size, uptr alignment) {
  105   static bool CanAllocate(uptr size, uptr alignment) {
  110   NOINLINE void ReturnToAllocator(AllocatorStats *stat, uptr class_id,
  111                                   const CompactPtrT *chunks, uptr n_chunks) {
  113     uptr region_beg = GetRegionBeginBySizeClass(class_id);
  117     uptr old_num_chunks = region->num_freed_chunks;
  118     uptr new_num_freed_chunks = old_num_chunks + n_chunks;
  128     for (uptr i = 0; i < n_chunks; i++)
  136   NOINLINE bool GetFromAllocator(AllocatorStats *stat, uptr class_id,
  137                                  CompactPtrT *chunks, uptr n_chunks) {
  139     uptr region_beg = GetRegionBeginBySizeClass(class_id);
  150     uptr base_idx = region->num_freed_chunks;
  151     for (uptr i = 0; i < n_chunks; i++)
  158     uptr P = reinterpret_cast<uptr>(p);
  164   uptr GetRegionBegin(const void *p) {
  167     uptr space_beg = SpaceBeg();
  172   uptr GetRegionBeginBySizeClass(uptr class_id) const {
  172   uptr GetRegionBeginBySizeClass(uptr class_id) const {
  176   uptr GetSizeClass(const void *p) {
  184     uptr class_id = GetSizeClass(p);
  185     uptr size = ClassIdToSize(class_id);
  187     uptr chunk_idx = GetChunkIdx((uptr)p, size);
  188     uptr reg_beg = GetRegionBegin(p);
  189     uptr beg = chunk_idx * size;
  190     uptr next_beg = beg + size;
  198   uptr GetActuallyAllocatedSize(void *p) {
  203   static uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); }
  203   static uptr ClassID(uptr size) { return SizeClassMap::ClassID(size); }
  206     uptr class_id = GetSizeClass(p);
  207     uptr size = ClassIdToSize(class_id);
  208     uptr chunk_idx = GetChunkIdx(reinterpret_cast<uptr>(p), size);
  209     uptr region_beg = GetRegionBeginBySizeClass(class_id);
  214   uptr TotalMemoryUsed() {
  215     uptr res = 0;
  216     for (uptr i = 0; i < kNumClasses; i++)
  226   static void FillMemoryProfile(uptr start, uptr rss, bool file, uptr *stats,
  226   static void FillMemoryProfile(uptr start, uptr rss, bool file, uptr *stats,
  226   static void FillMemoryProfile(uptr start, uptr rss, bool file, uptr *stats,
  227                            uptr stats_size) {
  228     for (uptr class_id = 0; class_id < stats_size; class_id++)
  233   void PrintStats(uptr class_id, uptr rss) {
  233   void PrintStats(uptr class_id, uptr rss) {
  236     uptr in_use = region->stats.n_allocated - region->stats.n_freed;
  237     uptr avail_chunks = region->allocated_user / ClassIdToSize(class_id);
  251     uptr rss_stats[kNumClasses];
  252     for (uptr class_id = 0; class_id < kNumClasses; class_id++)
  256     uptr total_mapped = 0;
  257     uptr total_rss = 0;
  258     uptr n_allocated = 0;
  259     uptr n_freed = 0;
  260     for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
  273     for (uptr class_id = 1; class_id < kNumClasses; class_id++)
  280     for (uptr i = 0; i < kNumClasses; i++) {
  294     for (uptr class_id = 1; class_id < kNumClasses; class_id++) {
  296       uptr chunk_size = ClassIdToSize(class_id);
  297       uptr region_beg = SpaceBeg() + class_id * kRegionSize;
  298       uptr region_allocated_user_size =
  300       for (uptr chunk = region_beg;
  309   static uptr ClassIdToSize(uptr class_id) {
  309   static uptr ClassIdToSize(uptr class_id) {
  313   static uptr AdditionalSize() {
  319   static const uptr kNumClasses = SizeClassMap::kNumClasses;
  320   static const uptr kNumClassesRounded = SizeClassMap::kNumClassesRounded;
  339       uptr counter_size_bits =
  345       uptr packing_ratio = kMaxCounterBits >> counter_size_bits_log;
  371     uptr Get(uptr i) const {
  371     uptr Get(uptr i) const {
  373       uptr index = i >> packing_ratio_log;
  374       uptr bit_offset = (i & bit_offset_mask) << counter_size_bits_log;
  378     void Inc(uptr i) const {
  380       uptr index = i >> packing_ratio_log;
  381       uptr bit_offset = (i & bit_offset_mask) << counter_size_bits_log;
  385     void IncRange(uptr from, uptr to) const {
  385     void IncRange(uptr from, uptr to) const {
  387       for (uptr i = from; i <= to; i++)
  438     const uptr page_size_scaled_log;
  440     uptr current_page;
  441     uptr current_range_start_page;
  450                                     uptr free_array_count, uptr chunk_size,
  450                                     uptr free_array_count, uptr chunk_size,
  451                                     uptr allocated_pages_count,
  453     const uptr page_size = GetPageSizeCached();
  457     uptr full_pages_chunk_count_max;
  494     const uptr chunk_size_scaled = chunk_size >> kCompactPtrScale;
  495     const uptr page_size_scaled = page_size >> kCompactPtrScale;
  496     const uptr page_size_scaled_log = Log2(page_size_scaled);
  502       for (uptr i = 0; i < free_array_count; i++)
  506       for (uptr i = 0; i < free_array_count; i++) {
  518       for (uptr i = 0; i < counters.GetCount(); i++)
  523       const uptr pn =
  525       const uptr pnc = pn * chunk_size_scaled;
  531       uptr prev_page_boundary = 0;
  532       uptr current_boundary = 0;
  533       for (uptr i = 0; i < counters.GetCount(); i++) {
  534         uptr page_boundary = prev_page_boundary + page_size_scaled;
  535         uptr chunks_per_page = pn;
  558   static const uptr kRegionSize = kSpaceSize / kNumClassesRounded;
  563   static const uptr kFreeArraySize = kRegionSize / 8;
  566   uptr NonConstSpaceBeg;
  567   uptr SpaceBeg() const {
  570   uptr SpaceEnd() const { return  SpaceBeg() + kSpaceSize; }
  576   static const uptr kUserMapSize = 1 << 16;
  578   static const uptr kMetaMapSize = 1 << 16;
  580   static const uptr kFreeArrayMapSize = 1 << 16;
  585     uptr n_allocated;
  586     uptr n_freed;
  590     uptr n_freed_at_last_release;
  591     uptr num_releases;
  598     uptr num_freed_chunks;  // Number of elements in the freearray.
  599     uptr mapped_free_array;  // Bytes mapped for freearray.
  600     uptr allocated_user;  // Bytes allocated for user memory.
  601     uptr allocated_meta;  // Bytes allocated for metadata.
  602     uptr mapped_user;  // Bytes mapped for user memory.
  603     uptr mapped_meta;  // Bytes mapped for metadata.
  611   RegionInfo *GetRegionInfo(uptr class_id) const {
  617   uptr GetMetadataEnd(uptr region_beg) const {
  617   uptr GetMetadataEnd(uptr region_beg) const {
  621   uptr GetChunkIdx(uptr chunk, uptr size) const {
  621   uptr GetChunkIdx(uptr chunk, uptr size) const {
  621   uptr GetChunkIdx(uptr chunk, uptr size) const {
  625     uptr offset = chunk % kRegionSize;
  633   CompactPtrT *GetFreeArray(uptr region_beg) const {
  637   bool MapWithCallback(uptr beg, uptr size, const char *name) {
  637   bool MapWithCallback(uptr beg, uptr size, const char *name) {
  638     uptr mapped = address_range.Map(beg, size, name);
  646   void MapWithCallbackOrDie(uptr beg, uptr size, const char *name) {
  646   void MapWithCallbackOrDie(uptr beg, uptr size, const char *name) {
  651   void UnmapWithCallbackOrDie(uptr beg, uptr size) {
  651   void UnmapWithCallbackOrDie(uptr beg, uptr size) {
  656   bool EnsureFreeArraySpace(RegionInfo *region, uptr region_beg,
  657                             uptr num_freed_chunks) {
  658     uptr needed_space = num_freed_chunks * sizeof(CompactPtrT);
  660       uptr new_mapped_free_array = RoundUpTo(needed_space, kFreeArrayMapSize);
  662       uptr current_map_end = reinterpret_cast<uptr>(GetFreeArray(region_beg)) +
  664       uptr new_map_size = new_mapped_free_array - region->mapped_free_array;
  674   bool IsRegionExhausted(RegionInfo *region, uptr class_id,
  675                          uptr additional_map_size) {
  688   NOINLINE bool PopulateFreeArray(AllocatorStats *stat, uptr class_id,
  689                                   RegionInfo *region, uptr requested_count) {
  691     const uptr region_beg = GetRegionBeginBySizeClass(class_id);
  692     const uptr size = ClassIdToSize(class_id);
  694     const uptr total_user_bytes =
  712       const uptr user_map_size =
  723     const uptr new_chunks_count =
  728       const uptr total_meta_bytes =
  730       const uptr meta_map_size = (total_meta_bytes > region->mapped_meta) ?
  746     const uptr total_freed_chunks = region->num_freed_chunks + new_chunks_count;
  750     for (uptr i = 0, chunk = region->allocated_user; i < new_chunks_count;
  775     MemoryMapper(const ThisT& base_allocator, uptr class_id)
  782     uptr GetReleasedRangesCount() const {
  786     uptr GetReleasedBytes() const {
  790     uptr MapPackedCounterArrayBuffer(uptr buffer_size) {
  790     uptr MapPackedCounterArrayBuffer(uptr buffer_size) {
  799     void UnmapPackedCounterArrayBuffer(uptr buffer, uptr buffer_size) {
  799     void UnmapPackedCounterArrayBuffer(uptr buffer, uptr buffer_size) {
  805       const uptr from_page = allocator.CompactPtrToPointer(region_base, from);
  806       const uptr to_page = allocator.CompactPtrToPointer(region_base, to);
  814     const uptr region_base;
  815     uptr released_ranges_count;
  816     uptr released_bytes;
  821   void MaybeReleaseToOS(uptr class_id, bool force) {
  823     const uptr chunk_size = ClassIdToSize(class_id);
  824     const uptr page_size = GetPageSizeCached();
  826     uptr n = region->num_freed_chunks;
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_report.cpp
   43 void NORETURN ReportCallocOverflow(uptr count, uptr size,
   43 void NORETURN ReportCallocOverflow(uptr count, uptr size,
   54 void NORETURN ReportReallocArrayOverflow(uptr count, uptr size,
   54 void NORETURN ReportReallocArrayOverflow(uptr count, uptr size,
   66 void NORETURN ReportPvallocOverflow(uptr size, const StackTrace *stack) {
   76 void NORETURN ReportInvalidAllocationAlignment(uptr alignment,
   86 void NORETURN ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
   86 void NORETURN ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
  104 void NORETURN ReportInvalidPosixMemalignAlignment(uptr alignment,
  118 void NORETURN ReportAllocationSizeTooBig(uptr user_size, uptr max_size,
  118 void NORETURN ReportAllocationSizeTooBig(uptr user_size, uptr max_size,
  128 void NORETURN ReportOutOfMemory(uptr requested_size, const StackTrace *stack) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_report.h
   22 void NORETURN ReportCallocOverflow(uptr count, uptr size,
   22 void NORETURN ReportCallocOverflow(uptr count, uptr size,
   24 void NORETURN ReportReallocArrayOverflow(uptr count, uptr size,
   24 void NORETURN ReportReallocArrayOverflow(uptr count, uptr size,
   26 void NORETURN ReportPvallocOverflow(uptr size, const StackTrace *stack);
   27 void NORETURN ReportInvalidAllocationAlignment(uptr alignment,
   29 void NORETURN ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
   29 void NORETURN ReportInvalidAlignedAllocAlignment(uptr size, uptr alignment,
   31 void NORETURN ReportInvalidPosixMemalignAlignment(uptr alignment,
   33 void NORETURN ReportAllocationSizeTooBig(uptr user_size, uptr max_size,
   33 void NORETURN ReportAllocationSizeTooBig(uptr user_size, uptr max_size,
   35 void NORETURN ReportOutOfMemory(uptr requested_size, const StackTrace *stack);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_secondary.h
   22   INLINE void EnsureSpace(uptr n) { CHECK_LT(n, kMaxNumChunks); }
   25   uptr p_[kMaxNumChunks];
   35     uptr p = address_range_.Init(kMaxNumChunks * sizeof(uptr),
   35     uptr p = address_range_.Init(kMaxNumChunks * sizeof(uptr),
   41   INLINE void EnsureSpace(uptr n) {
   47               n_reserved_ * sizeof(uptr),
   48           kChunksBlockCount * sizeof(uptr));
   57   uptr n_reserved_;
   85   void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) {
   85   void *Allocate(AllocatorStats *stat, uptr size, uptr alignment) {
   87     uptr map_size = RoundUpMapSize(size);
   97     uptr map_beg = reinterpret_cast<uptr>(
  103     uptr map_end = map_beg + map_size;
  104     uptr res = map_beg + page_size_;
  115     uptr size_log = MostSignificantSetBitIndex(map_size);
  120       uptr idx = n_chunks_++;
  138       uptr idx = h->chunk_idx;
  153   uptr TotalMemoryUsed() {
  155     uptr res = 0;
  156     for (uptr i = 0; i < n_chunks_; i++) {
  168   uptr GetActuallyAllocatedSize(void *p) {
  183     uptr p = reinterpret_cast<uptr>(ptr);
  185     uptr nearest_chunk = 0;
  188     for (uptr i = 0; i < n_chunks_; i++) {
  189       uptr ch = reinterpret_cast<uptr>(chunks[i]);
  211     for (uptr i = 0; i < n_chunks_; i++)
  220     uptr p = reinterpret_cast<uptr>(ptr);
  221     uptr n = n_chunks_;
  230     uptr beg = 0, end = n - 1;
  234       uptr mid = (beg + end) / 2;  // Invariant: mid >= beg + 1
  260     for (uptr i = 0; i < ARRAY_SIZE(stats.by_size_log); i++) {
  261       uptr c = stats.by_size_log[i];
  283     for (uptr i = 0; i < n_chunks_; i++) {
  294     uptr map_beg;
  295     uptr map_size;
  296     uptr size;
  297     uptr chunk_idx;
  300   Header *GetHeader(uptr p) {
  313   uptr RoundUpMapSize(uptr size) {
  313   uptr RoundUpMapSize(uptr size) {
  317   uptr page_size_;
  320   uptr n_chunks_;
  323     uptr n_allocs, n_frees, currently_allocated, max_allocated, by_size_log[64];
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_size_class_map.h
  122   static const uptr kMinSize = 1 << kMinSizeLog;
  123   static const uptr kMidSize = 1 << kMidSizeLog;
  124   static const uptr kMidClass = kMidSize / kMinSize;
  125   static const uptr S = kNumBits - 1;
  126   static const uptr M = (1 << S) - 1;
  131   static const uptr kMaxNumCachedHint = kMaxNumCachedHintT;
  134   static const uptr kMaxSize = 1UL << kMaxSizeLog;
  135   static const uptr kNumClasses =
  137   static const uptr kLargestClassID = kNumClasses - 2;
  138   static const uptr kBatchClassID = kNumClasses - 1;
  140   static const uptr kNumClassesRounded =
  145   static uptr Size(uptr class_id) {
  145   static uptr Size(uptr class_id) {
  150       return kMaxNumCachedHint * sizeof(uptr);
  154     uptr t = kMidSize << (class_id >> S);
  158   static uptr ClassID(uptr size) {
  158   static uptr ClassID(uptr size) {
  163     const uptr l = MostSignificantSetBitIndex(size);
  164     const uptr hbits = (size >> (l - S)) & M;
  165     const uptr lbits = size & ((1U << (l - S)) - 1);
  166     const uptr l1 = l - kMidSizeLog;
  170   static uptr MaxCachedHint(uptr size) {
  170   static uptr MaxCachedHint(uptr size) {
  174     uptr n;
  180     return Max<uptr>(1U, Min(kMaxNumCachedHint, n));
  184     uptr prev_s = 0;
  185     uptr total_cached = 0;
  186     for (uptr i = 0; i < kNumClasses; i++) {
  187       uptr s = Size(i);
  190       uptr d = s - prev_s;
  191       uptr p = prev_s ? (d * 100 / prev_s) : 0;
  192       uptr l = s ? MostSignificantSetBitIndex(s) : 0;
  193       uptr cached = MaxCachedHint(s) * s;
  206     for (uptr c = 1; c < kNumClasses; c++) {
  208       uptr s = Size(c);
  220     for (uptr s = 1; s <= kMaxSize; s++) {
  221       uptr c = ClassID(s);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_stats.h
   23 typedef uptr AllocatorStatCounters[AllocatorStatCount];
   33   void Add(AllocatorStat i, uptr v) {
   38   void Sub(AllocatorStat i, uptr v) {
   43   void Set(AllocatorStat i, uptr v) {
   47   uptr Get(AllocatorStat i) const {
   87     internal_memset(s, 0, AllocatorStatCount * sizeof(uptr));
projects/compiler-rt/lib/sanitizer_common/sanitizer_atomic.h
   56   typedef uptr Type;
projects/compiler-rt/lib/sanitizer_common/sanitizer_bitvector.h
   24   enum SizeEnum : uptr { kSize = sizeof(basic_int_t) * 8 };
   26   uptr size() const { return kSize; }
   33   bool setBit(uptr idx) {
   40   bool clearBit(uptr idx) {
   46   bool getBit(uptr idx) const { return (bits_ & mask(idx)) != 0; }
   48   uptr getAndClearFirstOne() {
   50     uptr idx = LeastSignificantSetBitIndex(bits_);
   92     uptr next() { return bv_.getAndClearFirstOne(); }
   99   basic_int_t mask(uptr idx) const {
  117   enum SizeEnum : uptr { kSize = BV::kSize * BV::kSize * kLevel1Size };
  120   uptr size() const { return kSize; }
  123     for (uptr i = 0; i < kLevel1Size; i++)
  128     for (uptr i0 = 0; i0 < kLevel1Size; i0++) {
  130       for (uptr i1 = 0; i1 < BV::kSize; i1++)
  136     for (uptr i = 0; i < kLevel1Size; i++)
  143   bool setBit(uptr idx) {
  145     uptr i0 = idx0(idx);
  146     uptr i1 = idx1(idx);
  147     uptr i2 = idx2(idx);
  158   bool clearBit(uptr idx) {
  160     uptr i0 = idx0(idx);
  161     uptr i1 = idx1(idx);
  162     uptr i2 = idx2(idx);
  172   bool getBit(uptr idx) const {
  174     uptr i0 = idx0(idx);
  175     uptr i1 = idx1(idx);
  176     uptr i2 = idx2(idx);
  181   uptr getAndClearFirstOne() {
  182     for (uptr i0 = 0; i0 < kLevel1Size; i0++) {
  184       uptr i1 = l1_[i0].getAndClearFirstOne();
  185       uptr i2 = l2_[i0][i1].getAndClearFirstOne();
  188       uptr res = i0 * BV::kSize * BV::kSize + i1 * BV::kSize + i2;
  199     for (uptr i0 = 0; i0 < kLevel1Size; i0++) {
  202         uptr i1 = t.getAndClearFirstOne();
  215     for (uptr i0 = 0; i0 < kLevel1Size; i0++) {
  221           uptr i1 = t.getAndClearFirstOne();
  235     for (uptr i0 = 0; i0 < kLevel1Size; i0++) {
  239         uptr i1 = t.getAndClearFirstOne();
  256     for (uptr i0 = 0; i0 < kLevel1Size; i0++) {
  260         uptr i1 = t.getAndClearFirstOne();
  283       for (uptr i = i0_; i < kLevel1Size; i++)
  288     uptr next() {
  308       uptr i2 = it2_.next();
  309       uptr res = i0_ * BV::kSize * BV::kSize + i1_ * BV::kSize + i2;
  319     uptr i0_, i1_;
  324   void check(uptr idx) const { CHECK_LE(idx, size()); }
  326   uptr idx0(uptr idx) const {
  326   uptr idx0(uptr idx) const {
  327     uptr res = idx / (BV::kSize * BV::kSize);
  332   uptr idx1(uptr idx) const {
  332   uptr idx1(uptr idx) const {
  333     uptr res = (idx / BV::kSize) % BV::kSize;
  338   uptr idx2(uptr idx) const {
  338   uptr idx2(uptr idx) const {
  339     uptr res = idx % BV::kSize;
projects/compiler-rt/lib/sanitizer_common/sanitizer_bvgraph.h
   27   enum SizeEnum : uptr { kSize = BV::kSize };
   28   uptr size() const { return kSize; }
   31     for (uptr i = 0; i < size(); i++)
   36     for (uptr i = 0; i < size(); i++)
   43   bool addEdge(uptr from, uptr to) {
   43   bool addEdge(uptr from, uptr to) {
   49   uptr addEdges(const BV &from, uptr to, uptr added_edges[],
   49   uptr addEdges(const BV &from, uptr to, uptr added_edges[],
   49   uptr addEdges(const BV &from, uptr to, uptr added_edges[],
   50                 uptr max_added_edges) {
   51     uptr res = 0;
   54       uptr node = t1.getAndClearFirstOne();
   68   bool hasEdge(uptr from, uptr to) { return v[from].getBit(to); }
   68   bool hasEdge(uptr from, uptr to) { return v[from].getBit(to); }
   71   bool removeEdge(uptr from, uptr to) {
   71   bool removeEdge(uptr from, uptr to) {
   78     for (uptr from = 0; from < size(); from++) {
   90       uptr idx = t1.getAndClearFirstOne();
   99   void removeEdgesFrom(uptr from) {
  103   bool hasEdge(uptr from, uptr to) const {
  103   bool hasEdge(uptr from, uptr to) const {
  110   bool isReachable(uptr from, const BV &targets) {
  117       uptr idx = to_visit.getAndClearFirstOne();
  127   uptr findPath(uptr from, const BV &targets, uptr *path, uptr path_size) {
  127   uptr findPath(uptr from, const BV &targets, uptr *path, uptr path_size) {
  127   uptr findPath(uptr from, const BV &targets, uptr *path, uptr path_size) {
  127   uptr findPath(uptr from, const BV &targets, uptr *path, uptr path_size) {
  136       uptr idx = it.next();
  137       if (uptr res = findPath(idx, targets, path + 1, path_size - 1))
  144   uptr findShortestPath(uptr from, const BV &targets, uptr *path,
  144   uptr findShortestPath(uptr from, const BV &targets, uptr *path,
  144   uptr findShortestPath(uptr from, const BV &targets, uptr *path,
  145                         uptr path_size) {
  146     for (uptr p = 1; p <= path_size; p++)
  153   void check(uptr idx1, uptr idx2) const {
  153   void check(uptr idx1, uptr idx2) const {
projects/compiler-rt/lib/sanitizer_common/sanitizer_common.cpp
   26 uptr PageSizeCached;
   31 uptr stoptheworld_tracer_pid = 0;
   34 uptr stoptheworld_tracer_ppid = 0;
   36 void NORETURN ReportMmapFailureAndDie(uptr size, const char *mem_type,
   57 typedef bool UptrComparisonFunction(const uptr &a, const uptr &b);
   57 typedef bool UptrComparisonFunction(const uptr &a, const uptr &b);
  130 void LoadedModule::set(const char *module_name, uptr base_address) {
  136 void LoadedModule::set(const char *module_name, uptr base_address,
  160 void LoadedModule::addAddressRange(uptr beg, uptr end, bool executable,
  160 void LoadedModule::addAddressRange(uptr beg, uptr end, bool executable,
  170 bool LoadedModule::containsAddress(uptr address) const {
  180 void IncreaseTotalMmap(uptr size) {
  182   uptr total_mmaped =
  189 void DecreaseTotalMmap(uptr size) {
  243 static uptr ReadProcessName(/*out*/ char *buf, uptr buf_len) {
  243 static uptr ReadProcessName(/*out*/ char *buf, uptr buf_len) {
  246   uptr len = internal_strlen(s);
  266 uptr ReadBinaryNameCached(/*out*/char *buf, uptr buf_len) {
  266 uptr ReadBinaryNameCached(/*out*/char *buf, uptr buf_len) {
  268   uptr name_len = internal_strlen(binary_name_cache_str);
  281   for (uptr i = 0; argv[i]; ++i)
  289   void (*malloc_hook)(const void *, uptr);
  295 void RunMallocHooks(const void *ptr, uptr size) {
  311 static int InstallMallocFreeHooks(void (*malloc_hook)(const void *, uptr),
  342                                                                   uptr),
projects/compiler-rt/lib/sanitizer_common/sanitizer_common.h
   38 const uptr kWordSize = SANITIZER_WORDSIZE / 8;
   39 const uptr kWordSizeInBits = 8 * kWordSize;
   41 const uptr kCacheLineSize = SANITIZER_CACHE_LINE_SIZE;
   43 const uptr kMaxPathLength = 4096;
   45 const uptr kMaxThreadStackSize = 1 << 30;  // 1Gb
   47 static const uptr kErrorMessageBufferSize = 1 << 16;
   72 uptr GetPageSize();
   73 extern uptr PageSizeCached;
   74 INLINE uptr GetPageSizeCached() {
   80 uptr GetMmapGranularity();
   81 uptr GetMaxVirtualAddress();
   82 uptr GetMaxUserVirtualAddress();
   86 uptr GetThreadSelf();
   87 void GetThreadStackTopAndBottom(bool at_initialization, uptr *stack_top,
   88                                 uptr *stack_bottom);
   89 void GetThreadStackAndTls(bool main, uptr *stk_addr, uptr *stk_size,
   89 void GetThreadStackAndTls(bool main, uptr *stk_addr, uptr *stk_size,
   90                           uptr *tls_addr, uptr *tls_size);
   90                           uptr *tls_addr, uptr *tls_size);
   93 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report = false);
   94 INLINE void *MmapOrDieQuietly(uptr size, const char *mem_type) {
   97 void UnmapOrDie(void *addr, uptr size);
  100 void *MmapOrDieOnFatalError(uptr size, const char *mem_type);
  101 bool MmapFixedNoReserve(uptr fixed_addr, uptr size, const char *name = nullptr)
  101 bool MmapFixedNoReserve(uptr fixed_addr, uptr size, const char *name = nullptr)
  103 bool MmapFixedSuperNoReserve(uptr fixed_addr, uptr size,
  103 bool MmapFixedSuperNoReserve(uptr fixed_addr, uptr size,
  105 void *MmapNoReserveOrDie(uptr size, const char *mem_type);
  106 void *MmapFixedOrDie(uptr fixed_addr, uptr size, const char *name = nullptr);
  106 void *MmapFixedOrDie(uptr fixed_addr, uptr size, const char *name = nullptr);
  109 void *MmapFixedOrDieOnFatalError(uptr fixed_addr, uptr size,
  109 void *MmapFixedOrDieOnFatalError(uptr fixed_addr, uptr size,
  111 void *MmapFixedNoAccess(uptr fixed_addr, uptr size, const char *name = nullptr);
  111 void *MmapFixedNoAccess(uptr fixed_addr, uptr size, const char *name = nullptr);
  112 void *MmapNoAccess(uptr size);
  115 void *MmapAlignedOrDieOnFatalError(uptr size, uptr alignment,
  115 void *MmapAlignedOrDieOnFatalError(uptr size, uptr alignment,
  119 bool MprotectNoAccess(uptr addr, uptr size);
  119 bool MprotectNoAccess(uptr addr, uptr size);
  120 bool MprotectReadOnly(uptr addr, uptr size);
  120 bool MprotectReadOnly(uptr addr, uptr size);
  125 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
  125 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
  125 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
  125 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
  126                               uptr *largest_gap_found, uptr *max_occupied_addr);
  126                               uptr *largest_gap_found, uptr *max_occupied_addr);
  129 bool MemoryRangeIsAvailable(uptr range_start, uptr range_end);
  129 bool MemoryRangeIsAvailable(uptr range_start, uptr range_end);
  132 void ReleaseMemoryPagesToOS(uptr beg, uptr end);
  132 void ReleaseMemoryPagesToOS(uptr beg, uptr end);
  133 void IncreaseTotalMmap(uptr size);
  134 void DecreaseTotalMmap(uptr size);
  135 uptr GetRSS();
  136 void SetShadowRegionHugePageMode(uptr addr, uptr length);
  136 void SetShadowRegionHugePageMode(uptr addr, uptr length);
  137 bool DontDumpShadowMemory(uptr addr, uptr length);
  137 bool DontDumpShadowMemory(uptr addr, uptr length);
  140 void RunMallocHooks(const void *ptr, uptr size);
  145   uptr Init(uptr size, const char *name = nullptr, uptr fixed_addr = 0);
  145   uptr Init(uptr size, const char *name = nullptr, uptr fixed_addr = 0);
  145   uptr Init(uptr size, const char *name = nullptr, uptr fixed_addr = 0);
  146   uptr Map(uptr fixed_addr, uptr size, const char *name = nullptr);
  146   uptr Map(uptr fixed_addr, uptr size, const char *name = nullptr);
  146   uptr Map(uptr fixed_addr, uptr size, const char *name = nullptr);
  147   uptr MapOrDie(uptr fixed_addr, uptr size, const char *name = nullptr);
  147   uptr MapOrDie(uptr fixed_addr, uptr size, const char *name = nullptr);
  147   uptr MapOrDie(uptr fixed_addr, uptr size, const char *name = nullptr);
  148   void Unmap(uptr addr, uptr size);
  148   void Unmap(uptr addr, uptr size);
  150   uptr size() const { return size_; }
  154   uptr size_;
  156   uptr os_handle_;
  159 typedef void (*fill_profile_f)(uptr start, uptr rss, bool file,
  159 typedef void (*fill_profile_f)(uptr start, uptr rss, bool file,
  160                                /*out*/uptr *stats, uptr stats_size);
  160                                /*out*/uptr *stats, uptr stats_size);
  165 void GetMemoryProfile(fill_profile_f cb, uptr *stats, uptr stats_size);
  165 void GetMemoryProfile(fill_profile_f cb, uptr *stats, uptr stats_size);
  173   void *Allocate(uptr size);
  179 void SetLowLevelAllocateMinAlignment(uptr alignment);
  180 typedef void (*LowLevelAllocateCallback)(uptr ptr, uptr size);
  180 typedef void (*LowLevelAllocateCallback)(uptr ptr, uptr size);
  186 void CatastrophicErrorWrite(const char *buffer, uptr length);
  211 extern uptr stoptheworld_tracer_pid;
  212 extern uptr stoptheworld_tracer_ppid;
  214 bool IsAccessibleMemoryRange(uptr beg, uptr size);
  214 bool IsAccessibleMemoryRange(uptr beg, uptr size);
  223 uptr ReadBinaryName(/*out*/char *buf, uptr buf_len);
  223 uptr ReadBinaryName(/*out*/char *buf, uptr buf_len);
  224 uptr ReadBinaryNameCached(/*out*/char *buf, uptr buf_len);
  224 uptr ReadBinaryNameCached(/*out*/char *buf, uptr buf_len);
  225 uptr ReadLongProcessName(/*out*/ char *buf, uptr buf_len);
  225 uptr ReadLongProcessName(/*out*/ char *buf, uptr buf_len);
  243 void SetStackSizeLimitInBytes(uptr limit);
  253 uptr GetTlsSize();
  268 void NORETURN ReportMmapFailureAndDie(uptr size, const char *mem_type,
  351 INLINE uptr MostSignificantSetBitIndex(uptr x) {
  351 INLINE uptr MostSignificantSetBitIndex(uptr x) {
  368 INLINE uptr LeastSignificantSetBitIndex(uptr x) {
  368 INLINE uptr LeastSignificantSetBitIndex(uptr x) {
  385 INLINE bool IsPowerOfTwo(uptr x) {
  389 INLINE uptr RoundUpToPowerOfTwo(uptr size) {
  389 INLINE uptr RoundUpToPowerOfTwo(uptr size) {
  393   uptr up = MostSignificantSetBitIndex(size);
  399 INLINE uptr RoundUpTo(uptr size, uptr boundary) {
  399 INLINE uptr RoundUpTo(uptr size, uptr boundary) {
  399 INLINE uptr RoundUpTo(uptr size, uptr boundary) {
  404 INLINE uptr RoundDownTo(uptr x, uptr boundary) {
  404 INLINE uptr RoundDownTo(uptr x, uptr boundary) {
  404 INLINE uptr RoundDownTo(uptr x, uptr boundary) {
  408 INLINE bool IsAligned(uptr a, uptr alignment) {
  408 INLINE bool IsAligned(uptr a, uptr alignment) {
  412 INLINE uptr Log2(uptr x) {
  412 INLINE uptr Log2(uptr x) {
  445   void Initialize(uptr initial_capacity) {
  452   T &operator[](uptr i) {
  456   const T &operator[](uptr i) const {
  463       uptr new_capacity = RoundUpToPowerOfTwo(size_ + 1);
  476   uptr size() const {
  485   uptr capacity() const { return capacity_bytes_ / sizeof(T); }
  486   void reserve(uptr new_size) {
  491   void resize(uptr new_size) {
  522   void Realloc(uptr new_capacity) {
  525     uptr new_capacity_bytes =
  535   uptr capacity_bytes_;
  536   uptr size_;
  556   explicit InternalMmapVector(uptr cnt) {
  570   explicit InternalScopedString(uptr max_length)
  574   uptr length() { return length_; }
  582   uptr length_;
  592 void Sort(T *v, uptr size, Compare comp = {}) {
  596   for (uptr i = 1; i < size; i++) {
  597     uptr j, p;
  608   for (uptr i = size - 1; i > 0; i--) {
  610     uptr j, max_ind;
  612       uptr left = 2 * j + 1;
  613       uptr right = 2 * j + 2;
  630 uptr InternalLowerBound(const Container &v, uptr first, uptr last,
  630 uptr InternalLowerBound(const Container &v, uptr first, uptr last,
  630 uptr InternalLowerBound(const Container &v, uptr first, uptr last,
  633     uptr mid = (first + last) / 2;
  659                       uptr max_len = 1 << 26, error_t *errno_p = nullptr);
  669 bool ReadFileToBuffer(const char *file_name, char **buff, uptr *buff_size,
  670                       uptr *read_len, uptr max_len = 1 << 26,
  670                       uptr *read_len, uptr max_len = 1 << 26,
  700 const uptr kModuleUUIDSize = 16;
  701 const uptr kMaxSegName = 16;
  716   void set(const char *module_name, uptr base_address);
  717   void set(const char *module_name, uptr base_address, ModuleArch arch,
  720   void addAddressRange(uptr beg, uptr end, bool executable, bool writable,
  720   void addAddressRange(uptr beg, uptr end, bool executable, bool writable,
  722   bool containsAddress(uptr address) const;
  725   uptr base_address() const { return base_address_; }
  726   uptr max_executable_address() const { return max_executable_address_; }
  733     uptr beg;
  734     uptr end;
  739     AddressRange(uptr beg, uptr end, bool executable, bool writable,
  739     AddressRange(uptr beg, uptr end, bool executable, bool writable,
  754   uptr base_address_;
  755   uptr max_executable_address_;
  774   uptr size() const { return modules_.size(); }
  775   const LoadedModule &operator[](uptr i) const {
  792   static const uptr kInitialCapacity = 1 << 14;
  797 typedef void (*RangeIteratorCallback)(uptr begin, uptr end, void *arg);
  797 typedef void (*RangeIteratorCallback)(uptr begin, uptr end, void *arg);
  847 INLINE uptr GetPthreadDestructorIterations() {
  877   uptr addr;
  878   uptr pc;
  879   uptr sp;
  880   uptr bp;
  920   uptr GetAddress() const;
  957   uptr n_uniq_ids;
  958   uptr allocated;
  969 bool GetRandom(void *buffer, uptr length, bool blocking = true);
projects/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors.inc
  440   uptr i;
  464   uptr i;
  470   uptr i1 = i;
  471   uptr i2 = i;
  505   uptr i;
  527   uptr i;
  533   uptr i1 = i;
  534   uptr i2 = i;
  557     uptr len1 = REAL(strlen)(s1);
  558     uptr len2 = REAL(strlen)(s2);
  702   uptr len = result - s + 1;
  833                             int (*real_fn)(const void *, const void *, uptr),
  834                             const void *a1, const void *a2, uptr size) {
  846       uptr i;
  909   uptr len = res ? (char *)res - (const char *)s + 1 : n;
 2477   uptr sz = __sanitizer_in_addr_sz(af);
 2497     uptr sz = __sanitizer_in_addr_sz(af);
 2519     uptr sz = __sanitizer_in_addr_sz(af_inet);
 2998 static void read_msghdr_control(void *ctx, void *control, uptr controllen) {
 3000       RoundUpTo(sizeof(__sanitizer_cmsghdr), sizeof(uptr));
 3009     if (p + RoundUpTo(cmsg->cmsg_len, sizeof(uptr)) > control_end) break;
 3022     p += RoundUpTo(cmsg->cmsg_len, sizeof(uptr));
 3242   uptr res = REAL(ptrace)(request, pid, addr, data);
 3946     for (uptr i = 0; i < p->we_wordc; ++i) {
 5171   uptr tls_begin, tls_end;
 6154 static uptr wrapped_read(void *cookie, char *buf, uptr size) {
 6154 static uptr wrapped_read(void *cookie, char *buf, uptr size) {
 6161 static uptr wrapped_write(void *cookie, const char *buf, uptr size) {
 6161 static uptr wrapped_write(void *cookie, const char *buf, uptr size) {
 6317     uptr page_size = GetPageSizeCached();
 6318     uptr vec_size = ((length + page_size - 1) & (~(page_size - 1))) / page_size;
projects/compiler-rt/lib/sanitizer_common/sanitizer_common_libcdep.cpp
   34   const uptr hard_rss_limit_mb = common_flags()->hard_rss_limit_mb;
   35   const uptr soft_rss_limit_mb = common_flags()->soft_rss_limit_mb;
   37   uptr prev_reported_rss = 0;
   38   uptr prev_reported_stack_depot_size = 0;
   40   uptr rss_during_last_reported_profile = 0;
   43     const uptr current_rss_mb = GetRSS() >> 20;
projects/compiler-rt/lib/sanitizer_common/sanitizer_common_syscalls.inc
  190 PRE_SYSCALL(read)(long fd, void *buf, uptr count) {
  196 POST_SYSCALL(read)(long res, long fd, void *buf, uptr count) {
 1317     uptr op = iocbpp[i]->aio_lio_opcode;
 1320     uptr len = (uptr)iocbpp[i]->aio_nbytes;
 1327       for (uptr v = 0; v < len; v++)
 1331       for (uptr v = 0; v < len; v++)
 2877 PRE_SYSCALL(getrandom)(void *buf, uptr count, long flags) {
 2883 POST_SYSCALL(getrandom)(long res, void *buf, uptr count, long flags) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_coverage_libcdep_new.cpp
   48                                 const uptr* pcs, uptr len) {
   48                                 const uptr* pcs, uptr len) {
   57 static void SanitizerDumpCoverage(const uptr* unsorted_pcs, uptr len) {
   57 static void SanitizerDumpCoverage(const uptr* unsorted_pcs, uptr len) {
   62   uptr* pcs = static_cast<uptr*>(InternalAlloc(len * sizeof(uptr)));
   62   uptr* pcs = static_cast<uptr*>(InternalAlloc(len * sizeof(uptr)));
   64   internal_memcpy(pcs, unsorted_pcs, len * sizeof(uptr));
   68   uptr last_base = 0;
   69   uptr module_start_idx = 0;
   71   for (uptr i = 0; i < len; ++i) {
   72     const uptr pc = pcs[i];
   79     uptr module_base = pc - pcs[i];
  128   void TracePcGuard(u32* guard, uptr pc) {
  149   InternalMmapVectorNoCtor<uptr> pc_vector;
  169 SANITIZER_INTERFACE_ATTRIBUTE void __sanitizer_dump_coverage(const uptr* pcs,
  170                                                              uptr len) {
  216 SANITIZER_TLS_INITIAL_EXEC_ATTRIBUTE uptr __sancov_lowest_stack;
projects/compiler-rt/lib/sanitizer_common/sanitizer_deadlock_detector.h
   48   void ensureCurrentEpoch(uptr current_epoch) {
   56   uptr getEpoch() const { return epoch_; }
   59   bool addLock(uptr lock_id, uptr current_epoch, u32 stk) {
   59   bool addLock(uptr lock_id, uptr current_epoch, u32 stk) {
   75   void removeLock(uptr lock_id) {
   99   u32 findLockContext(uptr lock_id) {
  100     for (uptr i = 0; i < n_all_locks_; i++)
  106   const BV &getLocks(uptr current_epoch) const {
  111   uptr getNumLocks() const { return n_all_locks_; }
  112   uptr getLock(uptr idx) const { return all_locks_with_contexts_[idx].lock; }
  112   uptr getLock(uptr idx) const { return all_locks_with_contexts_[idx].lock; }
  116   uptr epoch_;
  117   uptr recursive_locks[64];
  118   uptr n_recursive_locks;
  124   uptr n_all_locks_;
  139   uptr size() const { return g_.size(); }
  154   uptr newNode(uptr data) {
  154   uptr newNode(uptr data) {
  182   uptr getData(uptr node) const { return data_[nodeToIndex(node)]; }
  182   uptr getData(uptr node) const { return data_[nodeToIndex(node)]; }
  184   bool nodeBelongsToCurrentEpoch(uptr node) {
  188   void removeNode(uptr node) {
  189     uptr idx = nodeToIndex(node);
  202   bool onLockBefore(DeadlockDetectorTLS<BV> *dtls, uptr cur_node) {
  204     uptr cur_idx = nodeToIndex(cur_node);
  208   u32 findLockContext(DeadlockDetectorTLS<BV> *dtls, uptr node) {
  213   void onLockAfter(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, u32 stk = 0) {
  215     uptr cur_idx = nodeToIndex(cur_node);
  221   bool hasAllEdges(DeadlockDetectorTLS<BV> *dtls, uptr cur_node) {
  222     uptr local_epoch = dtls->getEpoch();
  226       uptr cur_idx = nodeToIndexUnchecked(cur_node);
  227       for (uptr i = 0, n = dtls->getNumLocks(); i < n; i++) {
  240   uptr addEdges(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, u32 stk,
  240   uptr addEdges(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, u32 stk,
  243     uptr cur_idx = nodeToIndex(cur_node);
  244     uptr added_edges[40];
  245     uptr n_added_edges = g_.addEdges(dtls->getLocks(current_epoch_), cur_idx,
  247     for (uptr i = 0; i < n_added_edges; i++) {
  258   bool findEdge(uptr from_node, uptr to_node, u32 *stk_from, u32 *stk_to,
  258   bool findEdge(uptr from_node, uptr to_node, u32 *stk_from, u32 *stk_to,
  260     uptr from_idx = nodeToIndex(from_node);
  261     uptr to_idx = nodeToIndex(to_node);
  262     for (uptr i = 0; i < n_edges_; i++) {
  275   bool onLock(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, u32 stk = 0) {
  288   bool onTryLock(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, u32 stk = 0) {
  290     uptr cur_idx = nodeToIndex(cur_node);
  298   bool onFirstLock(DeadlockDetectorTLS<BV> *dtls, uptr node, u32 stk = 0) {
  310   uptr findPathToLock(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, uptr *path,
  310   uptr findPathToLock(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, uptr *path,
  310   uptr findPathToLock(DeadlockDetectorTLS<BV> *dtls, uptr cur_node, uptr *path,
  311                       uptr path_size) {
  313     uptr idx = nodeToIndex(cur_node);
  315     uptr res = g_.findShortestPath(idx, tmp_bv_, path, path_size);
  316     for (uptr i = 0; i < res; i++)
  325   void onUnlock(DeadlockDetectorTLS<BV> *dtls, uptr node) {
  334   bool onLockFast(DeadlockDetectorTLS<BV> *dtls, uptr node, u32 stk = 0) {
  342   bool isHeld(DeadlockDetectorTLS<BV> *dtls, uptr node) const {
  346   uptr testOnlyGetEpoch() const { return current_epoch_; }
  347   bool testOnlyHasEdge(uptr l1, uptr l2) {
  347   bool testOnlyHasEdge(uptr l1, uptr l2) {
  351   bool testOnlyHasEdgeRaw(uptr idx1, uptr idx2) {
  351   bool testOnlyHasEdgeRaw(uptr idx1, uptr idx2) {
  356     for (uptr from = 0; from < size(); from++)
  357       for (uptr to = 0; to < size(); to++)
  363   void check_idx(uptr idx) const { CHECK_LT(idx, size()); }
  365   void check_node(uptr node) const {
  370   uptr indexToNode(uptr idx) const {
  370   uptr indexToNode(uptr idx) const {
  375   uptr nodeToIndexUnchecked(uptr node) const { return node % size(); }
  375   uptr nodeToIndexUnchecked(uptr node) const { return node % size(); }
  377   uptr nodeToIndex(uptr node) const {
  377   uptr nodeToIndex(uptr node) const {
  382   uptr nodeToEpoch(uptr node) const { return node / size() * size(); }
  382   uptr nodeToEpoch(uptr node) const { return node / size() * size(); }
  384   uptr getAvailableNode(uptr data) {
  384   uptr getAvailableNode(uptr data) {
  385     uptr idx = available_nodes_.getAndClearFirstOne();
  398   uptr current_epoch_;
  403   uptr data_[BV::kSize];
  405   uptr n_edges_;
projects/compiler-rt/lib/sanitizer_common/sanitizer_deadlock_detector1.cpp
  121   uptr path[20];
  122   uptr len = dd.findPathToLock(&lt->dd, m->id, path, ARRAY_SIZE(path));
  130   len = Min<uptr>(len, DDReport::kMaxLoopSize);
  133   for (uptr i = 0; i < len; i++) {
  134     uptr from = path[i];
  135     uptr to = path[(i + 1) % len];
projects/compiler-rt/lib/sanitizer_common/sanitizer_deadlock_detector_interface.h
   36   uptr id;
projects/compiler-rt/lib/sanitizer_common/sanitizer_file.cpp
   25 void CatastrophicErrorWrite(const char *buffer, uptr length) {
   40   uptr pid = internal_getpid();
   74   uptr len = internal_strlen(path);
   95 bool ReadFileToBuffer(const char *file_name, char **buff, uptr *buff_size,
   96                       uptr *read_len, uptr max_len, error_t *errno_p) {
   96                       uptr *read_len, uptr max_len, error_t *errno_p) {
  102   uptr PageSize = GetPageSizeCached();
  103   uptr kMinFileLen = Min(PageSize, max_len);
  106   for (uptr size = kMinFileLen;; size = Min(size * 2, max_len)) {
  119       uptr just_read;
  140                       InternalMmapVectorNoCtor<char> *buff, uptr max_len,
  145   uptr PageSize = GetPageSizeCached();
  149   uptr read_len = 0;
  155     uptr just_read;
  180   uptr name_len = internal_strlen(name);
  185     uptr prefix_len = end - beg;
projects/compiler-rt/lib/sanitizer_common/sanitizer_file.h
   26   void Write(const char *buffer, uptr length);
   44   uptr fd_pid;
   63 bool ReadFromFile(fd_t fd, void *buff, uptr buff_size,
   64                   uptr *bytes_read = nullptr, error_t *error_p = nullptr);
   65 bool WriteToFile(fd_t fd, const void *buff, uptr buff_size,
   66                  uptr *bytes_written = nullptr, error_t *error_p = nullptr);
  101 void *MapFileToMemory(const char *file_name, uptr *buff_size);
  102 void *MapWritableFileToMemory(void *addr, uptr size, fd_t fd, OFF_T offset);
projects/compiler-rt/lib/sanitizer_common/sanitizer_flag_parser.cpp
   50 char *FlagParser::ll_strndup(const char *s, uptr n) {
   51   uptr len = internal_strnlen(s, n);
   79   uptr name_start = pos_;
   92   uptr value_start = ++pos_;
  133   uptr old_pos_ = pos_;
  144   static const uptr kMaxIncludeSize = 1 << 15;
  146   uptr data_mapped_size;
  148   uptr len;
projects/compiler-rt/lib/sanitizer_common/sanitizer_flag_parser.h
  121   uptr pos_;
  141   char *ll_strndup(const char *s, uptr n);
projects/compiler-rt/lib/sanitizer_common/sanitizer_flags.cpp
   37 void SubstituteForFlagValue(const char *s, char *out, uptr out_size) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_flags.h
   54 void SubstituteForFlagValue(const char *s, char *out, uptr out_size);
projects/compiler-rt/lib/sanitizer_common/sanitizer_interface_internal.h
   50       const __sanitizer::uptr *pcs, const __sanitizer::uptr len);
   50       const __sanitizer::uptr *pcs, const __sanitizer::uptr len);
   74       __sanitizer::uptr pc, char *module_path,
   75       __sanitizer::uptr module_path_len, __sanitizer::uptr *pc_offset);
   75       __sanitizer::uptr module_path_len, __sanitizer::uptr *pc_offset);
projects/compiler-rt/lib/sanitizer_common/sanitizer_internal_defs.h
  183 typedef uptr operator_new_size_type;
projects/compiler-rt/lib/sanitizer_common/sanitizer_libc.cpp
   23 void *internal_memchr(const void *s, int c, uptr n) {
   25   for (uptr i = 0; i < n; ++i, ++t)
   31 void *internal_memrchr(const void *s, int c, uptr n) {
   34   for (uptr i = 0; i < n; ++i, ++t) {
   40 int internal_memcmp(const void* s1, const void* s2, uptr n) {
   43   for (uptr i = 0; i < n; ++i, ++t1, ++t2)
   49 void *internal_memcpy(void *dest, const void *src, uptr n) {
   52   for (uptr i = 0; i < n; ++i)
   57 void *internal_memmove(void *dest, const void *src, uptr n) {
   75 void *internal_memset(void* s, int c, uptr n) {
   93   for (uptr i = 0; i < n; ++i, ++t) {
   99 uptr internal_strcspn(const char *s, const char *reject) {
  100   uptr i;
  109   uptr len = internal_strlen(s);
  128 int internal_strncmp(const char *s1, const char *s2, uptr n) {
  129   for (uptr i = 0; i < n; i++) {
  159   for (uptr i = 0; s[i]; i++) {
  165 uptr internal_strlen(const char *s) {
  166   uptr i = 0;
  171 uptr internal_strlcat(char *dst, const char *src, uptr maxlen) {
  171 uptr internal_strlcat(char *dst, const char *src, uptr maxlen) {
  172   const uptr srclen = internal_strlen(src);
  173   const uptr dstlen = internal_strnlen(dst, maxlen);
  184 char *internal_strncat(char *dst, const char *src, uptr n) {
  185   uptr len = internal_strlen(dst);
  186   uptr i;
  193 uptr internal_strlcpy(char *dst, const char *src, uptr maxlen) {
  193 uptr internal_strlcpy(char *dst, const char *src, uptr maxlen) {
  194   const uptr srclen = internal_strlen(src);
  204 char *internal_strncpy(char *dst, const char *src, uptr n) {
  205   uptr i;
  212 uptr internal_strnlen(const char *s, uptr maxlen) {
  212 uptr internal_strnlen(const char *s, uptr maxlen) {
  213   uptr i = 0;
  220   uptr len1 = internal_strlen(haystack);
  221   uptr len2 = internal_strlen(needle);
  223   for (uptr pos = 0; pos <= len1 - len2; pos++) {
  261 bool mem_is_zero(const char *beg, uptr size) {
  264   uptr *aligned_beg = (uptr *)RoundUpTo((uptr)beg, sizeof(uptr));
  264   uptr *aligned_beg = (uptr *)RoundUpTo((uptr)beg, sizeof(uptr));
  265   uptr *aligned_end = (uptr *)RoundDownTo((uptr)end, sizeof(uptr));
  265   uptr *aligned_end = (uptr *)RoundDownTo((uptr)end, sizeof(uptr));
  266   uptr all = 0;
projects/compiler-rt/lib/sanitizer_common/sanitizer_libc.h
   29 void *internal_memchr(const void *s, int c, uptr n);
   30 void *internal_memrchr(const void *s, int c, uptr n);
   31 int internal_memcmp(const void* s1, const void* s2, uptr n);
   32 void *internal_memcpy(void *dest, const void *src, uptr n);
   33 void *internal_memmove(void *dest, const void *src, uptr n);
   35 void *internal_memset(void *s, int c, uptr n);
   39 uptr internal_strcspn(const char *s, const char *reject);
   41 uptr internal_strlen(const char *s);
   42 uptr internal_strlcat(char *dst, const char *src, uptr maxlen);
   42 uptr internal_strlcat(char *dst, const char *src, uptr maxlen);
   43 char *internal_strncat(char *dst, const char *src, uptr n);
   44 int internal_strncmp(const char *s1, const char *s2, uptr n);
   45 uptr internal_strlcpy(char *dst, const char *src, uptr maxlen);
   45 uptr internal_strlcpy(char *dst, const char *src, uptr maxlen);
   46 char *internal_strncpy(char *dst, const char *src, uptr n);
   47 uptr internal_strnlen(const char *s, uptr maxlen);
   47 uptr internal_strnlen(const char *s, uptr maxlen);
   52 int internal_snprintf(char *buffer, uptr length, const char *format, ...);
   56 bool mem_is_zero(const char *mem, uptr size);
   66 uptr internal_ftruncate(fd_t fd, uptr size);
   66 uptr internal_ftruncate(fd_t fd, uptr size);
   72 uptr internal_getpid();
   73 uptr internal_getppid();
   76 uptr internal_sched_yield();
   79 bool internal_iserror(uptr retval, int *rverrno = nullptr);
projects/compiler-rt/lib/sanitizer_common/sanitizer_libignore.cpp
   44     for (uptr i = 0; i < count_; i++) {
   55   for (uptr i = 0; i < count_; i++) {
   81         const uptr idx =
  110         const uptr idx =
projects/compiler-rt/lib/sanitizer_common/sanitizer_libignore.h
   46   bool IsIgnored(uptr pc, bool *pc_in_ignored_lib) const;
   49   bool IsPcInstrumented(uptr pc) const;
   60     uptr begin;
   61     uptr end;
   64   inline bool IsInRange(uptr pc, const LibCodeRange &range) const {
   68   static const uptr kMaxIgnoredRanges = 128;
   69   static const uptr kMaxInstrumentedRanges = 1024;
   70   static const uptr kMaxLibs = 1024;
   81   uptr count_;
   90 inline bool LibIgnore::IsIgnored(uptr pc, bool *pc_in_ignored_lib) const {
   91   const uptr n = atomic_load(&ignored_ranges_count_, memory_order_acquire);
   92   for (uptr i = 0; i < n; i++) {
  104 inline bool LibIgnore::IsPcInstrumented(uptr pc) const {
  105   const uptr n = atomic_load(&instrumented_ranges_count_, memory_order_acquire);
  106   for (uptr i = 0; i < n; i++) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_linux.cpp
  168 uptr internal_mmap(void *addr, uptr length, int prot, int flags, int fd,
  168 uptr internal_mmap(void *addr, uptr length, int prot, int flags, int fd,
  183 uptr internal_munmap(void *addr, uptr length) {
  183 uptr internal_munmap(void *addr, uptr length) {
  187 int internal_mprotect(void *addr, uptr length, int prot) {
  192 uptr internal_close(fd_t fd) {
  196 uptr internal_open(const char *filename, int flags) {
  204 uptr internal_open(const char *filename, int flags, u32 mode) {
  213 uptr internal_read(fd_t fd, void *buf, uptr count) {
  213 uptr internal_read(fd_t fd, void *buf, uptr count) {
  220 uptr internal_write(fd_t fd, const void *buf, uptr count) {
  220 uptr internal_write(fd_t fd, const void *buf, uptr count) {
  227 uptr internal_ftruncate(fd_t fd, uptr size) {
  227 uptr internal_ftruncate(fd_t fd, uptr size) {
  301 uptr internal_stat(const char *path, void *buf) {
  325 uptr internal_lstat(const char *path, void *buf) {
  350 uptr internal_fstat(fd_t fd, void *buf) {
  370 uptr internal_filesize(fd_t fd) {
  377 uptr internal_dup(int oldfd) {
  381 uptr internal_dup2(int oldfd, int newfd) {
  389 uptr internal_readlink(const char *path, char *buf, uptr bufsize) {
  389 uptr internal_readlink(const char *path, char *buf, uptr bufsize) {
  401 uptr internal_unlink(const char *path) {
  409 uptr internal_rename(const char *oldpath, const char *newpath) {
  421 uptr internal_sched_yield() {
  443 uptr internal_execve(const char *filename, char *const argv[],
  507 uptr internal_clock_gettime(__sanitizer_clockid_t clk_id, void *tp) {
  528   static uptr len;
  532     uptr environ_size;
  537   uptr namelen = internal_strlen(name);
  566   uptr buff_size;
  567   uptr buff_len;
  608     uptr* stack_end = (uptr*)__libc_stack_end;
  720 uptr internal_ptrace(int request, int pid, void *addr, void *data) {
  725 uptr internal_waitpid(int pid, int *status, int options) {
  730 uptr internal_getpid() {
  734 uptr internal_getppid() {
  738 uptr internal_getdents(fd_t fd, struct linux_dirent *dirp, unsigned int count) {
  748 uptr internal_lseek(fd_t fd, OFF_T offset, int whence) {
  753 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5) {
  753 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5) {
  753 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5) {
  753 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5) {
  753 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5) {
  758 uptr internal_sigaltstack(const void *ss, void *oss) {
  824   uptr result = internal_syscall(SYSCALL(rt_sigaction), (uptr)signum,
  843 uptr internal_sigprocmask(int how, __sanitizer_sigset_t *set,
  870   const uptr idx = signum / (sizeof(k_set->sig[0]) * 8);
  871   const uptr bit = signum % (sizeof(k_set->sig[0]) * 8);
  880   const uptr idx = signum / (sizeof(k_set->sig[0]) * 8);
  881   const uptr bit = signum % (sizeof(k_set->sig[0]) * 8);
  921     uptr read = internal_getdents(
  930     for (uptr begin = (uptr)buffer_.data(), end = begin + read; begin < end;) {
 1023 uptr GetMaxVirtualAddress() {
 1054 uptr GetMaxUserVirtualAddress() {
 1055   uptr addr = GetMaxVirtualAddress();
 1065 uptr GetPageSize() {
 1085 uptr ReadBinaryName(/*out*/char *buf, uptr buf_len) {
 1085 uptr ReadBinaryName(/*out*/char *buf, uptr buf_len) {
 1105   uptr module_name_len = internal_readlink(
 1123 uptr ReadLongProcessName(/*out*/ char *buf, uptr buf_len) {
 1123 uptr ReadLongProcessName(/*out*/ char *buf, uptr buf_len) {
 1126   uptr tmpsize;
 1127   uptr tmplen;
 1145   uptr base_name_length = internal_strlen(base_name);
 1152 void ForEachMappedRegion(link_map *map, void (*cb)(const void *, uptr)) {
 1166   uptr preferred_base = (uptr)-1;
 1179       uptr seg_start = phdr->p_vaddr + delta;
 1180       uptr seg_end = seg_start + phdr->p_memsz;
 1198 uptr internal_clone(int (*fn)(void *), void *child_stack, int flags, void *arg,
 1761   static const uptr PF_WRITE = 1U << 1;
 1772   uptr err = ucontext->uc_mcontext.gregs[REG_ERR];
 1865 static void GetPcSpBp(void *context, uptr *pc, uptr *sp, uptr *bp) {
 1865 static void GetPcSpBp(void *context, uptr *pc, uptr *sp, uptr *bp) {
 1865 static void GetPcSpBp(void *context, uptr *pc, uptr *sp, uptr *bp) {
 2103 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
 2103 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
 2103 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
 2103 uptr FindAvailableMemoryRange(uptr size, uptr alignment, uptr left_padding,
 2104                               uptr *largest_gap_found,
 2105                               uptr *max_occupied_addr) {
 2110 bool GetRandom(void *buffer, uptr length, bool blocking) {
 2126     uptr res = internal_syscall(SYSCALL(getrandom), buffer, length,
 2137   uptr fd = internal_open("/dev/urandom", O_RDONLY);
 2140   uptr res = internal_read(fd, buffer, length);
projects/compiler-rt/lib/sanitizer_common/sanitizer_linux.h
   36   uptr mmaped_size;
   37   uptr len;
   48 uptr internal_getdents(fd_t fd, struct linux_dirent *dirp, unsigned int count);
   49 uptr internal_sigaltstack(const void* ss, void* oss);
   50 uptr internal_sigprocmask(int how, __sanitizer_sigset_t *set,
   52 uptr internal_clock_gettime(__sanitizer_clockid_t clk_id, void *tp);
   56 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5);
   56 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5);
   56 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5);
   56 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5);
   56 uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5);
   65 uptr internal_clone(int (*fn)(void *), void *child_stack, int flags, void *arg,
   96 uptr ThreadDescriptorSize();
   97 uptr ThreadSelf();
   98 uptr ThreadSelfOffset();
  105 void ForEachMappedRegion(link_map *map, void (*cb)(const void *, uptr));
  110 INLINE void ReleaseMemoryPagesToOSAndZeroFill(uptr beg, uptr end) {
  110 INLINE void ReleaseMemoryPagesToOSAndZeroFill(uptr beg, uptr end) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_linux_libcdep.cpp
   93 void GetThreadStackTopAndBottom(bool at_initialization, uptr *stack_top,
   94                                 uptr *stack_bottom) {
  109     uptr prev_end = 0;
  118     uptr stacksize = rl.rlim_cur;
  129   uptr stacksize = 0;
  170   uptr len = confstr(_CS_GNU_LIBC_VERSION, buf, sizeof(buf));
  189 static uptr g_tls_size;
  270 uptr ThreadDescriptorSize() {
  271   uptr val = atomic_load_relaxed(&thread_descriptor_size);
  319 const uptr kThreadSelfOffset = FIRST_32_SECOND_64(8, 16);
  321 uptr ThreadSelfOffset() {
  341 uptr ThreadSelf() {
  342   uptr descr_addr;
  430 static void GetTls(uptr *addr, uptr *size) {
  430 static void GetTls(uptr *addr, uptr *size) {
  489 uptr GetTlsSize() {
  503 void GetThreadStackAndTls(bool main, uptr *stk_addr, uptr *stk_size,
  503 void GetThreadStackAndTls(bool main, uptr *stk_addr, uptr *stk_size,
  504                           uptr *tls_addr, uptr *tls_size) {
  504                           uptr *tls_addr, uptr *tls_size) {
  511   uptr stack_top, stack_bottom;
  558       uptr cur_beg = info->dlpi_addr + phdr->p_vaddr;
  559       uptr cur_end = cur_beg + phdr->p_memsz;
  615 static uptr GetRSSFromGetrusage() {
  622 uptr GetRSS() {
  629   uptr len = internal_read(fd, buf, sizeof(buf) - 1);
  645   uptr rss = 0;
  837   uptr rv = internal_execve(pathname, GetArgv(), GetEnviron());
projects/compiler-rt/lib/sanitizer_common/sanitizer_list.h
   37   uptr size() const { return size_; }
  121       uptr count = 0;
  159   uptr size_;
projects/compiler-rt/lib/sanitizer_common/sanitizer_local_address_space_view.h
   42   static const T *Load(const T *target_address, uptr num_elements = 1) {
   68   static T *LoadWritable(T *target_address, uptr num_elements = 1) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_mutex.h
   92   ALIGNED(8) uptr opaque_storage_[10];
   93   uptr owner_;  // for debugging
projects/compiler-rt/lib/sanitizer_common/sanitizer_persistent_allocator.h
   25   void *alloc(uptr size);
   28   void *tryAlloc(uptr size);
   34 inline void *PersistentAllocator::tryAlloc(uptr size) {
   37     uptr cmp = atomic_load(&region_pos, memory_order_acquire);
   38     uptr end = atomic_load(&region_end, memory_order_acquire);
   46 inline void *PersistentAllocator::alloc(uptr size) {
   56     uptr allocsz = 64 * 1024;
   58     uptr mem = (uptr)MmapOrDie(allocsz, "stack depot");
   65 inline void *PersistentAlloc(uptr sz) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp
  254   const uptr sig_ign = (uptr)SIG_IGN;
  255   const uptr sig_dfl = (uptr)SIG_DFL;
  256   const uptr sig_err = (uptr)SIG_ERR;
  257   const uptr sa_siginfo = (uptr)SA_SIGINFO;
  285   uptr __sanitizer_in_addr_sz(int af) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h
  164   uptr *oldlenp;
  166   uptr newlen;
  178   uptr data[4];
  265   uptr shm_segsz;
  274   uptr shm_atime;
  278   uptr shm_dtime;
  282   uptr shm_ctime;
  297   uptr shm_nattch;
  298   uptr __unused4;
  299   uptr __unused5;
  315   uptr iov_len;
  345   uptr x_public;
  346   uptr x_private;
  347   uptr x_base;
  462   uptr msg_iovlen;
  464   uptr msg_controllen;
  468   uptr cmsg_len;
  570   uptr val[128 / sizeof(uptr)];
  570   uptr val[128 / sizeof(uptr)];
  695 extern const uptr sig_ign;
  696 extern const uptr sig_dfl;
  697 extern const uptr sig_err;
  698 extern const uptr sa_siginfo;
  706 uptr __sanitizer_in_addr_sz(int af);
  710   uptr dlpi_addr;
  759   uptr gl_pathc;
  761   uptr gl_offs;
  781   uptr we_wordc;
  783   uptr we_offs;
  878   uptr more_fields[7];
  881 typedef uptr (*__sanitizer_cookie_io_read)(void *cookie, char *buf, uptr size);
  881 typedef uptr (*__sanitizer_cookie_io_read)(void *cookie, char *buf, uptr size);
  882 typedef uptr (*__sanitizer_cookie_io_write)(void *cookie, const char *buf,
  883                                             uptr size);
projects/compiler-rt/lib/sanitizer_common/sanitizer_posix.cpp
   40 uptr GetMmapGranularity() {
   44 void *MmapOrDie(uptr size, const char *mem_type, bool raw_report) {
   46   uptr res = MmapNamed(nullptr, size, PROT_READ | PROT_WRITE,
   55 void UnmapOrDie(void *addr, uptr size) {
   57   uptr res = internal_munmap(addr, size);
   66 void *MmapOrDieOnFatalError(uptr size, const char *mem_type) {
   68   uptr res = MmapNamed(nullptr, size, PROT_READ | PROT_WRITE,
   83 void *MmapAlignedOrDieOnFatalError(uptr size, uptr alignment,
   83 void *MmapAlignedOrDieOnFatalError(uptr size, uptr alignment,
   87   uptr map_size = size + alignment;
   88   uptr map_res = (uptr)MmapOrDieOnFatalError(map_size, mem_type);
   91   uptr map_end = map_res + map_size;
   92   uptr res = map_res;
   97   uptr end = res + size;
  103 void *MmapNoReserveOrDie(uptr size, const char *mem_type) {
  105   uptr p = MmapNamed(nullptr, size, PROT_READ | PROT_WRITE,
  114 static void *MmapFixedImpl(uptr fixed_addr, uptr size, bool tolerate_enomem,
  114 static void *MmapFixedImpl(uptr fixed_addr, uptr size, bool tolerate_enomem,
  118   uptr p = MmapNamed((void *)fixed_addr, size, PROT_READ | PROT_WRITE,
  133 void *MmapFixedOrDie(uptr fixed_addr, uptr size, const char *name) {
  133 void *MmapFixedOrDie(uptr fixed_addr, uptr size, const char *name) {
  137 void *MmapFixedOrDieOnFatalError(uptr fixed_addr, uptr size, const char *name) {
  137 void *MmapFixedOrDieOnFatalError(uptr fixed_addr, uptr size, const char *name) {
  141 bool MprotectNoAccess(uptr addr, uptr size) {
  141 bool MprotectNoAccess(uptr addr, uptr size) {
  145 bool MprotectReadOnly(uptr addr, uptr size) {
  145 bool MprotectReadOnly(uptr addr, uptr size) {
  172 bool ReadFromFile(fd_t fd, void *buff, uptr buff_size, uptr *bytes_read,
  172 bool ReadFromFile(fd_t fd, void *buff, uptr buff_size, uptr *bytes_read,
  174   uptr res = internal_read(fd, buff, buff_size);
  182 bool WriteToFile(fd_t fd, const void *buff, uptr buff_size, uptr *bytes_written,
  182 bool WriteToFile(fd_t fd, const void *buff, uptr buff_size, uptr *bytes_written,
  184   uptr res = internal_write(fd, buff, buff_size);
  192 void *MapFileToMemory(const char *file_name, uptr *buff_size) {
  195   uptr fsize = internal_filesize(fd);
  199   uptr map = internal_mmap(nullptr, *buff_size, PROT_READ, MAP_PRIVATE, fd, 0);
  203 void *MapWritableFileToMemory(void *addr, uptr size, fd_t fd, OFF_T offset) {
  204   uptr flags = MAP_SHARED;
  206   uptr p = internal_mmap(addr, size, PROT_READ | PROT_WRITE, flags, fd, offset);
  216 static inline bool IntervalsAreSeparate(uptr start1, uptr end1,
  216 static inline bool IntervalsAreSeparate(uptr start1, uptr end1,
  217                                         uptr start2, uptr end2) {
  217                                         uptr start2, uptr end2) {
  227 bool MemoryRangeIsAvailable(uptr range_start, uptr range_end) {
  227 bool MemoryRangeIsAvailable(uptr range_start, uptr range_end) {
  268 void ReportFile::Write(const char *buffer, uptr length) {
  274 bool GetCodeRangeForFile(const char *module, uptr *start, uptr *end) {
  274 bool GetCodeRangeForFile(const char *module, uptr *start, uptr *end) {
  289 uptr SignalContext::GetAddress() const {
  343 int GetNamedMappingFd(const char *name, uptr size, int *flags) {
  375 void DecorateMapping(uptr addr, uptr size, const char *name) {
  375 void DecorateMapping(uptr addr, uptr size, const char *name) {
  379 uptr MmapNamed(void *addr, uptr length, int prot, int flags, const char *name) {
  379 uptr MmapNamed(void *addr, uptr length, int prot, int flags, const char *name) {
  381   uptr res = internal_mmap(addr, length, prot, flags, fd, 0);
projects/compiler-rt/lib/sanitizer_common/sanitizer_posix.h
   33 uptr internal_open(const char *filename, int flags);
   34 uptr internal_open(const char *filename, int flags, u32 mode);
   35 uptr internal_close(fd_t fd);
   37 uptr internal_read(fd_t fd, void *buf, uptr count);
   37 uptr internal_read(fd_t fd, void *buf, uptr count);
   38 uptr internal_write(fd_t fd, const void *buf, uptr count);
   38 uptr internal_write(fd_t fd, const void *buf, uptr count);
   41 uptr internal_mmap(void *addr, uptr length, int prot, int flags,
   41 uptr internal_mmap(void *addr, uptr length, int prot, int flags,
   43 uptr internal_munmap(void *addr, uptr length);
   43 uptr internal_munmap(void *addr, uptr length);
   44 int internal_mprotect(void *addr, uptr length, int prot);
   47 uptr internal_filesize(fd_t fd);  // -1 on error.
   48 uptr internal_stat(const char *path, void *buf);
   49 uptr internal_lstat(const char *path, void *buf);
   50 uptr internal_fstat(fd_t fd, void *buf);
   51 uptr internal_dup(int oldfd);
   52 uptr internal_dup2(int oldfd, int newfd);
   53 uptr internal_readlink(const char *path, char *buf, uptr bufsize);
   53 uptr internal_readlink(const char *path, char *buf, uptr bufsize);
   54 uptr internal_unlink(const char *path);
   55 uptr internal_rename(const char *oldpath, const char *newpath);
   56 uptr internal_lseek(fd_t fd, OFF_T offset, int whence);
   61 uptr internal_ptrace(int request, int pid, void *addr, void *data);
   63 uptr internal_waitpid(int pid, int *status, int options);
   69                     uptr *oldlenp, const void *newp, uptr newlen);
   69                     uptr *oldlenp, const void *newp, uptr newlen);
   70 int internal_sysctlbyname(const char *sname, void *oldp, uptr *oldlenp,
   71                           const void *newp, uptr newlen);
   92 int my_pthread_attr_getstack(void *attr, void **addr, uptr *size);
  101 uptr internal_execve(const char *filename, char *const argv[],
  112 uptr MmapNamed(void *addr, uptr length, int prot, int flags, const char *name);
  112 uptr MmapNamed(void *addr, uptr length, int prot, int flags, const char *name);
  117 int GetNamedMappingFd(const char *name, uptr size, int *flags);
  120 void DecorateMapping(uptr addr, uptr size, const char *name);
  120 void DecorateMapping(uptr addr, uptr size, const char *name);
projects/compiler-rt/lib/sanitizer_common/sanitizer_posix_libcdep.cpp
   55 uptr GetThreadSelf() {
   59 void ReleaseMemoryPagesToOS(uptr beg, uptr end) {
   59 void ReleaseMemoryPagesToOS(uptr beg, uptr end) {
   60   uptr page_size = GetPageSizeCached();
   61   uptr beg_aligned = RoundUpTo(beg, page_size);
   62   uptr end_aligned = RoundDownTo(end, page_size);
   71 void SetShadowRegionHugePageMode(uptr addr, uptr size) {
   71 void SetShadowRegionHugePageMode(uptr addr, uptr size) {
   80 bool DontDumpShadowMemory(uptr addr, uptr length) {
   80 bool DontDumpShadowMemory(uptr addr, uptr length) {
  120 void SetStackSizeLimitInBytes(uptr limit) {
  172 static const uptr kAltStackSize = SIGSTKSZ * 4;  // SIGSTKSZ is not enough.
  277 bool IsAccessibleMemoryRange(uptr beg, uptr size) {
  277 bool IsAccessibleMemoryRange(uptr beg, uptr size) {
  278   uptr page_size = GetPageSizeCached();
  284   uptr bytes_written =
  307 static bool MmapFixed(uptr fixed_addr, uptr size, int additional_flags,
  307 static bool MmapFixed(uptr fixed_addr, uptr size, int additional_flags,
  311   uptr p =
  325 bool MmapFixedNoReserve(uptr fixed_addr, uptr size, const char *name) {
  325 bool MmapFixedNoReserve(uptr fixed_addr, uptr size, const char *name) {
  329 bool MmapFixedSuperNoReserve(uptr fixed_addr, uptr size, const char *name) {
  329 bool MmapFixedSuperNoReserve(uptr fixed_addr, uptr size, const char *name) {
  343 uptr ReservedAddressRange::Init(uptr size, const char *name, uptr fixed_addr) {
  343 uptr ReservedAddressRange::Init(uptr size, const char *name, uptr fixed_addr) {
  343 uptr ReservedAddressRange::Init(uptr size, const char *name, uptr fixed_addr) {
  354 uptr ReservedAddressRange::Map(uptr fixed_addr, uptr size, const char *name) {
  354 uptr ReservedAddressRange::Map(uptr fixed_addr, uptr size, const char *name) {
  354 uptr ReservedAddressRange::Map(uptr fixed_addr, uptr size, const char *name) {
  359 uptr ReservedAddressRange::MapOrDie(uptr fixed_addr, uptr size,
  359 uptr ReservedAddressRange::MapOrDie(uptr fixed_addr, uptr size,
  359 uptr ReservedAddressRange::MapOrDie(uptr fixed_addr, uptr size,
  364 void ReservedAddressRange::Unmap(uptr addr, uptr size) {
  364 void ReservedAddressRange::Unmap(uptr addr, uptr size) {
  375 void *MmapFixedNoAccess(uptr fixed_addr, uptr size, const char *name) {
  375 void *MmapFixedNoAccess(uptr fixed_addr, uptr size, const char *name) {
  381 void *MmapNoAccess(uptr size) {
  392 int my_pthread_attr_getstack(void *attr, void **addr, uptr *size) {
  404   uptr stackaddr = 0;
  405   uptr stacksize = 0;
  411   const uptr minstacksize = GetTlsSize() + 128*1024;
  481   uptr waitpid_status = internal_waitpid(pid, &process_status, WNOHANG);
  492   uptr waitpid_status = internal_waitpid(pid, &process_status, 0);
projects/compiler-rt/lib/sanitizer_common/sanitizer_printf.cpp
   44   uptr const kMaxLen = 30;
   54   uptr num_buffer[kMaxLen];
  339 int internal_snprintf(char *buffer, uptr length, const char *format, ...) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_procmaps.h
   31 static const uptr kProtectionRead = 1;
   32 static const uptr kProtectionWrite = 2;
   33 static const uptr kProtectionExecute = 4;
   34 static const uptr kProtectionShared = 8;
   40   explicit MemoryMappedSegment(char *buff = nullptr, uptr size = 0)
   51   uptr start;
   52   uptr end;
   53   uptr offset;
   55   uptr filename_size;
   56   uptr protection;
   89 bool GetCodeRangeForFile(const char *module, uptr *start, uptr *end);
   89 bool GetCodeRangeForFile(const char *module, uptr *start, uptr *end);
   92 uptr ParseDecimal(const char **p);
   94 uptr ParseHex(const char **p);
projects/compiler-rt/lib/sanitizer_common/sanitizer_procmaps_common.cpp
   37 static uptr ParseNumber(const char **p, int base) {
   38   uptr n = 0;
   53 uptr ParseDecimal(const char **p) {
   62 uptr ParseHex(const char **p) {
  125   for (uptr i = 0; Next(&segment); i++) {
  140     uptr base_address = (i ? segment.start : 0) - segment.offset;
  148 void GetMemoryProfile(fill_profile_f cb, uptr *stats, uptr stats_size) {
  148 void GetMemoryProfile(fill_profile_f cb, uptr *stats, uptr stats_size) {
  150   uptr smaps_cap = 0;
  151   uptr smaps_len = 0;
  154   uptr start = 0;
  164       uptr rss = ParseDecimal(&pos) * 1024;
projects/compiler-rt/lib/sanitizer_common/sanitizer_procmaps_linux.cpp
   69     uptr len =
projects/compiler-rt/lib/sanitizer_common/sanitizer_quarantine.h
   28   static const uptr kSize = 1021;
   30   uptr size;
   31   uptr count;
   34   void init(void *ptr, uptr size) {
   41   uptr quarantined_size() const {
   45   void push_back(void *ptr, uptr size) {
   59     for (uptr i = 0; i < from->count; ++i)
   84   void Init(uptr size, uptr cache_size) {
   84   void Init(uptr size, uptr cache_size) {
   97   uptr GetSize() const { return atomic_load_relaxed(&max_size_); }
   98   uptr GetCacheSize() const {
  102   void Put(Cache *c, Callback cb, Node *ptr, uptr size) {
  103     uptr cache_size = GetCacheSize();
  152   void NOINLINE Recycle(uptr min_size, Callback cb) {
  161       uptr cache_size = cache_.Size();
  162       uptr overhead_size = cache_.OverheadSize();
  167       const uptr kOverheadThresholdPercents = 100;
  185       const uptr kPrefetch = 16;
  187       for (uptr i = 0; i < kPrefetch; i++)
  189       for (uptr i = 0, count = b->count; i < count; i++) {
  212   uptr Size() const {
  217   uptr OverheadSize() const {
  221   void Enqueue(Callback cb, void *ptr, uptr size) {
  255     uptr extracted_size = 0;
  277     uptr batch_count = 0;
  278     uptr total_overhead_bytes = 0;
  279     uptr total_bytes = 0;
  280     uptr total_quarantine_chunks = 0;
  287     uptr quarantine_chunks_capacity = batch_count * QuarantineBatch::kSize;
  290     uptr total_quarantined_bytes = total_bytes - total_overhead_bytes;
  307   void SizeAdd(uptr add) {
  310   void SizeSub(uptr sub) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_ring_buffer.h
   25   static RingBuffer *New(uptr Size) {
   28     uptr End = reinterpret_cast<uptr>(Ptr) + SizeInBytes(Size);
   35   uptr size() const {
   41   static uptr SizeInBytes(uptr Size) {
   41   static uptr SizeInBytes(uptr Size) {
   45   uptr SizeInBytes() { return SizeInBytes(size()); }
   55   T operator[](uptr Idx) const {
   89   static constexpr uptr kNextMask = (1ULL << kSizeShift) - 1;
   91   uptr GetStorageSize() const { return (long_ >> kSizeShift) << kPageSizeBits; }
   93   void Init(void *storage, uptr size) {
  109   CompactRingBuffer(void *storage, uptr size) {
  115     uptr size = other.GetStorageSize();
  118     uptr Idx = other.Next() - (const T *)other.StartOfStorage();
  132   uptr size() const { return GetStorageSize() / sizeof(T); }
  142   const T &operator[](uptr Idx) const {
  156   uptr long_;
projects/compiler-rt/lib/sanitizer_common/sanitizer_stackdepot.cpp
   27   uptr stack[1];  // [size]
   43     uptr i = 0;
   49   static uptr storage_size(const args_type &args) {
   50     return sizeof(StackDepotNode) + (args.size - 1) * sizeof(uptr);
   53     MurMur2HashBuilder H(args.size * sizeof(uptr));
   54     for (uptr i = 0; i < args.size; i++) H.add(args.trace[i]);
   64     internal_memcpy(stack, args.trace, size * sizeof(uptr));
  128     uptr v = atomic_load(p, memory_order_consume);
  142   uptr idx =
projects/compiler-rt/lib/sanitizer_common/sanitizer_stackdepotbase.h
   77     uptr cmp = atomic_load(p, memory_order_relaxed);
  101   uptr h = Node::hash(args);
  103   uptr v = atomic_load(p, memory_order_consume);
  117   uptr part = (h % kTabSize) / kPartSize;
  124   uptr memsz = Node::storage_size(args);
  143   uptr part = id >> kPartShift;
  145     uptr idx = part * kPartSize + i;
  148     uptr v = atomic_load(p, memory_order_consume);
  170     uptr s = atomic_load(p, memory_order_relaxed);
projects/compiler-rt/lib/sanitizer_common/sanitizer_stacktrace.cpp
   19 uptr StackTrace::GetNextInstructionPc(uptr pc) {
   19 uptr StackTrace::GetNextInstructionPc(uptr pc) {
   29 uptr StackTrace::GetCurrentPc() {
   33 void BufferedStackTrace::Init(const uptr *pcs, uptr cnt, uptr extra_top_pc) {
   33 void BufferedStackTrace::Init(const uptr *pcs, uptr cnt, uptr extra_top_pc) {
   33 void BufferedStackTrace::Init(const uptr *pcs, uptr cnt, uptr extra_top_pc) {
   48 static inline uhwptr *GetCanonicFrame(uptr bp,
   49                                       uptr stack_top,
   50                                       uptr stack_bottom) {
   70 void BufferedStackTrace::UnwindFast(uptr pc, uptr bp, uptr stack_top,
   70 void BufferedStackTrace::UnwindFast(uptr pc, uptr bp, uptr stack_top,
   70 void BufferedStackTrace::UnwindFast(uptr pc, uptr bp, uptr stack_top,
   71                                     uptr stack_bottom, u32 max_depth) {
   74   const uptr kPageSize = GetPageSizeCached();
   81   uptr bottom = stack_bottom;
  115 void BufferedStackTrace::PopStackFrames(uptr count) {
  118   for (uptr i = 0; i < size; ++i) {
  123 static uptr Distance(uptr a, uptr b) { return a < b ? b - a : a - b; }
  123 static uptr Distance(uptr a, uptr b) { return a < b ? b - a : a - b; }
  123 static uptr Distance(uptr a, uptr b) { return a < b ? b - a : a - b; }
  125 uptr BufferedStackTrace::LocatePcInTrace(uptr pc) {
  125 uptr BufferedStackTrace::LocatePcInTrace(uptr pc) {
  126   uptr best = 0;
  127   for (uptr i = 1; i < size; ++i) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_stacktrace.h
   43   const uptr *trace;
   53   StackTrace(const uptr *trace, u32 size) : trace(trace), size(size), tag(0) {}
   54   StackTrace(const uptr *trace, u32 size, u32 tag)
   68   static uptr GetCurrentPc();
   69   static inline uptr GetPreviousInstructionPc(uptr pc);
   69   static inline uptr GetPreviousInstructionPc(uptr pc);
   70   static uptr GetNextInstructionPc(uptr pc);
   70   static uptr GetNextInstructionPc(uptr pc);
   77 uptr StackTrace::GetPreviousInstructionPc(uptr pc) {
   77 uptr StackTrace::GetPreviousInstructionPc(uptr pc) {
   95   uptr trace_buffer[kStackTraceMax];
   96   uptr top_frame_bp;  // Optional bp of a top frame.
  100   void Init(const uptr *pcs, uptr cnt, uptr extra_top_pc = 0);
  100   void Init(const uptr *pcs, uptr cnt, uptr extra_top_pc = 0);
  100   void Init(const uptr *pcs, uptr cnt, uptr extra_top_pc = 0);
  105   void Unwind(uptr pc, uptr bp, void *context, bool request_fast,
  105   void Unwind(uptr pc, uptr bp, void *context, bool request_fast,
  118   void Unwind(u32 max_depth, uptr pc, uptr bp, void *context, uptr stack_top,
  118   void Unwind(u32 max_depth, uptr pc, uptr bp, void *context, uptr stack_top,
  118   void Unwind(u32 max_depth, uptr pc, uptr bp, void *context, uptr stack_top,
  119               uptr stack_bottom, bool request_fast_unwind);
  128   void UnwindImpl(uptr pc, uptr bp, void *context, bool request_fast,
  128   void UnwindImpl(uptr pc, uptr bp, void *context, bool request_fast,
  132   void UnwindFast(uptr pc, uptr bp, uptr stack_top, uptr stack_bottom,
  132   void UnwindFast(uptr pc, uptr bp, uptr stack_top, uptr stack_bottom,
  132   void UnwindFast(uptr pc, uptr bp, uptr stack_top, uptr stack_bottom,
  132   void UnwindFast(uptr pc, uptr bp, uptr stack_top, uptr stack_bottom,
  134   void UnwindSlow(uptr pc, u32 max_depth);
  135   void UnwindSlow(uptr pc, void *context, u32 max_depth);
  137   void PopStackFrames(uptr count);
  138   uptr LocatePcInTrace(uptr pc);
  138   uptr LocatePcInTrace(uptr pc);
  147 static inline bool IsValidFrame(uptr frame, uptr stack_top, uptr stack_bottom) {
  147 static inline bool IsValidFrame(uptr frame, uptr stack_top, uptr stack_bottom) {
  147 static inline bool IsValidFrame(uptr frame, uptr stack_top, uptr stack_bottom) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_stacktrace_libcdep.cpp
   29   uptr frame_num = 0;
   30   for (uptr i = 0; i < size && trace[i]; i++) {
   33     uptr pc = GetPreviousInstructionPc(trace[i]);
   57 void BufferedStackTrace::Unwind(u32 max_depth, uptr pc, uptr bp, void *context,
   57 void BufferedStackTrace::Unwind(u32 max_depth, uptr pc, uptr bp, void *context,
   58                                 uptr stack_top, uptr stack_bottom,
   58                                 uptr stack_top, uptr stack_bottom,
   87 static int GetModuleAndOffsetForPc(uptr pc, char *module_name,
   88                                    uptr module_name_len, uptr *pc_offset) {
   88                                    uptr module_name_len, uptr *pc_offset) {
  107 void __sanitizer_symbolize_pc(uptr pc, const char *fmt, char *out_buf,
  108                               uptr out_buf_size) {
  118   uptr frame_num = 0;
  130     uptr n = out_end - out_buf - 1;
  132     out_buf += __sanitizer::Min<uptr>(n, frame_desc.length());
  140 void __sanitizer_symbolize_global(uptr data_addr, const char *fmt,
  141                                   char *out_buf, uptr out_buf_size) {
  153 int __sanitizer_get_module_and_offset_for_pc(uptr pc, char *module_name,
  154                                              uptr module_name_len,
  155                                              uptr *pc_offset) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_stacktrace_printer.cpp
   25   uptr prefix_len = internal_strlen(prefix);
  254                           uptr offset, ModuleArch arch,
projects/compiler-rt/lib/sanitizer_common/sanitizer_stacktrace_printer.h
   59                           uptr offset, ModuleArch arch,
projects/compiler-rt/lib/sanitizer_common/sanitizer_stoptheworld.h
   35   virtual PtraceRegistersStatus GetRegistersAndSP(uptr index, uptr *buffer,
   35   virtual PtraceRegistersStatus GetRegistersAndSP(uptr index, uptr *buffer,
   36                                                   uptr *sp) const {
   41   virtual uptr RegisterCount() const { UNIMPLEMENTED(); }
   42   virtual uptr ThreadCount() const { UNIMPLEMENTED(); }
   43   virtual tid_t GetThreadID(uptr index) const { UNIMPLEMENTED(); }
projects/compiler-rt/lib/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cpp
   92   tid_t GetThreadID(uptr index) const;
   93   uptr ThreadCount() const;
   97   PtraceRegistersStatus GetRegistersAndSP(uptr index, uptr *buffer,
   97   PtraceRegistersStatus GetRegistersAndSP(uptr index, uptr *buffer,
   98                                           uptr *sp) const;
   99   uptr RegisterCount() const;
  114   uptr parent_pid;
  164       uptr waitpid_status;
  188   for (uptr i = 0; i < suspended_threads_list_.ThreadCount(); i++) {
  204   for (uptr i = 0; i < suspended_threads_list_.ThreadCount(); i++)
  307   for (uptr i = 0; i < ARRAY_SIZE(kSyncSignals); i++) {
  333   explicit ScopedStackSpaceWithGuard(uptr stack_size) {
  350   uptr stack_size_;
  351   uptr guard_size_;
  352   uptr guard_start_;
  384   explicit ScopedSetTracerPID(uptr tracer_pid) {
  402   const uptr kTracerStackSize = 2 * 1024 * 1024;
  427   for (uptr i = 0; i < ARRAY_SIZE(kSyncSignals); i++)
  431   uptr tracer_pid = internal_clone(
  460       uptr waitpid_status = internal_waitpid(tracer_pid, nullptr, __WALL);
  515 tid_t SuspendedThreadsListLinux::GetThreadID(uptr index) const {
  520 uptr SuspendedThreadsListLinux::ThreadCount() const {
  525   for (uptr i = 0; i < thread_ids_.size(); i++) {
  536     uptr index, uptr *buffer, uptr *sp) const {
  536     uptr index, uptr *buffer, uptr *sp) const {
  536     uptr index, uptr *buffer, uptr *sp) const {
  566 uptr SuspendedThreadsListLinux::RegisterCount() const {
  567   return sizeof(regs_struct) / sizeof(uptr);
projects/compiler-rt/lib/sanitizer_common/sanitizer_suppressions.cpp
   36                                                 uptr new_file_path_size) {
   40     uptr path_to_exec_len = file_name_pos - exec.data();
   52                             uptr new_file_path_size) {
   79   uptr buffer_size;
   80   uptr contents_size;
   96   for (uptr i = 0; i < suppressions_.size(); i++) {
  157 uptr SuppressionContext::SuppressionCount() const {
  169 const Suppression *SuppressionContext::SuppressionAt(uptr i) const {
  176   for (uptr i = 0; i < suppressions_.size(); i++)
projects/compiler-rt/lib/sanitizer_common/sanitizer_suppressions.h
   26   uptr weight;
   39   uptr SuppressionCount() const;
   41   const Suppression *SuppressionAt(uptr i) const;
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer.cpp
   35 void AddressInfo::FillModuleInfo(const char *mod_name, uptr mod_offset,
   44 SymbolizedStack *SymbolizedStack::New(uptr addr) {
  103   for (uptr i = 0; i < storage_.size(); ++i) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer.h
   30   uptr address;
   33   uptr module_offset;
   36   static const uptr kUnknown = ~(uptr)0;
   38   uptr function_offset;
   47   void FillModuleInfo(const char *mod_name, uptr mod_offset, ModuleArch arch);
   54   static SymbolizedStack *New(uptr addr);
   68   uptr module_offset;
   72   uptr line;
   74   uptr start;
   75   uptr size;
   92   uptr size;
   93   uptr tag_offset;
  100   uptr module_offset;
  117   SymbolizedStack *SymbolizePC(uptr address);
  118   bool SymbolizeData(uptr address, DataInfo *info);
  119   bool SymbolizeFrame(uptr address, FrameInfo *info);
  123   bool GetModuleNameAndOffsetForPC(uptr pc, const char **module_name,
  124                                    uptr *module_address);
  125   const char *GetModuleNameForPc(uptr pc) {
  127     uptr unused;
  149   const LoadedModule *FindModuleForAddress(uptr address);
  168     static const uptr kInitialCapacity = 1000;
  178   bool FindModuleNameAndOffsetForAddress(uptr address, const char **module_name,
  179                                          uptr *module_offset,
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer_internal.h
   28 const char *ExtractUptr(const char *str, const char *delims, uptr *result);
   52   virtual bool SymbolizePC(uptr addr, SymbolizedStack *stack) {
   58   virtual bool SymbolizeData(uptr addr, DataInfo *info) {
   62   virtual bool SymbolizeFrame(uptr addr, FrameInfo *info) {
   88   virtual bool ReadFromSymbolizer(char *buffer, uptr max_length);
   91   virtual bool ReachedEndOfOutput(const char *buffer, uptr length) const {
  103   bool WriteToSymbolizer(const char *buffer, uptr length);
  109   static const uptr kBufferSize = 16 * 1024;
  112   static const uptr kMaxTimesRestarted = 5;
  114   uptr times_restarted_;
  128   bool SymbolizePC(uptr addr, SymbolizedStack *stack) override;
  129   bool SymbolizeData(uptr addr, DataInfo *info) override;
  130   bool SymbolizeFrame(uptr addr, FrameInfo *info) override;
  134                                    const char *module_name, uptr module_offset,
  138   static const uptr kBufferSize = 16 * 1024;
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer_libbacktrace.cpp
  184 bool LibbacktraceSymbolizer::SymbolizePC(uptr addr, SymbolizedStack *stack) {
  189 bool LibbacktraceSymbolizer::SymbolizeData(uptr addr, DataInfo *info) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer_libbacktrace.h
   35   bool SymbolizePC(uptr addr, SymbolizedStack *stack) override;
   37   bool SymbolizeData(uptr addr, DataInfo *info) override;
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer_libcdep.cpp
   32   uptr prefix_len = internal_strcspn(str, delims);
   51 const char *ExtractUptr(const char *str, const char *delims, uptr *result) {
   74   uptr prefix_len =
   84 SymbolizedStack *Symbolizer::SymbolizePC(uptr addr) {
   87   uptr module_offset;
  104 bool Symbolizer::SymbolizeData(uptr addr, DataInfo *info) {
  107   uptr module_offset;
  125 bool Symbolizer::SymbolizeFrame(uptr addr, FrameInfo *info) {
  141 bool Symbolizer::GetModuleNameAndOffsetForPC(uptr pc, const char **module_name,
  142                                              uptr *module_address) {
  173 bool Symbolizer::FindModuleNameAndOffsetForAddress(uptr address,
  175                                                    uptr *module_offset,
  194                                            uptr address) {
  195   for (uptr i = 0; i < modules.size(); i++) {
  203 const LoadedModule *Symbolizer::FindModuleForAddress(uptr address) {
  245   bool ReachedEndOfOutput(const char *buffer, uptr length) const override {
  299   if (uptr size = internal_strlen(file_line_info)) {
  401 bool LLVMSymbolizer::SymbolizePC(uptr addr, SymbolizedStack *stack) {
  412 bool LLVMSymbolizer::SymbolizeData(uptr addr, DataInfo *info) {
  423 bool LLVMSymbolizer::SymbolizeFrame(uptr addr, FrameInfo *info) {
  435                                                  uptr module_offset,
  516 bool SymbolizerProcess::ReadFromSymbolizer(char *buffer, uptr max_length) {
  519   uptr read_len = 0;
  521     uptr just_read = 0;
  543 bool SymbolizerProcess::WriteToSymbolizer(const char *buffer, uptr length) {
  546   uptr write_len = 0;
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer_posix_libcdep.cpp
  218   bool ReachedEndOfOutput(const char *buffer, uptr length) const override;
  220   bool ReadFromSymbolizer(char *buffer, uptr max_length) override {
  248                                           uptr length) const {
  269   bool SymbolizePC(uptr addr, SymbolizedStack *stack) override {
  278   bool SymbolizeData(uptr addr, DataInfo *info) override {
  283   const char *SendCommand(const char *module_name, uptr module_offset) {
  285     for (uptr i = 0; i < addr2line_pool_.size(); ++i) {
  304   static const uptr kBufferSize = 64;
  308   static const uptr dummy_address_ =
  337   bool SymbolizePC(uptr addr, SymbolizedStack *stack) override {
  344   bool SymbolizeData(uptr addr, DataInfo *info) override {
  361       for (uptr res_length = 1024;
  364         uptr req_length =
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer_report.cpp
   83   uptr pc = StackTrace::GetPreviousInstructionPc(stack->trace[0]);
  101   uptr top = 0;
  102   uptr bottom = 0;
  131 static void MaybeReportNonExecRegion(uptr pc) {
  149 static void MaybeDumpInstructionBytes(uptr pc) {
  256   uptr current = GetThreadSelf();
  258     uptr expected = 0;
projects/compiler-rt/lib/sanitizer_common/sanitizer_syscall_linux_x86_64.inc
   15 static uptr internal_syscall(u64 nr) {
   23 static uptr internal_syscall(u64 nr, T1 arg1) {
   31 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2) {
   39 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3) {
   47 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3, T4 arg4) {
   57 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3, T4 arg4,
   70 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3, T4 arg4,
   83 bool internal_iserror(uptr retval, int *rverrno) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_thread_registry.cpp
   81 void ThreadContextBase::SetCreated(uptr _user_id, u64 _unique_id,
  122 void ThreadRegistry::GetNumberOfThreads(uptr *total, uptr *running,
  122 void ThreadRegistry::GetNumberOfThreads(uptr *total, uptr *running,
  123                                         uptr *alive) {
  130 uptr ThreadRegistry::GetMaxAliveThreads() {
  135 u32 ThreadRegistry::CreateThread(uptr user_id, bool detached, u32 parent_tid,
  224 void ThreadRegistry::SetThreadNameByUserId(uptr user_id, const char *name) {
  340 void ThreadRegistry::SetThreadUserId(u32 tid, uptr user_id) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_thread_registry.h
   48   uptr user_id;   // Some opaque user thread id (e.g. pthread_t).
   66   void SetCreated(uptr _user_id, u64 _unique_id, bool _detached,
   93   void GetNumberOfThreads(uptr *total = nullptr, uptr *running = nullptr,
   93   void GetNumberOfThreads(uptr *total = nullptr, uptr *running = nullptr,
   94                           uptr *alive = nullptr);
   95   uptr GetMaxAliveThreads();
  107   u32 CreateThread(uptr user_id, bool detached, u32 parent_tid, void *arg);
  125   void SetThreadNameByUserId(uptr user_id, const char *name);
  130   void SetThreadUserId(u32 tid, uptr user_id);
  144   uptr alive_threads_;  // Created or running.
  145   uptr max_alive_threads_;
  146   uptr running_threads_;
projects/compiler-rt/lib/sanitizer_common/sanitizer_tls_get_addr.cpp
   24   uptr dso_id;
   25   uptr offset;
   31   uptr size;
   32   uptr start;
   43 static const uptr kDestroyedThread = -1;
   45 static inline void DTLS_Deallocate(DTLS::DTV *dtv, uptr size) {
   52 static inline void DTLS_Resize(uptr new_size) {
   58   uptr num_live_dtls =
   62   uptr old_dtv_size = dtls.dtv_size;
   75   uptr s = dtls.dtv_size;
   86 static const uptr kDtvOffset = 0;
   90                                 uptr static_tls_begin, uptr static_tls_end) {
   90                                 uptr static_tls_begin, uptr static_tls_end) {
   93   uptr dso_id = arg->dso_id;
   97   uptr tls_size = 0;
   98   uptr tls_beg = reinterpret_cast<uptr>(res) - arg->offset - kDtvOffset;
  129 void DTLS_on_libc_memalign(void *ptr, uptr size) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_tls_get_addr.h
   39     uptr beg, size;
   42   uptr dtv_size;
   46   uptr last_memalign_size;
   47   uptr last_memalign_ptr;
   52 DTLS::DTV *DTLS_on_tls_get_addr(void *arg, void *res, uptr static_tls_begin,
   53                                 uptr static_tls_end);
   54 void DTLS_on_libc_memalign(void *ptr, uptr size);
projects/compiler-rt/lib/sanitizer_common/sanitizer_unwind_linux_libcdep.cpp
   35   uptr absolute_pc;
   36   uptr stack_top;
   37   uptr stack_size;
   45     backtrace_frame_t *backtrace, uptr ignore_depth, uptr max_depth);
   45     backtrace_frame_t *backtrace, uptr ignore_depth, uptr max_depth);
   60 uptr Unwind_GetIP(struct _Unwind_Context *ctx) {
   81   uptr pc = Unwind_GetIP(ctx);
   82   const uptr kPageSize = GetPageSizeCached();
  126 void BufferedStackTrace::UnwindSlow(uptr pc, u32 max_depth) {
  132   uptr to_pop = LocatePcInTrace(pc);
  151 void BufferedStackTrace::UnwindSlow(uptr pc, void *context, u32 max_depth) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_vector.h
   41   uptr Size() const {
   45   T &operator[](uptr i) {
   50   const T &operator[](uptr i) const {
   74   void Resize(uptr size) {
   79     uptr old_size = Size();
   86       for (uptr i = old_size; i < size; i++)
   96   void EnsureSize(uptr size) {
  103     uptr cap0 = last_ - begin_;
  104     uptr cap = cap0 * 5 / 4;  // 25% growth
projects/compiler-rt/lib/scudo/scudo_allocator.cpp
   46 INLINE u32 computeCRC32(u32 Crc, uptr Value, uptr *Array, uptr ArraySize) {
   46 INLINE u32 computeCRC32(u32 Crc, uptr Value, uptr *Array, uptr ArraySize) {
   46 INLINE u32 computeCRC32(u32 Crc, uptr Value, uptr *Array, uptr ArraySize) {
   59     for (uptr i = 0; i < ArraySize; i++)
   64   for (uptr i = 0; i < ArraySize; i++)
   97   static INLINE uptr getUsableSize(const void *Ptr, UnpackedHeader *Header) {
   98     const uptr ClassId = Header->ClassId;
  107   static INLINE uptr getSize(const void *Ptr, UnpackedHeader *Header) {
  108     const uptr SizeOrUnusedBytes = Header->SizeOrUnusedBytes;
  119     uptr HeaderHolder[sizeof(UnpackedHeader) / sizeof(uptr)];
  119     uptr HeaderHolder[sizeof(UnpackedHeader) / sizeof(uptr)];
  199   void *Allocate(uptr Size) {
  200     const uptr BatchClassId = SizeClassMap::ClassID(sizeof(QuarantineBatch));
  205     const uptr BatchClassId = SizeClassMap::ClassID(sizeof(QuarantineBatch));
  227   static const uptr MaxAllowedMallocSize =
  240   uptr HardRssLimitMb;
  241   uptr SoftRssLimitMb;
  301   void *allocate(uptr Size, uptr Alignment, AllocType Type,
  301   void *allocate(uptr Size, uptr Alignment, AllocType Type,
  320     const uptr NeededSize = RoundUpTo(Size ? Size : 1, MinAlignment) +
  322     const uptr AlignedSize = (Alignment > MinAlignment) ?
  341     uptr BackendSize;
  368     uptr UserPtr = reinterpret_cast<uptr>(BackendPtr) + Chunk::getHeaderSize();
  374       const uptr AlignedUserPtr = RoundUpTo(UserPtr, Alignment);
  388       const uptr PageSize = GetPageSizeCached();
  389       const uptr TrailingBytes = (UserPtr + Size) & (PageSize - 1);
  404                                    uptr Size) {
  427       const uptr EstimatedSize = Size + (Header->Offset << MinAlignmentLog);
  442   void deallocate(void *Ptr, uptr DeleteSize, uptr DeleteAlignment,
  442   void deallocate(void *Ptr, uptr DeleteSize, uptr DeleteAlignment,
  478     const uptr Size = Chunk::getSize(Ptr, &Header);
  490   void *reallocate(void *OldPtr, uptr NewSize) {
  517     const uptr UsableSize = Chunk::getUsableSize(OldPtr, &OldHeader);
  532       const uptr OldSize = OldHeader.ClassId ? OldHeader.SizeOrUnusedBytes :
  541   uptr getUsableSize(const void *Ptr) {
  559   void *calloc(uptr NMemB, uptr Size) {
  559   void *calloc(uptr NMemB, uptr Size) {
  574   uptr getStats(AllocatorStat StatType) {
  576     uptr stats[AllocatorStatCount];
  586   void setRssLimit(uptr LimitMb, bool HardLimit) {
  610   const uptr MaxPrimaryAlignment =
  612   const uptr MaxOffset =
  622   const uptr MaxSizeOrUnusedBytes = SizeClassMap::kMaxSize - 1;
  627   const uptr LargestClassId = SizeClassMap::kLargestClassID;
  646   const uptr CurrentRssMb = GetRSS() >> 20;
  691 void *scudoAllocate(uptr Size, uptr Alignment, AllocType Type) {
  691 void *scudoAllocate(uptr Size, uptr Alignment, AllocType Type) {
  701 void scudoDeallocate(void *Ptr, uptr Size, uptr Alignment, AllocType Type) {
  701 void scudoDeallocate(void *Ptr, uptr Size, uptr Alignment, AllocType Type) {
  705 void *scudoRealloc(void *Ptr, uptr Size) {
  715 void *scudoCalloc(uptr NMemB, uptr Size) {
  715 void *scudoCalloc(uptr NMemB, uptr Size) {
  719 void *scudoValloc(uptr Size) {
  724 void *scudoPvalloc(uptr Size) {
  725   const uptr PageSize = GetPageSizeCached();
  737 int scudoPosixMemalign(void **MemPtr, uptr Alignment, uptr Size) {
  737 int scudoPosixMemalign(void **MemPtr, uptr Alignment, uptr Size) {
  750 void *scudoAlignedAlloc(uptr Alignment, uptr Size) {
  750 void *scudoAlignedAlloc(uptr Alignment, uptr Size) {
  760 uptr scudoMallocUsableSize(void *Ptr) {
  770 uptr __sanitizer_get_current_allocated_bytes() {
  774 uptr __sanitizer_get_heap_size() {
  778 uptr __sanitizer_get_free_bytes() {
  782 uptr __sanitizer_get_unmapped_bytes() {
  786 uptr __sanitizer_get_estimated_allocated_size(uptr Size) {
  786 uptr __sanitizer_get_estimated_allocated_size(uptr Size) {
  794 uptr __sanitizer_get_allocated_size(const void *Ptr) {
  812 void __scudo_set_rss_limit(uptr LimitMb, s32 HardLimit) {
projects/compiler-rt/lib/scudo/scudo_allocator.h
   56 const uptr MinAlignmentLog = FIRST_32_SECOND_64(3, 4);
   57 const uptr MaxAlignmentLog = 24;  // 16 MB
   58 const uptr MinAlignment = 1 << MinAlignmentLog;
   59 const uptr MaxAlignment = 1 << MaxAlignmentLog;
   63 constexpr uptr RoundUpTo(uptr Size, uptr Boundary) {
   63 constexpr uptr RoundUpTo(uptr Size, uptr Boundary) {
   63 constexpr uptr RoundUpTo(uptr Size, uptr Boundary) {
   68   constexpr uptr getHeaderSize() {
   74 const uptr AllocatorSpace = ~0ULL;
   76   static const uptr kSpaceBeg = AllocatorSpace;
   77   static const uptr kSpaceSize = AllocatorSize;
   78   static const uptr kMetadataSize = 0;
   81   static const uptr kFlags =
  113 void *scudoAllocate(uptr Size, uptr Alignment, AllocType Type);
  113 void *scudoAllocate(uptr Size, uptr Alignment, AllocType Type);
  114 void scudoDeallocate(void *Ptr, uptr Size, uptr Alignment, AllocType Type);
  114 void scudoDeallocate(void *Ptr, uptr Size, uptr Alignment, AllocType Type);
  115 void *scudoRealloc(void *Ptr, uptr Size);
  116 void *scudoCalloc(uptr NMemB, uptr Size);
  116 void *scudoCalloc(uptr NMemB, uptr Size);
  117 void *scudoValloc(uptr Size);
  118 void *scudoPvalloc(uptr Size);
  119 int scudoPosixMemalign(void **MemPtr, uptr Alignment, uptr Size);
  119 int scudoPosixMemalign(void **MemPtr, uptr Alignment, uptr Size);
  120 void *scudoAlignedAlloc(uptr Alignment, uptr Size);
  120 void *scudoAlignedAlloc(uptr Alignment, uptr Size);
  121 uptr scudoMallocUsableSize(void *Ptr);
projects/compiler-rt/lib/scudo/scudo_allocator_combined.h
   34   void *allocatePrimary(AllocatorCache *Cache, uptr ClassId) {
   40   void *allocateSecondary(uptr Size, uptr Alignment) {
   40   void *allocateSecondary(uptr Size, uptr Alignment) {
   44   void deallocatePrimary(AllocatorCache *Cache, void *Ptr, uptr ClassId) {
projects/compiler-rt/lib/scudo/scudo_allocator_secondary.h
   54   uptr CommittedSize;
   55   uptr Size;
   57 constexpr uptr getHeaderSize() {
   60 static Header *getHeader(uptr Ptr) {
   74   void *Allocate(AllocatorStats *Stats, uptr Size, uptr Alignment) {
   74   void *Allocate(AllocatorStats *Stats, uptr Size, uptr Alignment) {
   75     const uptr UserSize = Size - Chunk::getHeaderSize();
   78     uptr ReservedSize = Size + LargeChunk::getHeaderSize();
   81     const uptr PageSize = GetPageSizeCached();
   87     uptr ReservedBeg = AddressRange.Init(ReservedSize, SecondaryAllocatorName);
   92     uptr ReservedEnd = ReservedBeg + ReservedSize;
   96     uptr CommittedBeg = ReservedBeg + PageSize;
   97     uptr UserBeg = CommittedBeg + HeadersSize;
   98     uptr UserEnd = UserBeg + UserSize;
   99     uptr CommittedEnd = RoundUpTo(UserEnd, PageSize);
  108         const uptr NewReservedBeg = CommittedBeg - PageSize;
  117       const uptr NewReservedEnd = CommittedEnd + PageSize;
  126     const uptr CommittedSize = CommittedEnd - CommittedBeg;
  129     const uptr Ptr = UserBeg - Chunk::getHeaderSize();
  156     const uptr Size = H->CommittedSize;
  168   static uptr GetActuallyAllocatedSize(void *Ptr) {
  181   static constexpr uptr HeadersSize =
  187   uptr AllocatedBytes;
  188   uptr FreedBytes;
  189   uptr LargestSize;
projects/compiler-rt/lib/scudo/scudo_crc32.cpp
   19 u32 computeHardwareCRC32(u32 Crc, uptr Data) {
projects/compiler-rt/lib/scudo/scudo_crc32.h
   88 INLINE u32 computeSoftwareCRC32(u32 Crc, uptr Data) {
   89   for (uptr i = 0; i < sizeof(Data); i++) {
   96 SANITIZER_WEAK_ATTRIBUTE u32 computeHardwareCRC32(u32 Crc, uptr Data);
projects/compiler-rt/lib/scudo/scudo_errors.cpp
   19 void NORETURN reportCallocOverflow(uptr Count, uptr Size) {
   19 void NORETURN reportCallocOverflow(uptr Count, uptr Size) {
   24 void NORETURN reportPvallocOverflow(uptr Size) {
   30 void NORETURN reportAllocationAlignmentTooBig(uptr Alignment,
   31                                               uptr MaxAlignment) {
   36 void NORETURN reportAllocationAlignmentNotPowerOfTwo(uptr Alignment) {
   41 void NORETURN reportInvalidPosixMemalignAlignment(uptr Alignment) {
   48 void NORETURN reportInvalidAlignedAllocAlignment(uptr Size, uptr Alignment) {
   48 void NORETURN reportInvalidAlignedAllocAlignment(uptr Size, uptr Alignment) {
   60 void NORETURN reportAllocationSizeTooBig(uptr UserSize, uptr TotalSize,
   60 void NORETURN reportAllocationSizeTooBig(uptr UserSize, uptr TotalSize,
   61                                          uptr MaxSize) {
   72 void NORETURN reportOutOfMemory(uptr RequestedSize) {
projects/compiler-rt/lib/scudo/scudo_errors.h
   20 void NORETURN reportCallocOverflow(uptr Count, uptr Size);
   20 void NORETURN reportCallocOverflow(uptr Count, uptr Size);
   21 void NORETURN reportPvallocOverflow(uptr Size);
   22 void NORETURN reportAllocationAlignmentTooBig(uptr Alignment,
   23                                               uptr MaxAlignment);
   24 void NORETURN reportAllocationAlignmentNotPowerOfTwo(uptr Alignment);
   25 void NORETURN reportInvalidPosixMemalignAlignment(uptr Alignment);
   26 void NORETURN reportInvalidAlignedAllocAlignment(uptr Size, uptr Alignment);
   26 void NORETURN reportInvalidAlignedAllocAlignment(uptr Size, uptr Alignment);
   27 void NORETURN reportAllocationSizeTooBig(uptr UserSize, uptr TotalSize,
   27 void NORETURN reportAllocationSizeTooBig(uptr UserSize, uptr TotalSize,
   28                                          uptr MaxSize);
   30 void NORETURN reportOutOfMemory(uptr RequestedSize);
projects/compiler-rt/lib/scudo/scudo_interface_internal.h
   26 void __scudo_set_rss_limit(uptr LimitMb, s32 HardLimit);
projects/compiler-rt/lib/scudo/scudo_platform.h
   75 const uptr AllocatorSize = 0x40000000000ULL;  // 4T.
projects/compiler-rt/lib/scudo/scudo_tsd.h
   27   uptr QuarantineCachePlaceHolder[4];
   50   INLINE uptr getPrecedence() { return atomic_load_relaxed(&Precedence); }
projects/compiler-rt/lib/scudo/scudo_tsd_exclusive.cpp
   32   uptr I = reinterpret_cast<uptr>(Ptr);
projects/compiler-rt/lib/scudo/scudo_utils.cpp
   44   static constexpr uptr PrefixSize = sizeof(ScudoError) - 1;
projects/compiler-rt/lib/stats/stats.cpp
   35 void WriteLE(fd_t fd, uptr val) {
   36   char chars[sizeof(uptr)];
   37   for (unsigned i = 0; i != sizeof(uptr); ++i) {
   40   WriteToFile(fd, chars, sizeof(uptr));
   54   char sizeof_uptr = sizeof(uptr);
projects/compiler-rt/lib/stats/stats.h
   26   uptr addr;
   27   uptr data;
   36 inline uptr CountFromData(uptr data) {
   36 inline uptr CountFromData(uptr data) {
   37   return data & ((1ull << (sizeof(uptr) * 8 - kKindBits)) - 1);
projects/compiler-rt/lib/stats/stats_client.cpp
   77   uptr old_data = __sync_fetch_and_add(&s->data, 1);
projects/compiler-rt/lib/tsan/dd/dd_interceptors.cpp
   22 static uptr g_data_start;
   23 static uptr g_data_end;
  163   uptr cond = atomic_load(p, memory_order_acquire);
  241 void __dsan_before_mutex_lock(uptr m, int writelock) {
  247 void __dsan_after_mutex_lock(uptr m, int writelock, int trylock) {
  253 void __dsan_before_mutex_unlock(uptr m, int writelock) {
  259 void __dsan_mutex_destroy(uptr m) {
projects/compiler-rt/lib/tsan/dd/dd_rtl.cpp
   21 static u32 CurrentStackTrace(Thread *thr, uptr skip) {
  101   uptr id = atomic_fetch_add(&id_gen, 1, memory_order_relaxed);
  111 void MutexBeforeLock(Thread *thr, uptr m, bool writelock) {
  124 void MutexAfterLock(Thread *thr, uptr m, bool writelock, bool trylock) {
  137 void MutexBeforeUnlock(Thread *thr, uptr m, bool writelock) {
  148 void MutexDestroy(Thread *thr, uptr m) {
projects/compiler-rt/lib/tsan/dd/dd_rtl.h
   60 void MutexBeforeLock(Thread *thr, uptr m, bool writelock);
   61 void MutexAfterLock(Thread *thr, uptr m, bool writelock, bool trylock);
   62 void MutexBeforeUnlock(Thread *thr, uptr m, bool writelock);
   63 void MutexDestroy(Thread *thr, uptr m);
projects/compiler-rt/lib/tsan/rtl/tsan_clock.cpp
   90 static void UnrefClockBlock(ClockCache *c, u32 idx, uptr blocks) {
  102   for (uptr i = 0; i < blocks; i++)
  135   const uptr nclk = src->size_;
  216   uptr i = 0;
  225   for (uptr i = nclk_; i < dst->size_; i++)
  283   uptr i = 0;
  289   for (uptr i = 0; i < kDirtyTids; i++)
  335   for (uptr i = 0; i < dst->size_; i++)
  368   for (uptr i = 0; i < nclk_; i++)
  398   for (uptr i = 0; i < kDirtyTids; i++)
  402 void SyncClock::Resize(ClockCache *c, uptr nclk) {
  424     uptr top = size_ - blocks_ * ClockBlock::kClockCount;
  426     const uptr move = top * sizeof(tab_->clock[0]);
  515   const uptr block = tid / ClockBlock::kClockCount;
  525 ALWAYS_INLINE uptr SyncClock::capacity() const {
  528   uptr ratio = sizeof(ClockBlock::clock[0]) / sizeof(ClockBlock::table[0]);
  531   uptr top = ClockBlock::kClockCount - RoundUpTo(blocks_ + 1, ratio) / ratio;
  535 ALWAYS_INLINE u32 SyncClock::get_block(uptr bi) const {
  542   uptr bi = blocks_++;
  564   for (uptr i = 0; i < size_; i++)
  567   for (uptr i = 0; i < size_; i++)
projects/compiler-rt/lib/tsan/rtl/tsan_clock.h
   29   uptr size() const;
   35   void Resize(ClockCache *c, uptr nclk);
   65   static const uptr kDirtyTids = 2;
  117   uptr capacity() const;
  118   u32 get_block(uptr bi) const;
  134   uptr size() const;
  146   static const uptr kDirtyTids = SyncClock::kDirtyTids;
  163   uptr nclk_;
  184 ALWAYS_INLINE uptr ThreadClock::size() const {
  196 ALWAYS_INLINE uptr SyncClock::size() const {
projects/compiler-rt/lib/tsan/rtl/tsan_debugging.cpp
   56                       uptr trace_size) {
   57   uptr i = 0;
   76                            uptr trace_size) {
   91 int __tsan_get_report_tag(void *report, uptr *tag) {
   98 int __tsan_get_report_stack(void *report, uptr idx, void **trace,
   99                             uptr trace_size) {
  108 int __tsan_get_report_mop(void *report, uptr idx, int *tid, void **addr,
  110                           uptr trace_size) {
  124 int __tsan_get_report_loc(void *report, uptr idx, const char **type,
  125                           void **addr, uptr *start, uptr *size, int *tid,
  125                           void **addr, uptr *start, uptr *size, int *tid,
  127                           uptr trace_size) {
  143 int __tsan_get_report_loc_object_type(void *report, uptr idx,
  153 int __tsan_get_report_mutex(void *report, uptr idx, uptr *mutex_id, void **addr,
  153 int __tsan_get_report_mutex(void *report, uptr idx, uptr *mutex_id, void **addr,
  154                             int *destroyed, void **trace, uptr trace_size) {
  166 int __tsan_get_report_thread(void *report, uptr idx, int *tid, tid_t *os_id,
  168                              void **trace, uptr trace_size) {
  182 int __tsan_get_report_unique_tid(void *report, uptr idx, int *tid) {
  190 const char *__tsan_locate_address(uptr addr, char *name, uptr name_size,
  190 const char *__tsan_locate_address(uptr addr, char *name, uptr name_size,
  191                                   uptr *region_address_ptr,
  192                                   uptr *region_size_ptr) {
  193   uptr region_address = 0;
  194   uptr region_size = 0;
  242 int __tsan_get_alloc_stack(uptr addr, uptr *trace, uptr size, int *thread_id,
  242 int __tsan_get_alloc_stack(uptr addr, uptr *trace, uptr size, int *thread_id,
  242 int __tsan_get_alloc_stack(uptr addr, uptr *trace, uptr size, int *thread_id,
  260   for (uptr i = 0; i < size; i++) trace[i] = stack.trace[stack.size - i - 1];
projects/compiler-rt/lib/tsan/rtl/tsan_defs.h
   49   static const uptr kSize = 512;
   50   static const uptr kTableSize = kSize / sizeof(u32);
   51   static const uptr kClockCount = kSize / sizeof(ClockElem);
   52   static const uptr kRefIdx = kTableSize - 1;
   53   static const uptr kBlockIdx = kTableSize - 2;
   74 const uptr kShadowStackSize = 64 * 1024;
   77 const uptr kShadowCnt = 4;
   80 const uptr kShadowCell = 8;
   83 const uptr kShadowSize = 8;
   86 const uptr kShadowMultiplier = kShadowSize * kShadowCnt / kShadowCell;
   90 const uptr kMetaShadowCell = 8;
   93 const uptr kMetaShadowSize = 4;
  164 MD5Hash md5_hash(const void *data, uptr size);
  184 enum ExternalTag : uptr {
projects/compiler-rt/lib/tsan/rtl/tsan_dense_alloc.h
   28   static const uptr kSize = 128;
   30   uptr pos;
   55     for (uptr i = 0; i < kL1Size; i++) {
   98   uptr fillpos_;
  122     for (uptr i = 0; i < Cache::kSize / 2 && freelist_ != 0; i++) {
  131     for (uptr i = 0; i < Cache::kSize / 2; i++) {
projects/compiler-rt/lib/tsan/rtl/tsan_external.cpp
   29 static TagData *GetTagData(uptr tag) {
   35 const char *GetObjectTypeFromTag(uptr tag) {
   40 const char *GetReportHeaderFromTag(uptr tag) {
   45 void InsertShadowStackFrameForTag(ThreadState *thr, uptr tag) {
   49 uptr TagFromShadowStackFrame(uptr pc) {
   49 uptr TagFromShadowStackFrame(uptr pc) {
   50   uptr tag_count = atomic_load(&used_tags, memory_order_relaxed);
   59 typedef void(*AccessFunc)(ThreadState *, uptr, uptr, int);
   59 typedef void(*AccessFunc)(ThreadState *, uptr, uptr, int);
   76   uptr new_tag = atomic_fetch_add(&used_tags, 1, memory_order_relaxed);
projects/compiler-rt/lib/tsan/rtl/tsan_fd.cpp
   49 static FdSync *allocsync(ThreadState *thr, uptr pc) {
   62 static void unref(ThreadState *thr, uptr pc, FdSync *s) {
   73 static FdDesc *fddesc(ThreadState *thr, uptr pc, int fd) {
   77   uptr l1 = atomic_load(pl1, memory_order_consume);
   79     uptr size = kTableSizeL2 * sizeof(FdDesc);
   94 static void init(ThreadState *thr, uptr pc, int fd, FdSync *s,
  128 void FdOnFork(ThreadState *thr, uptr pc) {
  143 bool FdLocation(uptr addr, int *fd, int *tid, u32 *stack) {
  160 void FdAcquire(ThreadState *thr, uptr pc, int fd) {
  171 void FdRelease(ThreadState *thr, uptr pc, int fd) {
  182 void FdAccess(ThreadState *thr, uptr pc, int fd) {
  190 void FdClose(ThreadState *thr, uptr pc, int fd, bool write) {
  218 void FdFileCreate(ThreadState *thr, uptr pc, int fd) {
  225 void FdDup(ThreadState *thr, uptr pc, int oldfd, int newfd, bool write) {
  236 void FdPipeCreate(ThreadState *thr, uptr pc, int rfd, int wfd) {
  244 void FdEventCreate(ThreadState *thr, uptr pc, int fd) {
  251 void FdSignalCreate(ThreadState *thr, uptr pc, int fd) {
  258 void FdInotifyCreate(ThreadState *thr, uptr pc, int fd) {
  265 void FdPollCreate(ThreadState *thr, uptr pc, int fd) {
  272 void FdSocketCreate(ThreadState *thr, uptr pc, int fd) {
  280 void FdSocketAccept(ThreadState *thr, uptr pc, int fd, int newfd) {
  289 void FdSocketConnecting(ThreadState *thr, uptr pc, int fd) {
  297 void FdSocketConnect(ThreadState *thr, uptr pc, int fd) {
  304 uptr File2addr(const char *path) {
  310 uptr Dir2addr(const char *path) {
projects/compiler-rt/lib/tsan/rtl/tsan_fd.h
   41 void FdAcquire(ThreadState *thr, uptr pc, int fd);
   42 void FdRelease(ThreadState *thr, uptr pc, int fd);
   43 void FdAccess(ThreadState *thr, uptr pc, int fd);
   44 void FdClose(ThreadState *thr, uptr pc, int fd, bool write = true);
   45 void FdFileCreate(ThreadState *thr, uptr pc, int fd);
   46 void FdDup(ThreadState *thr, uptr pc, int oldfd, int newfd, bool write);
   47 void FdPipeCreate(ThreadState *thr, uptr pc, int rfd, int wfd);
   48 void FdEventCreate(ThreadState *thr, uptr pc, int fd);
   49 void FdSignalCreate(ThreadState *thr, uptr pc, int fd);
   50 void FdInotifyCreate(ThreadState *thr, uptr pc, int fd);
   51 void FdPollCreate(ThreadState *thr, uptr pc, int fd);
   52 void FdSocketCreate(ThreadState *thr, uptr pc, int fd);
   53 void FdSocketAccept(ThreadState *thr, uptr pc, int fd, int newfd);
   54 void FdSocketConnecting(ThreadState *thr, uptr pc, int fd);
   55 void FdSocketConnect(ThreadState *thr, uptr pc, int fd);
   56 bool FdLocation(uptr addr, int *fd, int *tid, u32 *stack);
   57 void FdOnFork(ThreadState *thr, uptr pc);
   59 uptr File2addr(const char *path);
   60 uptr Dir2addr(const char *path);
projects/compiler-rt/lib/tsan/rtl/tsan_ignoreset.cpp
   16 const uptr IgnoreSet::kMaxSize;
   25   for (uptr i = 0; i < size_; i++) {
   36 uptr IgnoreSet::Size() const {
   40 u32 IgnoreSet::At(uptr i) const {
projects/compiler-rt/lib/tsan/rtl/tsan_ignoreset.h
   22   static const uptr kMaxSize = 16;
   27   uptr Size() const;
   28   u32 At(uptr i) const;
   31   uptr size_;
projects/compiler-rt/lib/tsan/rtl/tsan_interceptors.h
   11   ScopedInterceptor(ThreadState *thr, const char *fname, uptr pc);
   17   const uptr pc_;
projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp
   85 extern "C" int pthread_attr_setstacksize(void *attr, uptr stacksize);
  216   const uptr n = supp.SuppressionCount();
  217   for (uptr i = 0; i < n; i++) {
  250                                      uptr pc)
  380     uptr element = interceptor_ctx()->AtExitStack.Size() - 1;
  397 static int setup_at_exit_wrapper(ThreadState *thr, uptr pc, void(*f)(),
  418 static int setup_at_exit_wrapper(ThreadState *thr, uptr pc, void(*f)(),
  450   uptr pc = 0;
  478 static void JmpBufGarbageCollect(ThreadState *thr, uptr sp) {
  479   for (uptr i = 0; i < thr->jmp_bufs.Size(); i++) {
  482       uptr sz = thr->jmp_bufs.Size();
  490 static void SetJmp(ThreadState *thr, uptr sp) {
  508 static void LongJmp(ThreadState *thr, uptr *env) {
  509   uptr sp = ExtractLongJmpSp(env);
  511   for (uptr i = 0; i < thr->jmp_bufs.Size(); i++) {
  535 extern "C" void __tsan_setjmp(uptr sp) {
  729   uptr srclen = internal_strlen(src);
  737   uptr srclen = internal_strnlen(src, n);
  766 static void *mmap_interceptor(ThreadState *thr, uptr pc, Mmap real_mmap,
  814     uptr PageSize = GetPageSizeCached();
  906   uptr iter = (uptr)v;
 1106   uptr cond = atomic_load(p, memory_order_acquire);
 1121   uptr pc;
 1143   MemoryAccessRange(thr, pc, (uptr)c, sizeof(uptr), true);
 1147 static int cond_wait(ThreadState *thr, uptr pc, ScopedInterceptor *si,
 1150   MemoryAccessRange(thr, pc, (uptr)c, sizeof(uptr), false);
 1195   MemoryAccessRange(thr, pc, (uptr)c, sizeof(uptr), false);
 1202   MemoryAccessRange(thr, pc, (uptr)c, sizeof(uptr), false);
 1209   MemoryAccessRange(thr, pc, (uptr)c, sizeof(uptr), true);
 1903   volatile uptr pc =
 2139   uptr pc;
 2144 static bool IsAppNotRodata(uptr addr) {
 2188   const uptr caller_pc;
 2189   const uptr pc;
 2193 static void HandleRecvmsg(ThreadState *thr, uptr pc,
 2390     uptr cb = (uptr)old->sigaction;
 2431 static void syscall_access_range(uptr pc, uptr p, uptr s, bool write) {
 2431 static void syscall_access_range(uptr pc, uptr p, uptr s, bool write) {
 2431 static void syscall_access_range(uptr pc, uptr p, uptr s, bool write) {
 2436 static void syscall_acquire(uptr pc, uptr addr) {
 2436 static void syscall_acquire(uptr pc, uptr addr) {
 2442 static void syscall_release(uptr pc, uptr addr) {
 2442 static void syscall_release(uptr pc, uptr addr) {
 2448 static void syscall_fd_close(uptr pc, int fd) {
 2453 static USED void syscall_fd_acquire(uptr pc, int fd) {
 2459 static USED void syscall_fd_release(uptr pc, int fd) {
 2465 static void syscall_pre_fork(uptr pc) {
 2470 static void syscall_post_fork(uptr pc, int pid) {
projects/compiler-rt/lib/tsan/rtl/tsan_interface.h
  122                            uptr trace_size);
  135 int __tsan_get_report_tag(void *report, uptr *tag);
  139 int __tsan_get_report_stack(void *report, uptr idx, void **trace,
  140                             uptr trace_size);
  144 int __tsan_get_report_mop(void *report, uptr idx, int *tid, void **addr,
  146                           uptr trace_size);
  150 int __tsan_get_report_loc(void *report, uptr idx, const char **type,
  151                           void **addr, uptr *start, uptr *size, int *tid,
  151                           void **addr, uptr *start, uptr *size, int *tid,
  153                           uptr trace_size);
  156 int __tsan_get_report_loc_object_type(void *report, uptr idx,
  161 int __tsan_get_report_mutex(void *report, uptr idx, uptr *mutex_id, void **addr,
  161 int __tsan_get_report_mutex(void *report, uptr idx, uptr *mutex_id, void **addr,
  162                             int *destroyed, void **trace, uptr trace_size);
  166 int __tsan_get_report_thread(void *report, uptr idx, int *tid, tid_t *os_id,
  168                              void **trace, uptr trace_size);
  172 int __tsan_get_report_unique_tid(void *report, uptr idx, int *tid);
  177 const char *__tsan_locate_address(uptr addr, char *name, uptr name_size,
  177 const char *__tsan_locate_address(uptr addr, char *name, uptr name_size,
  178                                   uptr *region_address, uptr *region_size);
  178                                   uptr *region_address, uptr *region_size);
  182 int __tsan_get_alloc_stack(uptr addr, uptr *trace, uptr size, int *thread_id,
  182 int __tsan_get_alloc_stack(uptr addr, uptr *trace, uptr size, int *thread_id,
  182 int __tsan_get_alloc_stack(uptr addr, uptr *trace, uptr size, int *thread_id,
  397 void __tsan_go_atomic32_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  397 void __tsan_go_atomic32_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  399 void __tsan_go_atomic64_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  399 void __tsan_go_atomic64_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  401 void __tsan_go_atomic32_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  401 void __tsan_go_atomic32_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  403 void __tsan_go_atomic64_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  403 void __tsan_go_atomic64_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  405 void __tsan_go_atomic32_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  405 void __tsan_go_atomic32_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  407 void __tsan_go_atomic64_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  407 void __tsan_go_atomic64_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  409 void __tsan_go_atomic32_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  409 void __tsan_go_atomic32_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  411 void __tsan_go_atomic64_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  411 void __tsan_go_atomic64_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
  413 void __tsan_go_atomic32_compare_exchange(ThreadState *thr, uptr cpc, uptr pc,
  413 void __tsan_go_atomic32_compare_exchange(ThreadState *thr, uptr cpc, uptr pc,
  416 void __tsan_go_atomic64_compare_exchange(ThreadState *thr, uptr cpc, uptr pc,
  416 void __tsan_go_atomic64_compare_exchange(ThreadState *thr, uptr cpc, uptr pc,
projects/compiler-rt/lib/tsan/rtl/tsan_interface_ann.cpp
   33   ScopedAnnotation(ThreadState *thr, const char *aname, uptr pc)
   68   uptr addr;
   69   uptr size;
   89     char *f, int l, uptr addr, uptr size, char *desc) {
   89     char *f, int l, uptr addr, uptr size, char *desc) {
  118 static ExpectRace *FindRace(ExpectRace *list, uptr addr, uptr size) {
  118 static ExpectRace *FindRace(ExpectRace *list, uptr addr, uptr size) {
  120     uptr maxbegin = max(race->addr, addr);
  121     uptr minend = min(race->addr + race->size, addr + size);
  128 static bool CheckContains(ExpectRace *list, uptr addr, uptr size) {
  128 static bool CheckContains(ExpectRace *list, uptr addr, uptr size) {
  149 bool IsExpectedReport(uptr addr, uptr size) {
  149 bool IsExpectedReport(uptr addr, uptr size) {
  163     const uptr cnt = atomic_load_relaxed(&(race->*counter));
  167     uptr i = 0;
  196     for (uptr i = 0; i < hit_matched.Size(); i++) {
  206     for (uptr i = 0; i < add_matched.Size(); i++) {
  226 void INTERFACE_ATTRIBUTE AnnotateHappensBefore(char *f, int l, uptr addr) {
  231 void INTERFACE_ATTRIBUTE AnnotateHappensAfter(char *f, int l, uptr addr) {
  236 void INTERFACE_ATTRIBUTE AnnotateCondVarSignal(char *f, int l, uptr cv) {
  240 void INTERFACE_ATTRIBUTE AnnotateCondVarSignalAll(char *f, int l, uptr cv) {
  244 void INTERFACE_ATTRIBUTE AnnotateMutexIsNotPHB(char *f, int l, uptr mu) {
  248 void INTERFACE_ATTRIBUTE AnnotateCondVarWait(char *f, int l, uptr cv,
  249                                              uptr lock) {
  253 void INTERFACE_ATTRIBUTE AnnotateRWLockCreate(char *f, int l, uptr m) {
  258 void INTERFACE_ATTRIBUTE AnnotateRWLockCreateStatic(char *f, int l, uptr m) {
  263 void INTERFACE_ATTRIBUTE AnnotateRWLockDestroy(char *f, int l, uptr m) {
  268 void INTERFACE_ATTRIBUTE AnnotateRWLockAcquired(char *f, int l, uptr m,
  269                                                 uptr is_w) {
  277 void INTERFACE_ATTRIBUTE AnnotateRWLockReleased(char *f, int l, uptr m,
  278                                                 uptr is_w) {
  286 void INTERFACE_ATTRIBUTE AnnotateTraceMemory(char *f, int l, uptr mem) {
  294 void INTERFACE_ATTRIBUTE AnnotateNewMemory(char *f, int l, uptr mem,
  295                                            uptr size) {
  299 void INTERFACE_ATTRIBUTE AnnotateNoOp(char *f, int l, uptr mem) {
  325     char *f, int l, uptr mu) {
  330     char *f, int l, uptr pcq) {
  335     char *f, int l, uptr pcq) {
  340     char *f, int l, uptr pcq) {
  345     char *f, int l, uptr pcq) {
  350     char *f, int l, uptr mem, char *desc) {
  359     char *f, int l, uptr mem, uptr size, char *desc) {
  359     char *f, int l, uptr mem, uptr size, char *desc) {
  368     char *f, int l, uptr mem, uptr size, char *desc) {
  368     char *f, int l, uptr mem, uptr size, char *desc) {
  374     char *f, int l, uptr mem, char *desc) {
  410     char *f, int l, uptr addr, uptr size) {
  410     char *f, int l, uptr addr, uptr size) {
  415     char *f, int l, uptr addr, uptr size) {
  415     char *f, int l, uptr addr, uptr size) {
  428 void INTERFACE_ATTRIBUTE WTFAnnotateHappensBefore(char *f, int l, uptr addr) {
  432 void INTERFACE_ATTRIBUTE WTFAnnotateHappensAfter(char *f, int l, uptr addr) {
  437     char *f, int l, uptr mem, uptr sz, char *desc) {
  437     char *f, int l, uptr mem, uptr sz, char *desc) {
  458 AnnotateMemoryIsInitialized(char *f, int l, uptr mem, uptr sz) {}
  458 AnnotateMemoryIsInitialized(char *f, int l, uptr mem, uptr sz) {}
  460 AnnotateMemoryIsUninitialized(char *f, int l, uptr mem, uptr sz) {}
  460 AnnotateMemoryIsUninitialized(char *f, int l, uptr mem, uptr sz) {}
projects/compiler-rt/lib/tsan/rtl/tsan_interface_atomic.cpp
  222 static T AtomicLoad(ThreadState *thr, uptr pc, const volatile T *a, morder mo) {
  258 static void AtomicStore(ThreadState *thr, uptr pc, volatile T *a, T v,
  281 static T AtomicRMW(ThreadState *thr, uptr pc, volatile T *a, T v, morder mo) {
  338 static T AtomicExchange(ThreadState *thr, uptr pc, volatile T *a, T v,
  344 static T AtomicFetchAdd(ThreadState *thr, uptr pc, volatile T *a, T v,
  350 static T AtomicFetchSub(ThreadState *thr, uptr pc, volatile T *a, T v,
  356 static T AtomicFetchAnd(ThreadState *thr, uptr pc, volatile T *a, T v,
  362 static T AtomicFetchOr(ThreadState *thr, uptr pc, volatile T *a, T v,
  368 static T AtomicFetchXor(ThreadState *thr, uptr pc, volatile T *a, T v,
  374 static T AtomicFetchNand(ThreadState *thr, uptr pc, volatile T *a, T v,
  403 static bool AtomicCAS(ThreadState *thr, uptr pc,
  436 static T AtomicCAS(ThreadState *thr, uptr pc,
  447 static void AtomicFence(ThreadState *thr, uptr pc, morder mo) {
  491   ScopedAtomic(ThreadState *thr, uptr pc, const volatile void *a,
  505 static void AtomicStatInc(ThreadState *thr, uptr size, morder mo, StatType t) {
projects/compiler-rt/lib/tsan/rtl/tsan_interface_inl.h
  118 void __tsan_read_range(void *addr, uptr size) {
  122 void __tsan_write_range(void *addr, uptr size) {
  126 void __tsan_read_range_pc(void *addr, uptr size, void *pc) {
  130 void __tsan_write_range_pc(void *addr, uptr size, void *pc) {
projects/compiler-rt/lib/tsan/rtl/tsan_interface_java.cpp
   29   const uptr heap_begin;
   30   const uptr heap_size;
   40   ScopedJavaFunc(ThreadState *thr, uptr pc)
  139   uptr inc = 1;
  159   for (uptr from = *from_ptr; from < to; from += kHeapAlignment) {
projects/compiler-rt/lib/tsan/rtl/tsan_md5.cpp
  242 MD5Hash md5_hash(const void *data, uptr size) {
projects/compiler-rt/lib/tsan/rtl/tsan_mman.cpp
   25 void __sanitizer_malloc_hook(void *ptr, uptr size) {
   38   void OnMap(uptr p, uptr size) const { }
   38   void OnMap(uptr p, uptr size) const { }
   39   void OnUnmap(uptr p, uptr size) const {
   39   void OnUnmap(uptr p, uptr size) const {
   46     const uptr kMetaRatio = kMetaShadowCell / kMetaShadowSize;
   47     const uptr kPageSize = GetPageSizeCached() * kMetaRatio;
   51     uptr diff = RoundUp(p, kPageSize) - p;
   59     uptr p_meta = (uptr)MemToMeta(p);
  139 static void SignalUnsafeCall(ThreadState *thr, uptr pc) {
  153 static constexpr uptr kMaxAllowedMallocSize = 1ull << 40;
  155 void *user_alloc_internal(ThreadState *thr, uptr pc, uptr sz, uptr align,
  155 void *user_alloc_internal(ThreadState *thr, uptr pc, uptr sz, uptr align,
  155 void *user_alloc_internal(ThreadState *thr, uptr pc, uptr sz, uptr align,
  178 void user_free(ThreadState *thr, uptr pc, void *p, bool signal) {
  187 void *user_alloc(ThreadState *thr, uptr pc, uptr sz) {
  187 void *user_alloc(ThreadState *thr, uptr pc, uptr sz) {
  191 void *user_calloc(ThreadState *thr, uptr pc, uptr size, uptr n) {
  191 void *user_calloc(ThreadState *thr, uptr pc, uptr size, uptr n) {
  191 void *user_calloc(ThreadState *thr, uptr pc, uptr size, uptr n) {
  204 void *user_reallocarray(ThreadState *thr, uptr pc, void *p, uptr size, uptr n) {
  204 void *user_reallocarray(ThreadState *thr, uptr pc, void *p, uptr size, uptr n) {
  204 void *user_reallocarray(ThreadState *thr, uptr pc, void *p, uptr size, uptr n) {
  214 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write) {
  214 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write) {
  214 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write) {
  223 void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write) {
  223 void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write) {
  225   uptr sz = ctx->metamap.FreeBlock(thr->proc(), p);
  231 void *user_realloc(ThreadState *thr, uptr pc, void *p, uptr sz) {
  231 void *user_realloc(ThreadState *thr, uptr pc, void *p, uptr sz) {
  242     uptr old_sz = user_alloc_usable_size(p);
  249 void *user_memalign(ThreadState *thr, uptr pc, uptr align, uptr sz) {
  249 void *user_memalign(ThreadState *thr, uptr pc, uptr align, uptr sz) {
  249 void *user_memalign(ThreadState *thr, uptr pc, uptr align, uptr sz) {
  260 int user_posix_memalign(ThreadState *thr, uptr pc, void **memptr, uptr align,
  260 int user_posix_memalign(ThreadState *thr, uptr pc, void **memptr, uptr align,
  261                         uptr sz) {
  277 void *user_aligned_alloc(ThreadState *thr, uptr pc, uptr align, uptr sz) {
  277 void *user_aligned_alloc(ThreadState *thr, uptr pc, uptr align, uptr sz) {
  277 void *user_aligned_alloc(ThreadState *thr, uptr pc, uptr align, uptr sz) {
  288 void *user_valloc(ThreadState *thr, uptr pc, uptr sz) {
  288 void *user_valloc(ThreadState *thr, uptr pc, uptr sz) {
  292 void *user_pvalloc(ThreadState *thr, uptr pc, uptr sz) {
  292 void *user_pvalloc(ThreadState *thr, uptr pc, uptr sz) {
  293   uptr PageSize = GetPageSizeCached();
  306 uptr user_alloc_usable_size(const void *p) {
  317 void invoke_malloc_hook(void *ptr, uptr size) {
  333 void *internal_alloc(MBlockType typ, uptr sz) {
  356 uptr __sanitizer_get_current_allocated_bytes() {
  357   uptr stats[AllocatorStatCount];
  362 uptr __sanitizer_get_heap_size() {
  363   uptr stats[AllocatorStatCount];
  368 uptr __sanitizer_get_free_bytes() {
  372 uptr __sanitizer_get_unmapped_bytes() {
  376 uptr __sanitizer_get_estimated_allocated_size(uptr size) {
  376 uptr __sanitizer_get_estimated_allocated_size(uptr size) {
  384 uptr __sanitizer_get_allocated_size(const void *p) {
projects/compiler-rt/lib/tsan/rtl/tsan_mman.h
   19 const uptr kDefaultAlignment = 16;
   29 void *user_alloc_internal(ThreadState *thr, uptr pc, uptr sz,
   29 void *user_alloc_internal(ThreadState *thr, uptr pc, uptr sz,
   30                           uptr align = kDefaultAlignment, bool signal = true);
   32 void user_free(ThreadState *thr, uptr pc, void *p, bool signal = true);
   34 void *user_alloc(ThreadState *thr, uptr pc, uptr sz);
   34 void *user_alloc(ThreadState *thr, uptr pc, uptr sz);
   35 void *user_calloc(ThreadState *thr, uptr pc, uptr sz, uptr n);
   35 void *user_calloc(ThreadState *thr, uptr pc, uptr sz, uptr n);
   35 void *user_calloc(ThreadState *thr, uptr pc, uptr sz, uptr n);
   36 void *user_realloc(ThreadState *thr, uptr pc, void *p, uptr sz);
   36 void *user_realloc(ThreadState *thr, uptr pc, void *p, uptr sz);
   37 void *user_reallocarray(ThreadState *thr, uptr pc, void *p, uptr sz, uptr n);
   37 void *user_reallocarray(ThreadState *thr, uptr pc, void *p, uptr sz, uptr n);
   37 void *user_reallocarray(ThreadState *thr, uptr pc, void *p, uptr sz, uptr n);
   38 void *user_memalign(ThreadState *thr, uptr pc, uptr align, uptr sz);
   38 void *user_memalign(ThreadState *thr, uptr pc, uptr align, uptr sz);
   38 void *user_memalign(ThreadState *thr, uptr pc, uptr align, uptr sz);
   39 int user_posix_memalign(ThreadState *thr, uptr pc, void **memptr, uptr align,
   39 int user_posix_memalign(ThreadState *thr, uptr pc, void **memptr, uptr align,
   40                         uptr sz);
   41 void *user_aligned_alloc(ThreadState *thr, uptr pc, uptr align, uptr sz);
   41 void *user_aligned_alloc(ThreadState *thr, uptr pc, uptr align, uptr sz);
   41 void *user_aligned_alloc(ThreadState *thr, uptr pc, uptr align, uptr sz);
   42 void *user_valloc(ThreadState *thr, uptr pc, uptr sz);
   42 void *user_valloc(ThreadState *thr, uptr pc, uptr sz);
   43 void *user_pvalloc(ThreadState *thr, uptr pc, uptr sz);
   43 void *user_pvalloc(ThreadState *thr, uptr pc, uptr sz);
   44 uptr user_alloc_usable_size(const void *p);
   47 void invoke_malloc_hook(void *ptr, uptr size);
   79 void *internal_alloc(MBlockType typ, uptr sz);
projects/compiler-rt/lib/tsan/rtl/tsan_mutex.cpp
  180 const uptr kUnlocked = 0;
  181 const uptr kWriteLock = 1;
  182 const uptr kReadLock = 2;
  230   uptr cmp = kUnlocked;
  249   uptr prev = atomic_fetch_sub(&state_, kWriteLock, memory_order_release);
  261   uptr prev = atomic_fetch_add(&state_, kReadLock, memory_order_acquire);
  276   uptr prev = atomic_fetch_sub(&state_, kReadLock, memory_order_release);
projects/compiler-rt/lib/tsan/rtl/tsan_mutexset.cpp
   17 const uptr MutexSet::kMaxSize;
   26   for (uptr i = 0; i < size_; i++) {
   37     for (uptr i = 0; i < size_; i++) {
   55   for (uptr i = 0; i < size_; i++) {
   65   for (uptr i = 0; i < size_; i++) {
   73 void MutexSet::RemovePos(uptr i) {
   79 uptr MutexSet::Size() const {
   83 MutexSet::Desc MutexSet::Get(uptr i) const {
projects/compiler-rt/lib/tsan/rtl/tsan_mutexset.h
   24   static const uptr kMaxSize = 16;
   37   uptr Size() const;
   38   Desc Get(uptr i) const;
   46   uptr size_;
   50   void RemovePos(uptr i);
projects/compiler-rt/lib/tsan/rtl/tsan_new_delete.cpp
   24 enum class align_val_t: __sanitizer::uptr {};
   62 void *operator new(__sanitizer::uptr size);
   63 void *operator new(__sanitizer::uptr size) {
   68 void *operator new[](__sanitizer::uptr size);
   69 void *operator new[](__sanitizer::uptr size) {
   74 void *operator new(__sanitizer::uptr size, std::nothrow_t const&);
   75 void *operator new(__sanitizer::uptr size, std::nothrow_t const&) {
   80 void *operator new[](__sanitizer::uptr size, std::nothrow_t const&);
   81 void *operator new[](__sanitizer::uptr size, std::nothrow_t const&) {
   86 void *operator new(__sanitizer::uptr size, std::align_val_t align);
   87 void *operator new(__sanitizer::uptr size, std::align_val_t align) {
   92 void *operator new[](__sanitizer::uptr size, std::align_val_t align);
   93 void *operator new[](__sanitizer::uptr size, std::align_val_t align) {
   98 void *operator new(__sanitizer::uptr size, std::align_val_t align,
  100 void *operator new(__sanitizer::uptr size, std::align_val_t align,
  107 void *operator new[](__sanitizer::uptr size, std::align_val_t align,
  109 void *operator new[](__sanitizer::uptr size, std::align_val_t align,
  148 void operator delete(void *ptr, __sanitizer::uptr size) NOEXCEPT;
  149 void operator delete(void *ptr, __sanitizer::uptr size) NOEXCEPT {
  154 void operator delete[](void *ptr, __sanitizer::uptr size) NOEXCEPT;
  155 void operator delete[](void *ptr, __sanitizer::uptr size) NOEXCEPT {
  186 void operator delete(void *ptr, __sanitizer::uptr size,
  188 void operator delete(void *ptr, __sanitizer::uptr size,
  194 void operator delete[](void *ptr, __sanitizer::uptr size,
  196 void operator delete[](void *ptr, __sanitizer::uptr size,
projects/compiler-rt/lib/tsan/rtl/tsan_platform.h
   59   static const uptr kMetaShadowBeg = 0x300000000000ull;
   60   static const uptr kMetaShadowEnd = 0x340000000000ull;
   61   static const uptr kTraceMemBeg   = 0x600000000000ull;
   62   static const uptr kTraceMemEnd   = 0x620000000000ull;
   63   static const uptr kShadowBeg     = 0x010000000000ull;
   64   static const uptr kShadowEnd     = 0x200000000000ull;
   65   static const uptr kHeapMemBeg    = 0x7b0000000000ull;
   66   static const uptr kHeapMemEnd    = 0x7c0000000000ull;
   67   static const uptr kLoAppMemBeg   = 0x000000001000ull;
   68   static const uptr kLoAppMemEnd   = 0x008000000000ull;
   69   static const uptr kMidAppMemBeg  = 0x550000000000ull;
   70   static const uptr kMidAppMemEnd  = 0x568000000000ull;
   71   static const uptr kHiAppMemBeg   = 0x7e8000000000ull;
   72   static const uptr kHiAppMemEnd   = 0x800000000000ull;
   73   static const uptr kAppMemMsk     = 0x780000000000ull;
   74   static const uptr kAppMemXor     = 0x040000000000ull;
   75   static const uptr kVdsoBeg       = 0xf000000000000000ull;
  524 uptr MappingImpl(void) {
  552 uptr MappingArchImpl(void) {
  578 uptr LoAppMemBeg(void) {
  582 uptr LoAppMemEnd(void) {
  588 uptr MidAppMemBeg(void) {
  592 uptr MidAppMemEnd(void) {
  598 uptr HeapMemBeg(void) {
  602 uptr HeapMemEnd(void) {
  607 uptr HiAppMemBeg(void) {
  611 uptr HiAppMemEnd(void) {
  616 uptr VdsoBeg(void) {
  634 bool GetUserRegion(int i, uptr *start, uptr *end) {
  634 bool GetUserRegion(int i, uptr *start, uptr *end) {
  667 uptr ShadowBeg(void) {
  671 uptr ShadowEnd(void) {
  676 uptr MetaShadowBeg(void) {
  680 uptr MetaShadowEnd(void) {
  685 uptr TraceMemBeg(void) {
  689 uptr TraceMemEnd(void) {
  695 bool IsAppMemImpl(uptr mem) {
  709 bool IsAppMem(uptr mem) {
  735 bool IsShadowMemImpl(uptr mem) {
  740 bool IsShadowMem(uptr mem) {
  766 bool IsMetaMemImpl(uptr mem) {
  771 bool IsMetaMem(uptr mem) {
  797 uptr MemToShadowImpl(uptr x) {
  797 uptr MemToShadowImpl(uptr x) {
  812 uptr MemToShadow(uptr x) {
  812 uptr MemToShadow(uptr x) {
  838 u32 *MemToMetaImpl(uptr x) {
  855 u32 *MemToMeta(uptr x) {
  881 uptr ShadowToMemImpl(uptr s) {
  881 uptr ShadowToMemImpl(uptr s) {
  889   uptr p = (s / kShadowCnt) ^ Mapping::kAppMemXor;
  911 uptr ShadowToMem(uptr s) {
  911 uptr ShadowToMem(uptr s) {
  939 const uptr kTotalTraceSize = (kTraceSize * sizeof(Event) + sizeof(Trace)
  943 uptr GetThreadTraceImpl(int tid) {
  944   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize;
  950 uptr GetThreadTrace(int tid) {
  976 uptr GetThreadTraceHeaderImpl(int tid) {
  977   uptr p = Mapping::kTraceMemBeg + (uptr)tid * kTotalTraceSize
  984 uptr GetThreadTraceHeader(int tid) {
 1013 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
 1013 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
 1013 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive);
 1016 uptr ExtractLongJmpSp(uptr *env);
 1016 uptr ExtractLongJmpSp(uptr *env);
 1017 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size);
 1017 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size);
projects/compiler-rt/lib/tsan/rtl/tsan_platform_linux.cpp
  106 void FillProfileCallback(uptr p, uptr rss, bool file,
  106 void FillProfileCallback(uptr p, uptr rss, bool file,
  107                          uptr *mem, uptr stats_size) {
  107                          uptr *mem, uptr stats_size) {
  130 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive) {
  130 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive) {
  130 void WriteMemoryProfile(char *buf, uptr buf_size, uptr nthread, uptr nlive) {
  131   uptr mem[MemCount];
  176   uptr openrv = internal_open(name, O_RDWR | O_CREAT | O_EXCL, 0600);
  182   const uptr kMarkerSize = 512 * 1024 / sizeof(u64);
  189   uptr page = internal_mmap(0, GetPageSizeCached(), PROT_READ | PROT_WRITE,
  208         internal_mmap(p, Min<uptr>(marker.size() * sizeof(u64), shadow_end - p),
  271       const uptr kMaxStackSize = 32 * 1024 * 1024;
  351 static uptr UnmangleLongJmpSp(uptr mangled_sp) {
  351 static uptr UnmangleLongJmpSp(uptr mangled_sp) {
  357   uptr sp;
  402 uptr ExtractLongJmpSp(uptr *env) {
  402 uptr ExtractLongJmpSp(uptr *env) {
  403   uptr mangled_sp = env[LONG_JMP_SP_ENV_SLOT];
  427 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size) {
  427 void ImitateTlsWrite(ThreadState *thr, uptr tls_addr, uptr tls_size) {
  429   const uptr thr_beg = (uptr)thr;
  430   const uptr thr_end = (uptr)thr + sizeof(*thr);
projects/compiler-rt/lib/tsan/rtl/tsan_platform_posix.cpp
   32 static void NoHugePagesInShadow(uptr addr, uptr size) {
   32 static void NoHugePagesInShadow(uptr addr, uptr size) {
   36 static void DontDumpShadow(uptr addr, uptr size) {
   36 static void DontDumpShadow(uptr addr, uptr size) {
   60   const uptr kMadviseRangeBeg  = 0x7f0000000000ull;
   61   const uptr kMadviseRangeSize = 0x010000000000ull;
  101   const uptr meta = MetaShadowBeg();
  102   const uptr meta_size = MetaShadowEnd() - meta;
  116 static void ProtectRange(uptr beg, uptr end) {
  116 static void ProtectRange(uptr beg, uptr end) {
projects/compiler-rt/lib/tsan/rtl/tsan_report.cpp
   78 static const char *ReportTypeString(ReportType typ, uptr tag) {
  140   for (uptr i = 0; i < mset.Size(); i++) {
  328     for (uptr i = 0; i < rep->mutexes.Size(); i++)
  334     for (uptr i = 0; i < rep->mutexes.Size(); i++) {
  356     for (uptr i = 0; i < rep->stacks.Size(); i++) {
  363   for (uptr i = 0; i < rep->mops.Size(); i++)
  369   for (uptr i = 0; i < rep->locs.Size(); i++)
  373     for (uptr i = 0; i < rep->mutexes.Size(); i++)
  377   for (uptr i = 0; i < rep->threads.Size(); i++)
projects/compiler-rt/lib/tsan/rtl/tsan_report.h
   56   uptr addr;
   60   uptr external_tag;
   78   uptr heap_chunk_start;
   79   uptr heap_chunk_size;
   80   uptr external_tag;
  103   uptr addr;
  111   uptr tag;
projects/compiler-rt/lib/tsan/rtl/tsan_rtl.cpp
   74   const uptr hdr = GetThreadTraceHeader(tid);
   81   uptr hdr_end = hdr + sizeof(Trace);
  114                          uptr stk_addr, uptr stk_size,
  114                          uptr stk_addr, uptr stk_size,
  115                          uptr tls_addr, uptr tls_size)
  115                          uptr tls_addr, uptr tls_size)
  139   uptr n_threads;
  140   uptr n_running_threads;
  176   uptr last_rss = 0;
  193       uptr rss = GetRSS();
  194       uptr limit = uptr(flags()->memory_limit_mb) << 20;
  238 void DontNeedShadowFor(uptr addr, uptr size) {
  238 void DontNeedShadowFor(uptr addr, uptr size) {
  243 void UnmapShadow(ThreadState *thr, uptr addr, uptr size) {
  243 void UnmapShadow(ThreadState *thr, uptr addr, uptr size) {
  251 void MapShadow(uptr addr, uptr size) {
  251 void MapShadow(uptr addr, uptr size) {
  255   const uptr kPageSize = GetPageSizeCached();
  256   uptr shadow_begin = RoundDownTo((uptr)MemToShadow(addr), kPageSize);
  257   uptr shadow_end = RoundUpTo((uptr)MemToShadow(addr + size), kPageSize);
  263   static uptr mapped_meta_end = 0;
  264   uptr meta_begin = (uptr)MemToMeta(addr);
  265   uptr meta_end = (uptr)MemToMeta(addr + size);
  290 void MapThreadTrace(uptr addr, uptr size, const char *name) {
  290 void MapThreadTrace(uptr addr, uptr size, const char *name) {
  303   uptr beg, end;
  310     uptr prev = 0;
  311     for (uptr p0 = beg; p0 <= end; p0 += (end - beg) / 4) {
  313         const uptr p = RoundDown(p0 + x, kShadowCell);
  316         const uptr s = MemToShadow(p);
  317         const uptr m = (uptr)MemToMeta(p);
  326           const uptr prev_s = MemToShadow(prev);
  327           const uptr prev_m = (uptr)MemToMeta(prev);
  494 void ForkBefore(ThreadState *thr, uptr pc) {
  499 void ForkParentAfter(ThreadState *thr, uptr pc) {
  504 void ForkChildAfter(ThreadState *thr, uptr pc) {
  508   uptr nthread = 0;
  541 u32 CurrentStackId(ThreadState *thr, uptr pc) {
  581 uptr TraceTopPC(ThreadState *thr) {
  583   uptr pc = events[thr->fast_state.GetTracePos()];
  587 uptr TraceSize() {
  591 uptr TraceParts() {
  640 void MemoryAccessImpl1(ThreadState *thr, uptr addr,
  709 void UnalignedMemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  709 void UnalignedMemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  733   for (uptr i = 0; i < kShadowCnt; i++) {
  818 void MemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  818 void MemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  884 void MemoryAccessImpl(ThreadState *thr, uptr addr,
  900 static void MemoryRangeSet(ThreadState *thr, uptr pc, uptr addr, uptr size,
  900 static void MemoryRangeSet(ThreadState *thr, uptr pc, uptr addr, uptr size,
  900 static void MemoryRangeSet(ThreadState *thr, uptr pc, uptr addr, uptr size,
  907   uptr offset = addr % kShadowCell;
  929     for (uptr i = 0; i < size / kShadowCell * kShadowCnt;) {
  931       for (uptr j = 1; j < kShadowCnt; j++)
  936     const uptr kPageSize = GetPageSizeCached();
  943       for (uptr j = 1; j < kShadowCnt; j++)
  955       for (uptr j = 1; j < kShadowCnt; j++)
  961 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  961 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  961 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  965 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  965 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  965 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  987 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  987 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  987 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size) {
  999 void MemoryRangeImitateWriteOrResetRange(ThreadState *thr, uptr pc, uptr addr,
  999 void MemoryRangeImitateWriteOrResetRange(ThreadState *thr, uptr pc, uptr addr,
 1000                                          uptr size) {
 1008 void FuncEntry(ThreadState *thr, uptr pc) {
 1045 void ThreadIgnoreBegin(ThreadState *thr, uptr pc, bool save_stack) {
 1056 void ThreadIgnoreEnd(ThreadState *thr, uptr pc) {
 1070 uptr __tsan_testonly_shadow_stack_current_size() {
 1076 void ThreadIgnoreSyncBegin(ThreadState *thr, uptr pc, bool save_stack) {
 1086 void ThreadIgnoreSyncEnd(ThreadState *thr, uptr pc) {
projects/compiler-rt/lib/tsan/rtl/tsan_rtl.h
   72   static const uptr kSpaceBeg = Mapping::kHeapMemBeg;
   73   static const uptr kSpaceSize = Mapping::kHeapMemEnd - Mapping::kHeapMemBeg;
   74   static const uptr kMetadataSize = 0;
   77   static const uptr kFlags = 0;
  327   uptr sp;
  330   uptr in_signal_handler;
  331   uptr *shadow_stack_pos;
  395   uptr *shadow_stack;
  396   uptr *shadow_stack_end;
  397   uptr *shadow_stack_pos;
  417   const uptr stk_addr;
  418   const uptr stk_size;
  419   const uptr tls_addr;
  420   const uptr tls_size;
  452                        uptr stk_addr, uptr stk_size,
  452                        uptr stk_addr, uptr stk_size,
  453                        uptr tls_addr, uptr tls_size);
  453                        uptr tls_addr, uptr tls_size);
  515   uptr addr_min;
  516   uptr addr_max;
  521   uptr pc_or_addr;
  582 const char *GetObjectTypeFromTag(uptr tag);
  583 const char *GetReportHeaderFromTag(uptr tag);
  584 uptr TagFromShadowStackFrame(uptr pc);
  584 uptr TagFromShadowStackFrame(uptr pc);
  588   void AddMemoryAccess(uptr addr, uptr external_tag, Shadow s, StackTrace stack,
  588   void AddMemoryAccess(uptr addr, uptr external_tag, Shadow s, StackTrace stack,
  596   void AddLocation(uptr addr, uptr size);
  596   void AddLocation(uptr addr, uptr size);
  603   ScopedReportBase(ReportType typ, uptr tag);
  620   explicit ScopedReport(ReportType typ, uptr tag = kExternalTagNone);
  627 ThreadContext *IsThreadStackOrTls(uptr addr, bool *is_stack);
  629                   MutexSet *mset, uptr *tag = nullptr);
  636 void ExtractTagFromStack(StackTraceTy *stack, uptr *tag = nullptr) {
  638   uptr possible_tag_pc = stack->trace[stack->size - 2];
  639   uptr possible_tag = TagFromShadowStackFrame(possible_tag_pc);
  647 void ObtainCurrentStack(ThreadState *thr, uptr toppc, StackTraceTy *stack,
  648                         uptr *tag = nullptr) {
  649   uptr size = thr->shadow_stack_pos - thr->shadow_stack;
  650   uptr start = 0;
  680 void MapShadow(uptr addr, uptr size);
  680 void MapShadow(uptr addr, uptr size);
  681 void MapThreadTrace(uptr addr, uptr size, const char *name);
  681 void MapThreadTrace(uptr addr, uptr size, const char *name);
  682 void DontNeedShadowFor(uptr addr, uptr size);
  682 void DontNeedShadowFor(uptr addr, uptr size);
  683 void UnmapShadow(ThreadState *thr, uptr addr, uptr size);
  683 void UnmapShadow(ThreadState *thr, uptr addr, uptr size);
  689 void ForkBefore(ThreadState *thr, uptr pc);
  690 void ForkParentAfter(ThreadState *thr, uptr pc);
  691 void ForkChildAfter(ThreadState *thr, uptr pc);
  696 bool IsExpectedReport(uptr addr, uptr size);
  696 bool IsExpectedReport(uptr addr, uptr size);
  711 u32 CurrentStackId(ThreadState *thr, uptr pc);
  713 void PrintCurrentStack(ThreadState *thr, uptr pc);
  714 void PrintCurrentStackSlow(uptr pc);  // uses libunwind
  720 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write);
  720 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write);
  720 void OnUserAlloc(ThreadState *thr, uptr pc, uptr p, uptr sz, bool write);
  721 void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write);
  721 void OnUserFree(ThreadState *thr, uptr pc, uptr p, bool write);
  723 void MemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  723 void MemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  725 void MemoryAccessImpl(ThreadState *thr, uptr addr,
  728 void MemoryAccessRange(ThreadState *thr, uptr pc, uptr addr,
  728 void MemoryAccessRange(ThreadState *thr, uptr pc, uptr addr,
  729     uptr size, bool is_write);
  730 void MemoryAccessRangeStep(ThreadState *thr, uptr pc, uptr addr,
  730 void MemoryAccessRangeStep(ThreadState *thr, uptr pc, uptr addr,
  731     uptr size, uptr step, bool is_write);
  731     uptr size, uptr step, bool is_write);
  732 void UnalignedMemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  732 void UnalignedMemoryAccess(ThreadState *thr, uptr pc, uptr addr,
  740 void ALWAYS_INLINE MemoryRead(ThreadState *thr, uptr pc,
  741                                      uptr addr, int kAccessSizeLog) {
  745 void ALWAYS_INLINE MemoryWrite(ThreadState *thr, uptr pc,
  746                                       uptr addr, int kAccessSizeLog) {
  750 void ALWAYS_INLINE MemoryReadAtomic(ThreadState *thr, uptr pc,
  751                                            uptr addr, int kAccessSizeLog) {
  755 void ALWAYS_INLINE MemoryWriteAtomic(ThreadState *thr, uptr pc,
  756                                             uptr addr, int kAccessSizeLog) {
  760 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size);
  760 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size);
  760 void MemoryResetRange(ThreadState *thr, uptr pc, uptr addr, uptr size);
  761 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size);
  761 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size);
  761 void MemoryRangeFreed(ThreadState *thr, uptr pc, uptr addr, uptr size);
  762 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size);
  762 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size);
  762 void MemoryRangeImitateWrite(ThreadState *thr, uptr pc, uptr addr, uptr size);
  763 void MemoryRangeImitateWriteOrResetRange(ThreadState *thr, uptr pc, uptr addr,
  763 void MemoryRangeImitateWriteOrResetRange(ThreadState *thr, uptr pc, uptr addr,
  764                                          uptr size);
  766 void ThreadIgnoreBegin(ThreadState *thr, uptr pc, bool save_stack = true);
  767 void ThreadIgnoreEnd(ThreadState *thr, uptr pc);
  768 void ThreadIgnoreSyncBegin(ThreadState *thr, uptr pc, bool save_stack = true);
  769 void ThreadIgnoreSyncEnd(ThreadState *thr, uptr pc);
  771 void FuncEntry(ThreadState *thr, uptr pc);
  774 int ThreadCreate(ThreadState *thr, uptr pc, uptr uid, bool detached);
  774 int ThreadCreate(ThreadState *thr, uptr pc, uptr uid, bool detached);
  778 int ThreadTid(ThreadState *thr, uptr pc, uptr uid);
  778 int ThreadTid(ThreadState *thr, uptr pc, uptr uid);
  779 void ThreadJoin(ThreadState *thr, uptr pc, int tid);
  780 void ThreadDetach(ThreadState *thr, uptr pc, int tid);
  785 void ThreadNotJoined(ThreadState *thr, uptr pc, int tid, uptr uid);
  785 void ThreadNotJoined(ThreadState *thr, uptr pc, int tid, uptr uid);
  794 void MutexCreate(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  794 void MutexCreate(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  795 void MutexDestroy(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  795 void MutexDestroy(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  796 void MutexPreLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  796 void MutexPreLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  797 void MutexPostLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0,
  797 void MutexPostLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0,
  799 int  MutexUnlock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  799 int  MutexUnlock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  800 void MutexPreReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  800 void MutexPreReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  801 void MutexPostReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  801 void MutexPostReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz = 0);
  802 void MutexReadUnlock(ThreadState *thr, uptr pc, uptr addr);
  802 void MutexReadUnlock(ThreadState *thr, uptr pc, uptr addr);
  803 void MutexReadOrWriteUnlock(ThreadState *thr, uptr pc, uptr addr);
  803 void MutexReadOrWriteUnlock(ThreadState *thr, uptr pc, uptr addr);
  804 void MutexRepair(ThreadState *thr, uptr pc, uptr addr);  // call on EOWNERDEAD
  804 void MutexRepair(ThreadState *thr, uptr pc, uptr addr);  // call on EOWNERDEAD
  805 void MutexInvalidAccess(ThreadState *thr, uptr pc, uptr addr);
  805 void MutexInvalidAccess(ThreadState *thr, uptr pc, uptr addr);
  807 void Acquire(ThreadState *thr, uptr pc, uptr addr);
  807 void Acquire(ThreadState *thr, uptr pc, uptr addr);
  814 void AcquireGlobal(ThreadState *thr, uptr pc);
  815 void Release(ThreadState *thr, uptr pc, uptr addr);
  815 void Release(ThreadState *thr, uptr pc, uptr addr);
  816 void ReleaseStore(ThreadState *thr, uptr pc, uptr addr);
  816 void ReleaseStore(ThreadState *thr, uptr pc, uptr addr);
  817 void AfterSleep(ThreadState *thr, uptr pc);
  818 void AcquireImpl(ThreadState *thr, uptr pc, SyncClock *c);
  819 void ReleaseImpl(ThreadState *thr, uptr pc, SyncClock *c);
  820 void ReleaseStoreImpl(ThreadState *thr, uptr pc, SyncClock *c);
  821 void AcquireReleaseImpl(ThreadState *thr, uptr pc, SyncClock *c);
  845 uptr TraceTopPC(ThreadState *thr);
  846 uptr TraceSize();
  847 uptr TraceParts();
  874 uptr ALWAYS_INLINE HeapEnd() {
  879 ThreadState *FiberCreate(ThreadState *thr, uptr pc, unsigned flags);
  880 void FiberDestroy(ThreadState *thr, uptr pc, ThreadState *fiber);
  881 void FiberSwitch(ThreadState *thr, uptr pc, ThreadState *fiber, unsigned flags);
projects/compiler-rt/lib/tsan/rtl/tsan_rtl_mutex.cpp
   25 void ReportDeadlock(ThreadState *thr, uptr pc, DDReport *r);
   29   uptr pc;
   31   Callback(ThreadState *thr, uptr pc)
   42 void DDMutexInit(ThreadState *thr, uptr pc, SyncVar *s) {
   48 static void ReportMutexMisuse(ThreadState *thr, uptr pc, ReportType typ,
   49     uptr addr, u64 mid) {
   64 void MutexCreate(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
   64 void MutexCreate(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
   80 void MutexDestroy(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
   80 void MutexDestroy(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  142 void MutexPreLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  142 void MutexPreLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  158 void MutexPostLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz, int rec) {
  158 void MutexPostLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz, int rec) {
  213 int MutexUnlock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  213 int MutexUnlock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  256 void MutexPreReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  256 void MutexPreReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  268 void MutexPostReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  268 void MutexPostReadLock(ThreadState *thr, uptr pc, uptr addr, u32 flagz) {
  308 void MutexReadUnlock(ThreadState *thr, uptr pc, uptr addr) {
  308 void MutexReadUnlock(ThreadState *thr, uptr pc, uptr addr) {
  340 void MutexReadOrWriteUnlock(ThreadState *thr, uptr pc, uptr addr) {
  340 void MutexReadOrWriteUnlock(ThreadState *thr, uptr pc, uptr addr) {
  387 void MutexRepair(ThreadState *thr, uptr pc, uptr addr) {
  387 void MutexRepair(ThreadState *thr, uptr pc, uptr addr) {
  395 void MutexInvalidAccess(ThreadState *thr, uptr pc, uptr addr) {
  395 void MutexInvalidAccess(ThreadState *thr, uptr pc, uptr addr) {
  403 void Acquire(ThreadState *thr, uptr pc, uptr addr) {
  403 void Acquire(ThreadState *thr, uptr pc, uptr addr) {
  423 void AcquireGlobal(ThreadState *thr, uptr pc) {
  432 void Release(ThreadState *thr, uptr pc, uptr addr) {
  432 void Release(ThreadState *thr, uptr pc, uptr addr) {
  444 void ReleaseStore(ThreadState *thr, uptr pc, uptr addr) {
  444 void ReleaseStore(ThreadState *thr, uptr pc, uptr addr) {
  466 void AfterSleep(ThreadState *thr, uptr pc) {
  477 void AcquireImpl(ThreadState *thr, uptr pc, SyncClock *c) {
  485 void ReleaseImpl(ThreadState *thr, uptr pc, SyncClock *c) {
  494 void ReleaseStoreImpl(ThreadState *thr, uptr pc, SyncClock *c) {
  503 void AcquireReleaseImpl(ThreadState *thr, uptr pc, SyncClock *c) {
  513 void ReportDeadlock(ThreadState *thr, uptr pc, DDReport *r) {
  523   uptr dummy_pc = 0x42;
projects/compiler-rt/lib/tsan/rtl/tsan_rtl_report.cpp
  120   for (uptr si = 0; si < trace.size; si++) {
  121     const uptr pc = trace.trace[si];
  122     uptr pc1 = pc;
  145 ScopedReportBase::ScopedReportBase(ReportType typ, uptr tag) {
  166 void ScopedReportBase::AddMemoryAccess(uptr addr, uptr external_tag, Shadow s,
  166 void ScopedReportBase::AddMemoryAccess(uptr addr, uptr external_tag, Shadow s,
  180   for (uptr i = 0; i < mset->Size(); i++) {
  193   for (uptr i = 0; i < rep_->threads.Size(); i++) {
  232   uptr addr = (uptr)arg;
  242 ThreadContext *IsThreadStackOrTls(uptr addr, bool *is_stack) {
  264   for (uptr i = 0; i < rep_->mutexes.Size(); i++) {
  280   uptr addr = SyncVar::SplitId(id, &uid);
  297   for (uptr i = 0; i < rep_->mutexes.Size(); i++) {
  310 void ScopedReportBase::AddLocation(uptr addr, uptr size) {
  310 void ScopedReportBase::AddLocation(uptr addr, uptr size) {
  374 ScopedReport::ScopedReport(ReportType typ, uptr tag)
  380                   MutexSet *mset, uptr *tag) {
  396   Vector<uptr> stack;
  398   for (uptr i = 0; i < hdr->stack0.size; i++) {
  404   uptr pos = hdr->stack0.size;
  406   for (uptr i = ebegin; i <= eend; i++) {
  409     uptr pc = (uptr)(ev & ((1ull << kEventPCBits) - 1));
  432     for (uptr j = 0; j <= pos; j++)
  443                              uptr addr_min, uptr addr_max) {
  443                              uptr addr_min, uptr addr_max) {
  451       hash.hash[0] = md5_hash(traces[0].trace, traces[0].size * sizeof(uptr));
  452       hash.hash[1] = md5_hash(traces[1].trace, traces[1].size * sizeof(uptr));
  453       for (uptr i = 0; i < ctx->racy_stacks.Size(); i++) {
  463       for (uptr i = 0; i < ctx->racy_addresses.Size(); i++) {
  465         uptr maxbeg = max(ra0.addr_min, ra2.addr_min);
  466         uptr minend = min(ra0.addr_max, ra2.addr_max);
  489                           uptr addr_min, uptr addr_max) {
  489                           uptr addr_min, uptr addr_max) {
  493     hash.hash[0] = md5_hash(traces[0].trace, traces[0].size * sizeof(uptr));
  494     hash.hash[1] = md5_hash(traces[1].trace, traces[1].size * sizeof(uptr));
  511   uptr pc_or_addr = 0;
  512   for (uptr i = 0; pc_or_addr == 0 && i < rep->mops.Size(); i++)
  514   for (uptr i = 0; pc_or_addr == 0 && i < rep->stacks.Size(); i++)
  516   for (uptr i = 0; pc_or_addr == 0 && i < rep->threads.Size(); i++)
  518   for (uptr i = 0; pc_or_addr == 0 && i < rep->locs.Size(); i++)
  546   for (uptr k = 0; k < ctx->fired_suppressions.size(); k++) {
  549     for (uptr j = 0; j < trace.size; j++) {
  561 static bool IsFiredSuppression(Context *ctx, ReportType type, uptr addr) {
  563   for (uptr k = 0; k < ctx->fired_suppressions.size(); k++) {
  608   uptr addr = ShadowToMem((uptr)thr->racy_shadow_addr);
  609   uptr addr_min = 0;
  610   uptr addr_max = 0;
  612     uptr a0 = addr + Shadow(thr->racy_state[0]).addr0();
  613     uptr a1 = addr + Shadow(thr->racy_state[1]).addr0();
  614     uptr e0 = a0 + Shadow(thr->racy_state[0]).size();
  615     uptr e1 = a1 + Shadow(thr->racy_state[1]).size();
  633   const uptr kMop = 2;
  635   uptr tags[kMop] = {kExternalTagNone};
  636   uptr toppc = TraceTopPC(thr);
  675   uptr tag = kExternalTagNone;
  676   for (uptr i = 0; i < kMop; i++) {
  686   for (uptr i = 0; i < kMop; i++) {
  692   for (uptr i = 0; i < kMop; i++) {
  717 void PrintCurrentStack(ThreadState *thr, uptr pc) {
  731 void PrintCurrentStackSlow(uptr pc) {
  733   uptr bp = GET_CURRENT_FRAME();
  739   for (uptr i = 0; i < ptrace->size / 2; i++) {
  740     uptr tmp = ptrace->trace_buffer[i];
projects/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cpp
   49   uptr pc;
   70   uptr trace_p = GetThreadTrace(tid);
   82   uptr stk_addr;
   83   uptr stk_size;
   84   uptr tls_addr;
   85   uptr tls_size;
  166   for (uptr i = 0; i < leaks.Size(); i++) {
  188   for (uptr i = 0; i < set->Size(); i++) {
  216   for (uptr i = 0; i < leaks.Size(); i++) {
  226   uptr result;
  231 int ThreadCreate(ThreadState *thr, uptr pc, uptr uid, bool detached) {
  231 int ThreadCreate(ThreadState *thr, uptr pc, uptr uid, bool detached) {
  244   uptr stk_addr = 0;
  245   uptr stk_size = 0;
  246   uptr tls_addr = 0;
  247   uptr tls_size = 0;
  289   uptr uid = (uptr)arg;
  297 int ThreadTid(ThreadState *thr, uptr pc, uptr uid) {
  297 int ThreadTid(ThreadState *thr, uptr pc, uptr uid) {
  303 void ThreadJoin(ThreadState *thr, uptr pc, int tid) {
  310 void ThreadDetach(ThreadState *thr, uptr pc, int tid) {
  316 void ThreadNotJoined(ThreadState *thr, uptr pc, int tid, uptr uid) {
  316 void ThreadNotJoined(ThreadState *thr, uptr pc, int tid, uptr uid) {
  326 void MemoryAccessRange(ThreadState *thr, uptr pc, uptr addr,
  326 void MemoryAccessRange(ThreadState *thr, uptr pc, uptr addr,
  327                        uptr size, bool is_write) {
  416 ThreadState *FiberCreate(ThreadState *thr, uptr pc, unsigned flags) {
  427 void FiberDestroy(ThreadState *thr, uptr pc, ThreadState *fiber) {
  434 void FiberSwitch(ThreadState *thr, uptr pc,
projects/compiler-rt/lib/tsan/rtl/tsan_stack_trace.cpp
   25 void VarSizeStackTrace::ResizeBuffer(uptr new_size) {
   38 void VarSizeStackTrace::Init(const uptr *pcs, uptr cnt, uptr extra_top_pc) {
   38 void VarSizeStackTrace::Init(const uptr *pcs, uptr cnt, uptr extra_top_pc) {
   38 void VarSizeStackTrace::Init(const uptr *pcs, uptr cnt, uptr extra_top_pc) {
   54     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
   54     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
   55   uptr top = 0;
   56   uptr bottom = 0;
projects/compiler-rt/lib/tsan/rtl/tsan_stack_trace.h
   23   uptr *trace_buffer;  // Owned.
   27   void Init(const uptr *pcs, uptr cnt, uptr extra_top_pc = 0);
   27   void Init(const uptr *pcs, uptr cnt, uptr extra_top_pc = 0);
   27   void Init(const uptr *pcs, uptr cnt, uptr extra_top_pc = 0);
   34   void ResizeBuffer(uptr new_size);
projects/compiler-rt/lib/tsan/rtl/tsan_suppressions.cpp
   94 static uptr IsSuppressed(const char *stype, const AddressInfo &info,
  106 uptr IsSuppressed(ReportType typ, const ReportStack *stack, Suppression **sp) {
  116     uptr pc = IsSuppressed(stype, frame->info, sp);
  125 uptr IsSuppressed(ReportType typ, const ReportLocation *loc, Suppression **sp) {
  152   for (uptr i = 0; i < matched.size(); i++)
  156   for (uptr i = 0; i < matched.size(); i++) {
projects/compiler-rt/lib/tsan/rtl/tsan_suppressions.h
   32 uptr IsSuppressed(ReportType typ, const ReportStack *stack, Suppression **sp);
   33 uptr IsSuppressed(ReportType typ, const ReportLocation *loc, Suppression **sp);
projects/compiler-rt/lib/tsan/rtl/tsan_symbolize.cpp
   42 bool __tsan_symbolize_external(uptr pc, char *func_buf, uptr func_siz,
   42 bool __tsan_symbolize_external(uptr pc, char *func_buf, uptr func_siz,
   43                                char *file_buf, uptr file_siz, int *line,
   51 void __tsan_symbolize_external_ex(uptr pc,
   59   uptr addr;
   82 SymbolizedStack *SymbolizeCode(uptr addr) {
  109 ReportLocation *SymbolizeData(uptr addr) {
projects/compiler-rt/lib/tsan/rtl/tsan_symbolize.h
   22 SymbolizedStack *SymbolizeCode(uptr addr);
   23 ReportLocation *SymbolizeData(uptr addr);
   26 ReportStack *NewReportStackEntry(uptr addr);
projects/compiler-rt/lib/tsan/rtl/tsan_sync.cpp
   19 void DDMutexInit(ThreadState *thr, uptr pc, SyncVar *s);
   26 void SyncVar::Init(ThreadState *thr, uptr pc, uptr addr, u64 uid) {
   26 void SyncVar::Init(ThreadState *thr, uptr pc, uptr addr, u64 uid) {
   61 void MetaMap::AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz) {
   61 void MetaMap::AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz) {
   61 void MetaMap::AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz) {
   73 uptr MetaMap::FreeBlock(Processor *proc, uptr p) {
   73 uptr MetaMap::FreeBlock(Processor *proc, uptr p) {
   77   uptr sz = RoundUpTo(b->siz, kMetaShadowCell);
   82 bool MetaMap::FreeRange(Processor *proc, uptr p, uptr sz) {
   82 bool MetaMap::FreeRange(Processor *proc, uptr p, uptr sz) {
  121 void MetaMap::ResetRange(Processor *proc, uptr p, uptr sz) {
  121 void MetaMap::ResetRange(Processor *proc, uptr p, uptr sz) {
  128   const uptr kMetaRatio = kMetaShadowCell / kMetaShadowSize;
  129   const uptr kPageSize = GetPageSizeCached() * kMetaRatio;
  136   uptr diff = RoundUp(p, kPageSize) - p;
  151   const uptr p0 = p;
  152   const uptr sz0 = sz;
  154   for (uptr checked = 0; sz > 0; checked += kPageSize) {
  162   for (uptr checked = 0; sz > 0; checked += kPageSize) {
  175   uptr metap = (uptr)MemToMeta(p0);
  176   uptr metasz = sz0 / kMetaRatio;
  182 MBlock* MetaMap::GetBlock(uptr p) {
  196 SyncVar* MetaMap::GetOrCreateAndLock(ThreadState *thr, uptr pc,
  197                               uptr addr, bool write_lock) {
  201 SyncVar* MetaMap::GetIfExistsAndLock(uptr addr, bool write_lock) {
  205 SyncVar* MetaMap::GetAndLock(ThreadState *thr, uptr pc,
  206                              uptr addr, bool write_lock, bool create) {
  258 void MetaMap::MoveMemory(uptr src, uptr dst, uptr sz) {
  258 void MetaMap::MoveMemory(uptr src, uptr dst, uptr sz) {
  258 void MetaMap::MoveMemory(uptr src, uptr dst, uptr sz) {
  263   uptr diff = dst - src;
  267   uptr inc = 1;
projects/compiler-rt/lib/tsan/rtl/tsan_sync.h
   55   uptr addr;  // overwritten by DenseSlabAlloc freelist
   70   void Init(ThreadState *thr, uptr pc, uptr addr, u64 uid);
   70   void Init(ThreadState *thr, uptr pc, uptr addr, u64 uid);
   81   static uptr SplitId(u64 id, u64 *uid) {
  115   void AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz);
  115   void AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz);
  115   void AllocBlock(ThreadState *thr, uptr pc, uptr p, uptr sz);
  116   uptr FreeBlock(Processor *proc, uptr p);
  116   uptr FreeBlock(Processor *proc, uptr p);
  117   bool FreeRange(Processor *proc, uptr p, uptr sz);
  117   bool FreeRange(Processor *proc, uptr p, uptr sz);
  118   void ResetRange(Processor *proc, uptr p, uptr sz);
  118   void ResetRange(Processor *proc, uptr p, uptr sz);
  119   MBlock* GetBlock(uptr p);
  121   SyncVar* GetOrCreateAndLock(ThreadState *thr, uptr pc,
  122                               uptr addr, bool write_lock);
  123   SyncVar* GetIfExistsAndLock(uptr addr, bool write_lock);
  125   void MoveMemory(uptr src, uptr dst, uptr sz);
  125   void MoveMemory(uptr src, uptr dst, uptr sz);
  125   void MoveMemory(uptr src, uptr dst, uptr sz);
  139   SyncVar* GetAndLock(ThreadState *thr, uptr pc, uptr addr, bool write_lock,
  139   SyncVar* GetAndLock(ThreadState *thr, uptr pc, uptr addr, bool write_lock,
projects/compiler-rt/lib/tsan/rtl/tsan_trace.h
   43 const uptr kEventPCBits = 61;
   62   uptr shadow_stack[kShadowStackSize];
projects/compiler-rt/lib/ubsan/ubsan_diag.cpp
   35 void ubsan_GetStackTrace(BufferedStackTrace *stack, uptr max_depth,
   36                          uptr pc, uptr bp, void *context, bool fast) {
   36                          uptr pc, uptr bp, void *context, bool fast) {
   37   uptr top = 0;
   38   uptr bottom = 0;
   46 static void MaybePrintStackTrace(uptr pc, uptr bp) {
   46 static void MaybePrintStackTrace(uptr pc, uptr bp) {
  115 SymbolizedStack *__ubsan::getSymbolizedLocation(uptr PC) {
  246 static inline uptr subtractNoOverflow(uptr LHS, uptr RHS) {
  246 static inline uptr subtractNoOverflow(uptr LHS, uptr RHS) {
  246 static inline uptr subtractNoOverflow(uptr LHS, uptr RHS) {
  250 static inline uptr addNoOverflow(uptr LHS, uptr RHS) {
  250 static inline uptr addNoOverflow(uptr LHS, uptr RHS) {
  250 static inline uptr addNoOverflow(uptr LHS, uptr RHS) {
  251   const uptr Limit = (uptr)-1;
  282   for (uptr P = Min; P != Max; ++P) {
  291   for (uptr P = Min; P != Max; ++P) {
  312   for (uptr P = Min; P != Max; ++P) {
  419 bool __ubsan::IsPCSuppressed(ErrorType ET, uptr PC, const char *Filename) {
projects/compiler-rt/lib/ubsan/ubsan_diag.h
   41 SymbolizedStack *getSymbolizedLocation(uptr PC);
   43 inline SymbolizedStack *getCallerLocation(uptr CallerPC) {
   45   uptr PC = StackTrace::GetPreviousInstructionPc(CallerPC);
   50 typedef uptr MemoryLocation;
  227   uptr pc;
  228   uptr bp;
  262 bool IsPCSuppressed(ErrorType ET, uptr PC, const char *Filename);
projects/compiler-rt/lib/ubsan/ubsan_diag_standalone.cpp
   20     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
   20     uptr pc, uptr bp, void *context, bool request_fast, u32 max_depth) {
   21   uptr top = 0;
   22   uptr bottom = 0;
projects/compiler-rt/lib/ubsan/ubsan_handlers.cpp
   50   uptr Alignment = (uptr)1 << Data->LogAlignment;
  123   uptr RealPointer = Pointer - Offset;
  124   uptr LSB = LeastSignificantSetBitIndex(RealPointer);
  125   uptr ActualAlignment = uptr(1) << LSB;
  127   uptr Mask = Alignment - 1;
  128   uptr MisAlignmentOffset = RealPointer & Mask;
  824                                             uptr ValidVtable) {
  834                                                   uptr ValidVtable) {
projects/compiler-rt/lib/ubsan/ubsan_handlers_cxx.cpp
   62           << Range(Pointer, Pointer + sizeof(uptr), "possibly invalid vptr");
   66           << Range(Pointer, Pointer + sizeof(uptr), "invalid vptr");
   71         << Range(Pointer, Pointer + sizeof(uptr), "vptr for %0");
   79         << Range(Pointer, Pointer + sizeof(uptr),
projects/compiler-rt/lib/ubsan/ubsan_signals_standalone.cpp
   41 void ubsan_GetStackTrace(BufferedStackTrace *stack, uptr max_depth,
   42                          uptr pc, uptr bp, void *context, bool fast);
   42                          uptr pc, uptr bp, void *context, bool fast);
projects/compiler-rt/lib/ubsan/ubsan_type_hash.h
   19 typedef uptr HashValue;
projects/compiler-rt/lib/ubsan/ubsan_value.h
  136 typedef uptr ValueHandle;
projects/compiler-rt/lib/xray/xray_allocator.h
   40   uptr RoundedSize = RoundUpTo(sizeof(T), GetPageSizeCached());
   63   uptr B = internal_mmap(NULL, RoundedSize, PROT_READ | PROT_WRITE,
   80   uptr RoundedSize = RoundUpTo(sizeof(T), GetPageSizeCached());
   91   uptr RoundedSize = RoundUpTo(S * sizeof(T), GetPageSizeCached());
  112   uptr B = internal_mmap(NULL, RoundedSize, PROT_READ | PROT_WRITE,
  129   uptr RoundedSize = RoundUpTo(S * sizeof(T), GetPageSizeCached());
projects/compiler-rt/lib/xray/xray_function_call_trie.h
  193     explicit Allocators(uptr Max) XRAY_NEVER_INSTRUMENT {
  310   static Allocators InitAllocatorsCustom(uptr Max) XRAY_NEVER_INSTRUMENT {
projects/compiler-rt/lib/xray/xray_profiling.cpp
   88   uptr Allocators = 0;
  138   uptr FCT = 0;
  172   uptr P = atomic_exchange(&T.FCT, 0, memory_order_acq_rel);
  180   uptr A = atomic_exchange(&T.Allocators, 0, memory_order_acq_rel);