reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

projects/compiler-rt/lib/asan/asan_fake_stack.cpp
   20 static const u64 kMagic1 = kAsanStackAfterReturnMagic;
   21 static const u64 kMagic2 = (kMagic1 << 8) | kMagic1;
   22 static const u64 kMagic4 = (kMagic2 << 16) | kMagic2;
   23 static const u64 kMagic8 = (kMagic4 << 32) | kMagic4;
   25 static const u64 kAllocaRedzoneSize = 32UL;
   26 static const u64 kAllocaRedzoneMask = 31UL;
   29 ALWAYS_INLINE void SetShadow(uptr ptr, uptr size, uptr class_id, u64 magic) {
   30   u64 *shadow = reinterpret_cast<u64*>(MemToShadow(ptr));
projects/compiler-rt/lib/asan/asan_mapping.h
  156 static const u64 kDefaultShadowScale = SANITIZER_MYRIAD2 ? 5 : 3;
  158 static const u64 kDefaultShadowSentinel = ~(uptr)0;
  159 static const u64 kDefaultShadowOffset32 = 1ULL << 29;  // 0x20000000
  160 static const u64 kDefaultShadowOffset64 = 1ULL << 44;
  161 static const u64 kDefaultShort64bitShadowOffset =
  163 static const u64 kAArch64_ShadowOffset64 = 1ULL << 36;
  164 static const u64 kMIPS32_ShadowOffset32 = 0x0aaa0000;
  165 static const u64 kMIPS64_ShadowOffset64 = 1ULL << 37;
  166 static const u64 kPPC64_ShadowOffset64 = 1ULL << 44;
  167 static const u64 kSystemZ_ShadowOffset64 = 1ULL << 52;
  168 static const u64 kSPARC64_ShadowOffset64 = 1ULL << 43;  // 0x80000000000
  169 static const u64 kFreeBSD_ShadowOffset32 = 1ULL << 30;  // 0x40000000
  170 static const u64 kFreeBSD_ShadowOffset64 = 1ULL << 46;  // 0x400000000000
  171 static const u64 kNetBSD_ShadowOffset32 = 1ULL << 30;  // 0x40000000
  172 static const u64 kNetBSD_ShadowOffset64 = 1ULL << 46;  // 0x400000000000
  173 static const u64 kWindowsShadowOffset32 = 3ULL << 28;  // 0x30000000
  175 static const u64 kMyriadMemoryOffset32 = 0x80000000ULL;
  176 static const u64 kMyriadMemorySize32 = 0x20000000ULL;
  177 static const u64 kMyriadMemoryEnd32 =
  179 static const u64 kMyriadShadowOffset32 =
  182 static const u64 kMyriadCacheBitMask32 = 0x40000000ULL;
projects/compiler-rt/lib/asan/asan_poisoning.cpp
  242 u64 __sanitizer_unaligned_load64(const uu64 *p) {
  260 void __sanitizer_unaligned_store64(uu64 *p, u64 x) {
projects/compiler-rt/lib/asan/asan_rtl.cpp
   65                             u64 v1, u64 v2) {
   65                             u64 v1, u64 v2) {
projects/compiler-rt/lib/cfi/cfi.cpp
   94 typedef int (*CFICheckFn)(u64, void *, void *);
  341 ALWAYS_INLINE void CfiSlowPathCommon(u64 CallSiteTypeId, void *Ptr,
  402 __cfi_slowpath(u64 CallSiteTypeId, void *Ptr) {
  408 __cfi_slowpath_diag(u64 CallSiteTypeId, void *Ptr, void *DiagData) {
projects/compiler-rt/lib/hwasan/hwasan.cpp
  145                               u64 v1, u64 v2) {
  145                               u64 v1, u64 v2) {
  452 u64 __sanitizer_unaligned_load64(const uu64 *p) {
  461 void __sanitizer_unaligned_store64(uu64 *p, u64 x) {
projects/compiler-rt/lib/hwasan/hwasan.h
  144   u64 va_arg_overflow_size_tls;
projects/compiler-rt/lib/hwasan/hwasan_allocator.cpp
  261   return HwasanAllocate(stack, nmemb * size, sizeof(u64), true);
  289   return SetErrnoOnNull(HwasanAllocate(stack, size, sizeof(u64), false));
  298     return SetErrnoOnNull(HwasanAllocate(stack, size, sizeof(u64), false));
  303   return SetErrnoOnNull(HwasanReallocate(stack, ptr, size, sizeof(u64)));
projects/compiler-rt/lib/hwasan/hwasan_interface_internal.h
  145 u64 __sanitizer_unaligned_load64(const uu64 *p);
  154 void __sanitizer_unaligned_store64(uu64 *p, u64 x);
projects/compiler-rt/lib/hwasan/hwasan_thread.cpp
   38   static u64 unique_id;
projects/compiler-rt/lib/hwasan/hwasan_thread.h
   59   u64 unique_id() const { return unique_id_; }
   93   u64 unique_id_;  // counting from zero.
projects/compiler-rt/lib/interception/interception.h
   31 typedef __sanitizer::u64     UINTMAX_T;
projects/compiler-rt/lib/msan/msan.cpp
   43 THREADLOCAL u64 __msan_param_tls[kMsanParamTlsSize / sizeof(u64)];
   43 THREADLOCAL u64 __msan_param_tls[kMsanParamTlsSize / sizeof(u64)];
   52 THREADLOCAL u64 __msan_retval_tls[kMsanRetvalTlsSize / sizeof(u64)];
   52 THREADLOCAL u64 __msan_retval_tls[kMsanRetvalTlsSize / sizeof(u64)];
   58 ALIGNED(16) THREADLOCAL u64 __msan_va_arg_tls[kMsanParamTlsSize / sizeof(u64)];
   58 ALIGNED(16) THREADLOCAL u64 __msan_va_arg_tls[kMsanParamTlsSize / sizeof(u64)];
   65 THREADLOCAL u64 __msan_va_arg_overflow_size_tls;
  390                             u64 v1, u64 v2) {
  390                             u64 v1, u64 v2) {
  627 u64 __sanitizer_unaligned_load64(const uu64 *p) {
  649 void __sanitizer_unaligned_store64(uu64 *p, u64 x) {
  650   u64 s = __msan_param_tls[1];
projects/compiler-rt/lib/msan/msan.h
  371   u64 va_arg_overflow_size_tls;
projects/compiler-rt/lib/msan/msan_allocator.cpp
  233   return MsanAllocate(stack, nmemb * size, sizeof(u64), true);
  245   return SetErrnoOnNull(MsanAllocate(stack, size, sizeof(u64), false));
  254     return SetErrnoOnNull(MsanAllocate(stack, size, sizeof(u64), false));
  259   return SetErrnoOnNull(MsanReallocate(stack, ptr, size, sizeof(u64)));
projects/compiler-rt/lib/msan/msan_interface_internal.h
   56 void __msan_maybe_warning_8(u64 s, u32 o);
   65 void __msan_maybe_store_origin_8(u64 s, void *p, u32 o);
  162 u64 __sanitizer_unaligned_load64(const uu64 *p);
  171 void __sanitizer_unaligned_store64(uu64 *p, u64 x);
projects/compiler-rt/lib/msan/msan_poisoning.cpp
  154   u64 origin64 = ((u64)origin << 32) | origin;
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator.cpp
  140 const u64 kBlockMagic = 0x6A6CB03ABCEBC041ull;
  150   if (size + sizeof(u64) < size)
  152   void *p = RawInternalAlloc(size + sizeof(u64), cache, alignment);
  154     ReportInternalAllocatorOutOfMemory(size + sizeof(u64));
  156   return (char*)p + sizeof(u64);
  162   if (size + sizeof(u64) < size)
  164   addr = (char*)addr - sizeof(u64);
  165   size = size + sizeof(u64);
  170   return (char*)p + sizeof(u64);
  201   addr = (char*)addr - sizeof(u64);
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_internal.h
   27   static const u64 kSpaceSize = SANITIZER_MMAP_RANGE_SIZE;
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h
   50   static const u64 kTwoLevelByteMapSize1 =
   52   static const u64 kMinFirstMapSizeTwoLevelByteMap = 4;
   57   static const u64 kSpaceSize = Params::kSpaceSize;
projects/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary64.h
  332     PackedCounterArray(u64 num_counters, u64 max_value, MemoryMapperT *mapper)
  332     PackedCounterArray(u64 num_counters, u64 max_value, MemoryMapperT *mapper)
  336       constexpr u64 kMaxCounterBits = sizeof(*buffer) * 8ULL;
  367     u64 GetCount() const {
  392     const u64 n;
  393     u64 counter_size_bits_log;
  394     u64 counter_mask;
  395     u64 packing_ratio_log;
  396     u64 bit_offset_mask;
  399     u64 buffer_size;
  400     u64* buffer;
  592     u64 last_release_at_ns;
  593     u64 last_released_bytes;
projects/compiler-rt/lib/sanitizer_common/sanitizer_atomic.h
   50   typedef u64 Type;
projects/compiler-rt/lib/sanitizer_common/sanitizer_common.h
   51 const u64 kExternalPCBit = 1ULL << 60;
  258 u64 NanoTime();
  259 u64 MonotonicNanoTime();
  267 CheckFailed(const char *file, int line, const char *cond, u64 v1, u64 v2);
  267 CheckFailed(const char *file, int line, const char *cond, u64 v1, u64 v2);
  286                                        u64, u64);
  286                                        u64, u64);
projects/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors.inc
 6168 static int wrapped_seek(void *cookie, u64 *offset, int whence) {
 9628   static u64 metadata_mem[sizeof(MetadataHashMap) / sizeof(u64) + 1];
 9628   static u64 metadata_mem[sizeof(MetadataHashMap) / sizeof(u64) + 1];
projects/compiler-rt/lib/sanitizer_common/sanitizer_common_syscalls.inc
 1706 PRE_SYSCALL(lookup_dcookie)(u64 cookie64, void *buf, long len) {}
 1708 POST_SYSCALL(lookup_dcookie)(long res, u64 cookie64, void *buf, long len) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_coverage_libcdep_new.cpp
   26 static const u64 Magic64 = 0xC0BFFFFFFFFFFF64ULL;
   27 static const u64 Magic32 = 0xC0BFFFFFFFFFFF32ULL;
   28 static const u64 Magic = SANITIZER_WORDSIZE == 64 ? Magic64 : Magic32;
projects/compiler-rt/lib/sanitizer_common/sanitizer_deadlock_detector1.cpp
   29   u64 ctx;
   45   DDLogicalThread *CreateLogicalThread(u64 ctx) override;
   79 DDLogicalThread* DD::CreateLogicalThread(u64 ctx) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_deadlock_detector_interface.h
   45   u64  ctx;
   56     u64 thr_ctx;   // user thread context
   57     u64 mtx_ctx0;  // user mutex context, start of the edge
   58     u64 mtx_ctx1;  // user mutex context, end of the edge
   77   virtual DDLogicalThread* CreateLogicalThread(u64 ctx) { return nullptr; }
projects/compiler-rt/lib/sanitizer_common/sanitizer_internal_defs.h
  176 typedef u64 OFF_T;
  180 typedef u64  OFF64_T;
  193 typedef u64 tid_t;
  260 typedef ALIGNED(1) u64 uu64;
  281                           u64 v1, u64 v2);
  281                           u64 v1, u64 v2);
projects/compiler-rt/lib/sanitizer_common/sanitizer_lfstack.h
   36     u64 cmp = atomic_load(&head_, memory_order_relaxed);
   38       u64 cnt = (cmp & kCounterMask) + kCounterInc;
   39       u64 xch = (u64)(uptr)p | cnt;
   48     u64 cmp = atomic_load(&head_, memory_order_acquire);
   54       u64 cnt = (cmp & kCounterMask);
   55       u64 xch = (u64)(uptr)nxt | cnt;
   64   static const u64 kPtrMask = ((u64)-1) >> kCounterBits;
   65   static const u64 kCounterMask = ~kPtrMask;
   66   static const u64 kCounterInc = kPtrMask + 1;
projects/compiler-rt/lib/sanitizer_common/sanitizer_libc.cpp
   78     u64 *p = reinterpret_cast<u64*>(s);
   79     u64 *e = p + n / 8;
   80     u64 v = c;
  234   u64 res = 0;
projects/compiler-rt/lib/sanitizer_common/sanitizer_linux.cpp
  496 u64 NanoTime() {
projects/compiler-rt/lib/sanitizer_common/sanitizer_linux_libcdep.cpp
  790 u64 MonotonicNanoTime() {
projects/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h
  135   u64 aio_data;
  141   u64 aio_buf;
  142   u64 aio_nbytes;
  144   u64 aio_reserved2;
  145   u64 aio_reserved3;
  149   u64 data;
  150   u64 obj;
  151   u64 res;
  152   u64 res2;
  576   u64 opaque[128 / sizeof(u64)];
  576   u64 opaque[128 / sizeof(u64)];
  884 typedef int (*__sanitizer_cookie_io_seek)(void *cookie, u64 *offset,
projects/compiler-rt/lib/sanitizer_common/sanitizer_printf.cpp
   41 static int AppendNumber(char **buff, const char *buff_end, u64 absolute_value,
   82 static int AppendUnsigned(char **buff, const char *buff_end, u64 num, u8 base,
  119 static int AppendPointer(char **buff, const char *buff_end, u64 ptr_value) {
  166     u64 uval;
projects/compiler-rt/lib/sanitizer_common/sanitizer_symbolizer_posix_libcdep.cpp
  315 bool __sanitizer_symbolize_code(const char *ModuleName, u64 ModuleOffset,
  318 bool __sanitizer_symbolize_data(const char *ModuleName, u64 ModuleOffset,
projects/compiler-rt/lib/sanitizer_common/sanitizer_syscall_linux_x86_64.inc
   15 static uptr internal_syscall(u64 nr) {
   16   u64 retval;
   23 static uptr internal_syscall(u64 nr, T1 arg1) {
   24   u64 retval;
   31 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2) {
   32   u64 retval;
   39 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3) {
   40   u64 retval;
   47 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3, T4 arg4) {
   48   u64 retval;
   57 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3, T4 arg4,
   59   u64 retval;
   70 static uptr internal_syscall(u64 nr, T1 arg1, T2 arg2, T3 arg3, T4 arg4,
   72   u64 retval;
projects/compiler-rt/lib/sanitizer_common/sanitizer_termination.cpp
   70                           u64 v1, u64 v2) {
   70                           u64 v1, u64 v2) {
projects/compiler-rt/lib/sanitizer_common/sanitizer_thread_registry.cpp
   81 void ThreadContextBase::SetCreated(uptr _user_id, u64 _unique_id,
projects/compiler-rt/lib/sanitizer_common/sanitizer_thread_registry.h
   45   u64 unique_id;  // Unique thread ID.
   66   void SetCreated(uptr _user_id, u64 _unique_id, bool _detached,
  142   u64 total_threads_;   // Total number of created threads. May be greater than
projects/compiler-rt/lib/scudo/scudo_allocator.cpp
  636   u64 LastCheck = atomic_load_relaxed(&RssLastCheckedAtNS);
  637   const u64 CurrentCheck = MonotonicNanoTime();
projects/compiler-rt/lib/scudo/scudo_allocator.h
   38 typedef u64 PackedHeader;
   40   u64 Checksum          : 16;
   41   u64 ClassId           : 8;
   42   u64 SizeOrUnusedBytes : 20;  // Size for Primary backed allocations, amount of
   44   u64 State             : 2;   // available, allocated, or quarantined
   45   u64 AllocType         : 2;   // malloc, new, new[], or memalign
   46   u64 Offset            : 16;  // Offset from the beginning of the backend
projects/compiler-rt/lib/scudo/scudo_termination.cpp
   36                           u64 Value1, u64 Value2) {
   36                           u64 Value1, u64 Value2) {
projects/compiler-rt/lib/tsan/dd/dd_rtl.cpp
   91   static u64 ctx_mem[sizeof(Context) / sizeof(u64) + 1];
   91   static u64 ctx_mem[sizeof(Context) / sizeof(u64) + 1];
projects/compiler-rt/lib/tsan/rtl/tsan_clock.cpp
  158     u64 *dst_pos = &clk_[0];
  160       u64 epoch = src_elem.epoch;
  356 void ThreadClock::set(ClockCache *c, unsigned tid, u64 v) {
  548 u64 SyncClock::get(unsigned tid) const {
  558 u64 SyncClock::get_clean(unsigned tid) const {
projects/compiler-rt/lib/tsan/rtl/tsan_clock.h
   32   u64 get(unsigned tid) const;
   33   u64 get_clean(unsigned tid) const;
   68     u64 epoch  : kClkBits;
   69     u64 tid : 64 - kClkBits;  // kInvalidId if not active
  130   u64 get(unsigned tid) const;
  131   void set(ClockCache *c, unsigned tid, u64 v);
  132   void set(u64 v);
  151   u64 last_acquire_;
  164   u64 clk_[kMaxTidInClock];  // Fixed size vector clock.
  170 ALWAYS_INLINE u64 ThreadClock::get(unsigned tid) const {
  175 ALWAYS_INLINE void ThreadClock::set(u64 v) {
projects/compiler-rt/lib/tsan/rtl/tsan_defs.h
   44   u64 epoch  : kClkBits;
   45   u64 reused : 64 - kClkBits;  // tid reuse count
  142 T RoundUp(T p, u64 align) {
  148 T RoundDown(T p, u64 align) {
  160   u64 hash[2];
  176   u64  siz : 48;
  177   u64  tag : 16;
projects/compiler-rt/lib/tsan/rtl/tsan_fd.cpp
   39   u64 connectsync;
  306   static u64 addr;
  312   static u64 addr;
projects/compiler-rt/lib/tsan/rtl/tsan_interceptors_posix.cpp
   72   u64 opaque[936 / sizeof(u64) + 1];
   72   u64 opaque[936 / sizeof(u64) + 1];
 2830 void __tsan_testonly_barrier_init(u64 *barrier, u32 count) {
 2840 void __tsan_testonly_barrier_wait(u64 *barrier) {
projects/compiler-rt/lib/tsan/rtl/tsan_interface.cpp
   24 typedef u64 uint64_t;
  105 u64 __sanitizer_unaligned_load64(const uu64 *addr) {
  123 void __sanitizer_unaligned_store64(uu64 *addr, u64 v) {
projects/compiler-rt/lib/tsan/rtl/tsan_interface_java.cpp
   55 static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
   55 static u64 jctx_buf[sizeof(JavaContext) / sizeof(u64) + 1];
  136   u64 *s = (u64*)MemToShadow(src);
  137   u64 *d = (u64*)MemToShadow(dst);
  138   u64 *send = (u64*)MemToShadow(src + size);
projects/compiler-rt/lib/tsan/rtl/tsan_mutex.cpp
  198   u64 Contention() const {
  199     u64 active = iter_ % kActiveSpinIters;
  200     u64 passive = iter_ - active;
projects/compiler-rt/lib/tsan/rtl/tsan_mutex.h
   78   u64 seq_;
   79   u64 locked_[MutexTypeCount];
projects/compiler-rt/lib/tsan/rtl/tsan_mutexset.cpp
   24 void MutexSet::Add(u64 id, bool write, u64 epoch) {
   24 void MutexSet::Add(u64 id, bool write, u64 epoch) {
   35     u64 minepoch = (u64)-1;
   36     u64 mini = (u64)-1;
   54 void MutexSet::Del(u64 id, bool write) {
   64 void MutexSet::Remove(u64 id) {
projects/compiler-rt/lib/tsan/rtl/tsan_mutexset.h
   26     u64 id;
   27     u64 epoch;
   34   void Add(u64 id, bool write, u64 epoch);
   34   void Add(u64 id, bool write, u64 epoch);
   35   void Del(u64 id, bool write);
   36   void Remove(u64 id);  // Removes the mutex completely (if it's destroyed).
projects/compiler-rt/lib/tsan/rtl/tsan_platform_linux.cpp
  182   const uptr kMarkerSize = 512 * 1024 / sizeof(u64);
  183   InternalMmapVector<u64> marker(kMarkerSize);
  185   for (volatile u64 *p = marker.data(); p < marker.data() + kMarkerSize; p++)
  187   internal_write(fd, marker.data(), marker.size() * sizeof(u64));
  207            p += marker.size() * sizeof(u64)) {
  208         internal_mmap(p, Min<uptr>(marker.size() * sizeof(u64), shadow_end - p),
projects/compiler-rt/lib/tsan/rtl/tsan_report.h
   50   u64 id;
  102   u64 id;
projects/compiler-rt/lib/tsan/rtl/tsan_rtl.cpp
  112 ThreadState::ThreadState(Context *ctx, int tid, int unique_id, u64 epoch,
  154   const u64 kMs2Ns = 1000 * 1000;
  175   u64 last_flush = NanoTime();
  181     u64 now = NanoTime();
  213       u64 last = atomic_load(&ctx->last_symbolize_time_ns,
  606 Shadow LoadShadow(u64 *p) {
  607   u64 raw = atomic_load((atomic_uint64_t*)p, memory_order_relaxed);
  612 void StoreShadow(u64 *sp, u64 s) {
  612 void StoreShadow(u64 *sp, u64 s) {
  617 void StoreIfNotYetStored(u64 *sp, u64 *s) {
  617 void StoreIfNotYetStored(u64 *sp, u64 *s) {
  623 void HandleRace(ThreadState *thr, u64 *shadow_mem,
  642     u64 *shadow_mem, Shadow cur) {
  651   u64 store_word = cur.raw();
  731 bool ContainsSameAccessSlow(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  731 bool ContainsSameAccessSlow(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  731 bool ContainsSameAccessSlow(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  750 bool ContainsSameAccessFast(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  750 bool ContainsSameAccessFast(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  750 bool ContainsSameAccessFast(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  804 bool ContainsSameAccess(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  804 bool ContainsSameAccess(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  804 bool ContainsSameAccess(u64 *s, u64 a, u64 sync_epoch, bool is_write) {
  820   u64 *shadow_mem = (u64*)MemToShadow(addr);
  886     u64 *shadow_mem, Shadow cur) {
  901                            u64 val) {
  925     u64 *p = (u64*)MemToShadow(addr);
  937     u64 *begin = (u64*)MemToShadow(addr);
  938     u64 *end = begin + size / kShadowCell * kShadowCnt;
  939     u64 *p = begin;
  947     u64 *p1 = p;
projects/compiler-rt/lib/tsan/rtl/tsan_rtl.h
   88                      u64 v1, u64 v2);
   88                      u64 v1, u64 v2);
   90 const u64 kShadowRodata = (u64)-1;  // .rodata shadow marker
  100   FastState(u64 tid, u64 epoch) {
  100   FastState(u64 tid, u64 epoch) {
  108   explicit FastState(u64 x)
  112   u64 raw() const {
  116   u64 tid() const {
  117     u64 res = (x_ & ~kIgnoreBit) >> kTidShift;
  121   u64 TidWithIgnore() const {
  122     u64 res = x_ >> kTidShift;
  126   u64 epoch() const {
  127     u64 res = x_ & ((1ull << kClkBits) - 1);
  132     u64 old_epoch = epoch();
  158   u64 GetTracePos() const {
  161     const u64 mask = (1ull << (kTracePartSizeBits + hs + 1)) - 1;
  168   static const u64 kIgnoreBit = 1ull << 63;
  169   static const u64 kFreedBit = 1ull << 63;
  170   static const u64 kHistoryShift = kClkBits;
  171   static const u64 kHistoryMask = 7;
  172   u64 x_;
  185   explicit Shadow(u64 x)
  194   void SetAddr0AndSizeLog(u64 addr0, unsigned kAccessSizeLog) {
  226     u64 shifted_xor = (s1.x_ ^ s2.x_) >> kTidShift;
  233     u64 masked_xor = ((s1.x_ ^ s2.x_) >> kClkBits) & 31;
  240     u64 diff = s1.addr0() - s2.addr0();
  255   u64 ALWAYS_INLINE addr0() const { return (x_ >> kClkBits) & 7; }
  256   u64 ALWAYS_INLINE size() const { return 1ull << size_log(); }
  307   static const u64 kReadShift   = 5 + kClkBits;
  308   static const u64 kReadBit     = 1ull << kReadShift;
  309   static const u64 kAtomicShift = 6 + kClkBits;
  310   static const u64 kAtomicBit   = 1ull << kAtomicShift;
  312   u64 size_log() const { return (x_ >> (3 + kClkBits)) & 3; }
  378   u64 fast_synch_epoch;
  398   u64 *racy_shadow_addr;
  399   u64 racy_state[2];
  450   explicit ThreadState(Context *ctx, int tid, int unique_id, u64 epoch,
  490   u64 epoch0;
  491   u64 epoch1;
  557   u64 stat[StatCnt];
  558   u64 int_alloc_cnt[MBlockTypeCount];
  559   u64 int_alloc_siz[MBlockTypeCount];
  595   u64 AddMutex(u64 id);
  595   u64 AddMutex(u64 id);
  612   void AddDeadMutex(u64 id);
  628 void RestoreStack(int tid, const u64 epoch, VarSizeStackTrace *stk,
  669 void ALWAYS_INLINE StatInc(ThreadState *thr, StatType typ, u64 n = 1) {
  674 void ALWAYS_INLINE StatSet(ThreadState *thr, StatType typ, u64 n) {
  727     u64 *shadow_mem, Shadow cur);
  852                                         EventType typ, u64 addr) {
  859   u64 pos = fs.GetTracePos();
projects/compiler-rt/lib/tsan/rtl/tsan_rtl_mutex.cpp
   49     uptr addr, u64 mid) {
  105   u64 mid = s->GetId();
  106   u64 last_lock = s->last_lock;
  201   u64 mid = s->GetId();
  244   u64 mid = s->GetId();
  296   u64 mid = s->GetId();
  328   u64 mid = s->GetId();
  376   u64 mid = s->GetId();
  398   u64 mid = s->GetId();
  417   u64 epoch = tctx->epoch1;
  460   u64 epoch = tctx->epoch1;
projects/compiler-rt/lib/tsan/rtl/tsan_rtl_report.cpp
   35                      u64 v1, u64 v2) {
   35                      u64 v1, u64 v2) {
  182     u64 mid = this->AddMutex(d.id);
  277 u64 ScopedReportBase::AddMutex(u64 id) {
  277 u64 ScopedReportBase::AddMutex(u64 id) {
  278   u64 uid = 0;
  279   u64 mid = id;
  296 void ScopedReportBase::AddDeadMutex(u64 id) {
  379 void RestoreStack(int tid, const u64 epoch, VarSizeStackTrace *stk,
  391   const u64 epoch0 = RoundDown(epoch, TraceSize());
  392   const u64 eend = epoch % TraceSize();
  393   const u64 ebegin = RoundDown(eend, kTracePartSize);
  662   Vector<u64> mset_buffer;
  663   mset_buffer.Resize(sizeof(MutexSet) / sizeof(u64) + 1);
projects/compiler-rt/lib/tsan/rtl/tsan_rtl_thread.cpp
  331   u64 *shadow_mem = (u64*)MemToShadow(addr);
projects/compiler-rt/lib/tsan/rtl/tsan_sync.cpp
   26 void SyncVar::Init(ThreadState *thr, uptr pc, uptr addr, u64 uid) {
  241       const u64 uid = atomic_fetch_add(&uid_gen_, 1, memory_order_relaxed);
projects/compiler-rt/lib/tsan/rtl/tsan_sync.h
   57   u64 uid;  // Globally unique id.
   60   u64 last_lock;
   70   void Init(ThreadState *thr, uptr pc, uptr addr, u64 uid);
   73   u64 GetId() const {
   77   bool CheckId(u64 uid) const {
   81   static uptr SplitId(u64 id, u64 *uid) {
   81   static uptr SplitId(u64 id, u64 *uid) {
projects/compiler-rt/lib/tsan/rtl/tsan_trace.h
   41 typedef u64 Event;
   51   u64        epoch0;  // Start epoch for the trace.
projects/compiler-rt/lib/tsan/rtl/tsan_update_shadow_word_inl.h
   18   u64 *sp = &shadow_mem[idx];
projects/compiler-rt/lib/xray/xray_profile_collector.cpp
   41 constexpr u64 XRayProfilingVersion = 0x20180424;
   44 constexpr u64 XRayMagicBytes = 0x7872617970726f66;
   47   const u64 MagicBytes = XRayMagicBytes;
   48   const u64 Version = XRayProfilingVersion;
   49   u64 Timestamp = 0; // System time in nanoseconds.
   50   u64 PID = 0;       // Process ID.
   56   u64 ThreadId;