diff options
Diffstat (limited to 'gnu/llvm/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h')
-rw-r--r-- | gnu/llvm/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h | 21 |
1 files changed, 10 insertions, 11 deletions
diff --git a/gnu/llvm/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h b/gnu/llvm/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h index 38d2a7d117f..f2471efced6 100644 --- a/gnu/llvm/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h +++ b/gnu/llvm/compiler-rt/lib/sanitizer_common/sanitizer_allocator_primary32.h @@ -189,7 +189,7 @@ class SizeClassAllocator32 { sci->free_list.push_front(b); } - bool PointerIsMine(const void *p) { + bool PointerIsMine(const void *p) const { uptr mem = reinterpret_cast<uptr>(p); if (SANITIZER_SIGN_EXTENDED_ADDRESSES) mem &= (kSpaceSize - 1); @@ -198,8 +198,9 @@ class SizeClassAllocator32 { return GetSizeClass(p) != 0; } - uptr GetSizeClass(const void *p) { - return possible_regions[ComputeRegionId(reinterpret_cast<uptr>(p))]; + uptr GetSizeClass(const void *p) const { + uptr id = ComputeRegionId(reinterpret_cast<uptr>(p)); + return possible_regions.contains(id) ? possible_regions[id] : 0; } void *GetBlockBegin(const void *p) { @@ -237,13 +238,13 @@ class SizeClassAllocator32 { // ForceLock() and ForceUnlock() are needed to implement Darwin malloc zone // introspection API. - void ForceLock() NO_THREAD_SAFETY_ANALYSIS { + void ForceLock() SANITIZER_NO_THREAD_SAFETY_ANALYSIS { for (uptr i = 0; i < kNumClasses; i++) { GetSizeClassInfo(i)->mutex.Lock(); } } - void ForceUnlock() NO_THREAD_SAFETY_ANALYSIS { + void ForceUnlock() SANITIZER_NO_THREAD_SAFETY_ANALYSIS { for (int i = kNumClasses - 1; i >= 0; i--) { GetSizeClassInfo(i)->mutex.Unlock(); } @@ -251,9 +252,9 @@ class SizeClassAllocator32 { // Iterate over all existing chunks. // The allocator must be locked when calling this function. - void ForEachChunk(ForEachChunkCallback callback, void *arg) { + void ForEachChunk(ForEachChunkCallback callback, void *arg) const { for (uptr region = 0; region < kNumPossibleRegions; region++) - if (possible_regions[region]) { + if (possible_regions.contains(region) && possible_regions[region]) { uptr chunk_size = ClassIdToSize(possible_regions[region]); uptr max_chunks_in_region = kRegionSize / (chunk_size + kMetadataSize); uptr region_beg = region * kRegionSize; @@ -292,9 +293,7 @@ class SizeClassAllocator32 { return res; } - uptr ComputeRegionBeg(uptr mem) { - return mem & ~(kRegionSize - 1); - } + uptr ComputeRegionBeg(uptr mem) const { return mem & ~(kRegionSize - 1); } uptr AllocateRegion(AllocatorStats *stat, uptr class_id) { DCHECK_LT(class_id, kNumClasses); @@ -305,7 +304,7 @@ class SizeClassAllocator32 { MapUnmapCallback().OnMap(res, kRegionSize); stat->Add(AllocatorStatMapped, kRegionSize); CHECK(IsAligned(res, kRegionSize)); - possible_regions.set(ComputeRegionId(res), static_cast<u8>(class_id)); + possible_regions[ComputeRegionId(res)] = class_id; return res; } |