60 {
62
63
65 DEBUG_ASSERT(freelist->mutex()->IsOwnedByCurrentThread());
66
67
68 intptr_t used_in_bytes = 0;
69
70 bool is_executable =
page->is_executable();
74 const bool dontneed_on_sweep = FLAG_dontneed_on_sweep;
76
77 while (current < end) {
80
81
82
83 uword tags = raw_obj->untag()->tags_.load(std::memory_order_acquire);
84 intptr_t obj_size = raw_obj->untag()->HeapSize(tags);
86
87 raw_obj->untag()->ClearMarkBit();
88 used_in_bytes += obj_size;
89
91 } else {
92 uword free_end = current + obj_size;
93 while (free_end < end) {
95 tags = next_obj->untag()->tags_.load(std::memory_order_acquire);
97
98 break;
99 }
100
101 free_end += next_obj->untag()->HeapSize(tags);
102 }
103
104 if ((current == start) && (free_end == end)) {
105 page->set_live_bytes(0);
106 return false;
107 }
108 obj_size = free_end - current;
109 if (is_executable) {
110 uword cursor = current;
112 while (cursor < end) {
115 }
116 }
else if (
UNLIKELY(dontneed_on_sweep)) {
120 if (
UNLIKELY(page_aligned_start < page_aligned_end)) {
122 page_aligned_end - page_aligned_start);
123 }
124 } else {
125#if defined(DEBUG)
126 memset(
reinterpret_cast<void*
>(current),
Heap::kZapByte, obj_size);
127#endif
128 }
129 freelist->FreeLocked(current, obj_size);
130 }
131 current += obj_size;
132 }
134 ASSERT(used_in_bytes != 0);
135 page->set_live_bytes(used_in_bytes);
136 return true;
137}
#define DEBUG_ASSERT(cond)
static constexpr intptr_t kLargeHeaderSize
static constexpr T RoundUp(T x, uintptr_t alignment, uintptr_t offset=0)
static constexpr T RoundDown(T x, intptr_t alignment)
static intptr_t PageSize()
static void DontNeed(void *address, intptr_t size)
constexpr uword kBreakInstructionFiller
constexpr intptr_t kWordSize
bool IsAllocatableViaFreeLists(intptr_t size)