44 reduce_memory_footprint_(
false),
45 abort_incremental_marking_(
false),
48 was_marked_incrementally_(
false),
49 sweeping_in_progress_(
false),
50 pending_sweeper_jobs_semaphore_(0),
51 sequential_sweeping_(
false),
52 migration_slots_buffer_(
NULL),
55 have_code_to_deoptimize_(
false) {
61 explicit VerifyMarkingVisitor(
Heap* heap) : heap_(heap) {}
64 for (
Object** current = start; current < end; current++) {
65 if ((*current)->IsHeapObject()) {
66 HeapObject*
object = HeapObject::cast(*current);
67 CHECK(heap_->mark_compact_collector()->IsMarked(
object));
72 void VisitEmbeddedPointer(RelocInfo* rinfo) {
74 if (!rinfo->host()->IsWeakObject(rinfo->target_object())) {
75 Object* p = rinfo->target_object();
80 void VisitCell(RelocInfo* rinfo) {
81 Code* code = rinfo->host();
83 if (!code->IsWeakObject(rinfo->target_cell())) {
84 ObjectVisitor::VisitCell(rinfo);
93 static void VerifyMarking(Heap* heap,
Address bottom,
Address top) {
94 VerifyMarkingVisitor visitor(heap);
96 Address next_object_must_be_here_or_later = bottom;
101 CHECK(current >= next_object_must_be_here_or_later);
102 object->Iterate(&visitor);
103 next_object_must_be_here_or_later = current +
object->Size();
109 static void VerifyMarking(NewSpace*
space) {
111 NewSpacePageIterator it(
space->bottom(), end);
116 while (it.has_next()) {
117 NewSpacePage* page = it.next();
118 Address limit = it.has_next() ? page->area_end() : end;
119 CHECK(limit == end || !page->Contains(end));
120 VerifyMarking(
space->heap(), page->area_start(), limit);
125 static void VerifyMarking(PagedSpace*
space) {
126 PageIterator it(
space);
128 while (it.has_next()) {
130 VerifyMarking(
space->heap(), p->area_start(), p->area_end());
135 static void VerifyMarking(Heap* heap) {
136 VerifyMarking(heap->old_pointer_space());
137 VerifyMarking(heap->old_data_space());
138 VerifyMarking(heap->code_space());
139 VerifyMarking(heap->cell_space());
140 VerifyMarking(heap->property_cell_space());
141 VerifyMarking(heap->map_space());
142 VerifyMarking(heap->new_space());
144 VerifyMarkingVisitor visitor(heap);
146 LargeObjectIterator it(heap->lo_space());
147 for (HeapObject* obj = it.Next(); obj !=
NULL; obj = it.Next()) {
149 obj->Iterate(&visitor);
157 class VerifyEvacuationVisitor :
public ObjectVisitor {
160 for (
Object** current = start; current < end; current++) {
161 if ((*current)->IsHeapObject()) {
162 HeapObject*
object = HeapObject::cast(*current);
163 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(
object));
170 static void VerifyEvacuation(Page* page) {
171 VerifyEvacuationVisitor visitor;
172 HeapObjectIterator iterator(page,
NULL);
173 for (HeapObject* heap_object = iterator.Next(); heap_object !=
NULL;
174 heap_object = iterator.Next()) {
176 if (!heap_object->IsFiller()) {
177 heap_object->Iterate(&visitor);
183 static void VerifyEvacuation(NewSpace*
space) {
184 NewSpacePageIterator it(
space->bottom(),
space->top());
185 VerifyEvacuationVisitor visitor;
187 while (it.has_next()) {
188 NewSpacePage* page = it.next();
189 Address current = page->area_start();
190 Address limit = it.has_next() ? page->area_end() :
space->top();
192 while (current < limit) {
194 object->Iterate(&visitor);
195 current +=
object->Size();
201 static void VerifyEvacuation(Heap* heap, PagedSpace*
space) {
202 if (FLAG_use_allocation_folding &&
203 (
space == heap->old_pointer_space() ||
space == heap->old_data_space())) {
206 PageIterator it(
space);
208 while (it.has_next()) {
210 if (p->IsEvacuationCandidate())
continue;
216 static void VerifyEvacuation(Heap* heap) {
217 VerifyEvacuation(heap, heap->old_pointer_space());
218 VerifyEvacuation(heap, heap->old_data_space());
219 VerifyEvacuation(heap, heap->code_space());
220 VerifyEvacuation(heap, heap->cell_space());
221 VerifyEvacuation(heap, heap->property_cell_space());
222 VerifyEvacuation(heap, heap->map_space());
223 VerifyEvacuation(heap->new_space());
225 VerifyEvacuationVisitor visitor;
226 heap->IterateStrongRoots(&visitor,
VISIT_ALL);
232 class VerifyNativeContextSeparationVisitor :
public ObjectVisitor {
234 VerifyNativeContextSeparationVisitor() : current_native_context_(
NULL) {}
237 for (
Object** current = start; current < end; current++) {
238 if ((*current)->IsHeapObject()) {
239 HeapObject*
object = HeapObject::cast(*current);
240 if (object->IsString())
continue;
241 switch (object->map()->instance_type()) {
243 CheckContext(JSFunction::cast(
object)->context());
246 CheckContext(JSGlobalProxy::cast(
object)->native_context());
250 CheckContext(GlobalObject::cast(
object)->native_context());
263 if (object->IsContext()) {
264 CheckContext(
object);
266 FixedArray* array = FixedArray::cast(
object);
267 int length = array->length();
270 array->set_length(0);
273 array->set_length(length);
280 object->Iterate(
this);
303 void CheckContext(
Object* context) {
304 if (!context->IsContext())
return;
306 if (current_native_context_ ==
NULL) {
307 current_native_context_ = native_context;
309 CHECK_EQ(current_native_context_, native_context);
313 Context* current_native_context_;
317 static void VerifyNativeContextSeparation(Heap* heap) {
318 HeapObjectIterator it(heap->code_space());
320 for (
Object*
object = it.Next();
object !=
NULL;
object = it.Next()) {
321 VerifyNativeContextSeparationVisitor visitor;
322 Code::cast(
object)->CodeIterateBody(&visitor);
344 int number_of_pages =
space->CountTotalPages();
345 intptr_t reserved = (number_of_pages *
space->AreaSize());
346 intptr_t free = reserved -
space->SizeOfObjects();
347 PrintF(
"[%s]: %d pages, %d (%.1f%%) free\n",
349 static_cast<int>(free),
static_cast<double>(free) * 100 / reserved);
357 #ifdef ENABLE_GDB_JIT_INTERFACE
359 if (FLAG_gdbjit)
return false;
366 FLAG_incremental_code_compaction)) {
368 }
else if (FLAG_trace_fragmentation) {
372 if (FLAG_trace_fragmentation) {
392 DCHECK(state_ == PREPARE_GC);
402 if (FLAG_verify_heap) {
403 VerifyMarking(
heap_);
410 if (FLAG_verify_native_context_separation) {
411 VerifyNativeContextSeparation(
heap_);
416 if (
heap()->weak_embedded_objects_verification_enabled()) {
417 VerifyWeakEmbeddedObjectsInCode();
419 if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
420 VerifyOmittedMapChecks();
437 PageIterator it(
space);
439 while (it.has_next()) {
447 void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace*
space) {
448 NewSpacePageIterator it(
space->bottom(),
space->top());
450 while (it.has_next()) {
451 NewSpacePage* p = it.next();
452 CHECK(p->markbits()->IsClean());
458 void MarkCompactCollector::VerifyMarkbitsAreClean() {
468 for (HeapObject* obj = it.Next(); obj !=
NULL; obj = it.Next()) {
469 MarkBit mark_bit = Marking::MarkBitFrom(obj);
470 CHECK(Marking::IsWhite(mark_bit));
476 void MarkCompactCollector::VerifyWeakEmbeddedObjectsInCode() {
477 HeapObjectIterator code_iterator(
heap()->code_space());
478 for (HeapObject* obj = code_iterator.Next(); obj !=
NULL;
479 obj = code_iterator.Next()) {
480 Code* code = Code::cast(obj);
481 if (!code->is_optimized_code() && !code->is_weak_stub())
continue;
483 code->VerifyEmbeddedObjectsDependency();
488 void MarkCompactCollector::VerifyOmittedMapChecks() {
489 HeapObjectIterator iterator(
heap()->map_space());
490 for (HeapObject* obj = iterator.Next(); obj !=
NULL; obj = iterator.Next()) {
491 Map*
map = Map::cast(obj);
492 map->VerifyOmittedMapChecks();
499 PageIterator it(
space);
501 while (it.has_next()) {
508 NewSpacePageIterator it(
space->ToSpaceStart(),
space->ToSpaceEnd());
510 while (it.has_next()) {
527 MarkBit mark_bit = Marking::MarkBitFrom(obj);
563 if (FLAG_job_based_sweeping) {
588 if (FLAG_job_based_sweeping) {
601 if (FLAG_verify_heap) {
602 VerifyEvacuation(
heap_);
615 if (FLAG_job_based_sweeping) {
617 base::TimeDelta::FromSeconds(0))) {
630 if (
space ==
heap()->old_pointer_space()) {
632 }
else if (
space ==
heap()->old_data_space()) {
640 intptr_t freed_bytes =
space->free_list()->Concatenate(free_list);
641 space->AddToAccountingStats(freed_bytes);
642 space->DecrementUnsweptFreeBytes(freed_bytes);
661 if (old_start == new_start)
return;
663 MarkBit new_mark_bit = MarkBitFrom(new_start);
664 MarkBit old_mark_bit = MarkBitFrom(old_start);
667 ObjectColor old_color = Color(old_mark_bit);
670 if (Marking::IsBlack(old_mark_bit)) {
671 old_mark_bit.
Clear();
672 DCHECK(IsWhite(old_mark_bit));
673 Marking::MarkBlack(new_mark_bit);
675 }
else if (Marking::IsGrey(old_mark_bit)) {
676 old_mark_bit.
Clear();
678 DCHECK(IsWhite(old_mark_bit));
685 ObjectColor new_color = Color(new_mark_bit);
686 DCHECK(new_color == old_color);
696 return "OLD_POINTER_SPACE";
698 return "OLD_DATA_SPACE";
706 return "PROPERTY_CELL_SPACE";
723 if (FLAG_trace_fragmentation) {
724 PrintF(
"%p [%s]: %d bytes live (unswept)\n",
reinterpret_cast<void*
>(p),
731 space->ObtainFreeListStatistics(p, &sizes);
734 intptr_t ratio_threshold;
735 intptr_t area_size =
space->AreaSize();
738 ratio_threshold = 10;
741 ratio_threshold = 15;
744 if (FLAG_trace_fragmentation) {
745 PrintF(
"%p [%s]: %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %d (%.2f%%) %s\n",
748 static_cast<double>(sizes.
small_size_ * 100) / area_size,
750 static_cast<double>(sizes.
medium_size_ * 100) / area_size,
752 static_cast<double>(sizes.
large_size_ * 100) / area_size,
754 static_cast<double>(sizes.
huge_size_ * 100) / area_size,
755 (ratio > ratio_threshold) ?
"[fragmented]" :
"");
758 if (FLAG_always_compact && sizes.
Total() != area_size) {
762 if (ratio <= ratio_threshold)
return 0;
764 return static_cast<int>(ratio - ratio_threshold);
773 static const int kMaxMaxEvacuationCandidates = 1000;
774 int number_of_pages =
space->CountTotalPages();
775 int max_evacuation_candidates =
776 static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1);
778 if (FLAG_stress_compaction || FLAG_always_compact) {
779 max_evacuation_candidates = kMaxMaxEvacuationCandidates;
784 Candidate() : fragmentation_(0), page_(
NULL) {}
785 Candidate(
int f,
Page* p) : fragmentation_(f), page_(p) {}
787 int fragmentation() {
return fragmentation_; }
788 Page* page() {
return page_; }
795 enum CompactionMode { COMPACT_FREE_LISTS, REDUCE_MEMORY_FOOTPRINT };
799 intptr_t reserved = number_of_pages *
space->AreaSize();
800 intptr_t over_reserved = reserved -
space->SizeOfObjects();
801 static const intptr_t kFreenessThreshold = 50;
807 mode = REDUCE_MEMORY_FOOTPRINT;
808 max_evacuation_candidates += 2;
812 if (over_reserved > reserved / 3 && over_reserved >= 2 *
space->AreaSize()) {
816 mode = REDUCE_MEMORY_FOOTPRINT;
817 max_evacuation_candidates *= 2;
820 if (FLAG_trace_fragmentation &&
mode == REDUCE_MEMORY_FOOTPRINT) {
822 "Estimated over reserved memory: %.1f / %.1f MB (threshold %d), "
823 "evacuation candidate limit: %d\n",
824 static_cast<double>(over_reserved) /
MB,
825 static_cast<double>(reserved) /
MB,
826 static_cast<int>(kFreenessThreshold), max_evacuation_candidates);
829 intptr_t estimated_release = 0;
831 Candidate candidates[kMaxMaxEvacuationCandidates];
833 max_evacuation_candidates =
834 Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates);
837 int fragmentation = 0;
838 Candidate* least =
NULL;
840 PageIterator it(
space);
841 if (it.has_next()) it.next();
843 while (it.has_next()) {
847 if (FLAG_stress_compaction) {
848 unsigned int counter =
space->heap()->ms_count();
850 if ((counter & 1) == (page_number & 1)) fragmentation = 1;
851 }
else if (
mode == REDUCE_MEMORY_FOOTPRINT) {
853 if (estimated_release >= over_reserved) {
857 intptr_t free_bytes = 0;
863 space->ObtainFreeListStatistics(p, &sizes);
864 free_bytes = sizes.
Total();
867 int free_pct =
static_cast<int>(free_bytes * 100) / p->
area_size();
869 if (free_pct >= kFreenessThreshold) {
870 estimated_release += free_bytes;
871 fragmentation = free_pct;
876 if (FLAG_trace_fragmentation) {
877 PrintF(
"%p [%s]: %d (%.2f%%) free %s\n",
reinterpret_cast<void*
>(p),
879 static_cast<int>(free_bytes),
880 static_cast<double>(free_bytes * 100) / p->
area_size(),
881 (fragmentation > 0) ?
"[fragmented]" :
"");
887 if (fragmentation != 0) {
888 if (count < max_evacuation_candidates) {
889 candidates[count++] = Candidate(fragmentation, p);
892 for (
int i = 0;
i < max_evacuation_candidates;
i++) {
894 candidates[
i].fragmentation() < least->fragmentation()) {
895 least = candidates +
i;
899 if (least->fragmentation() < fragmentation) {
900 *least = Candidate(fragmentation, p);
907 for (
int i = 0;
i < count;
i++) {
911 if (count > 0 && FLAG_trace_fragmentation) {
912 PrintF(
"Collected %d evacuation candidates for space %s\n", count,
921 for (
int i = 0;
i < npages;
i++) {
943 DCHECK(!FLAG_never_compact || !FLAG_always_compact);
965 PagedSpaces spaces(
heap());
967 space = spaces.next()) {
968 space->PrepareForMarkCompact();
973 VerifyMarkbitsAreClean();
981 DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
1032 while (candidate !=
NULL) {
1038 Code* code = shared->code();
1039 MarkBit code_mark = Marking::MarkBitFrom(code);
1040 if (!code_mark.
Get()) {
1041 if (FLAG_trace_code_flushing && shared->
is_compiled()) {
1042 PrintF(
"[code-flushing clears: ");
1046 shared->set_code(lazy_compile);
1059 Object** shared_code_slot =
1062 shared_code_slot, shared_code_slot, *shared_code_slot);
1064 candidate = next_candidate;
1076 while (candidate !=
NULL) {
1080 Code* code = candidate->code();
1081 MarkBit code_mark = Marking::MarkBitFrom(code);
1082 if (!code_mark.
Get()) {
1083 if (FLAG_trace_code_flushing && candidate->
is_compiled()) {
1084 PrintF(
"[code-flushing clears: ");
1088 candidate->set_code(lazy_compile);
1096 candidate = next_candidate;
1109 while (holder !=
NULL) {
1113 FixedArray* code_map = FixedArray::cast(holder->optimized_code_map());
1115 int old_length = code_map->
length();
1120 if (!Marking::MarkBitFrom(code).Get())
continue;
1124 int dst_index = new_length++;
1127 code_map->
set(dst_index,
object);
1132 Marking::IsBlack(Marking::MarkBitFrom(HeapObject::cast(*slot))));
1140 if (new_length < old_length) {
1144 holder = next_holder;
1155 if (FLAG_trace_code_flushing) {
1156 PrintF(
"[code-flushing abandons function-info: ");
1163 if (candidate == shared_info) {
1168 while (candidate !=
NULL) {
1171 if (next_candidate == shared_info) {
1178 candidate = next_candidate;
1185 DCHECK(!function->next_function_link()->IsUndefined());
1192 if (FLAG_trace_code_flushing) {
1193 PrintF(
"[code-flushing abandons closure: ");
1194 function->shared()->ShortPrint();
1200 if (candidate ==
function) {
1205 while (candidate !=
NULL) {
1208 if (next_candidate ==
function) {
1215 candidate = next_candidate;
1222 DCHECK(!FixedArray::cast(code_map_holder->optimized_code_map())
1229 if (FLAG_trace_code_flushing) {
1230 PrintF(
"[code-flushing abandons code-map: ");
1237 if (holder == code_map_holder) {
1242 while (holder !=
NULL) {
1245 if (next_holder == code_map_holder) {
1252 holder = next_holder;
1261 while (candidate !=
NULL) {
1264 candidate = next_candidate;
1273 while (candidate !=
NULL) {
1276 candidate = next_candidate;
1285 while (holder !=
NULL) {
1288 holder = next_holder;
1299 while (candidate !=
NULL) {
1301 v->VisitPointer(
reinterpret_cast<Object**
>(slot));
1331 if (!FLAG_clever_optimizations)
return object;
1332 Map*
map =
object->map();
1338 if (second != heap->empty_string()) {
1349 return HeapObject::cast(first);
1363 template <MarkCompactMarkingVisitor::VisitorId
id>
1377 const int kMinRangeForMarkingRecursion = 64;
1378 if (end - start >= kMinRangeForMarkingRecursion) {
1379 if (VisitUnmarkedObjects(heap, start, end))
return;
1383 for (
Object** p = start; p < end; p++) {
1384 MarkObjectByPointer(collector, start, p);
1390 MarkBit mark = Marking::MarkBitFrom(
object);
1397 MarkBit mark_bit = Marking::MarkBitFrom(
object);
1398 if (!mark_bit.
Get()) {
1408 if (!(*p)->IsHeapObject())
return;
1410 collector->RecordSlot(anchor_slot, p,
object);
1411 MarkBit mark = Marking::MarkBitFrom(
object);
1412 collector->MarkObject(
object, mark);
1425 MarkBit mark = Marking::MarkBitFrom(obj);
1428 MarkBit map_mark = Marking::MarkBitFrom(
map);
1430 IterateBody(
map, obj);
1438 StackLimitCheck check(heap->
isolate());
1439 if (check.HasOverflowed())
return false;
1443 for (
Object** p = start; p < end; p++) {
1445 if (!o->IsHeapObject())
continue;
1446 collector->RecordSlot(start, p, o);
1448 MarkBit mark = Marking::MarkBitFrom(obj);
1449 if (mark.
Get())
continue;
1450 VisitUnmarkedObject(collector, obj);
1467 if (HeapObject::cast(re->data())->map()->instance_type() !=
1475 if (!code->IsSmi() &&
1476 HeapObject::cast(code)->map()->instance_type() ==
CODE_TYPE) {
1484 FixedArray* data = FixedArray::cast(re->data());
1492 }
else if (code->IsSmi()) {
1493 int value = Smi::cast(code)->value();
1520 VisitJSRegExp(
map,
object);
1528 VisitJSRegExp(
map,
object);
1539 if (fixed_array->
map() != heap->fixed_cow_array_map() &&
1540 fixed_array->
map() != heap->fixed_double_array_map() &&
1541 fixed_array != heap->empty_fixed_array()) {
1542 if (fixed_array->IsDictionary()) {
1554 int object_size = obj->
Size();
1557 if (obj->IsJSObject()) {
1558 JSObject*
object = JSObject::cast(obj);
1560 FAST_ELEMENTS_SUB_TYPE);
1562 DICTIONARY_PROPERTIES_SUB_TYPE,
1563 FAST_PROPERTIES_SUB_TYPE);
1568 template <MarkCompactMarkingVisitor::VisitorId
id>
1571 ObjectStatsVisitBase(
id,
map, obj);
1581 Map* map_obj = Map::cast(obj);
1585 array != heap->empty_descriptor_array()) {
1586 int fixed_array_size = array->
Size();
1591 int fixed_array_size = map_obj->transitions()->
Size();
1596 CodeCache* cache = CodeCache::cast(map_obj->code_cache());
1598 cache->default_cache()->
Size());
1599 if (!cache->normal_type_cache()->IsUndefined()) {
1601 MAP_CODE_CACHE_SUB_TYPE,
1602 FixedArray::cast(cache->normal_type_cache())->Size());
1616 int object_size = obj->
Size();
1618 Code* code_obj = Code::cast(obj);
1633 if (sfi->scope_info() != heap->empty_fixed_array()) {
1635 SCOPE_INFO_SUB_TYPE, FixedArray::cast(sfi->scope_info())->Size());
1648 FixedArray* fixed_array = FixedArray::cast(obj);
1649 if (fixed_array == heap->string_table()) {
1651 fixed_array->
Size());
1663 if (FLAG_track_gc_object_stats) {
1666 #define VISITOR_ID_COUNT_FUNCTION(id) \
1667 table_.Register(kVisit##id, ObjectStatsTracker<kVisit##id>::Visit);
1669 #undef VISITOR_ID_COUNT_FUNCTION
1703 if (obj->IsSharedFunctionInfo()) {
1705 MarkBit shared_mark = Marking::MarkBitFrom(shared);
1706 MarkBit code_mark = Marking::MarkBitFrom(shared->code());
1707 collector_->MarkObject(shared->code(), code_mark);
1718 ThreadLocalTop* top) {
1725 Code* code = frame->unchecked_code();
1726 MarkBit code_mark = Marking::MarkBitFrom(code);
1727 MarkObject(code, code_mark);
1728 if (frame->is_optimized()) {
1730 frame->LookupCode());
1738 if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
1748 MarkBit descriptor_array_mark = Marking::MarkBitFrom(descriptor_array);
1749 MarkObject(descriptor_array, descriptor_array_mark);
1752 DCHECK(
this ==
heap()->mark_compact_collector());
1760 &code_marking_visitor);
1774 :
collector_(heap->mark_compact_collector()) {}
1788 if (!(*p)->IsHeapObject())
return;
1792 MarkBit mark_bit = Marking::MarkBitFrom(
object);
1793 if (mark_bit.
Get())
return;
1795 Map*
map =
object->map();
1800 MarkBit map_mark = Marking::MarkBitFrom(
map);
1802 MarkCompactMarkingVisitor::IterateBody(
map,
object);
1814 template <
bool finalize_external_
strings>
1821 for (
Object** p = start; p < end; p++) {
1823 if (o->IsHeapObject() &&
1824 !Marking::MarkBitFrom(HeapObject::cast(o)).Get()) {
1825 if (finalize_external_strings) {
1826 DCHECK(o->IsExternalString());
1832 *p =
heap_->the_hole_value();
1838 DCHECK(!finalize_external_strings);
1857 if (Marking::MarkBitFrom(HeapObject::cast(
object)).Get()) {
1859 }
else if (object->IsAllocationSite() &&
1860 !(AllocationSite::cast(
object)->IsZombie())) {
1885 Map* filler_map = heap->one_pointer_filler_map();
1886 for (
HeapObject*
object = it->Next();
object !=
NULL;
object = it->Next()) {
1887 MarkBit markbit = Marking::MarkBitFrom(
object);
1888 if ((object->map() != filler_map) && Marking::IsGrey(markbit)) {
1889 Marking::GreyToBlack(markbit);
1891 marking_deque->PushBlack(
object);
1892 if (marking_deque->
IsFull())
return;
1909 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
1910 Address cell_base = it.CurrentCellBase();
1914 if (current_cell == 0)
continue;
1919 grey_objects = current_cell & ((current_cell >> 1) |
1922 grey_objects = current_cell & (current_cell >> 1);
1926 while (grey_objects != 0) {
1928 grey_objects >>= trailing_zeros;
1929 offset += trailing_zeros;
1930 MarkBit markbit(cell, 1 << offset,
false);
1931 DCHECK(Marking::IsGrey(markbit));
1932 Marking::GreyToBlack(markbit);
1936 marking_deque->PushBlack(
object);
1937 if (marking_deque->
IsFull())
return;
1955 int survivors_size = 0;
1957 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
1958 Address cell_base = it.CurrentCellBase();
1962 if (current_cell == 0)
continue;
1965 while (current_cell != 0) {
1967 current_cell >>= trailing_zeros;
1968 offset += trailing_zeros;
1972 int size =
object->Size();
1973 survivors_size +=
size;
1994 allocation = new_space->AllocateRaw(
size);
2004 return survivors_size;
2010 PageIterator it(
space);
2011 while (it.has_next()) {
2012 Page* p = it.next();
2014 if (marking_deque->
IsFull())
return;
2022 NewSpacePageIterator it(
space->bottom(),
space->top());
2023 while (it.has_next()) {
2026 if (marking_deque->
IsFull())
return;
2033 if (!o->IsHeapObject())
return false;
2034 HeapObject* heap_object = HeapObject::cast(o);
2035 MarkBit mark = Marking::MarkBitFrom(heap_object);
2043 DCHECK(o->IsHeapObject());
2044 HeapObject* heap_object = HeapObject::cast(o);
2045 MarkBit mark = Marking::MarkBitFrom(heap_object);
2053 MarkBit string_table_mark = Marking::MarkBitFrom(string_table);
2054 if (!string_table_mark.
Get()) {
2056 SetMark(string_table, string_table_mark);
2065 MarkBit mark_bit = Marking::MarkBitFrom(site);
2066 SetMark(site, mark_bit);
2093 for (
int i = 0;
i < ref_groups->length();
i++) {
2098 (*ref_groups)[last++] = entry;
2104 for (
size_t j = 0; j < entry->
length; ++j) {
2105 if ((*children[j])->IsHeapObject()) {
2106 HeapObject* child = HeapObject::cast(*children[j]);
2107 MarkBit mark = Marking::MarkBitFrom(child);
2108 MarkObject(child, mark);
2116 ref_groups->Rewind(last);
2122 HeapObject::cast(
heap()->weak_object_to_code_table());
2123 if (!
IsMarked(weak_object_to_code_table)) {
2124 MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table);
2125 SetMark(weak_object_to_code_table, mark);
2137 DCHECK(object->IsHeapObject());
2139 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(
object)));
2141 Map*
map =
object->map();
2142 MarkBit map_mark = Marking::MarkBitFrom(
map);
2143 MarkObject(
map, map_mark);
2145 MarkCompactMarkingVisitor::IterateBody(
map,
object);
2162 heap()->old_pointer_space());
2178 heap()->property_cell_space());
2205 bool work_to_do =
true;
2207 while (work_to_do) {
2221 if (it.frame()->type() == StackFrame::JAVA_SCRIPT) {
2224 if (it.frame()->type() == StackFrame::OPTIMIZED) {
2225 Code* code = it.frame()->LookupCode();
2238 double start_time = 0.0;
2239 if (FLAG_print_cumulative_gc_stat) {
2245 PostponeInterruptsScope postpone(
isolate());
2247 bool incremental_marking_overflowed =
false;
2257 incremental_marking_overflowed =
2262 incremental_marking->
Abort();
2266 DCHECK(state_ == PREPARE_GC);
2267 state_ = MARK_LIVE_OBJECTS;
2273 if (FLAG_force_marking_deque_overflows) {
2274 marking_deque_end = marking_deque_start + 64 *
kPointerSize;
2279 if (incremental_marking_overflowed) {
2292 while ((cell = cell_iterator.
Next()) !=
NULL) {
2296 MarkCompactMarkingVisitor::VisitPointer(
2303 heap()->property_cell_space());
2305 while ((cell = js_global_property_cell_iterator.
Next()) !=
NULL) {
2306 DCHECK(cell->IsPropertyCell());
2308 MarkCompactMarkingVisitor::VisitPropertyCell(cell->
map(), cell);
2345 if (FLAG_print_cumulative_gc_stat) {
2383 if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
2388 if (FLAG_track_gc_object_stats) {
2396 while (raw_context !=
heap()->undefined_value()) {
2405 raw_map_cache !=
heap()->undefined_value()) {
2408 int used_elements = 0;
2412 if (raw_key ==
heap()->undefined_value() ||
2413 raw_key ==
heap()->the_hole_value())
2417 if (raw_map->IsHeapObject() &&
IsMarked(raw_map)) {
2421 DCHECK(raw_map->IsMap());
2426 if (used_elements == 0) {
2433 MarkBit map_cache_markbit = Marking::MarkBitFrom(map_cache);
2434 MarkObject(map_cache, map_cache_markbit);
2451 obj = map_iterator.
Next()) {
2452 Map*
map = Map::cast(obj);
2454 if (!
map->CanTransition())
continue;
2456 MarkBit map_mark = Marking::MarkBitFrom(
map);
2460 if (map_mark.
Get()) {
2464 map->set_dependent_code(DependentCode::cast(
heap()->empty_fixed_array()));
2472 cell = cell_iterator.
Next()) {
2480 Object* undefined =
heap()->undefined_value();
2481 for (
Object* site =
heap()->allocation_sites_list(); site != undefined;
2482 site = AllocationSite::cast(site)->weak_next()) {
2495 if (!table->
IsKey(key))
continue;
2497 Object* value = table->
get(value_index);
2498 if (key->IsCell() && !
IsMarked(key)) {
2499 Cell* cell = Cell::cast(key);
2500 Object*
object = cell->value();
2502 MarkBit mark = Marking::MarkBitFrom(cell);
2503 SetMark(cell, mark);
2505 RecordSlot(value_slot, value_slot, *value_slot);
2511 MarkBit mark = Marking::MarkBitFrom(obj);
2517 table->
set(key_index,
heap_->the_hole_value());
2518 table->
set(value_index,
heap_->the_hole_value());
2527 int number_of_transitions =
map->NumberOfProtoTransitions();
2528 FixedArray* prototype_transitions =
map->GetPrototypeTransitions();
2530 int new_number_of_transitions = 0;
2535 for (
int i = 0;
i < number_of_transitions;
i++) {
2536 Object* prototype = prototype_transitions->
get(proto_offset +
i * step);
2537 Object* cached_map = prototype_transitions->
get(map_offset +
i * step);
2539 DCHECK(!prototype->IsUndefined());
2540 int proto_index = proto_offset + new_number_of_transitions * step;
2541 int map_index = map_offset + new_number_of_transitions * step;
2542 if (new_number_of_transitions !=
i) {
2543 prototype_transitions->
set(proto_index, prototype,
2548 RecordSlot(slot, slot, prototype);
2549 new_number_of_transitions++;
2553 if (new_number_of_transitions != number_of_transitions) {
2554 map->SetNumberOfProtoTransitions(new_number_of_transitions);
2558 for (
int i = new_number_of_transitions * step;
2559 i < number_of_transitions * step;
i++) {
2567 Object* potential_parent =
map->GetBackPointer();
2568 if (!potential_parent->IsMap())
return;
2569 Map* parent = Map::cast(potential_parent);
2573 bool current_is_alive = map_mark.
Get();
2574 bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
2575 if (!current_is_alive && parent_is_alive) {
2584 if (Marking::MarkBitFrom(target).Get())
return false;
2594 if (!
map->HasTransitionArray())
return;
2598 int transition_index = 0;
2601 bool descriptors_owner_died =
false;
2607 if (target->instance_descriptors() == descriptors) {
2608 descriptors_owner_died =
true;
2611 if (
i != transition_index) {
2613 t->
SetKey(transition_index, key);
2615 RecordSlot(key_slot, key_slot, key);
2628 int number_of_own_descriptors =
map->NumberOfOwnDescriptors();
2630 if (descriptors_owner_died) {
2631 if (number_of_own_descriptors > 0) {
2634 map->set_owns_descriptors(
true);
2636 DCHECK(descriptors ==
heap_->empty_descriptor_array());
2656 int number_of_own_descriptors) {
2658 int to_trim = number_of_descriptors - number_of_own_descriptors;
2659 if (to_trim == 0)
return;
2666 descriptors->
Sort();
2672 int live_enum =
map->EnumLength();
2680 int to_trim = enum_cache->
length() - live_enum;
2681 if (to_trim <= 0)
return;
2693 Object* undefined =
heap()->undefined_value();
2694 while (current != undefined) {
2695 Code* code = Code::cast(current);
2700 current = code->next_code_link();
2701 code->set_next_code_link(undefined);
2710 if (number_of_entries == 0)
return;
2712 if (starts.
at(g) != starts.
at(g + 1)) {
2713 int i = starts.
at(g);
2719 for (
int i = starts.
at(g);
i < starts.
at(g + 1);
i++) {
2731 for (
int i = 0;
i < number_of_entries;
i++) {
2738 DependentCode* entries,
int group,
int start,
int end,
int new_start) {
2744 DCHECK(start + 1 == end);
2750 RecordSlot(slot, slot, head);
2756 for (
int i = start;
i < end;
i++) {
2761 if (new_start + survived !=
i) {
2765 RecordSlot(slot, slot, obj);
2780 if (number_of_entries == 0)
return;
2781 int new_number_of_entries = 0;
2785 entries, g, starts.
at(g), starts.
at(g + 1), new_number_of_entries);
2786 new_number_of_entries += survived;
2788 for (
int i = new_number_of_entries;
i < number_of_entries;
i++) {
2802 if (weak_collection->table()->IsHashTable()) {
2803 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2809 RecordSlot(anchor, key_slot, *key_slot);
2812 MarkCompactMarkingVisitor::MarkObjectByPointer(
this, anchor,
2817 weak_collection_obj = weak_collection->next();
2830 if (weak_collection->table()->IsHashTable()) {
2831 ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2839 weak_collection_obj = weak_collection->next();
2840 weak_collection->set_next(
heap()->undefined_value());
2853 weak_collection_obj = weak_collection->next();
2854 weak_collection->set_next(
heap()->undefined_value());
2863 }
else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2865 reinterpret_cast<Object**
>(slot),
2889 DCHECK(
heap()->AllowedToBeMigrated(src, dest));
2918 }
else if (dst->IsConstantPoolArray()) {
2924 while (!code_iter.is_finished()) {
2936 while (!heap_iter.is_finished()) {
2949 Code::cast(dst)->Relocate(dst_addr - src_addr);
2973 Object* target = rinfo->target_object();
2974 Object* old_target = target;
2978 if (target != old_target) {
2979 rinfo->set_target_object(target);
2986 Object* old_target = target;
2988 if (target != old_target) {
2989 rinfo->set_target_address(Code::cast(target)->instruction_start());
2995 Object* stub = rinfo->code_age_stub();
2998 if (stub != rinfo->code_age_stub()) {
2999 rinfo->set_code_age_stub(Code::cast(stub));
3005 rinfo->IsPatchedReturnSequence()) ||
3007 rinfo->IsPatchedDebugBreakSlotSequence()));
3010 rinfo->set_call_address(Code::cast(target)->instruction_start());
3016 if (!obj->IsHeapObject())
return;
3018 HeapObject* heap_obj = HeapObject::cast(obj);
3020 MapWord map_word = heap_obj->
map_word();
3021 if (map_word.IsForwardingAddress()) {
3023 MarkCompactCollector::IsOnEvacuationCandidate(heap_obj));
3024 HeapObject* target = map_word.ToForwardingAddress();
3027 !MarkCompactCollector::IsOnEvacuationCandidate(target));
3047 if (new_addr !=
NULL) {
3058 MapWord map_word = HeapObject::cast(*p)->
map_word();
3060 if (map_word.IsForwardingAddress()) {
3061 return String::cast(map_word.ToForwardingAddress());
3064 return String::cast(*p);
3074 DCHECK(target_space ==
heap()->old_pointer_space() ||
3075 target_space ==
heap()->old_data_space());
3078 if (allocation.
To(&target)) {
3105 int survivors_size = 0;
3111 NewSpacePageIterator it(from_bottom, from_top);
3112 while (it.has_next()) {
3130 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
3131 Address cell_base = it.CurrentCellBase();
3134 if (*cell == 0)
continue;
3137 for (
int i = 0;
i < live_objects;
i++) {
3140 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(
object)));
3142 int size =
object->Size();
3146 if (!allocation.
To(&target_object)) {
3149 space->UseEmergencyMemory();
3152 if (!allocation.
To(&target_object)) {
3171 for (
int i = 0;
i < npages;
i++) {
3180 if (!
space->HasEmergencyMemory()) {
3181 space->CreateEmergencyMemory();
3186 if (
space->HasEmergencyMemory() &&
space->CanExpand()) {
3191 for (
int j =
i; j < npages; j++) {
3203 PagedSpaces spaces(
heap());
3205 space = spaces.next()) {
3206 if (
space->HasEmergencyMemory()) {
3207 space->FreeEmergencyMemory();
3217 if (object->IsHeapObject()) {
3218 HeapObject* heap_object = HeapObject::cast(
object);
3219 MapWord map_word = heap_object->
map_word();
3220 if (map_word.IsForwardingAddress()) {
3221 return map_word.ToForwardingAddress();
3231 switch (slot_type) {
3234 rinfo.
Visit(isolate, v);
3238 v->VisitCodeEntry(addr);
3243 Code::cast(obj)->CodeIterateBody(v);
3248 if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.
Visit(isolate, v);
3253 if (rinfo.IsPatchedReturnSequence()) rinfo.
Visit(isolate, v);
3258 rinfo.
Visit(isolate, v);
3277 template <MarkCompactCollector::SweepingParallelism mode>
3312 int curr_region = -1;
3317 intptr_t freed_bytes = 0;
3318 intptr_t max_freed_bytes = 0;
3320 for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
3321 Address cell_base = it.CurrentCellBase();
3325 for (; live_objects != 0; live_objects--) {
3327 if (free_end != free_start) {
3328 int size =
static_cast<int>(free_end - free_start);
3330 memset(free_start, 0xcc,
size);
3332 freed_bytes = Free<parallelism>(
space, free_list, free_start,
size);
3333 max_freed_bytes =
Max(freed_bytes, max_freed_bytes);
3334 #ifdef ENABLE_GDB_JIT_INTERFACE
3336 GDBJITInterface::RemoveCodeRange(free_start, free_end);
3341 DCHECK(Marking::IsBlack(Marking::MarkBitFrom(live_object)));
3349 int new_region_end =
3351 if (new_region_start != curr_region || new_region_end != curr_region) {
3353 curr_region = new_region_end;
3356 free_start = free_end +
size;
3362 int size =
static_cast<int>(p->
area_end() - free_start);
3364 memset(free_start, 0xcc,
size);
3366 freed_bytes = Free<parallelism>(
space, free_list, free_start,
size);
3367 max_freed_bytes =
Max(freed_bytes, max_freed_bytes);
3368 #ifdef ENABLE_GDB_JIT_INTERFACE
3370 GDBJITInterface::RemoveCodeRange(free_start, p->
area_end());
3413 if (start_cell == end_cell) {
3414 *start_cell |= start_mask & end_mask;
3416 *start_cell |= start_mask;
3420 *end_cell |= end_mask;
3447 return mark_bit.
Get();
3453 !ShouldSkipEvacuationSlotRecording(code)) {
3457 MarkBit mark_bit = Marking::MarkBitFrom(code);
3458 if (Marking::IsWhite(mark_bit))
return;
3472 bool code_marked =
false;
3475 for (
int i = 0;
i < length;
i++) {
3489 for (
int i = 0;
i < length;
i++) {
3497 for (
int i = 0;
i < length;
i++) {
3511 bool code_slots_filtering_required;
3533 heap()->new_space()->top());
3535 object = to_it.
Next()) {
3536 Map*
map =
object->map();
3537 object->IterateBody(
map->instance_type(), object->SizeFromMap(
map),
3562 code_slots_filtering_required);
3563 if (FLAG_trace_fragmentation) {
3564 PrintF(
" migration slots buffer: %d\n",
3578 obj->
Iterate(&updating_visitor);
3590 for (
int i = 0;
i < npages;
i++) {
3597 code_slots_filtering_required);
3598 if (FLAG_trace_fragmentation) {
3599 PrintF(
" page %p slots buffer: %d\n",
reinterpret_cast<void*
>(p),
3609 if (FLAG_gc_verbose) {
3611 reinterpret_cast<intptr_t
>(p));
3616 switch (
space->identity()) {
3628 if (FLAG_zap_code_space) {
3652 cell = cell_iterator.
Next()) {
3653 if (cell->IsCell()) {
3661 cell = js_global_property_cell_iterator.
Next()) {
3662 if (cell->IsPropertyCell()) {
3667 heap_->string_table()->Iterate(&updating_visitor);
3672 table->
Iterate(&updating_visitor);
3696 for (
int i = 0;
i < npages;
i++) {
3708 for (
int i = 0;
i < npages;
i++) {
3716 space->ReleasePage(p);
3729 #define _ kStartTableUnusedEntry
3730 #define X kStartTableInvalidLine
4095 DCHECK((mark_bits & 0x180) != 0x180);
4096 DCHECK((mark_bits & 0x18000) != 0x18000);
4097 DCHECK((mark_bits & 0x1800000) != 0x1800000);
4099 while (mark_bits != 0) {
4100 int byte = (mark_bits & 0xff);
4105 int objects_in_these_8_words = table[0];
4108 for (
int i = 0;
i < objects_in_these_8_words;
i++) {
4109 starts[objects++] = offset + table[1 +
i];
4119 int required_freed_bytes) {
4121 int max_freed_overall = 0;
4122 PageIterator it(
space);
4123 while (it.has_next()) {
4124 Page* p = it.next();
4127 if (required_freed_bytes > 0 && max_freed >= required_freed_bytes) {
4130 max_freed_overall =
Max(max_freed, max_freed_overall);
4131 if (p ==
space->end_of_unswept_pages())
break;
4133 return max_freed_overall;
4153 space->ClearStats();
4157 space->set_end_of_unswept_pages(
space->FirstPage());
4159 PageIterator it(
space);
4161 int pages_swept = 0;
4162 bool unused_page_present =
false;
4163 bool parallel_sweeping_active =
false;
4165 while (it.has_next()) {
4166 Page* p = it.next();
4181 if (unused_page_present) {
4182 if (FLAG_gc_verbose) {
4184 reinterpret_cast<intptr_t
>(p));
4188 space->IncreaseUnsweptFreeBytes(p);
4189 space->ReleasePage(p);
4192 unused_page_present =
true;
4198 if (!parallel_sweeping_active) {
4199 if (FLAG_gc_verbose) {
4201 reinterpret_cast<intptr_t
>(p));
4206 parallel_sweeping_active =
true;
4208 if (FLAG_gc_verbose) {
4210 reinterpret_cast<intptr_t
>(p));
4213 space->IncreaseUnsweptFreeBytes(p);
4215 space->set_end_of_unswept_pages(p);
4218 if (FLAG_gc_verbose) {
4219 PrintF(
"Sweeping 0x%" V8PRIxPTR ".\n",
reinterpret_cast<intptr_t
>(p));
4238 if (FLAG_gc_verbose) {
4239 PrintF(
"SweepSpace: %s (%d pages swept)\n",
4262 double start_time = 0.0;
4263 if (FLAG_print_cumulative_gc_stat) {
4268 state_ = SWEEP_SPACES;
4285 SequentialSweepingScope scope(
this);
4330 if (FLAG_print_cumulative_gc_stat) {
4338 PageIterator it(
space);
4339 while (it.has_next()) {
4340 Page* p = it.next();
4357 if (
isolate()->debug()->is_loaded() ||
4358 isolate()->debug()->has_break_points()) {
4372 if (FLAG_trace_code_flushing) {
4373 PrintF(
"[code-flushing is now %s]\n", enable ?
"on" :
"off");
4383 if (obj->IsCode()) {
4413 *buffer_address = buffer;
4442 !ShouldSkipEvacuationSlotRecording(rinfo->
host()))) {
4446 Object** target_pointer =
4447 reinterpret_cast<Object**
>(rinfo->constant_pool_entry_address());
4462 EvictEvacuationCandidate(target_page);
4471 !ShouldSkipEvacuationSlotRecording(
reinterpret_cast<Object**
>(slot))) {
4476 EvictEvacuationCandidate(target_page);
4488 MarkBit mark_bit = Marking::MarkBitFrom(host);
4489 if (Marking::IsBlack(mark_bit)) {
4506 for (
int slot_idx = 0; slot_idx <
idx_; ++slot_idx) {
4523 for (
int slot_idx = 0; slot_idx <
idx_; ++slot_idx) {
4554 while (buffer !=
NULL) {
4557 buffer = next_buffer;
4559 *buffer_address =
NULL;
A JavaScript object (ECMA-262, 4.3.3)
A single JavaScript stack frame.
A Task represents a unit of work.
static double TimeCurrentMillis()
Object * ToObjectChecked()
MarkBit MarkBitFromIndex(uint32_t index, bool data_only=false)
static void Clear(MemoryChunk *chunk)
static const uint32_t kBitsPerCell
Code * builtin(Name name)
static const int kValueOffset
void EvictJSFunctionCandidates()
static JSFunction * GetNextCandidate(JSFunction *candidate)
void ProcessSharedFunctionInfoCandidates()
void EvictCandidate(SharedFunctionInfo *shared_info)
void EvictOptimizedCodeMaps()
void EvictSharedFunctionInfoCandidates()
void IteratePointersToFromSpace(ObjectVisitor *v)
static void ClearNextCodeMap(SharedFunctionInfo *holder)
void EvictOptimizedCodeMap(SharedFunctionInfo *code_map_holder)
JSFunction * jsfunction_candidates_head_
SharedFunctionInfo * shared_function_info_candidates_head_
static void SetNextCandidate(JSFunction *candidate, JSFunction *next_candidate)
SharedFunctionInfo * optimized_code_map_holder_head_
void EvictAllCandidates()
void ProcessOptimizedCodeMaps()
static JSFunction ** GetNextCandidateSlot(JSFunction *candidate)
static void SetNextCodeMap(SharedFunctionInfo *holder, SharedFunctionInfo *next_holder)
void ProcessJSFunctionCandidates()
static void ClearNextCandidate(JSFunction *candidate, Object *undefined)
static SharedFunctionInfo * GetNextCodeMap(SharedFunctionInfo *holder)
CodeMarkingVisitor(MarkCompactCollector *collector)
void VisitThread(Isolate *isolate, ThreadLocalTop *top)
MarkCompactCollector * collector_
void CodeIterateBody(ObjectVisitor *v)
static Code * GetCodeFromTargetAddress(Address address)
bool marked_for_deoptimization()
bool CanDeoptAt(Address pc)
static Object * GetObjectFromEntryAddress(Address location_of_address)
void InvalidateEmbeddedObjects()
void IterateFunctions(ObjectVisitor *v)
int OffsetOfElementAt(int index)
static Context * cast(Object *context)
Context * native_context()
static void DeoptimizeMarkedCode(Isolate *isolate)
Object * object_at(int i)
static void SetMarkedForDeoptimization(Code *code, DependencyGroup group)
static const int kGroupCount
void set_number_of_entries(DependencyGroup group, int value)
void set_object_at(int i, Object *object)
static const int kDescriptorSize
FixedArray * GetEnumCache()
void SetNumberOfDescriptors(int number_of_descriptors)
int number_of_descriptors_storage()
bool HasEnumIndicesCache()
FixedArray * GetEnumIndicesCache()
int number_of_descriptors()
virtual Object * RetainAs(Object *object)
void Iterate(ObjectVisitor *v)
Object ** RawFieldOfElementAt(int index)
static int SizeFor(int length)
void set(int index, Object *value)
void set_undefined(int index)
void set_the_hole(int index)
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
static int GuaranteedAllocatable(int maximum_freed)
intptr_t Concatenate(FreeList *free_list)
int Free(Address start, int size_in_bytes)
@ MC_UPDATE_POINTERS_TO_EVACUATED
@ MC_UPDATE_OLD_TO_NEW_POINTERS
@ MC_UPDATE_NEW_TO_NEW_POINTERS
@ MC_UPDATE_MISC_POINTERS
@ MC_WEAKCOLLECTION_PROCESS
@ MC_UPDATE_ROOT_TO_NEW_POINTERS
@ MC_WEAKCOLLECTION_ABORT
@ MC_WEAKCOLLECTION_CLEAR
@ MC_UPDATE_POINTERS_BETWEEN_EVACUATED
void AddSweepingTime(double duration)
void AddMarkingTime(double duration)
bool IterateObjectGroups(ObjectVisitor *v, WeakSlotCallbackWithHeap can_skip)
void RemoveObjectGroups()
void IdentifyWeakHandles(WeakSlotCallback f)
List< ImplicitRefGroup * > * implicit_ref_groups()
void IterateWeakRoots(ObjectVisitor *v)
void RemoveImplicitRefGroups()
void Iterate(v8::internal::ObjectVisitor *v)
static const int kElementsStartIndex
Object * KeyAt(int entry)
static int EntryToIndex(int entry)
void IteratePrefix(ObjectVisitor *visitor)
static const int kEntrySize
void IterateElements(ObjectVisitor *visitor)
void ElementsRemoved(int n)
static const int kMapOffset
void IterateBody(InstanceType type, int object_size, ObjectVisitor *v)
static Object ** RawField(HeapObject *obj, int offset)
static HeapObject * FromAddress(Address address)
void Iterate(ObjectVisitor *v)
bool MayContainRawValues()
int SizeFromMap(Map *map)
ExternalStringTable external_string_table_
bool Contains(Address addr)
void IterateRoots(ObjectVisitor *v, VisitMode mode)
void IncrementSemiSpaceCopiedObjectSize(int object_size)
OldSpace * old_pointer_space()
void RecordCodeSubTypeStats(int code_sub_type, int code_age, size_t size)
OldSpace * TargetSpace(HeapObject *object)
PropertyCellSpace * property_cell_space()
PagedSpace * paged_space(int idx)
Object * native_contexts_list() const
LargeObjectSpace * lo_space()
void CheckpointObjectStats()
void ProcessWeakReferences(WeakObjectRetainer *retainer)
void FinalizeExternalString(String *string)
bool InNewSpace(Object *object)
Object * encountered_weak_collections() const
Object * weak_object_to_code_table()
StoreBuffer * store_buffer()
static void MoveBlock(Address dst, Address src, int byte_size)
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
static void ScavengeStoreBufferCallback(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
bool InFromSpace(Object *object)
void IncrementPromotedObjectsSize(int object_size)
OldSpace * old_data_space()
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
IncrementalMarking * incremental_marking()
void OnMoveEvent(HeapObject *target, HeapObject *source, int size_in_bytes)
void RightTrimFixedArray(FixedArrayBase *obj, int elements_to_trim)
void IncrementYoungSurvivorsCounter(int survived)
Object ** weak_object_to_code_table_address()
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
void RecordFixedArraySubTypeStats(int array_sub_type, size_t size)
void set_encountered_weak_collections(Object *weak_collection)
MarkCompactCollector * mark_compact_collector()
void RecordObjectStats(InstanceType type, size_t size)
static void InvalidateMaps(Code *stub)
void WhiteToGreyAndPush(HeapObject *obj, MarkBit mark_bit)
MarkingDeque * marking_deque()
void RecordWrites(HeapObject *obj)
void RestartIfNotMarking()
Code * GcSafeFindCodeForInnerPointer(Address inner_pointer)
HandleScopeImplementer * handle_scope_implementer()
InnerPointerToCodeCache * inner_pointer_to_code_cache()
ThreadManager * thread_manager()
CompilationCache * compilation_cache()
int num_sweeper_threads() const
SweeperThread ** sweeper_threads()
GlobalHandles * global_handles()
void set_code(Code *code)
static const int kCodeEntryOffset
static const int kUninitializedValue
void SetDataAt(int index, Object *value)
static const int kCompilationErrorValue
Object * DataAt(int index)
static int saved_code_index(bool is_latin1)
static int code_index(bool is_latin1)
void FreeUnmarkedObjects()
static const int kProtoTransitionMapOffset
void SetBackPointer(Object *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static const int kProtoTransitionElementsPerEntry
static const int kProtoTransitionPrototypeOffset
static const int kConstructorOffset
bool HasTransitionArray() const
static const int kPrototypeOffset
static const int kProtoTransitionHeaderSize
SweeperTask(Heap *heap, PagedSpace *space)
virtual void Run() OVERRIDE
DISALLOW_COPY_AND_ASSIGN(SweeperTask)
void MarkAllocationSite(AllocationSite *site)
void AddEvacuationCandidate(Page *p)
void EvacuateNewSpaceAndCandidates()
bool ClearMapBackPointer(Map *map)
bool is_compacting() const
void ClearNonLiveReferences()
void ProcessInvalidatedCode(ObjectVisitor *visitor)
void MigrateObject(HeapObject *dst, HeapObject *src, int size, AllocationSpace to_old_space)
bool MarkInvalidatedCode()
Isolate * isolate() const
MarkingDeque marking_deque_
void ClearMapTransitions(Map *map)
SlotsBufferAllocator slots_buffer_allocator_
bool have_code_to_deoptimize_
void ClearNonLiveMapTransitions(Map *map, MarkBit map_mark)
bool TryPromoteObject(HeapObject *object, int object_size)
void TrimDescriptorArray(Map *map, DescriptorArray *descriptors, int number_of_own_descriptors)
MarkingParity marking_parity_
void ClearDependentICList(Object *head)
void CollectEvacuationCandidates(PagedSpace *space)
void ProcessEphemeralMarking(ObjectVisitor *visitor)
bool sweeping_in_progress()
void AbortWeakCollections()
bool abort_incremental_marking_
void ProcessWeakCollections()
bool reduce_memory_footprint_
bool sweeping_in_progress_
void MoveEvacuationCandidatesToEndOfPagesList()
void MarkWeakObjectToCodeTable()
void ClearNonLiveDependentCode(DependentCode *dependent_code)
void InvalidateCode(Code *code)
void ClearNonLivePrototypeTransitions(Map *map)
static bool IsMarked(Object *obj)
void PrepareForCodeFlushing()
void RecordCodeTargetPatch(Address pc, Code *target)
void ProcessMarkingDeque()
void EnableCodeFlushing(bool enable)
static void ReportDeleteIfNeeded(HeapObject *obj, Isolate *isolate)
void RefillMarkingDeque()
void RecordCodeEntrySlot(Address slot, Code *target)
int SweepInParallel(PagedSpace *space, int required_freed_bytes)
SlotsBuffer * migration_slots_buffer_
CodeFlusher * code_flusher_
@ NON_INCREMENTAL_COMPACTION
List< Page * > evacuation_candidates_
void RemoveDeadInvalidatedCode()
void PrepareThreadForCodeFlushing(Isolate *isolate, ThreadLocalTop *top)
void ClearWeakCollections()
void ParallelSweepSpaceComplete(PagedSpace *space)
List< Code * > invalidated_code_
bool IsSweepingCompleted()
int DiscoverAndEvacuateBlackObjectsOnPage(NewSpace *new_space, NewSpacePage *p)
void RefillFreeList(PagedSpace *space)
void EnsureSweepingCompleted()
static bool IsUnmarkedHeapObjectWithHeap(Heap *heap, Object **p)
SmartPointer< FreeList > free_list_old_data_space_
SmartPointer< FreeList > free_list_old_pointer_space_
bool StartCompaction(CompactionMode mode)
void StartSweeperThreads()
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
void RecordMigratedSlot(Object *value, Address slot)
void MarkImplicitRefGroups()
void MarkStringTable(RootMarkingVisitor *visitor)
void ProcessTopOptimizedFrame(ObjectVisitor *visitor)
bool AreSweeperThreadsActivated()
bool is_code_flushing_enabled() const
void SweepSpace(PagedSpace *space, SweeperType sweeper)
bool was_marked_incrementally_
void TrimEnumCache(Map *map, DescriptorArray *descriptors)
void ReleaseEvacuationCandidates()
bool WillBeDeoptimized(Code *code)
int ClearNonLiveDependentCodeInGroup(DependentCode *dependent_code, int group, int start, int end, int new_start)
base::Semaphore pending_sweeper_jobs_semaphore_
void ParallelSweepSpacesComplete()
void EvacuateLiveObjectsFromPage(Page *p)
void ClearDependentCode(DependentCode *dependent_code)
void MarkRoots(RootMarkingVisitor *visitor)
static bool IsUnmarkedHeapObject(Object **p)
MarkCompactCollector(Heap *heap)
static void Visit(Map *map, HeapObject *obj)
static void Visit(Map *map, HeapObject *obj)
static void Visit(Map *map, HeapObject *obj)
static void Visit(Map *map, HeapObject *obj)
static void Visit(Map *map, HeapObject *obj)
static const int kRegExpCodeThreshold
static void ObjectStatsCountFixedArray(FixedArrayBase *fixed_array, FixedArraySubInstanceType fast_type, FixedArraySubInstanceType dictionary_type)
INLINE(static bool VisitUnmarkedObjects(Heap *heap, Object **start, Object **end))
INLINE(static void VisitPointer(Heap *heap, Object **p))
INLINE(static bool MarkObjectWithoutPush(Heap *heap, HeapObject *object))
INLINE(static void VisitUnmarkedObject(MarkCompactCollector *collector, HeapObject *obj))
INLINE(static void MarkObject(Heap *heap, HeapObject *object))
static void TrackObjectStatsAndVisit(Map *map, HeapObject *obj)
static void UpdateRegExpCodeAgeAndFlush(Heap *heap, JSRegExp *re, bool is_one_byte)
static void ObjectStatsVisitBase(StaticVisitorBase::VisitorId id, Map *map, HeapObject *obj)
static VisitorDispatchTable< Callback > non_count_table_
static void VisitRegExpAndFlushCode(Map *map, HeapObject *object)
INLINE(static void MarkObjectByPointer(MarkCompactCollector *collector, Object **anchor_slot, Object **p))
INLINE(static void VisitPointers(Heap *heap, Object **start, Object **end))
virtual Object * RetainAs(Object *object)
void Initialize(Address low, Address high)
void TransferMark(Address old_start, Address new_start)
static const char * kImpossibleBitPattern
static const char * kGreyBitPattern
static const char * kWhiteBitPattern
static const char * kBlackBitPattern
SlotsBuffer ** slots_buffer_address()
static void IncrementLiveBytesFromGC(Address address, int by)
void MarkEvacuationCandidate()
static uint32_t FastAddressToMarkbitIndex(Address addr)
void set_scan_on_scavenge(bool scan)
static MemoryChunk * FromAddress(Address a)
bool TryParallelSweeping()
bool IsEvacuationCandidate()
ParallelSweepingState parallel_sweeping()
SlotsBuffer * slots_buffer()
void set_parallel_sweeping(ParallelSweepingState state)
void ClearEvacuationCandidate()
void InsertAfter(MemoryChunk *other)
static Object *& Object_at(Address addr)
static Address & Address_at(Address addr)
static NewSpacePage * FromAddress(Address address_in_page)
Address FromSpacePageHigh()
void set_age_mark(Address mark)
void ResetAllocationInfo()
Address FromSpacePageLow()
static int EntryToValueIndex(int entry)
void RemoveEntry(int entry)
void ShortPrint(FILE *out=stdout)
static const int kMaxRegularHeapObjectSize
void ResetUnsweptFreeBytes()
void EvictEvacuationCandidatesFromFreeLists()
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes)
static void UpdateSlot(Heap *heap, Object **slot)
void VisitCodeAgeSequence(RelocInfo *rinfo)
void VisitDebugTarget(RelocInfo *rinfo)
void VisitCodeTarget(RelocInfo *rinfo)
void VisitPointer(Object **p)
PointersUpdatingVisitor(Heap *heap)
void VisitPointers(Object **start, Object **end)
void UpdatePointer(Object **p)
void VisitEmbeddedPointer(RelocInfo *rinfo)
static bool IsDebugBreakSlot(Mode mode)
static bool IsJSReturn(Mode mode)
static bool IsEmbeddedObject(Mode mode)
static bool IsCodeTarget(Mode mode)
static bool IsCodeAgeSequence(Mode mode)
RootMarkingVisitor(Heap *heap)
void VisitNextCodeLink(Object **p)
void MarkObjectByPointer(Object **p)
void VisitPointers(Object **start, Object **end)
MarkCompactCollector * collector_
void VisitPointer(Object **p)
SharedFunctionInfoMarkingVisitor(MarkCompactCollector *collector)
void VisitPointer(Object **slot)
MarkCompactCollector * collector_
void VisitPointers(Object **start, Object **end)
static const int kCachedCodeOffset
static const int kNextMapIndex
static const int kEntriesStart
static const int kOsrAstIdOffset
void TrimOptimizedCodeMap(int shrink_by)
static const int kCodeOffset
static const int kEntryLength
void AddObject(Address addr, int size)
static int RegionNumber(Address addr)
SlotsBuffer * AllocateBuffer(SlotsBuffer *next_buffer)
void DeallocateBuffer(SlotsBuffer *buffer)
void DeallocateChain(SlotsBuffer **buffer_address)
bool HasSpaceForTypedSlot()
void UpdateSlotsWithFilter(Heap *heap)
static bool ChainLengthThresholdReached(SlotsBuffer *buffer)
ObjectSlot slots_[kNumberOfElements]
void Add(ObjectSlot slot)
static void UpdateSlotsRecordedIn(Heap *heap, SlotsBuffer *buffer, bool code_slots_filtering_required)
static bool AddTo(SlotsBufferAllocator *allocator, SlotsBuffer **buffer_address, SlotType type, Address addr, AdditionMode mode)
static int SizeOfChain(SlotsBuffer *buffer)
static bool IsTypedSlot(ObjectSlot slot)
void UpdateSlots(Heap *heap)
static Smi * FromInt(int value)
AllocationSpace identity()
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
static VisitorDispatchTable< Callback > table_
void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback)
virtual void VisitPointers(Object **start, Object **end)
StringTableCleaner(Heap *heap)
void WaitForSweeperThread()
void IterateArchivedThreads(ThreadVisitor *v)
int number_of_transitions()
Object ** GetKeySlot(int transition_number)
static const int kTransitionSize
Name * GetKey(int transition_number)
void SetKey(int transition_number, Name *value)
Map * GetTarget(int transition_number)
void SetTarget(int transition_number, Map *target)
bool IsSimpleTransition()
static v8::Platform * GetCurrentPlatform()
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
static int EntryToValueIndex(int entry)
#define PROFILE(IsolateGetter, Call)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define CHECK_EQ(expected, value)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define VISITOR_ID_COUNT_FUNCTION(id)
uint32_t CountTrailingZeros32(uint32_t value)
Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr, Atomic32 old_value, Atomic32 new_value)
template Object * VisitWeakList< Code >(Heap *heap, Object *list, WeakObjectRetainer *retainer)
static void DiscoverGreyObjectsWithIterator(Heap *heap, MarkingDeque *marking_deque, T *it)
static SlotsBuffer::SlotType DecodeSlotType(SlotsBuffer::ObjectSlot slot)
static void UpdateSlot(Isolate *isolate, ObjectVisitor *v, SlotsBuffer::SlotType slot_type, Address addr)
static bool IsOnInvalidatedCodeObject(Address addr)
FixedArraySubInstanceType
static bool ShouldStartSweeperThreads(MarkCompactCollector::SweeperType type)
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
@ VISIT_ALL_IN_SWEEP_NEWSPACE
char kStartTable[kStartTableLines *kStartTableEntriesPerLine]
@ SWEEP_AND_VISIT_LIVE_OBJECTS
static SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode)
static const int kStartTableEntriesPerLine
static void DiscoverGreyObjectsInNewSpace(Heap *heap, MarkingDeque *marking_deque)
static const int kStartTableInvalidLine
static bool SetMarkBitsUnderInvalidatedCode(Code *code, bool value)
static const int kStartTableUnusedEntry
kSerializedDataOffset Object
static void ClearMarkbitsInNewSpace(NewSpace *space)
static HeapObject * ShortCircuitConsString(Object **p)
@ DECLARED_ACCESSOR_INFO_TYPE
@ FIXED_DOUBLE_ARRAY_TYPE
@ MUTABLE_HEAP_NUMBER_TYPE
@ EXECUTABLE_ACCESSOR_INFO_TYPE
@ SHARED_FUNCTION_INFO_TYPE
@ JS_BUILTINS_OBJECT_TYPE
@ TYPE_FEEDBACK_INFO_TYPE
static void DiscoverGreyObjectsOnPage(MarkingDeque *marking_deque, MemoryChunk *p)
static bool IsShortcutCandidate(int type)
static const int kInvalidEnumCacheSentinel
static int Sweep(PagedSpace *space, FreeList *free_list, Page *p, ObjectVisitor *v)
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
StringTableCleaner< false > InternalizedStringTableCleaner
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset dependent_code
static void TraceFragmentation(PagedSpace *space)
void PrintF(const char *format,...)
static bool ShouldWaitForSweeperThreads(MarkCompactCollector::SweeperType type)
static int MarkWordToObjectStarts(uint32_t mark_bits, int *starts)
static String * UpdateReferenceInExternalStringTableEntry(Heap *heap, Object **p)
static const int kStartTableLines
StringTableCleaner< true > ExternalStringTableCleaner
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static int FreeListFragmentation(PagedSpace *space, Page *p)
static void UpdatePointer(HeapObject **address, HeapObject *object)
static void DiscoverGreyObjectsInSpace(Heap *heap, MarkingDeque *marking_deque, PagedSpace *space)
bool IsAligned(T value, U alignment)
const char * AllocationSpaceName(AllocationSpace space)
static intptr_t Free(PagedSpace *space, FreeList *free_list, Address start, int size)
static void ClearMarkbitsInPagedSpace(PagedSpace *space)
Debugger support for the V8 JavaScript engine.
#define VISITOR_ID_LIST(V)
#define T(name, string, precedence)