V8 Project
v8::internal::MarkCompactCollector Class Reference

#include <mark-compact.h>

+ Collaboration diagram for v8::internal::MarkCompactCollector:

Classes

class  SweeperTask
 

Public Types

enum  CompactionMode { INCREMENTAL_COMPACTION , NON_INCREMENTAL_COMPACTION }
 
enum  SweeperType { PARALLEL_SWEEPING , CONCURRENT_SWEEPING , SEQUENTIAL_SWEEPING }
 
enum  SweepingParallelism { SWEEP_ON_MAIN_THREAD , SWEEP_IN_PARALLEL }
 

Public Member Functions

void SetFlags (int flags)
 
void SetUp ()
 
void TearDown ()
 
void CollectEvacuationCandidates (PagedSpace *space)
 
void AddEvacuationCandidate (Page *p)
 
void Prepare ()
 
void CollectGarbage ()
 
bool StartCompaction (CompactionMode mode)
 
void AbortCompaction ()
 
Heapheap () const
 
Isolateisolate () const
 
CodeFlushercode_flusher ()
 
bool is_code_flushing_enabled () const
 
void EnableCodeFlushing (bool enable)
 
 INLINE (static bool ShouldSkipEvacuationSlotRecording(Object **anchor))
 
 INLINE (static bool ShouldSkipEvacuationSlotRecording(Object *host))
 
 INLINE (static bool IsOnEvacuationCandidate(Object *obj))
 
 INLINE (void EvictEvacuationCandidate(Page *page))
 
void RecordRelocSlot (RelocInfo *rinfo, Object *target)
 
void RecordCodeEntrySlot (Address slot, Code *target)
 
void RecordCodeTargetPatch (Address pc, Code *target)
 
 INLINE (void RecordSlot(Object **anchor_slot, Object **slot, Object *object, SlotsBuffer::AdditionMode mode=SlotsBuffer::FAIL_ON_OVERFLOW))
 
void MigrateObject (HeapObject *dst, HeapObject *src, int size, AllocationSpace to_old_space)
 
bool TryPromoteObject (HeapObject *object, int object_size)
 
void InvalidateCode (Code *code)
 
void ClearMarkbits ()
 
bool abort_incremental_marking () const
 
bool is_compacting () const
 
MarkingParity marking_parity ()
 
int SweepInParallel (PagedSpace *space, int required_freed_bytes)
 
int SweepInParallel (Page *page, PagedSpace *space)
 
void EnsureSweepingCompleted ()
 
bool IsSweepingCompleted ()
 
void RefillFreeList (PagedSpace *space)
 
bool AreSweeperThreadsActivated ()
 
bool sweeping_in_progress ()
 
void set_sequential_sweeping (bool sequential_sweeping)
 
bool sequential_sweeping () const
 
void MarkWeakObjectToCodeTable ()
 
void MarkAllocationSite (AllocationSite *site)
 

Static Public Member Functions

static void Initialize ()
 
static void ReportDeleteIfNeeded (HeapObject *obj, Isolate *isolate)
 
static bool IsMarked (Object *obj)
 

Static Public Attributes

static const uint32_t kSingleFreeEncoding = 0
 
static const uint32_t kMultiFreeEncoding = 1
 

Private Member Functions

 MarkCompactCollector (Heap *heap)
 
 ~MarkCompactCollector ()
 
bool MarkInvalidatedCode ()
 
bool WillBeDeoptimized (Code *code)
 
void RemoveDeadInvalidatedCode ()
 
void ProcessInvalidatedCode (ObjectVisitor *visitor)
 
void StartSweeperThreads ()
 
void Finish ()
 
void PrepareThreadForCodeFlushing (Isolate *isolate, ThreadLocalTop *top)
 
void PrepareForCodeFlushing ()
 
void MarkLiveObjects ()
 
void AfterMarking ()
 
 INLINE (void MarkObject(HeapObject *obj, MarkBit mark_bit))
 
 INLINE (void SetMark(HeapObject *obj, MarkBit mark_bit))
 
void MarkRoots (RootMarkingVisitor *visitor)
 
void MarkStringTable (RootMarkingVisitor *visitor)
 
void MarkImplicitRefGroups ()
 
void ProcessMarkingDeque ()
 
void ProcessEphemeralMarking (ObjectVisitor *visitor)
 
void ProcessTopOptimizedFrame (ObjectVisitor *visitor)
 
void EmptyMarkingDeque ()
 
void RefillMarkingDeque ()
 
void ProcessMapCaches ()
 
void ClearNonLiveReferences ()
 
void ClearNonLivePrototypeTransitions (Map *map)
 
void ClearNonLiveMapTransitions (Map *map, MarkBit map_mark)
 
void ClearMapTransitions (Map *map)
 
bool ClearMapBackPointer (Map *map)
 
void TrimDescriptorArray (Map *map, DescriptorArray *descriptors, int number_of_own_descriptors)
 
void TrimEnumCache (Map *map, DescriptorArray *descriptors)
 
void ClearDependentCode (DependentCode *dependent_code)
 
void ClearDependentICList (Object *head)
 
void ClearNonLiveDependentCode (DependentCode *dependent_code)
 
int ClearNonLiveDependentCodeInGroup (DependentCode *dependent_code, int group, int start, int end, int new_start)
 
void ProcessWeakCollections ()
 
void ClearWeakCollections ()
 
void AbortWeakCollections ()
 
void SweepSpaces ()
 
int DiscoverAndEvacuateBlackObjectsOnPage (NewSpace *new_space, NewSpacePage *p)
 
void EvacuateNewSpace ()
 
void EvacuateLiveObjectsFromPage (Page *p)
 
void EvacuatePages ()
 
void EvacuateNewSpaceAndCandidates ()
 
void ReleaseEvacuationCandidates ()
 
void MoveEvacuationCandidatesToEndOfPagesList ()
 
void SweepSpace (PagedSpace *space, SweeperType sweeper)
 
void ParallelSweepSpacesComplete ()
 
void ParallelSweepSpaceComplete (PagedSpace *space)
 
void RecordMigratedSlot (Object *value, Address slot)
 

Static Private Member Functions

static bool IsUnmarkedHeapObject (Object **p)
 
static bool IsUnmarkedHeapObjectWithHeap (Heap *heap, Object **p)
 

Private Attributes

bool reduce_memory_footprint_
 
bool abort_incremental_marking_
 
MarkingParity marking_parity_
 
bool compacting_
 
bool was_marked_incrementally_
 
bool sweeping_in_progress_
 
base::Semaphore pending_sweeper_jobs_semaphore_
 
bool sequential_sweeping_
 
SlotsBufferAllocator slots_buffer_allocator_
 
SlotsBuffermigration_slots_buffer_
 
Heapheap_
 
MarkingDeque marking_deque_
 
CodeFlushercode_flusher_
 
bool have_code_to_deoptimize_
 
List< Page * > evacuation_candidates_
 
List< Code * > invalidated_code_
 
SmartPointer< FreeListfree_list_old_data_space_
 
SmartPointer< FreeListfree_list_old_pointer_space_
 

Friends

class RootMarkingVisitor
 
class MarkingVisitor
 
class MarkCompactMarkingVisitor
 
class CodeMarkingVisitor
 
class SharedFunctionInfoMarkingVisitor
 
class Heap
 

Detailed Description

Definition at line 498 of file mark-compact.h.

Member Enumeration Documentation

◆ CompactionMode

Enumerator
INCREMENTAL_COMPACTION 
NON_INCREMENTAL_COMPACTION 

Definition at line 520 of file mark-compact.h.

◆ SweeperType

Enumerator
PARALLEL_SWEEPING 
CONCURRENT_SWEEPING 
SEQUENTIAL_SWEEPING 

Definition at line 549 of file mark-compact.h.

◆ SweepingParallelism

Enumerator
SWEEP_ON_MAIN_THREAD 
SWEEP_IN_PARALLEL 

Definition at line 555 of file mark-compact.h.

Constructor & Destructor Documentation

◆ MarkCompactCollector()

v8::internal::MarkCompactCollector::MarkCompactCollector ( Heap heap)
explicitprivate

Definition at line 39 of file mark-compact.cc.

40  : // NOLINT
41 #ifdef DEBUG
42  state_(IDLE),
43 #endif
47  compacting_(false),
49  sweeping_in_progress_(false),
51  sequential_sweeping_(false),
53  heap_(heap),
56 }
base::Semaphore pending_sweeper_jobs_semaphore_
Definition: mark-compact.h:706
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
@ ODD_MARKING_PARITY
Definition: objects.h:299

◆ ~MarkCompactCollector()

v8::internal::MarkCompactCollector::~MarkCompactCollector ( )
private

Definition at line 1309 of file mark-compact.cc.

1309  {
1310  if (code_flusher_ != NULL) {
1311  delete code_flusher_;
1312  code_flusher_ = NULL;
1313  }
1314 }

References code_flusher_, and NULL.

Member Function Documentation

◆ abort_incremental_marking()

bool v8::internal::MarkCompactCollector::abort_incremental_marking ( ) const
inline

Definition at line 618 of file mark-compact.h.

618 { return abort_incremental_marking_; }

References abort_incremental_marking_.

◆ AbortCompaction()

void v8::internal::MarkCompactCollector::AbortCompaction ( )

Definition at line 918 of file mark-compact.cc.

918  {
919  if (compacting_) {
920  int npages = evacuation_candidates_.length();
921  for (int i = 0; i < npages; i++) {
922  Page* p = evacuation_candidates_[i];
923  slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address());
924  p->ClearEvacuationCandidate();
925  p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
926  }
927  compacting_ = false;
928  evacuation_candidates_.Rewind(0);
929  invalidated_code_.Rewind(0);
930  }
931  DCHECK_EQ(0, evacuation_candidates_.length());
932 }
SlotsBufferAllocator slots_buffer_allocator_
Definition: mark-compact.h:710
void DeallocateChain(SlotsBuffer **buffer_address)
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206

References v8::internal::MemoryChunk::ClearEvacuationCandidate(), v8::internal::MemoryChunk::ClearFlag(), compacting_, DCHECK_EQ, v8::internal::SlotsBufferAllocator::DeallocateChain(), evacuation_candidates_, invalidated_code_, v8::internal::MemoryChunk::RESCAN_ON_EVACUATION, v8::internal::MemoryChunk::slots_buffer_address(), and slots_buffer_allocator_.

Referenced by Prepare(), and TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AbortWeakCollections()

void v8::internal::MarkCompactCollector::AbortWeakCollections ( )
private

Definition at line 2846 of file mark-compact.cc.

2846  {
2847  GCTracer::Scope gc_scope(heap()->tracer(),
2849  Object* weak_collection_obj = heap()->encountered_weak_collections();
2850  while (weak_collection_obj != Smi::FromInt(0)) {
2851  JSWeakCollection* weak_collection =
2852  reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2853  weak_collection_obj = weak_collection->next();
2854  weak_collection->set_next(heap()->undefined_value());
2855  }
2857 }
Object * encountered_weak_collections() const
Definition: heap.h:811
void set_encountered_weak_collections(Object *weak_collection)
Definition: heap.h:808
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
kSerializedDataOffset Object
Definition: objects-inl.h:5322

References v8::internal::Heap::encountered_weak_collections(), v8::internal::Smi::FromInt(), heap(), v8::internal::GCTracer::Scope::MC_WEAKCOLLECTION_ABORT, and v8::internal::Heap::set_encountered_weak_collections().

Referenced by Prepare().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddEvacuationCandidate()

void v8::internal::MarkCompactCollector::AddEvacuationCandidate ( Page p)

Definition at line 337 of file mark-compact.cc.

337  {
338  p->MarkEvacuationCandidate();
339  evacuation_candidates_.Add(p);
340 }

References evacuation_candidates_, and v8::internal::MemoryChunk::MarkEvacuationCandidate().

Referenced by CollectEvacuationCandidates().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AfterMarking()

void v8::internal::MarkCompactCollector::AfterMarking ( )
private

Definition at line 2351 of file mark-compact.cc.

2351  {
2352  // Object literal map caches reference strings (cache keys) and maps
2353  // (cache values). At this point still useful maps have already been
2354  // marked. Mark the keys for the alive values before we process the
2355  // string table.
2356  ProcessMapCaches();
2357 
2358  // Prune the string table removing all strings only pointed to by the
2359  // string table. Cannot use string_table() here because the string
2360  // table is marked.
2361  StringTable* string_table = heap()->string_table();
2362  InternalizedStringTableCleaner internalized_visitor(heap());
2363  string_table->IterateElements(&internalized_visitor);
2364  string_table->ElementsRemoved(internalized_visitor.PointersRemoved());
2365 
2366  ExternalStringTableCleaner external_visitor(heap());
2367  heap()->external_string_table_.Iterate(&external_visitor);
2369 
2370  // Process the weak references.
2371  MarkCompactWeakObjectRetainer mark_compact_object_retainer;
2372  heap()->ProcessWeakReferences(&mark_compact_object_retainer);
2373 
2374  // Remove object groups after marking phase.
2377 
2378  // Flush code from collected candidates.
2379  if (is_code_flushing_enabled()) {
2381  // If incremental marker does not support code flushing, we need to
2382  // disable it before incremental marking steps for next cycle.
2383  if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
2384  EnableCodeFlushing(false);
2385  }
2386  }
2387 
2388  if (FLAG_track_gc_object_stats) {
2390  }
2391 }
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:654
ExternalStringTable external_string_table_
Definition: heap.h:2013
void CheckpointObjectStats()
Definition: heap.cc:6100
void ProcessWeakReferences(WeakObjectRetainer *retainer)
Definition: heap.cc:1626
Isolate * isolate()
Definition: heap-inl.h:589
GlobalHandles * global_handles()
Definition: isolate.h:917
StringTableCleaner< false > InternalizedStringTableCleaner
StringTableCleaner< true > ExternalStringTableCleaner

References v8::internal::Heap::CheckpointObjectStats(), v8::internal::ExternalStringTable::CleanUp(), code_flusher_, v8::internal::HashTable< Derived, Shape, Key >::ElementsRemoved(), EnableCodeFlushing(), v8::internal::Heap::external_string_table_, v8::internal::Isolate::global_handles(), heap(), is_code_flushing_enabled(), v8::internal::Heap::isolate(), v8::internal::ExternalStringTable::Iterate(), v8::internal::HashTable< Derived, Shape, Key >::IterateElements(), v8::internal::StringTableCleaner< finalize_external_strings >::PointersRemoved(), v8::internal::CodeFlusher::ProcessCandidates(), ProcessMapCaches(), v8::internal::Heap::ProcessWeakReferences(), v8::internal::GlobalHandles::RemoveImplicitRefGroups(), and v8::internal::GlobalHandles::RemoveObjectGroups().

Referenced by MarkLiveObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AreSweeperThreadsActivated()

bool v8::internal::MarkCompactCollector::AreSweeperThreadsActivated ( )

Definition at line 646 of file mark-compact.cc.

646  {
647  return isolate()->sweeper_threads() != NULL || FLAG_job_based_sweeping;
648 }
SweeperThread ** sweeper_threads()
Definition: isolate.h:1067

References isolate(), NULL, and v8::internal::Isolate::sweeper_threads().

Referenced by EnsureSweepingCompleted().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearDependentCode()

void v8::internal::MarkCompactCollector::ClearDependentCode ( DependentCode dependent_code)
private

Definition at line 2706 of file mark-compact.cc.

2706  {
2707  DisallowHeapAllocation no_allocation;
2708  DependentCode::GroupStartIndexes starts(entries);
2709  int number_of_entries = starts.number_of_entries();
2710  if (number_of_entries == 0) return;
2712  if (starts.at(g) != starts.at(g + 1)) {
2713  int i = starts.at(g);
2714  DCHECK(i + 1 == starts.at(g + 1));
2715  Object* head = entries->object_at(i);
2716  ClearDependentICList(head);
2717  }
2719  for (int i = starts.at(g); i < starts.at(g + 1); i++) {
2720  // If the entry is compilation info then the map must be alive,
2721  // and ClearDependentCode shouldn't be called.
2722  DCHECK(entries->is_code_at(i));
2723  Code* code = entries->code_at(i);
2724  if (IsMarked(code) && !code->marked_for_deoptimization()) {
2726  code, static_cast<DependentCode::DependencyGroup>(g));
2727  code->InvalidateEmbeddedObjects();
2728  have_code_to_deoptimize_ = true;
2729  }
2730  }
2731  for (int i = 0; i < number_of_entries; i++) {
2732  entries->clear_at(i);
2733  }
2734 }
static void SetMarkedForDeoptimization(Code *code, DependencyGroup group)
Definition: objects.cc:11602
void ClearDependentICList(Object *head)
static bool IsMarked(Object *obj)
#define DCHECK(condition)
Definition: logging.h:205
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:110

References v8::internal::DependentCode::GroupStartIndexes::at(), v8::internal::DependentCode::clear_at(), ClearDependentICList(), v8::internal::DependentCode::code_at(), DCHECK, have_code_to_deoptimize_, v8::internal::Code::InvalidateEmbeddedObjects(), v8::internal::DependentCode::is_code_at(), IsMarked(), v8::internal::DependentCode::kWeakCodeGroup, v8::internal::DependentCode::kWeakICGroup, v8::internal::Code::marked_for_deoptimization(), v8::internal::DependentCode::GroupStartIndexes::number_of_entries(), v8::internal::DependentCode::object_at(), and v8::internal::DependentCode::SetMarkedForDeoptimization().

Referenced by ClearNonLiveReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearDependentICList()

void v8::internal::MarkCompactCollector::ClearDependentICList ( Object head)
private

Definition at line 2691 of file mark-compact.cc.

2691  {
2692  Object* current = head;
2693  Object* undefined = heap()->undefined_value();
2694  while (current != undefined) {
2695  Code* code = Code::cast(current);
2696  if (IsMarked(code)) {
2697  DCHECK(code->is_weak_stub());
2698  IC::InvalidateMaps(code);
2699  }
2700  current = code->next_code_link();
2701  code->set_next_code_link(undefined);
2702  }
2703 }
static void InvalidateMaps(Code *stub)
Definition: ic.cc:460

References DCHECK, heap(), v8::internal::IC::InvalidateMaps(), v8::internal::Code::is_weak_stub(), and IsMarked().

Referenced by ClearDependentCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearMapBackPointer()

bool v8::internal::MarkCompactCollector::ClearMapBackPointer ( Map map)
private

Definition at line 2583 of file mark-compact.cc.

2583  {
2584  if (Marking::MarkBitFrom(target).Get()) return false;
2585  target->SetBackPointer(heap_->undefined_value(), SKIP_WRITE_BARRIER);
2586  return true;
2587 }
@ SKIP_WRITE_BARRIER
Definition: objects.h:235

References heap_, v8::internal::Map::SetBackPointer(), and v8::internal::SKIP_WRITE_BARRIER.

Referenced by ClearMapTransitions().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearMapTransitions()

void v8::internal::MarkCompactCollector::ClearMapTransitions ( Map map)
private

Definition at line 2590 of file mark-compact.cc.

2590  {
2591  // If there are no transitions to be cleared, return.
2592  // TODO(verwaest) Should be an assert, otherwise back pointers are not
2593  // properly cleared.
2594  if (!map->HasTransitionArray()) return;
2595 
2596  TransitionArray* t = map->transitions();
2597 
2598  int transition_index = 0;
2599 
2600  DescriptorArray* descriptors = map->instance_descriptors();
2601  bool descriptors_owner_died = false;
2602 
2603  // Compact all live descriptors to the left.
2604  for (int i = 0; i < t->number_of_transitions(); ++i) {
2605  Map* target = t->GetTarget(i);
2606  if (ClearMapBackPointer(target)) {
2607  if (target->instance_descriptors() == descriptors) {
2608  descriptors_owner_died = true;
2609  }
2610  } else {
2611  if (i != transition_index) {
2612  Name* key = t->GetKey(i);
2613  t->SetKey(transition_index, key);
2614  Object** key_slot = t->GetKeySlot(transition_index);
2615  RecordSlot(key_slot, key_slot, key);
2616  // Target slots do not need to be recorded since maps are not compacted.
2617  t->SetTarget(transition_index, t->GetTarget(i));
2618  }
2619  transition_index++;
2620  }
2621  }
2622 
2623  // If there are no transitions to be cleared, return.
2624  // TODO(verwaest) Should be an assert, otherwise back pointers are not
2625  // properly cleared.
2626  if (transition_index == t->number_of_transitions()) return;
2627 
2628  int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2629 
2630  if (descriptors_owner_died) {
2631  if (number_of_own_descriptors > 0) {
2632  TrimDescriptorArray(map, descriptors, number_of_own_descriptors);
2633  DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
2634  map->set_owns_descriptors(true);
2635  } else {
2636  DCHECK(descriptors == heap_->empty_descriptor_array());
2637  }
2638  }
2639 
2640  // Note that we never eliminate a transition array, though we might right-trim
2641  // such that number_of_transitions() == 0. If this assumption changes,
2642  // TransitionArray::CopyInsert() will need to deal with the case that a
2643  // transition array disappeared during GC.
2644  int trim = t->number_of_transitions() - transition_index;
2645  if (trim > 0) {
2647  t, t->IsSimpleTransition() ? trim
2649  }
2650  DCHECK(map->HasTransitionArray());
2651 }
void RightTrimFixedArray(FixedArrayBase *obj, int elements_to_trim)
Definition: heap.cc:3322
void TrimDescriptorArray(Map *map, DescriptorArray *descriptors, int number_of_own_descriptors)
static const int kTransitionSize
Definition: transitions.h:139
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map

References ClearMapBackPointer(), DCHECK, v8::internal::Heap::FROM_GC, v8::internal::TransitionArray::GetKey(), v8::internal::TransitionArray::GetKeySlot(), v8::internal::TransitionArray::GetTarget(), heap_, v8::internal::TransitionArray::IsSimpleTransition(), v8::internal::TransitionArray::kTransitionSize, map, v8::internal::DescriptorArray::number_of_descriptors(), v8::internal::TransitionArray::number_of_transitions(), v8::internal::Heap::RightTrimFixedArray(), v8::internal::TransitionArray::SetKey(), v8::internal::TransitionArray::SetTarget(), and TrimDescriptorArray().

Referenced by ClearNonLiveMapTransitions().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearMarkbits()

void v8::internal::MarkCompactCollector::ClearMarkbits ( )

Definition at line 516 of file mark-compact.cc.

516  {
524 
525  LargeObjectIterator it(heap_->lo_space());
526  for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
527  MarkBit mark_bit = Marking::MarkBitFrom(obj);
528  mark_bit.Clear();
529  mark_bit.Next().Clear();
530  Page::FromAddress(obj->address())->ResetProgressBar();
531  Page::FromAddress(obj->address())->ResetLiveBytes();
532  }
533 }
OldSpace * old_pointer_space()
Definition: heap.h:594
PropertyCellSpace * property_cell_space()
Definition: heap.h:599
OldSpace * code_space()
Definition: heap.h:596
LargeObjectSpace * lo_space()
Definition: heap.h:600
CellSpace * cell_space()
Definition: heap.h:598
OldSpace * old_data_space()
Definition: heap.h:595
NewSpace * new_space()
Definition: heap.h:593
MapSpace * map_space()
Definition: heap.h:597
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:276
static void ClearMarkbitsInNewSpace(NewSpace *space)
static void ClearMarkbitsInPagedSpace(PagedSpace *space)

References v8::internal::Heap::cell_space(), v8::internal::MarkBit::Clear(), v8::internal::ClearMarkbitsInNewSpace(), v8::internal::ClearMarkbitsInPagedSpace(), v8::internal::Heap::code_space(), v8::internal::MemoryChunk::FromAddress(), heap_, v8::internal::Heap::lo_space(), v8::internal::Heap::map_space(), v8::internal::Heap::new_space(), v8::internal::MarkBit::Next(), v8::internal::LargeObjectIterator::Next(), NULL, v8::internal::Heap::old_data_space(), v8::internal::Heap::old_pointer_space(), v8::internal::Heap::property_cell_space(), v8::internal::MemoryChunk::ResetLiveBytes(), and v8::internal::MemoryChunk::ResetProgressBar().

Referenced by Prepare().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearNonLiveDependentCode()

void v8::internal::MarkCompactCollector::ClearNonLiveDependentCode ( DependentCode dependent_code)
private

Definition at line 2776 of file mark-compact.cc.

2776  {
2777  DisallowHeapAllocation no_allocation;
2778  DependentCode::GroupStartIndexes starts(entries);
2779  int number_of_entries = starts.number_of_entries();
2780  if (number_of_entries == 0) return;
2781  int new_number_of_entries = 0;
2782  // Go through all groups, remove dead codes and compact.
2783  for (int g = 0; g < DependentCode::kGroupCount; g++) {
2784  int survived = ClearNonLiveDependentCodeInGroup(
2785  entries, g, starts.at(g), starts.at(g + 1), new_number_of_entries);
2786  new_number_of_entries += survived;
2787  }
2788  for (int i = new_number_of_entries; i < number_of_entries; i++) {
2789  entries->clear_at(i);
2790  }
2791 }
static const int kGroupCount
Definition: objects.h:5534
int ClearNonLiveDependentCodeInGroup(DependentCode *dependent_code, int group, int start, int end, int new_start)

References v8::internal::DependentCode::GroupStartIndexes::at(), v8::internal::DependentCode::clear_at(), ClearNonLiveDependentCodeInGroup(), v8::internal::DependentCode::kGroupCount, and v8::internal::DependentCode::GroupStartIndexes::number_of_entries().

Referenced by ClearNonLiveReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearNonLiveDependentCodeInGroup()

int v8::internal::MarkCompactCollector::ClearNonLiveDependentCodeInGroup ( DependentCode dependent_code,
int  group,
int  start,
int  end,
int  new_start 
)
private

Definition at line 2737 of file mark-compact.cc.

2738  {
2739  int survived = 0;
2740  if (group == DependentCode::kWeakICGroup) {
2741  // Dependent weak IC stubs form a linked list and only the head is stored
2742  // in the dependent code array.
2743  if (start != end) {
2744  DCHECK(start + 1 == end);
2745  Object* old_head = entries->object_at(start);
2746  MarkCompactWeakObjectRetainer retainer;
2747  Object* head = VisitWeakList<Code>(heap(), old_head, &retainer);
2748  entries->set_object_at(new_start, head);
2749  Object** slot = entries->slot_at(new_start);
2750  RecordSlot(slot, slot, head);
2751  // We do not compact this group even if the head is undefined,
2752  // more dependent ICs are likely to be added later.
2753  survived = 1;
2754  }
2755  } else {
2756  for (int i = start; i < end; i++) {
2757  Object* obj = entries->object_at(i);
2758  DCHECK(obj->IsCode() || IsMarked(obj));
2759  if (IsMarked(obj) &&
2760  (!obj->IsCode() || !WillBeDeoptimized(Code::cast(obj)))) {
2761  if (new_start + survived != i) {
2762  entries->set_object_at(new_start + survived, obj);
2763  }
2764  Object** slot = entries->slot_at(new_start + survived);
2765  RecordSlot(slot, slot, obj);
2766  survived++;
2767  }
2768  }
2769  }
2770  entries->set_number_of_entries(
2771  static_cast<DependentCode::DependencyGroup>(group), survived);
2772  return survived;
2773 }
template Object * VisitWeakList< Code >(Heap *heap, Object *list, WeakObjectRetainer *retainer)

References DCHECK, heap(), IsMarked(), v8::internal::DependentCode::kWeakICGroup, v8::internal::DependentCode::object_at(), v8::internal::DependentCode::set_number_of_entries(), v8::internal::DependentCode::set_object_at(), v8::internal::DependentCode::slot_at(), v8::internal::VisitWeakList< Code >(), and WillBeDeoptimized().

Referenced by ClearNonLiveDependentCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearNonLiveMapTransitions()

void v8::internal::MarkCompactCollector::ClearNonLiveMapTransitions ( Map map,
MarkBit  map_mark 
)
private

Definition at line 2565 of file mark-compact.cc.

2566  {
2567  Object* potential_parent = map->GetBackPointer();
2568  if (!potential_parent->IsMap()) return;
2569  Map* parent = Map::cast(potential_parent);
2570 
2571  // Follow back pointer, check whether we are dealing with a map transition
2572  // from a live map to a dead path and in case clear transitions of parent.
2573  bool current_is_alive = map_mark.Get();
2574  bool parent_is_alive = Marking::MarkBitFrom(parent).Get();
2575  if (!current_is_alive && parent_is_alive) {
2576  ClearMapTransitions(parent);
2577  }
2578 }

References ClearMapTransitions(), v8::internal::MarkBit::Get(), and map.

Referenced by ClearNonLiveReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearNonLivePrototypeTransitions()

void v8::internal::MarkCompactCollector::ClearNonLivePrototypeTransitions ( Map map)
private

Definition at line 2526 of file mark-compact.cc.

2526  {
2527  int number_of_transitions = map->NumberOfProtoTransitions();
2528  FixedArray* prototype_transitions = map->GetPrototypeTransitions();
2529 
2530  int new_number_of_transitions = 0;
2531  const int header = Map::kProtoTransitionHeaderSize;
2532  const int proto_offset = header + Map::kProtoTransitionPrototypeOffset;
2533  const int map_offset = header + Map::kProtoTransitionMapOffset;
2534  const int step = Map::kProtoTransitionElementsPerEntry;
2535  for (int i = 0; i < number_of_transitions; i++) {
2536  Object* prototype = prototype_transitions->get(proto_offset + i * step);
2537  Object* cached_map = prototype_transitions->get(map_offset + i * step);
2538  if (IsMarked(prototype) && IsMarked(cached_map)) {
2539  DCHECK(!prototype->IsUndefined());
2540  int proto_index = proto_offset + new_number_of_transitions * step;
2541  int map_index = map_offset + new_number_of_transitions * step;
2542  if (new_number_of_transitions != i) {
2543  prototype_transitions->set(proto_index, prototype,
2545  prototype_transitions->set(map_index, cached_map, SKIP_WRITE_BARRIER);
2546  }
2547  Object** slot = prototype_transitions->RawFieldOfElementAt(proto_index);
2548  RecordSlot(slot, slot, prototype);
2549  new_number_of_transitions++;
2550  }
2551  }
2552 
2553  if (new_number_of_transitions != number_of_transitions) {
2554  map->SetNumberOfProtoTransitions(new_number_of_transitions);
2555  }
2556 
2557  // Fill slots that became free with undefined value.
2558  for (int i = new_number_of_transitions * step;
2559  i < number_of_transitions * step; i++) {
2560  prototype_transitions->set_undefined(header + i);
2561  }
2562 }
static const int kProtoTransitionMapOffset
Definition: objects.h:5905
static const int kProtoTransitionElementsPerEntry
Definition: objects.h:5903
static const int kProtoTransitionPrototypeOffset
Definition: objects.h:5904
static const int kProtoTransitionHeaderSize
Definition: objects.h:5901
@ UPDATE_WRITE_BARRIER
Definition: objects.h:235

References DCHECK, v8::internal::FixedArray::get(), IsMarked(), v8::internal::Map::kProtoTransitionElementsPerEntry, v8::internal::Map::kProtoTransitionHeaderSize, v8::internal::Map::kProtoTransitionMapOffset, v8::internal::Map::kProtoTransitionPrototypeOffset, map, v8::internal::FixedArray::RawFieldOfElementAt(), v8::internal::FixedArray::set(), v8::internal::FixedArray::set_undefined(), v8::internal::SKIP_WRITE_BARRIER, and v8::internal::UPDATE_WRITE_BARRIER.

Referenced by ClearNonLiveReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearNonLiveReferences()

void v8::internal::MarkCompactCollector::ClearNonLiveReferences ( )
private

Definition at line 2445 of file mark-compact.cc.

2445  {
2446  // Iterate over the map space, setting map transitions that go from
2447  // a marked map to an unmarked map to null transitions. This action
2448  // is carried out only on maps of JSObjects and related subtypes.
2449  HeapObjectIterator map_iterator(heap()->map_space());
2450  for (HeapObject* obj = map_iterator.Next(); obj != NULL;
2451  obj = map_iterator.Next()) {
2452  Map* map = Map::cast(obj);
2453 
2454  if (!map->CanTransition()) continue;
2455 
2456  MarkBit map_mark = Marking::MarkBitFrom(map);
2458  ClearNonLiveMapTransitions(map, map_mark);
2459 
2460  if (map_mark.Get()) {
2461  ClearNonLiveDependentCode(map->dependent_code());
2462  } else {
2463  ClearDependentCode(map->dependent_code());
2464  map->set_dependent_code(DependentCode::cast(heap()->empty_fixed_array()));
2465  }
2466  }
2467 
2468  // Iterate over property cell space, removing dependent code that is not
2469  // otherwise kept alive by strong references.
2470  HeapObjectIterator cell_iterator(heap_->property_cell_space());
2471  for (HeapObject* cell = cell_iterator.Next(); cell != NULL;
2472  cell = cell_iterator.Next()) {
2473  if (IsMarked(cell)) {
2474  ClearNonLiveDependentCode(PropertyCell::cast(cell)->dependent_code());
2475  }
2476  }
2477 
2478  // Iterate over allocation sites, removing dependent code that is not
2479  // otherwise kept alive by strong references.
2480  Object* undefined = heap()->undefined_value();
2481  for (Object* site = heap()->allocation_sites_list(); site != undefined;
2482  site = AllocationSite::cast(site)->weak_next()) {
2483  if (IsMarked(site)) {
2484  ClearNonLiveDependentCode(AllocationSite::cast(site)->dependent_code());
2485  }
2486  }
2487 
2488  if (heap_->weak_object_to_code_table()->IsHashTable()) {
2489  WeakHashTable* table =
2490  WeakHashTable::cast(heap_->weak_object_to_code_table());
2491  uint32_t capacity = table->Capacity();
2492  for (uint32_t i = 0; i < capacity; i++) {
2493  uint32_t key_index = table->EntryToIndex(i);
2494  Object* key = table->get(key_index);
2495  if (!table->IsKey(key)) continue;
2496  uint32_t value_index = table->EntryToValueIndex(i);
2497  Object* value = table->get(value_index);
2498  if (key->IsCell() && !IsMarked(key)) {
2499  Cell* cell = Cell::cast(key);
2500  Object* object = cell->value();
2501  if (IsMarked(object)) {
2502  MarkBit mark = Marking::MarkBitFrom(cell);
2503  SetMark(cell, mark);
2504  Object** value_slot = HeapObject::RawField(cell, Cell::kValueOffset);
2505  RecordSlot(value_slot, value_slot, *value_slot);
2506  }
2507  }
2508  if (IsMarked(key)) {
2509  if (!IsMarked(value)) {
2510  HeapObject* obj = HeapObject::cast(value);
2511  MarkBit mark = Marking::MarkBitFrom(obj);
2512  SetMark(obj, mark);
2513  }
2514  ClearNonLiveDependentCode(DependentCode::cast(value));
2515  } else {
2516  ClearDependentCode(DependentCode::cast(value));
2517  table->set(key_index, heap_->the_hole_value());
2518  table->set(value_index, heap_->the_hole_value());
2519  table->ElementRemoved();
2520  }
2521  }
2522  }
2523 }
static const int kValueOffset
Definition: objects.h:9446
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1311
Object * weak_object_to_code_table()
Definition: heap.h:806
void ClearNonLiveMapTransitions(Map *map, MarkBit map_mark)
void ClearNonLiveDependentCode(DependentCode *dependent_code)
void ClearNonLivePrototypeTransitions(Map *map)
void ClearDependentCode(DependentCode *dependent_code)
kSerializedDataOffset kPrototypeTemplateOffset kIndexedPropertyHandlerOffset kInstanceCallHandlerOffset kInternalFieldCountOffset dependent_code
Definition: objects-inl.h:5353

References v8::internal::HashTable< Derived, Shape, Key >::Capacity(), ClearDependentCode(), ClearNonLiveDependentCode(), ClearNonLiveMapTransitions(), ClearNonLivePrototypeTransitions(), v8::internal::dependent_code, v8::internal::HashTable< Derived, Shape, Key >::ElementRemoved(), v8::internal::HashTable< Derived, Shape, Key >::EntryToIndex(), v8::internal::WeakHashTable::EntryToValueIndex(), v8::internal::MarkBit::Get(), v8::internal::FixedArray::get(), heap(), heap_, v8::internal::HashTable< Derived, Shape, Key >::IsKey(), IsMarked(), v8::internal::Cell::kValueOffset, map, v8::internal::HeapObjectIterator::Next(), NULL, v8::internal::Heap::property_cell_space(), v8::internal::HeapObject::RawField(), v8::internal::FixedArray::set(), and v8::internal::Heap::weak_object_to_code_table().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearWeakCollections()

void v8::internal::MarkCompactCollector::ClearWeakCollections ( )
private

Definition at line 2822 of file mark-compact.cc.

2822  {
2823  GCTracer::Scope gc_scope(heap()->tracer(),
2825  Object* weak_collection_obj = heap()->encountered_weak_collections();
2826  while (weak_collection_obj != Smi::FromInt(0)) {
2827  JSWeakCollection* weak_collection =
2828  reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2829  DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2830  if (weak_collection->table()->IsHashTable()) {
2831  ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2832  for (int i = 0; i < table->Capacity(); i++) {
2833  HeapObject* key = HeapObject::cast(table->KeyAt(i));
2834  if (!MarkCompactCollector::IsMarked(key)) {
2835  table->RemoveEntry(i);
2836  }
2837  }
2838  }
2839  weak_collection_obj = weak_collection->next();
2840  weak_collection->set_next(heap()->undefined_value());
2841  }
2843 }

References v8::internal::HashTable< Derived, Shape, Key >::Capacity(), DCHECK, v8::internal::Heap::encountered_weak_collections(), v8::internal::Smi::FromInt(), heap(), IsMarked(), v8::internal::HashTable< Derived, Shape, Key >::KeyAt(), v8::internal::GCTracer::Scope::MC_WEAKCOLLECTION_CLEAR, v8::internal::ObjectHashTable::RemoveEntry(), and v8::internal::Heap::set_encountered_weak_collections().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ code_flusher()

CodeFlusher* v8::internal::MarkCompactCollector::code_flusher ( )
inline

Definition at line 545 of file mark-compact.h.

545 { return code_flusher_; }

References code_flusher_.

Referenced by v8::internal::Context::AddOptimizedFunction(), v8::internal::SharedFunctionInfo::ClearOptimizedCodeMap(), v8::internal::SharedFunctionInfo::ReplaceCode(), and v8::internal::Heap::Scavenge().

+ Here is the caller graph for this function:

◆ CollectEvacuationCandidates()

void v8::internal::MarkCompactCollector::CollectEvacuationCandidates ( PagedSpace space)

Definition at line 768 of file mark-compact.cc.

768  {
769  DCHECK(space->identity() == OLD_POINTER_SPACE ||
770  space->identity() == OLD_DATA_SPACE ||
771  space->identity() == CODE_SPACE);
772 
773  static const int kMaxMaxEvacuationCandidates = 1000;
774  int number_of_pages = space->CountTotalPages();
775  int max_evacuation_candidates =
776  static_cast<int>(std::sqrt(number_of_pages / 2.0) + 1);
777 
778  if (FLAG_stress_compaction || FLAG_always_compact) {
779  max_evacuation_candidates = kMaxMaxEvacuationCandidates;
780  }
781 
782  class Candidate {
783  public:
784  Candidate() : fragmentation_(0), page_(NULL) {}
785  Candidate(int f, Page* p) : fragmentation_(f), page_(p) {}
786 
787  int fragmentation() { return fragmentation_; }
788  Page* page() { return page_; }
789 
790  private:
791  int fragmentation_;
792  Page* page_;
793  };
794 
795  enum CompactionMode { COMPACT_FREE_LISTS, REDUCE_MEMORY_FOOTPRINT };
796 
797  CompactionMode mode = COMPACT_FREE_LISTS;
798 
799  intptr_t reserved = number_of_pages * space->AreaSize();
800  intptr_t over_reserved = reserved - space->SizeOfObjects();
801  static const intptr_t kFreenessThreshold = 50;
802 
803  if (reduce_memory_footprint_ && over_reserved >= space->AreaSize()) {
804  // If reduction of memory footprint was requested, we are aggressive
805  // about choosing pages to free. We expect that half-empty pages
806  // are easier to compact so slightly bump the limit.
807  mode = REDUCE_MEMORY_FOOTPRINT;
808  max_evacuation_candidates += 2;
809  }
810 
811 
812  if (over_reserved > reserved / 3 && over_reserved >= 2 * space->AreaSize()) {
813  // If over-usage is very high (more than a third of the space), we
814  // try to free all mostly empty pages. We expect that almost empty
815  // pages are even easier to compact so bump the limit even more.
816  mode = REDUCE_MEMORY_FOOTPRINT;
817  max_evacuation_candidates *= 2;
818  }
819 
820  if (FLAG_trace_fragmentation && mode == REDUCE_MEMORY_FOOTPRINT) {
821  PrintF(
822  "Estimated over reserved memory: %.1f / %.1f MB (threshold %d), "
823  "evacuation candidate limit: %d\n",
824  static_cast<double>(over_reserved) / MB,
825  static_cast<double>(reserved) / MB,
826  static_cast<int>(kFreenessThreshold), max_evacuation_candidates);
827  }
828 
829  intptr_t estimated_release = 0;
830 
831  Candidate candidates[kMaxMaxEvacuationCandidates];
832 
833  max_evacuation_candidates =
834  Min(kMaxMaxEvacuationCandidates, max_evacuation_candidates);
835 
836  int count = 0;
837  int fragmentation = 0;
838  Candidate* least = NULL;
839 
840  PageIterator it(space);
841  if (it.has_next()) it.next(); // Never compact the first page.
842 
843  while (it.has_next()) {
844  Page* p = it.next();
845  p->ClearEvacuationCandidate();
846 
847  if (FLAG_stress_compaction) {
848  unsigned int counter = space->heap()->ms_count();
849  uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
850  if ((counter & 1) == (page_number & 1)) fragmentation = 1;
851  } else if (mode == REDUCE_MEMORY_FOOTPRINT) {
852  // Don't try to release too many pages.
853  if (estimated_release >= over_reserved) {
854  continue;
855  }
856 
857  intptr_t free_bytes = 0;
858 
859  if (!p->WasSwept()) {
860  free_bytes = (p->area_size() - p->LiveBytes());
861  } else {
862  PagedSpace::SizeStats sizes;
863  space->ObtainFreeListStatistics(p, &sizes);
864  free_bytes = sizes.Total();
865  }
866 
867  int free_pct = static_cast<int>(free_bytes * 100) / p->area_size();
868 
869  if (free_pct >= kFreenessThreshold) {
870  estimated_release += free_bytes;
871  fragmentation = free_pct;
872  } else {
873  fragmentation = 0;
874  }
875 
876  if (FLAG_trace_fragmentation) {
877  PrintF("%p [%s]: %d (%.2f%%) free %s\n", reinterpret_cast<void*>(p),
878  AllocationSpaceName(space->identity()),
879  static_cast<int>(free_bytes),
880  static_cast<double>(free_bytes * 100) / p->area_size(),
881  (fragmentation > 0) ? "[fragmented]" : "");
882  }
883  } else {
884  fragmentation = FreeListFragmentation(space, p);
885  }
886 
887  if (fragmentation != 0) {
888  if (count < max_evacuation_candidates) {
889  candidates[count++] = Candidate(fragmentation, p);
890  } else {
891  if (least == NULL) {
892  for (int i = 0; i < max_evacuation_candidates; i++) {
893  if (least == NULL ||
894  candidates[i].fragmentation() < least->fragmentation()) {
895  least = candidates + i;
896  }
897  }
898  }
899  if (least->fragmentation() < fragmentation) {
900  *least = Candidate(fragmentation, p);
901  least = NULL;
902  }
903  }
904  }
905  }
906 
907  for (int i = 0; i < count; i++) {
908  AddEvacuationCandidate(candidates[i].page());
909  }
910 
911  if (count > 0 && FLAG_trace_fragmentation) {
912  PrintF("Collected %d evacuation candidates for space %s\n", count,
913  AllocationSpaceName(space->identity()));
914  }
915 }
const int kPageSizeBits
Definition: build_config.h:159
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
void PrintF(const char *format,...)
Definition: utils.cc:80
@ OLD_DATA_SPACE
Definition: globals.h:361
@ OLD_POINTER_SPACE
Definition: globals.h:360
static int FreeListFragmentation(PagedSpace *space, Page *p)
const int MB
Definition: globals.h:107
const char * AllocationSpaceName(AllocationSpace space)

References AddEvacuationCandidate(), v8::internal::AllocationSpaceName(), v8::internal::MemoryChunk::area_size(), v8::internal::MemoryChunk::ClearEvacuationCandidate(), v8::internal::CODE_SPACE, DCHECK, v8::internal::FreeListFragmentation(), kPageSizeBits, v8::internal::MemoryChunk::LiveBytes(), v8::internal::MB, v8::internal::Min(), mode(), NULL, v8::internal::OLD_DATA_SPACE, v8::internal::OLD_POINTER_SPACE, v8::internal::PrintF(), reduce_memory_footprint_, space(), v8::internal::PagedSpace::SizeStats::Total(), and v8::internal::Page::WasSwept().

Referenced by StartCompaction().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbage()

void v8::internal::MarkCompactCollector::CollectGarbage ( )

Definition at line 389 of file mark-compact.cc.

389  {
390  // Make sure that Prepare() has been called. The individual steps below will
391  // update the state as they proceed.
392  DCHECK(state_ == PREPARE_GC);
393 
394  MarkLiveObjects();
396 
397  if (FLAG_collect_maps) ClearNonLiveReferences();
398 
400 
401 #ifdef VERIFY_HEAP
402  if (FLAG_verify_heap) {
403  VerifyMarking(heap_);
404  }
405 #endif
406 
407  SweepSpaces();
408 
409 #ifdef DEBUG
410  if (FLAG_verify_native_context_separation) {
411  VerifyNativeContextSeparation(heap_);
412  }
413 #endif
414 
415 #ifdef VERIFY_HEAP
416  if (heap()->weak_embedded_objects_verification_enabled()) {
417  VerifyWeakEmbeddedObjectsInCode();
418  }
419  if (FLAG_collect_maps && FLAG_omit_map_checks_for_leaf_maps) {
420  VerifyOmittedMapChecks();
421  }
422 #endif
423 
424  Finish();
425 
428  } else {
431  }
432 }
IncrementalMarking * incremental_marking()
Definition: heap.h:1205
@ EVEN_MARKING_PARITY
Definition: objects.h:300

References ClearNonLiveReferences(), ClearWeakCollections(), DCHECK, v8::internal::EVEN_MARKING_PARITY, Finish(), heap(), heap_, v8::internal::Heap::incremental_marking(), v8::internal::IncrementalMarking::IsStopped(), marking_parity_, MarkLiveObjects(), v8::internal::ODD_MARKING_PARITY, and SweepSpaces().

Referenced by v8::internal::Heap::MarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DiscoverAndEvacuateBlackObjectsOnPage()

int v8::internal::MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage ( NewSpace new_space,
NewSpacePage p 
)
private

Definition at line 1947 of file mark-compact.cc.

1948  {
1949  DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
1950  DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
1951  DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
1952  DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
1953 
1954  MarkBit::CellType* cells = p->markbits()->cells();
1955  int survivors_size = 0;
1956 
1957  for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
1958  Address cell_base = it.CurrentCellBase();
1959  MarkBit::CellType* cell = it.CurrentCell();
1960 
1961  MarkBit::CellType current_cell = *cell;
1962  if (current_cell == 0) continue;
1963 
1964  int offset = 0;
1965  while (current_cell != 0) {
1966  int trailing_zeros = base::bits::CountTrailingZeros32(current_cell);
1967  current_cell >>= trailing_zeros;
1968  offset += trailing_zeros;
1969  Address address = cell_base + offset * kPointerSize;
1970  HeapObject* object = HeapObject::FromAddress(address);
1971 
1972  int size = object->Size();
1973  survivors_size += size;
1974 
1976 
1977  offset++;
1978  current_cell >>= 1;
1979 
1980  // TODO(hpayer): Refactor EvacuateObject and call this function instead.
1981  if (heap()->ShouldBePromoted(object->address(), size) &&
1982  TryPromoteObject(object, size)) {
1983  continue;
1984  }
1985 
1986  AllocationResult allocation = new_space->AllocateRaw(size);
1987  if (allocation.IsRetry()) {
1988  if (!new_space->AddFreshPage()) {
1989  // Shouldn't happen. We are sweeping linearly, and to-space
1990  // has the same number of pages as from-space, so there is
1991  // always room.
1992  UNREACHABLE();
1993  }
1994  allocation = new_space->AllocateRaw(size);
1995  DCHECK(!allocation.IsRetry());
1996  }
1997  Object* target = allocation.ToObjectChecked();
1998 
1999  MigrateObject(HeapObject::cast(target), object, size, NEW_SPACE);
2001  }
2002  *cells = 0;
2003  }
2004  return survivors_size;
2005 }
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1464
void IncrementSemiSpaceCopiedObjectSize(int object_size)
Definition: heap.h:1119
@ RECORD_SCRATCHPAD_SLOT
Definition: heap.h:974
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
Definition: heap-inl.h:536
uint32_t CellType
Definition: spaces.h:103
void MigrateObject(HeapObject *dst, HeapObject *src, int size, AllocationSpace to_old_space)
bool TryPromoteObject(HeapObject *object, int object_size)
static const char * kImpossibleBitPattern
Definition: mark-compact.h:37
static const char * kGreyBitPattern
Definition: mark-compact.h:53
static const char * kWhiteBitPattern
Definition: mark-compact.h:49
static const char * kBlackBitPattern
Definition: mark-compact.h:43
enable harmony numeric enable harmony object literal extensions Optimize object size
#define UNREACHABLE()
Definition: logging.h:30
uint32_t CountTrailingZeros32(uint32_t value)
Definition: bits.h:59
const int kPointerSize
Definition: globals.h:129
byte * Address
Definition: globals.h:101

References v8::internal::NewSpace::AddFreshPage(), v8::internal::HeapObject::address(), v8::base::bits::CountTrailingZeros32(), DCHECK, v8::internal::HeapObject::FromAddress(), heap(), v8::internal::Heap::IncrementSemiSpaceCopiedObjectSize(), v8::internal::AllocationResult::IsRetry(), v8::internal::Marking::kBlackBitPattern, v8::internal::Marking::kGreyBitPattern, v8::internal::Marking::kImpossibleBitPattern, v8::internal::kPointerSize, v8::internal::Marking::kWhiteBitPattern, v8::internal::MemoryChunk::markbits(), MigrateObject(), v8::internal::NEW_SPACE, v8::internal::Heap::RECORD_SCRATCHPAD_SLOT, size, v8::internal::AllocationResult::ToObjectChecked(), TryPromoteObject(), UNREACHABLE, and v8::internal::Heap::UpdateAllocationSiteFeedback().

Referenced by EvacuateNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EmptyMarkingDeque()

void v8::internal::MarkCompactCollector::EmptyMarkingDeque ( )
private

Definition at line 2134 of file mark-compact.cc.

2134  {
2135  while (!marking_deque_.IsEmpty()) {
2136  HeapObject* object = marking_deque_.Pop();
2137  DCHECK(object->IsHeapObject());
2138  DCHECK(heap()->Contains(object));
2139  DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
2140 
2141  Map* map = object->map();
2142  MarkBit map_mark = Marking::MarkBitFrom(map);
2143  MarkObject(map, map_mark);
2144 
2145  MarkCompactMarkingVisitor::IterateBody(map, object);
2146  }
2147 }

References DCHECK, heap(), v8::internal::MarkingDeque::IsEmpty(), map, and marking_deque_.

Referenced by MarkLiveObjects(), v8::internal::RootMarkingVisitor::MarkObjectByPointer(), MarkRoots(), and ProcessMarkingDeque().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnableCodeFlushing()

void v8::internal::MarkCompactCollector::EnableCodeFlushing ( bool  enable)

Definition at line 4356 of file mark-compact.cc.

4356  {
4357  if (isolate()->debug()->is_loaded() ||
4358  isolate()->debug()->has_break_points()) {
4359  enable = false;
4360  }
4361 
4362  if (enable) {
4363  if (code_flusher_ != NULL) return;
4364  code_flusher_ = new CodeFlusher(isolate());
4365  } else {
4366  if (code_flusher_ == NULL) return;
4368  delete code_flusher_;
4369  code_flusher_ = NULL;
4370  }
4371 
4372  if (FLAG_trace_code_flushing) {
4373  PrintF("[code-flushing is now %s]\n", enable ? "on" : "off");
4374  }
4375 }

References code_flusher_, v8::internal::CodeFlusher::EvictAllCandidates(), isolate(), NULL, and v8::internal::PrintF().

Referenced by AfterMarking(), v8::internal::Heap::GarbageCollectionPrologue(), and PrepareForCodeFlushing().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureSweepingCompleted()

void v8::internal::MarkCompactCollector::EnsureSweepingCompleted ( )

Definition at line 574 of file mark-compact.cc.

574  {
575  DCHECK(sweeping_in_progress_ == true);
576 
577  // If sweeping is not completed, we try to complete it here. If we do not
578  // have sweeper threads we have to complete since we do not have a good
579  // indicator for a swept space in that case.
581  SweepInParallel(heap()->paged_space(OLD_DATA_SPACE), 0);
582  SweepInParallel(heap()->paged_space(OLD_POINTER_SPACE), 0);
583  }
584 
585  for (int i = 0; i < isolate()->num_sweeper_threads(); i++) {
587  }
588  if (FLAG_job_based_sweeping) {
589  // Wait twice for both jobs.
592  }
594  sweeping_in_progress_ = false;
595  RefillFreeList(heap()->paged_space(OLD_DATA_SPACE));
596  RefillFreeList(heap()->paged_space(OLD_POINTER_SPACE));
599 
600 #ifdef VERIFY_HEAP
601  if (FLAG_verify_heap) {
602  VerifyEvacuation(heap_);
603  }
604 #endif
605 }
PagedSpace * paged_space(int idx)
Definition: heap.h:601
int num_sweeper_threads() const
Definition: isolate.h:1063
int SweepInParallel(PagedSpace *space, int required_freed_bytes)
void RefillFreeList(PagedSpace *space)
void ResetUnsweptFreeBytes()
Definition: spaces.h:1850

References AreSweeperThreadsActivated(), DCHECK, heap(), heap_, isolate(), IsSweepingCompleted(), v8::internal::Isolate::num_sweeper_threads(), v8::internal::OLD_DATA_SPACE, v8::internal::OLD_POINTER_SPACE, v8::internal::Heap::paged_space(), ParallelSweepSpacesComplete(), pending_sweeper_jobs_semaphore_, RefillFreeList(), v8::internal::PagedSpace::ResetUnsweptFreeBytes(), v8::internal::Isolate::sweeper_threads(), sweeping_in_progress_, SweepInParallel(), and v8::internal::SweeperThread::WaitForSweeperThread().

Referenced by v8::internal::Isolate::Deinit(), v8::internal::Heap::IdleNotification(), v8::internal::StoreBuffer::IteratePointersToNewSpace(), v8::internal::Heap::MakeHeapIterable(), Prepare(), v8::internal::IncrementalMarking::Step(), SweepSpaces(), and v8::internal::PagedSpace::WaitForSweeperThreadsAndRetryAllocation().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvacuateLiveObjectsFromPage()

void v8::internal::MarkCompactCollector::EvacuateLiveObjectsFromPage ( Page p)
private

Definition at line 3122 of file mark-compact.cc.

3122  {
3123  AlwaysAllocateScope always_allocate(isolate());
3124  PagedSpace* space = static_cast<PagedSpace*>(p->owner());
3125  DCHECK(p->IsEvacuationCandidate() && !p->WasSwept());
3126  p->SetWasSwept();
3127 
3128  int offsets[16];
3129 
3130  for (MarkBitCellIterator it(p); !it.Done(); it.Advance()) {
3131  Address cell_base = it.CurrentCellBase();
3132  MarkBit::CellType* cell = it.CurrentCell();
3133 
3134  if (*cell == 0) continue;
3135 
3136  int live_objects = MarkWordToObjectStarts(*cell, offsets);
3137  for (int i = 0; i < live_objects; i++) {
3138  Address object_addr = cell_base + offsets[i] * kPointerSize;
3139  HeapObject* object = HeapObject::FromAddress(object_addr);
3140  DCHECK(Marking::IsBlack(Marking::MarkBitFrom(object)));
3141 
3142  int size = object->Size();
3143 
3144  HeapObject* target_object;
3145  AllocationResult allocation = space->AllocateRaw(size);
3146  if (!allocation.To(&target_object)) {
3147  // If allocation failed, use emergency memory and re-try allocation.
3148  CHECK(space->HasEmergencyMemory());
3149  space->UseEmergencyMemory();
3150  allocation = space->AllocateRaw(size);
3151  }
3152  if (!allocation.To(&target_object)) {
3153  // OS refused to give us memory.
3154  V8::FatalProcessOutOfMemory("Evacuation");
3155  return;
3156  }
3157 
3158  MigrateObject(target_object, object, size, space->identity());
3159  DCHECK(object->map_word().IsForwardingAddress());
3160  }
3161 
3162  // Clear marking bits for current cell.
3163  *cell = 0;
3164  }
3165  p->ResetLiveBytes();
3166 }
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
#define CHECK(condition)
Definition: logging.h:36
static int MarkWordToObjectStarts(uint32_t mark_bits, int *starts)

References CHECK, DCHECK, v8::internal::V8::FatalProcessOutOfMemory(), v8::internal::HeapObject::FromAddress(), v8::internal::MemoryChunk::IsEvacuationCandidate(), isolate(), v8::internal::kPointerSize, v8::internal::HeapObject::map_word(), v8::internal::MarkWordToObjectStarts(), MigrateObject(), v8::internal::MemoryChunk::owner(), v8::internal::MemoryChunk::ResetLiveBytes(), v8::internal::Page::SetWasSwept(), size, space(), v8::internal::AllocationResult::To(), and v8::internal::Page::WasSwept().

Referenced by EvacuatePages().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvacuateNewSpace()

void v8::internal::MarkCompactCollector::EvacuateNewSpace ( )
private

Definition at line 3088 of file mark-compact.cc.

3088  {
3089  // There are soft limits in the allocation code, designed trigger a mark
3090  // sweep collection by failing allocations. But since we are already in
3091  // a mark-sweep allocation, there is no sense in trying to trigger one.
3092  AlwaysAllocateScope scope(isolate());
3093 
3094  NewSpace* new_space = heap()->new_space();
3095 
3096  // Store allocation range before flipping semispaces.
3097  Address from_bottom = new_space->bottom();
3098  Address from_top = new_space->top();
3099 
3100  // Flip the semispaces. After flipping, to space is empty, from space has
3101  // live objects.
3102  new_space->Flip();
3103  new_space->ResetAllocationInfo();
3104 
3105  int survivors_size = 0;
3106 
3107  // First pass: traverse all objects in inactive semispace, remove marks,
3108  // migrate live objects and write forwarding addresses. This stage puts
3109  // new entries in the store buffer and may cause some pages to be marked
3110  // scan-on-scavenge.
3111  NewSpacePageIterator it(from_bottom, from_top);
3112  while (it.has_next()) {
3113  NewSpacePage* p = it.next();
3114  survivors_size += DiscoverAndEvacuateBlackObjectsOnPage(new_space, p);
3115  }
3116 
3117  heap_->IncrementYoungSurvivorsCounter(survivors_size);
3118  new_space->set_age_mark(new_space->top());
3119 }
void IncrementYoungSurvivorsCounter(int survived)
Definition: heap.h:1130
int DiscoverAndEvacuateBlackObjectsOnPage(NewSpace *new_space, NewSpacePage *p)

References v8::internal::NewSpace::bottom(), DiscoverAndEvacuateBlackObjectsOnPage(), v8::internal::NewSpace::Flip(), heap(), heap_, v8::internal::Heap::IncrementYoungSurvivorsCounter(), isolate(), v8::internal::Heap::new_space(), v8::internal::NewSpace::ResetAllocationInfo(), v8::internal::NewSpace::set_age_mark(), and v8::internal::NewSpace::top().

Referenced by EvacuateNewSpaceAndCandidates().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvacuateNewSpaceAndCandidates()

void v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates ( )
private

Definition at line 3508 of file mark-compact.cc.

3508  {
3509  Heap::RelocationLock relocation_lock(heap());
3510 
3511  bool code_slots_filtering_required;
3512  {
3513  GCTracer::Scope gc_scope(heap()->tracer(),
3515  code_slots_filtering_required = MarkInvalidatedCode();
3516  EvacuateNewSpace();
3517  }
3518 
3519  {
3520  GCTracer::Scope gc_scope(heap()->tracer(),
3522  EvacuatePages();
3523  }
3524 
3525  // Second pass: find pointers to new space and update them.
3526  PointersUpdatingVisitor updating_visitor(heap());
3527 
3528  {
3529  GCTracer::Scope gc_scope(heap()->tracer(),
3531  // Update pointers in to space.
3532  SemiSpaceIterator to_it(heap()->new_space()->bottom(),
3533  heap()->new_space()->top());
3534  for (HeapObject* object = to_it.Next(); object != NULL;
3535  object = to_it.Next()) {
3536  Map* map = object->map();
3537  object->IterateBody(map->instance_type(), object->SizeFromMap(map),
3538  &updating_visitor);
3539  }
3540  }
3541 
3542  {
3543  GCTracer::Scope gc_scope(heap()->tracer(),
3545  // Update roots.
3546  heap_->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
3547  }
3548 
3549  {
3550  GCTracer::Scope gc_scope(heap()->tracer(),
3552  StoreBufferRebuildScope scope(heap_, heap_->store_buffer(),
3555  &UpdatePointer);
3556  }
3557 
3558  {
3559  GCTracer::Scope gc_scope(heap()->tracer(),
3562  code_slots_filtering_required);
3563  if (FLAG_trace_fragmentation) {
3564  PrintF(" migration slots buffer: %d\n",
3566  }
3567 
3569  // It's difficult to filter out slots recorded for large objects.
3570  LargeObjectIterator it(heap_->lo_space());
3571  for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
3572  // LargeObjectSpace is not swept yet thus we have to skip
3573  // dead objects explicitly.
3574  if (!IsMarked(obj)) continue;
3575 
3576  Page* p = Page::FromAddress(obj->address());
3577  if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
3578  obj->Iterate(&updating_visitor);
3579  p->ClearFlag(Page::RESCAN_ON_EVACUATION);
3580  }
3581  }
3582  }
3583  }
3584 
3585  int npages = evacuation_candidates_.length();
3586  {
3587  GCTracer::Scope gc_scope(
3588  heap()->tracer(),
3590  for (int i = 0; i < npages; i++) {
3591  Page* p = evacuation_candidates_[i];
3592  DCHECK(p->IsEvacuationCandidate() ||
3593  p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3594 
3595  if (p->IsEvacuationCandidate()) {
3596  SlotsBuffer::UpdateSlotsRecordedIn(heap_, p->slots_buffer(),
3597  code_slots_filtering_required);
3598  if (FLAG_trace_fragmentation) {
3599  PrintF(" page %p slots buffer: %d\n", reinterpret_cast<void*>(p),
3600  SlotsBuffer::SizeOfChain(p->slots_buffer()));
3601  }
3602 
3603  // Important: skip list should be cleared only after roots were updated
3604  // because root iteration traverses the stack and might have to find
3605  // code objects from non-updated pc pointing into evacuation candidate.
3606  SkipList* list = p->skip_list();
3607  if (list != NULL) list->Clear();
3608  } else {
3609  if (FLAG_gc_verbose) {
3610  PrintF("Sweeping 0x%" V8PRIxPTR " during evacuation.\n",
3611  reinterpret_cast<intptr_t>(p));
3612  }
3613  PagedSpace* space = static_cast<PagedSpace*>(p->owner());
3614  p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
3615 
3616  switch (space->identity()) {
3617  case OLD_DATA_SPACE:
3620  &updating_visitor);
3621  break;
3622  case OLD_POINTER_SPACE:
3625  &updating_visitor);
3626  break;
3627  case CODE_SPACE:
3628  if (FLAG_zap_code_space) {
3631  &updating_visitor);
3632  } else {
3635  &updating_visitor);
3636  }
3637  break;
3638  default:
3639  UNREACHABLE();
3640  break;
3641  }
3642  }
3643  }
3644  }
3645 
3646  GCTracer::Scope gc_scope(heap()->tracer(),
3648 
3649  // Update pointers from cells.
3650  HeapObjectIterator cell_iterator(heap_->cell_space());
3651  for (HeapObject* cell = cell_iterator.Next(); cell != NULL;
3652  cell = cell_iterator.Next()) {
3653  if (cell->IsCell()) {
3654  Cell::BodyDescriptor::IterateBody(cell, &updating_visitor);
3655  }
3656  }
3657 
3658  HeapObjectIterator js_global_property_cell_iterator(
3660  for (HeapObject* cell = js_global_property_cell_iterator.Next(); cell != NULL;
3661  cell = js_global_property_cell_iterator.Next()) {
3662  if (cell->IsPropertyCell()) {
3663  PropertyCell::BodyDescriptor::IterateBody(cell, &updating_visitor);
3664  }
3665  }
3666 
3667  heap_->string_table()->Iterate(&updating_visitor);
3668  updating_visitor.VisitPointer(heap_->weak_object_to_code_table_address());
3669  if (heap_->weak_object_to_code_table()->IsHashTable()) {
3670  WeakHashTable* table =
3671  WeakHashTable::cast(heap_->weak_object_to_code_table());
3672  table->Iterate(&updating_visitor);
3673  table->Rehash(heap_->isolate()->factory()->undefined_value());
3674  }
3675 
3676  // Update pointers from external string table.
3679 
3680  EvacuationWeakObjectRetainer evacuation_object_retainer;
3681  heap()->ProcessWeakReferences(&evacuation_object_retainer);
3682 
3683  // Visit invalidated code (we ignored all slots on it) and clear mark-bits
3684  // under it.
3685  ProcessInvalidatedCode(&updating_visitor);
3686 
3688 
3691 }
static void IterateBody(HeapObject *obj, ObjectVisitor *v)
Definition: objects-inl.h:7162
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4722
StoreBuffer * store_buffer()
Definition: heap.h:1201
static void ScavengeStoreBufferCallback(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1309
Object ** weak_object_to_code_table_address()
Definition: heap.h:1940
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1613
InnerPointerToCodeCache * inner_pointer_to_code_cache()
Definition: isolate.h:911
Factory * factory()
Definition: isolate.h:982
void ProcessInvalidatedCode(ObjectVisitor *visitor)
static void UpdateSlotsRecordedIn(Heap *heap, SlotsBuffer *buffer, bool code_slots_filtering_required)
Definition: mark-compact.h:314
static int SizeOfChain(SlotsBuffer *buffer)
Definition: mark-compact.h:304
void IteratePointersToNewSpaceAndClearMaps(ObjectSlotCallback callback)
#define V8PRIxPTR
Definition: macros.h:363
@ VISIT_ALL_IN_SWEEP_NEWSPACE
Definition: globals.h:396
@ SWEEP_AND_VISIT_LIVE_OBJECTS
static int Sweep(PagedSpace *space, FreeList *free_list, Page *p, ObjectVisitor *v)
static String * UpdateReferenceInExternalStringTableEntry(Heap *heap, Object **p)
static void UpdatePointer(HeapObject **address, HeapObject *object)

References v8::internal::HeapObject::address(), v8::internal::Heap::cell_space(), v8::internal::SkipList::Clear(), v8::internal::MemoryChunk::ClearFlag(), v8::internal::CODE_SPACE, compacting_, DCHECK, v8::internal::SlotsBufferAllocator::DeallocateChain(), EvacuateNewSpace(), EvacuatePages(), evacuation_candidates_, v8::internal::Isolate::factory(), v8::internal::InnerPointerToCodeCache::Flush(), v8::internal::MemoryChunk::FromAddress(), heap(), heap_, v8::internal::IGNORE_FREE_SPACE, v8::internal::IGNORE_SKIP_LIST, v8::internal::Isolate::inner_pointer_to_code_cache(), v8::internal::MemoryChunk::IsEvacuationCandidate(), v8::internal::MemoryChunk::IsFlagSet(), IsMarked(), v8::internal::Heap::isolate(), v8::internal::HeapObject::Iterate(), v8::internal::FixedBodyDescriptor< start_offset, end_offset, size >::IterateBody(), v8::internal::StoreBuffer::IteratePointersToNewSpaceAndClearMaps(), v8::internal::Heap::IterateRoots(), v8::internal::Heap::lo_space(), map, MarkInvalidatedCode(), v8::internal::GCTracer::Scope::MC_EVACUATE_PAGES, v8::internal::GCTracer::Scope::MC_SWEEP_NEWSPACE, v8::internal::GCTracer::Scope::MC_UPDATE_MISC_POINTERS, v8::internal::GCTracer::Scope::MC_UPDATE_NEW_TO_NEW_POINTERS, v8::internal::GCTracer::Scope::MC_UPDATE_OLD_TO_NEW_POINTERS, v8::internal::GCTracer::Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED, v8::internal::GCTracer::Scope::MC_UPDATE_POINTERS_TO_EVACUATED, v8::internal::GCTracer::Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS, migration_slots_buffer_, v8::internal::HeapObjectIterator::Next(), v8::internal::SemiSpaceIterator::Next(), v8::internal::LargeObjectIterator::Next(), NULL, v8::internal::OLD_DATA_SPACE, v8::internal::OLD_POINTER_SPACE, v8::internal::MemoryChunk::owner(), v8::internal::PrintF(), ProcessInvalidatedCode(), v8::internal::Heap::ProcessWeakReferences(), v8::internal::Heap::property_cell_space(), v8::internal::REBUILD_SKIP_LIST, v8::internal::HashTable< Derived, Shape, Key >::Rehash(), v8::internal::MemoryChunk::RESCAN_ON_EVACUATION, v8::internal::Heap::ScavengeStoreBufferCallback(), v8::internal::SlotsBuffer::SizeOfChain(), v8::internal::MemoryChunk::skip_list(), v8::internal::MemoryChunk::slots_buffer(), slots_buffer_allocator_, space(), v8::internal::Heap::store_buffer(), v8::internal::Sweep(), v8::internal::SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD, UNREACHABLE, v8::internal::UpdatePointer(), v8::internal::UpdateReferenceInExternalStringTableEntry(), v8::internal::Heap::UpdateReferencesInExternalStringTable(), v8::internal::SlotsBuffer::UpdateSlotsRecordedIn(), V8PRIxPTR, v8::internal::VISIT_ALL_IN_SWEEP_NEWSPACE, v8::internal::PointersUpdatingVisitor::VisitPointer(), was_marked_incrementally_, v8::internal::Heap::weak_object_to_code_table(), v8::internal::Heap::weak_object_to_code_table_address(), and v8::internal::ZAP_FREE_SPACE.

Referenced by SweepSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvacuatePages()

void v8::internal::MarkCompactCollector::EvacuatePages ( )
private

Definition at line 3169 of file mark-compact.cc.

3169  {
3170  int npages = evacuation_candidates_.length();
3171  for (int i = 0; i < npages; i++) {
3172  Page* p = evacuation_candidates_[i];
3173  DCHECK(p->IsEvacuationCandidate() ||
3174  p->IsFlagSet(Page::RESCAN_ON_EVACUATION));
3175  DCHECK(static_cast<int>(p->parallel_sweeping()) ==
3177  PagedSpace* space = static_cast<PagedSpace*>(p->owner());
3178  // Allocate emergency memory for the case when compaction fails due to out
3179  // of memory.
3180  if (!space->HasEmergencyMemory()) {
3181  space->CreateEmergencyMemory();
3182  }
3183  if (p->IsEvacuationCandidate()) {
3184  // During compaction we might have to request a new page. Check that we
3185  // have an emergency page and the space still has room for that.
3186  if (space->HasEmergencyMemory() && space->CanExpand()) {
3188  } else {
3189  // Without room for expansion evacuation is not guaranteed to succeed.
3190  // Pessimistically abandon unevacuated pages.
3191  for (int j = i; j < npages; j++) {
3192  Page* page = evacuation_candidates_[j];
3193  slots_buffer_allocator_.DeallocateChain(page->slots_buffer_address());
3194  page->ClearEvacuationCandidate();
3195  page->SetFlag(Page::RESCAN_ON_EVACUATION);
3196  }
3197  break;
3198  }
3199  }
3200  }
3201  if (npages > 0) {
3202  // Release emergency memory.
3203  PagedSpaces spaces(heap());
3204  for (PagedSpace* space = spaces.next(); space != NULL;
3205  space = spaces.next()) {
3206  if (space->HasEmergencyMemory()) {
3207  space->FreeEmergencyMemory();
3208  }
3209  }
3210  }
3211 }

References v8::internal::MemoryChunk::ClearEvacuationCandidate(), DCHECK, v8::internal::SlotsBufferAllocator::DeallocateChain(), EvacuateLiveObjectsFromPage(), evacuation_candidates_, heap(), v8::internal::MemoryChunk::IsEvacuationCandidate(), v8::internal::MemoryChunk::IsFlagSet(), NULL, v8::internal::MemoryChunk::owner(), v8::internal::MemoryChunk::parallel_sweeping(), v8::internal::MemoryChunk::RESCAN_ON_EVACUATION, v8::internal::MemoryChunk::SetFlag(), v8::internal::MemoryChunk::slots_buffer_address(), slots_buffer_allocator_, space(), and v8::internal::MemoryChunk::SWEEPING_DONE.

Referenced by EvacuateNewSpaceAndCandidates().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Finish()

void v8::internal::MarkCompactCollector::Finish ( )
private

Definition at line 979 of file mark-compact.cc.

979  {
980 #ifdef DEBUG
981  DCHECK(state_ == SWEEP_SPACES || state_ == RELOCATE_OBJECTS);
982  state_ = IDLE;
983 #endif
984  // The stub cache is not traversed during GC; clear the cache to
985  // force lazy re-initialization of it. This must be done after the
986  // GC, because it relies on the new address of certain old space
987  // objects (empty string, illegal builtin).
988  isolate()->stub_cache()->Clear();
989 
991  // Some code objects were marked for deoptimization during the GC.
993  have_code_to_deoptimize_ = false;
994  }
995 }
static void DeoptimizeMarkedCode(Isolate *isolate)
Definition: deoptimizer.cc:454
StubCache * stub_cache()
Definition: isolate.h:875

References v8::internal::StubCache::Clear(), DCHECK, v8::internal::Deoptimizer::DeoptimizeMarkedCode(), have_code_to_deoptimize_, v8::internal::IDLE, isolate(), and v8::internal::Isolate::stub_cache().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ heap()

◆ Initialize()

void v8::internal::MarkCompactCollector::Initialize ( )
static

Definition at line 4392 of file mark-compact.cc.

References v8::internal::IncrementalMarking::Initialize(), and v8::internal::MarkCompactMarkingVisitor::Initialize().

Referenced by v8::internal::InitializeGCOnce().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ INLINE() [1/7]

v8::internal::MarkCompactCollector::INLINE ( static bool   IsOnEvacuationCandidateObject *obj)
inline

Definition at line 575 of file mark-compact.h.

575  {
576  return Page::FromAddress(reinterpret_cast<Address>(obj))
578  }
bool IsEvacuationCandidate()
Definition: spaces.h:607

References v8::internal::MemoryChunk::FromAddress(), and v8::internal::MemoryChunk::IsEvacuationCandidate().

+ Here is the call graph for this function:

◆ INLINE() [2/7]

v8::internal::MarkCompactCollector::INLINE ( static bool   ShouldSkipEvacuationSlotRecordingObject **anchor)
inline

Definition at line 565 of file mark-compact.h.

565  {
566  return Page::FromAddress(reinterpret_cast<Address>(anchor))
568  }
bool ShouldSkipEvacuationSlotRecording()
Definition: spaces.h:609

References v8::internal::MemoryChunk::FromAddress(), and v8::internal::MemoryChunk::ShouldSkipEvacuationSlotRecording().

+ Here is the call graph for this function:

◆ INLINE() [3/7]

v8::internal::MarkCompactCollector::INLINE ( static bool   ShouldSkipEvacuationSlotRecordingObject *host)
inline

Definition at line 570 of file mark-compact.h.

570  {
571  return Page::FromAddress(reinterpret_cast<Address>(host))
573  }

References v8::internal::MemoryChunk::FromAddress(), and v8::internal::MemoryChunk::ShouldSkipEvacuationSlotRecording().

+ Here is the call graph for this function:

◆ INLINE() [4/7]

v8::internal::MarkCompactCollector::INLINE ( void   EvictEvacuationCandidatePage *page)
inline

Definition at line 580 of file mark-compact.h.

580  {
581  if (FLAG_trace_fragmentation) {
582  PrintF("Page %p is too popular. Disabling evacuation.\n",
583  reinterpret_cast<void*>(page));
584  }
585 
586  // TODO(gc) If all evacuation candidates are too popular we
587  // should stop slots recording entirely.
588  page->ClearEvacuationCandidate();
589 
590  // We were not collecting slots on this page that point
591  // to other evacuation candidates thus we have to
592  // rescan the page after evacuation to discover and update all
593  // pointers to evacuated objects.
594  if (page->owner()->identity() == OLD_DATA_SPACE) {
595  evacuation_candidates_.RemoveElement(page);
596  } else {
597  page->SetFlag(Page::RESCAN_ON_EVACUATION);
598  }
599  }

References v8::internal::MemoryChunk::ClearEvacuationCandidate(), evacuation_candidates_, v8::internal::Space::identity(), v8::internal::OLD_DATA_SPACE, v8::internal::MemoryChunk::owner(), v8::internal::PrintF(), v8::internal::MemoryChunk::RESCAN_ON_EVACUATION, and v8::internal::MemoryChunk::SetFlag().

+ Here is the call graph for this function:

◆ INLINE() [5/7]

v8::internal::MarkCompactCollector::INLINE ( void   MarkObjectHeapObject *obj, MarkBit mark_bit)
private

◆ INLINE() [6/7]

v8::internal::MarkCompactCollector::INLINE ( void   RecordSlotObject **anchor_slot, Object **slot, Object *object, SlotsBuffer::AdditionMode mode=SlotsBuffer::FAIL_ON_OVERFLOW)

◆ INLINE() [7/7]

v8::internal::MarkCompactCollector::INLINE ( void   SetMarkHeapObject *obj, MarkBit mark_bit)
private

◆ InvalidateCode()

void v8::internal::MarkCompactCollector::InvalidateCode ( Code code)

Definition at line 3451 of file mark-compact.cc.

3451  {
3453  !ShouldSkipEvacuationSlotRecording(code)) {
3455 
3456  // If the object is white than no slots were recorded on it yet.
3457  MarkBit mark_bit = Marking::MarkBitFrom(code);
3458  if (Marking::IsWhite(mark_bit)) return;
3459 
3460  invalidated_code_.Add(code);
3461  }
3462 }

References compacting_, DCHECK, heap_, v8::internal::Heap::incremental_marking(), invalidated_code_, and v8::internal::IncrementalMarking::IsCompacting().

Referenced by v8::internal::Deoptimizer::DeoptimizeMarkedCodeForContext().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ is_code_flushing_enabled()

bool v8::internal::MarkCompactCollector::is_code_flushing_enabled ( ) const
inline

Definition at line 546 of file mark-compact.h.

546 { return code_flusher_ != NULL; }

References code_flusher_, and NULL.

Referenced by AfterMarking(), PrepareForCodeFlushing(), v8::internal::Heap::Scavenge(), and v8::internal::MarkCompactMarkingVisitor::VisitRegExpAndFlushCode().

+ Here is the caller graph for this function:

◆ is_compacting()

bool v8::internal::MarkCompactCollector::is_compacting ( ) const
inline

Definition at line 620 of file mark-compact.h.

620 { return compacting_; }

References compacting_.

Referenced by v8::internal::MustRecordSlots(), and RecordCodeTargetPatch().

+ Here is the caller graph for this function:

◆ IsMarked()

bool v8::internal::MarkCompactCollector::IsMarked ( Object obj)
inlinestatic

Definition at line 50 of file mark-compact-inl.h.

50  {
51  DCHECK(obj->IsHeapObject());
52  HeapObject* heap_object = HeapObject::cast(obj);
53  return Marking::MarkBitFrom(heap_object).Get();
54 }

References DCHECK.

Referenced by ClearDependentCode(), ClearDependentICList(), ClearNonLiveDependentCodeInGroup(), ClearNonLivePrototypeTransitions(), ClearNonLiveReferences(), ClearWeakCollections(), EvacuateNewSpaceAndCandidates(), v8::internal::MarkCompactMarkingVisitor::INLINE(), MarkImplicitRefGroups(), MarkLiveObjects(), MarkWeakObjectToCodeTable(), ProcessMapCaches(), ProcessWeakCollections(), and RemoveDeadInvalidatedCode().

+ Here is the caller graph for this function:

◆ isolate()

Isolate * v8::internal::MarkCompactCollector::isolate ( ) const
inline

Definition at line 4389 of file mark-compact.cc.

4389 { return heap_->isolate(); }

References heap_, and v8::internal::Heap::isolate().

Referenced by AreSweeperThreadsActivated(), EnableCodeFlushing(), EnsureSweepingCompleted(), EvacuateLiveObjectsFromPage(), EvacuateNewSpace(), Finish(), IsSweepingCompleted(), MarkImplicitRefGroups(), MarkLiveObjects(), MigrateObject(), PrepareForCodeFlushing(), PrepareThreadForCodeFlushing(), ProcessEphemeralMarking(), ProcessTopOptimizedFrame(), RecordCodeTargetPatch(), ReportDeleteIfNeeded(), and StartSweeperThreads().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsSweepingCompleted()

bool v8::internal::MarkCompactCollector::IsSweepingCompleted ( )

Definition at line 608 of file mark-compact.cc.

608  {
609  for (int i = 0; i < isolate()->num_sweeper_threads(); i++) {
610  if (!isolate()->sweeper_threads()[i]->SweepingCompleted()) {
611  return false;
612  }
613  }
614 
615  if (FLAG_job_based_sweeping) {
616  if (!pending_sweeper_jobs_semaphore_.WaitFor(
617  base::TimeDelta::FromSeconds(0))) {
618  return false;
619  }
621  }
622 
623  return true;
624 }

References isolate(), v8::internal::Isolate::num_sweeper_threads(), pending_sweeper_jobs_semaphore_, v8::internal::Isolate::sweeper_threads(), and v8::internal::SweeperThread::SweepingCompleted().

Referenced by EnsureSweepingCompleted(), and v8::internal::IncrementalMarking::Step().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsUnmarkedHeapObject()

bool v8::internal::MarkCompactCollector::IsUnmarkedHeapObject ( Object **  p)
staticprivate

Definition at line 2031 of file mark-compact.cc.

2031  {
2032  Object* o = *p;
2033  if (!o->IsHeapObject()) return false;
2034  HeapObject* heap_object = HeapObject::cast(o);
2035  MarkBit mark = Marking::MarkBitFrom(heap_object);
2036  return !mark.Get();
2037 }

References v8::internal::MarkBit::Get().

Referenced by MarkLiveObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsUnmarkedHeapObjectWithHeap()

bool v8::internal::MarkCompactCollector::IsUnmarkedHeapObjectWithHeap ( Heap heap,
Object **  p 
)
staticprivate

Definition at line 2040 of file mark-compact.cc.

2041  {
2042  Object* o = *p;
2043  DCHECK(o->IsHeapObject());
2044  HeapObject* heap_object = HeapObject::cast(o);
2045  MarkBit mark = Marking::MarkBitFrom(heap_object);
2046  return !mark.Get();
2047 }

References DCHECK, and v8::internal::MarkBit::Get().

Referenced by ProcessEphemeralMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkAllocationSite()

void v8::internal::MarkCompactCollector::MarkAllocationSite ( AllocationSite site)

Definition at line 2064 of file mark-compact.cc.

2064  {
2065  MarkBit mark_bit = Marking::MarkBitFrom(site);
2066  SetMark(site, mark_bit);
2067 }

Referenced by v8::internal::MarkCompactWeakObjectRetainer::RetainAs().

+ Here is the caller graph for this function:

◆ MarkImplicitRefGroups()

void v8::internal::MarkCompactCollector::MarkImplicitRefGroups ( )
private

Definition at line 2088 of file mark-compact.cc.

2088  {
2089  List<ImplicitRefGroup*>* ref_groups =
2091 
2092  int last = 0;
2093  for (int i = 0; i < ref_groups->length(); i++) {
2094  ImplicitRefGroup* entry = ref_groups->at(i);
2095  DCHECK(entry != NULL);
2096 
2097  if (!IsMarked(*entry->parent)) {
2098  (*ref_groups)[last++] = entry;
2099  continue;
2100  }
2101 
2102  Object*** children = entry->children;
2103  // A parent object is marked, so mark all child heap objects.
2104  for (size_t j = 0; j < entry->length; ++j) {
2105  if ((*children[j])->IsHeapObject()) {
2106  HeapObject* child = HeapObject::cast(*children[j]);
2107  MarkBit mark = Marking::MarkBitFrom(child);
2108  MarkObject(child, mark);
2109  }
2110  }
2111 
2112  // Once the entire group has been marked, dispose it because it's
2113  // not needed anymore.
2114  delete entry;
2115  }
2116  ref_groups->Rewind(last);
2117 }
List< ImplicitRefGroup * > * implicit_ref_groups()

References v8::internal::List< T, AllocationPolicy >::at(), v8::internal::ImplicitRefGroup::children, DCHECK, v8::internal::Isolate::global_handles(), v8::internal::GlobalHandles::implicit_ref_groups(), IsMarked(), isolate(), v8::internal::ImplicitRefGroup::length, NULL, and v8::internal::ImplicitRefGroup::parent.

Referenced by ProcessEphemeralMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ marking_parity()

MarkingParity v8::internal::MarkCompactCollector::marking_parity ( )
inline

Definition at line 622 of file mark-compact.h.

622 { return marking_parity_; }

References marking_parity_.

◆ MarkInvalidatedCode()

bool v8::internal::MarkCompactCollector::MarkInvalidatedCode ( )
private

Definition at line 3471 of file mark-compact.cc.

3471  {
3472  bool code_marked = false;
3473 
3474  int length = invalidated_code_.length();
3475  for (int i = 0; i < length; i++) {
3476  Code* code = invalidated_code_[i];
3477 
3478  if (SetMarkBitsUnderInvalidatedCode(code, true)) {
3479  code_marked = true;
3480  }
3481  }
3482 
3483  return code_marked;
3484 }
static bool SetMarkBitsUnderInvalidatedCode(Code *code, bool value)

References invalidated_code_, and v8::internal::SetMarkBitsUnderInvalidatedCode().

Referenced by EvacuateNewSpaceAndCandidates().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkLiveObjects()

void v8::internal::MarkCompactCollector::MarkLiveObjects ( )
private

Definition at line 2236 of file mark-compact.cc.

2236  {
2237  GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK);
2238  double start_time = 0.0;
2239  if (FLAG_print_cumulative_gc_stat) {
2240  start_time = base::OS::TimeCurrentMillis();
2241  }
2242  // The recursive GC marker detects when it is nearing stack overflow,
2243  // and switches to a different marking system. JS interrupts interfere
2244  // with the C stack limit check.
2245  PostponeInterruptsScope postpone(isolate());
2246 
2247  bool incremental_marking_overflowed = false;
2248  IncrementalMarking* incremental_marking = heap_->incremental_marking();
2250  // Finalize the incremental marking and check whether we had an overflow.
2251  // Both markers use grey color to mark overflowed objects so
2252  // non-incremental marker can deal with them as if overflow
2253  // occured during normal marking.
2254  // But incremental marker uses a separate marking deque
2255  // so we have to explicitly copy its overflow state.
2256  incremental_marking->Finalize();
2257  incremental_marking_overflowed =
2258  incremental_marking->marking_deque()->overflowed();
2259  incremental_marking->marking_deque()->ClearOverflowed();
2260  } else {
2261  // Abort any pending incremental activities e.g. incremental sweeping.
2262  incremental_marking->Abort();
2263  }
2264 
2265 #ifdef DEBUG
2266  DCHECK(state_ == PREPARE_GC);
2267  state_ = MARK_LIVE_OBJECTS;
2268 #endif
2269  // The to space contains live objects, a page in from space is used as a
2270  // marking stack.
2271  Address marking_deque_start = heap()->new_space()->FromSpacePageLow();
2272  Address marking_deque_end = heap()->new_space()->FromSpacePageHigh();
2273  if (FLAG_force_marking_deque_overflows) {
2274  marking_deque_end = marking_deque_start + 64 * kPointerSize;
2275  }
2276  marking_deque_.Initialize(marking_deque_start, marking_deque_end);
2278 
2279  if (incremental_marking_overflowed) {
2280  // There are overflowed objects left in the heap after incremental marking.
2282  }
2283 
2285 
2287  // There is no write barrier on cells so we have to scan them now at the end
2288  // of the incremental marking.
2289  {
2290  HeapObjectIterator cell_iterator(heap()->cell_space());
2291  HeapObject* cell;
2292  while ((cell = cell_iterator.Next()) != NULL) {
2293  DCHECK(cell->IsCell());
2294  if (IsMarked(cell)) {
2295  int offset = Cell::kValueOffset;
2296  MarkCompactMarkingVisitor::VisitPointer(
2297  heap(), reinterpret_cast<Object**>(cell->address() + offset));
2298  }
2299  }
2300  }
2301  {
2302  HeapObjectIterator js_global_property_cell_iterator(
2303  heap()->property_cell_space());
2304  HeapObject* cell;
2305  while ((cell = js_global_property_cell_iterator.Next()) != NULL) {
2306  DCHECK(cell->IsPropertyCell());
2307  if (IsMarked(cell)) {
2308  MarkCompactMarkingVisitor::VisitPropertyCell(cell->map(), cell);
2309  }
2310  }
2311  }
2312  }
2313 
2314  RootMarkingVisitor root_visitor(heap());
2315  MarkRoots(&root_visitor);
2316 
2317  ProcessTopOptimizedFrame(&root_visitor);
2318 
2319  // The objects reachable from the roots are marked, yet unreachable
2320  // objects are unmarked. Mark objects reachable due to host
2321  // application specific logic or through Harmony weak maps.
2322  ProcessEphemeralMarking(&root_visitor);
2323 
2324  // The objects reachable from the roots, weak maps or object groups
2325  // are marked, yet unreachable objects are unmarked. Mark objects
2326  // reachable only from weak global handles.
2327  //
2328  // First we identify nonlive weak handles and mark them as pending
2329  // destruction.
2332  // Then we mark the objects and process the transitive closure.
2333  heap()->isolate()->global_handles()->IterateWeakRoots(&root_visitor);
2334  while (marking_deque_.overflowed()) {
2337  }
2338 
2339  // Repeat host application specific and Harmony weak maps marking to
2340  // mark unmarked objects reachable from the weak roots.
2341  ProcessEphemeralMarking(&root_visitor);
2342 
2343  AfterMarking();
2344 
2345  if (FLAG_print_cumulative_gc_stat) {
2347  }
2348 }
static double TimeCurrentMillis()
void AddMarkingTime(double duration)
Definition: gc-tracer.h:260
void IdentifyWeakHandles(WeakSlotCallback f)
void IterateWeakRoots(ObjectVisitor *v)
GCTracer * tracer()
Definition: heap.h:1166
void ProcessEphemeralMarking(ObjectVisitor *visitor)
void ProcessTopOptimizedFrame(ObjectVisitor *visitor)
void MarkRoots(RootMarkingVisitor *visitor)
static bool IsUnmarkedHeapObject(Object **p)
void Initialize(Address low, Address high)
Definition: mark-compact.h:145
Address FromSpacePageHigh()
Definition: spaces.h:2483
Address FromSpacePageLow()
Definition: spaces.h:2482

References v8::internal::IncrementalMarking::Abort(), v8::internal::GCTracer::AddMarkingTime(), v8::internal::HeapObject::address(), AfterMarking(), v8::internal::MarkingDeque::ClearOverflowed(), DCHECK, EmptyMarkingDeque(), v8::internal::IncrementalMarking::Finalize(), v8::internal::NewSpace::FromSpacePageHigh(), v8::internal::NewSpace::FromSpacePageLow(), v8::internal::Isolate::global_handles(), heap(), heap_, v8::internal::GlobalHandles::IdentifyWeakHandles(), v8::internal::Heap::incremental_marking(), v8::internal::MarkingDeque::Initialize(), IsMarked(), v8::internal::Heap::isolate(), isolate(), IsUnmarkedHeapObject(), v8::internal::GlobalHandles::IterateWeakRoots(), v8::internal::kPointerSize, v8::internal::Cell::kValueOffset, v8::internal::HeapObject::map(), v8::internal::IncrementalMarking::marking_deque(), marking_deque_, MarkRoots(), v8::internal::GCTracer::Scope::MC_MARK, v8::internal::Heap::new_space(), v8::internal::HeapObjectIterator::Next(), NULL, v8::internal::MarkingDeque::overflowed(), PrepareForCodeFlushing(), ProcessEphemeralMarking(), ProcessTopOptimizedFrame(), RefillMarkingDeque(), v8::internal::MarkingDeque::SetOverflowed(), v8::base::OS::TimeCurrentMillis(), v8::internal::Heap::tracer(), and was_marked_incrementally_.

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkRoots()

void v8::internal::MarkCompactCollector::MarkRoots ( RootMarkingVisitor visitor)
private

Definition at line 2070 of file mark-compact.cc.

2070  {
2071  // Mark the heap roots including global variables, stack variables,
2072  // etc., and all objects reachable from them.
2074 
2075  // Handle the string table specially.
2076  MarkStringTable(visitor);
2077 
2079 
2080  // There may be overflowed objects in the heap. Visit them now.
2081  while (marking_deque_.overflowed()) {
2084  }
2085 }
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4747
void MarkStringTable(RootMarkingVisitor *visitor)
@ VISIT_ONLY_STRONG
Definition: globals.h:397

References EmptyMarkingDeque(), heap(), v8::internal::Heap::IterateStrongRoots(), marking_deque_, MarkStringTable(), MarkWeakObjectToCodeTable(), v8::internal::MarkingDeque::overflowed(), RefillMarkingDeque(), and v8::internal::VISIT_ONLY_STRONG.

Referenced by MarkLiveObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkStringTable()

void v8::internal::MarkCompactCollector::MarkStringTable ( RootMarkingVisitor visitor)
private

Definition at line 2050 of file mark-compact.cc.

2050  {
2051  StringTable* string_table = heap()->string_table();
2052  // Mark the string table itself.
2053  MarkBit string_table_mark = Marking::MarkBitFrom(string_table);
2054  if (!string_table_mark.Get()) {
2055  // String table could have already been marked by visiting the handles list.
2056  SetMark(string_table, string_table_mark);
2057  }
2058  // Explicitly mark the prefix.
2059  string_table->IteratePrefix(visitor);
2061 }

References v8::internal::MarkBit::Get(), heap(), v8::internal::HashTable< Derived, Shape, Key >::IteratePrefix(), and ProcessMarkingDeque().

Referenced by MarkRoots().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkWeakObjectToCodeTable()

void v8::internal::MarkCompactCollector::MarkWeakObjectToCodeTable ( )

Definition at line 2120 of file mark-compact.cc.

2120  {
2121  HeapObject* weak_object_to_code_table =
2122  HeapObject::cast(heap()->weak_object_to_code_table());
2123  if (!IsMarked(weak_object_to_code_table)) {
2124  MarkBit mark = Marking::MarkBitFrom(weak_object_to_code_table);
2125  SetMark(weak_object_to_code_table, mark);
2126  }
2127 }

References heap(), and IsMarked().

Referenced by MarkRoots(), and v8::internal::IncrementalMarking::StartMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MigrateObject()

void v8::internal::MarkCompactCollector::MigrateObject ( HeapObject dst,
HeapObject src,
int  size,
AllocationSpace  to_old_space 
)

Definition at line 2885 of file mark-compact.cc.

2886  {
2887  Address dst_addr = dst->address();
2888  Address src_addr = src->address();
2889  DCHECK(heap()->AllowedToBeMigrated(src, dest));
2891  if (dest == OLD_POINTER_SPACE) {
2892  Address src_slot = src_addr;
2893  Address dst_slot = dst_addr;
2895 
2896  for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
2897  Object* value = Memory::Object_at(src_slot);
2898 
2899  Memory::Object_at(dst_slot) = value;
2900 
2901  if (!src->MayContainRawValues()) {
2902  RecordMigratedSlot(value, dst_slot);
2903  }
2904 
2905  src_slot += kPointerSize;
2906  dst_slot += kPointerSize;
2907  }
2908 
2909  if (compacting_ && dst->IsJSFunction()) {
2910  Address code_entry_slot = dst_addr + JSFunction::kCodeEntryOffset;
2911  Address code_entry = Memory::Address_at(code_entry_slot);
2912 
2913  if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
2915  SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
2917  }
2918  } else if (dst->IsConstantPoolArray()) {
2919  // We special case ConstantPoolArrays since they could contain integers
2920  // value entries which look like tagged pointers.
2921  // TODO(mstarzinger): restructure this code to avoid this special-casing.
2922  ConstantPoolArray* array = ConstantPoolArray::cast(dst);
2923  ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
2924  while (!code_iter.is_finished()) {
2925  Address code_entry_slot =
2926  dst_addr + array->OffsetOfElementAt(code_iter.next_index());
2927  Address code_entry = Memory::Address_at(code_entry_slot);
2928 
2929  if (Page::FromAddress(code_entry)->IsEvacuationCandidate()) {
2931  SlotsBuffer::CODE_ENTRY_SLOT, code_entry_slot,
2933  }
2934  }
2935  ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
2936  while (!heap_iter.is_finished()) {
2937  Address heap_slot =
2938  dst_addr + array->OffsetOfElementAt(heap_iter.next_index());
2939  Object* value = Memory::Object_at(heap_slot);
2940  RecordMigratedSlot(value, heap_slot);
2941  }
2942  }
2943  } else if (dest == CODE_SPACE) {
2944  PROFILE(isolate(), CodeMoveEvent(src_addr, dst_addr));
2945  heap()->MoveBlock(dst_addr, src_addr, size);
2949  Code::cast(dst)->Relocate(dst_addr - src_addr);
2950  } else {
2951  DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE);
2952  heap()->MoveBlock(dst_addr, src_addr, size);
2953  }
2954  heap()->OnMoveEvent(dst, src, size);
2955  Memory::Address_at(src_addr) = dst_addr;
2956 }
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:475
void OnMoveEvent(HeapObject *target, HeapObject *source, int size_in_bytes)
Definition: heap-inl.h:245
static const int kCodeEntryOffset
Definition: objects.h:7376
void RecordMigratedSlot(Object *value, Address slot)
static Object *& Object_at(Address addr)
Definition: v8memory.h:60
static Address & Address_at(Address addr)
Definition: v8memory.h:56
static const int kMaxRegularHeapObjectSize
Definition: spaces.h:754
static bool AddTo(SlotsBufferAllocator *allocator, SlotsBuffer **buffer_address, SlotType type, Address addr, AdditionMode mode)
#define PROFILE(IsolateGetter, Call)
Definition: cpu-profiler.h:181
bool IsAligned(T value, U alignment)
Definition: utils.h:123

References v8::internal::HeapObject::address(), v8::internal::Memory::Address_at(), v8::internal::SlotsBuffer::AddTo(), v8::internal::SlotsBuffer::CODE_ENTRY_SLOT, v8::internal::ConstantPoolArray::CODE_PTR, v8::internal::CODE_SPACE, compacting_, DCHECK, v8::internal::MemoryChunk::FromAddress(), heap(), v8::internal::ConstantPoolArray::HEAP_PTR, v8::internal::SlotsBuffer::IGNORE_OVERFLOW, v8::internal::IsAligned(), isolate(), v8::internal::JSFunction::kCodeEntryOffset, v8::internal::Page::kMaxRegularHeapObjectSize, v8::internal::kPointerSize, v8::internal::LO_SPACE, v8::internal::HeapObject::MayContainRawValues(), migration_slots_buffer_, v8::internal::Heap::MoveBlock(), v8::internal::NEW_SPACE, v8::internal::Memory::Object_at(), v8::internal::ConstantPoolArray::OffsetOfElementAt(), v8::internal::OLD_DATA_SPACE, v8::internal::OLD_POINTER_SPACE, v8::internal::Heap::OnMoveEvent(), PROFILE, RecordMigratedSlot(), v8::internal::SlotsBuffer::RELOCATED_CODE_OBJECT, size, and slots_buffer_allocator_.

Referenced by DiscoverAndEvacuateBlackObjectsOnPage(), EvacuateLiveObjectsFromPage(), and TryPromoteObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MoveEvacuationCandidatesToEndOfPagesList()

void v8::internal::MarkCompactCollector::MoveEvacuationCandidatesToEndOfPagesList ( )
private

Definition at line 3694 of file mark-compact.cc.

3694  {
3695  int npages = evacuation_candidates_.length();
3696  for (int i = 0; i < npages; i++) {
3697  Page* p = evacuation_candidates_[i];
3698  if (!p->IsEvacuationCandidate()) continue;
3699  p->Unlink();
3700  PagedSpace* space = static_cast<PagedSpace*>(p->owner());
3701  p->InsertAfter(space->LastPage());
3702  }
3703 }

References evacuation_candidates_, v8::internal::MemoryChunk::InsertAfter(), v8::internal::MemoryChunk::IsEvacuationCandidate(), v8::internal::MemoryChunk::owner(), space(), and v8::internal::MemoryChunk::Unlink().

Referenced by SweepSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ParallelSweepSpaceComplete()

void v8::internal::MarkCompactCollector::ParallelSweepSpaceComplete ( PagedSpace space)
private

Definition at line 4337 of file mark-compact.cc.

4337  {
4338  PageIterator it(space);
4339  while (it.has_next()) {
4340  Page* p = it.next();
4341  if (p->parallel_sweeping() == MemoryChunk::SWEEPING_FINALIZE) {
4342  p->set_parallel_sweeping(MemoryChunk::SWEEPING_DONE);
4343  p->SetWasSwept();
4344  }
4345  DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE);
4346  }
4347 }

References DCHECK, v8::internal::MemoryChunk::parallel_sweeping(), v8::internal::MemoryChunk::set_parallel_sweeping(), v8::internal::Page::SetWasSwept(), space(), v8::internal::MemoryChunk::SWEEPING_DONE, and v8::internal::MemoryChunk::SWEEPING_FINALIZE.

Referenced by ParallelSweepSpacesComplete().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ParallelSweepSpacesComplete()

void v8::internal::MarkCompactCollector::ParallelSweepSpacesComplete ( )
private

Definition at line 4350 of file mark-compact.cc.

4350  {
4351  ParallelSweepSpaceComplete(heap()->old_pointer_space());
4352  ParallelSweepSpaceComplete(heap()->old_data_space());
4353 }
void ParallelSweepSpaceComplete(PagedSpace *space)

References heap(), and ParallelSweepSpaceComplete().

Referenced by EnsureSweepingCompleted().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Prepare()

void v8::internal::MarkCompactCollector::Prepare ( )

Definition at line 935 of file mark-compact.cc.

935  {
937 
938 #ifdef DEBUG
939  DCHECK(state_ == IDLE);
940  state_ = PREPARE_GC;
941 #endif
942 
943  DCHECK(!FLAG_never_compact || !FLAG_always_compact);
944 
945  if (sweeping_in_progress()) {
946  // Instead of waiting we could also abort the sweeper threads here.
948  }
949 
950  // Clear marking bits if incremental marking is aborted.
953  ClearMarkbits();
955  AbortCompaction();
957  }
958 
959  // Don't start compaction if we are in the middle of incremental
960  // marking cycle. We did not collect any slots.
961  if (!FLAG_never_compact && !was_marked_incrementally_) {
963  }
964 
965  PagedSpaces spaces(heap());
966  for (PagedSpace* space = spaces.next(); space != NULL;
967  space = spaces.next()) {
968  space->PrepareForMarkCompact();
969  }
970 
971 #ifdef VERIFY_HEAP
972  if (!was_marked_incrementally_ && FLAG_verify_heap) {
973  VerifyMarkbitsAreClean();
974  }
975 #endif
976 }
bool StartCompaction(CompactionMode mode)

References v8::internal::IncrementalMarking::Abort(), abort_incremental_marking_, AbortCompaction(), AbortWeakCollections(), ClearMarkbits(), DCHECK, EnsureSweepingCompleted(), heap(), v8::internal::IDLE, v8::internal::Heap::incremental_marking(), NON_INCREMENTAL_COMPACTION, NULL, space(), StartCompaction(), sweeping_in_progress(), and was_marked_incrementally_.

Referenced by v8::internal::Heap::MarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrepareForCodeFlushing()

void v8::internal::MarkCompactCollector::PrepareForCodeFlushing ( )
private

Definition at line 1736 of file mark-compact.cc.

1736  {
1737  // Enable code flushing for non-incremental cycles.
1738  if (FLAG_flush_code && !FLAG_flush_code_incrementally) {
1740  }
1741 
1742  // If code flushing is disabled, there is no need to prepare for it.
1743  if (!is_code_flushing_enabled()) return;
1744 
1745  // Ensure that empty descriptor array is marked. Method MarkDescriptorArray
1746  // relies on it being marked before any other descriptor array.
1747  HeapObject* descriptor_array = heap()->empty_descriptor_array();
1748  MarkBit descriptor_array_mark = Marking::MarkBitFrom(descriptor_array);
1749  MarkObject(descriptor_array, descriptor_array_mark);
1750 
1751  // Make sure we are not referencing the code from the stack.
1752  DCHECK(this == heap()->mark_compact_collector());
1754  heap()->isolate()->thread_local_top());
1755 
1756  // Iterate the archived stacks in all threads to check if
1757  // the code is referenced.
1758  CodeMarkingVisitor code_marking_visitor(this);
1760  &code_marking_visitor);
1761 
1762  SharedFunctionInfoMarkingVisitor visitor(this);
1763  heap()->isolate()->compilation_cache()->IterateFunctions(&visitor);
1764  heap()->isolate()->handle_scope_implementer()->Iterate(&visitor);
1765 
1767 }
void IterateFunctions(ObjectVisitor *v)
void Iterate(v8::internal::ObjectVisitor *v)
Definition: api.cc:7590
HandleScopeImplementer * handle_scope_implementer()
Definition: isolate.h:901
ThreadManager * thread_manager()
Definition: isolate.h:921
CompilationCache * compilation_cache()
Definition: isolate.h:865
void PrepareThreadForCodeFlushing(Isolate *isolate, ThreadLocalTop *top)
void IterateArchivedThreads(ThreadVisitor *v)
Definition: v8threads.cc:342

References v8::internal::Isolate::compilation_cache(), DCHECK, EnableCodeFlushing(), v8::internal::Isolate::handle_scope_implementer(), heap(), is_code_flushing_enabled(), v8::internal::Heap::isolate(), isolate(), v8::internal::HandleScopeImplementer::Iterate(), v8::internal::ThreadManager::IterateArchivedThreads(), v8::internal::CompilationCache::IterateFunctions(), PrepareThreadForCodeFlushing(), ProcessMarkingDeque(), v8::internal::Isolate::thread_manager(), and was_marked_incrementally_.

Referenced by MarkLiveObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrepareThreadForCodeFlushing()

void v8::internal::MarkCompactCollector::PrepareThreadForCodeFlushing ( Isolate isolate,
ThreadLocalTop *  top 
)
private

Definition at line 1717 of file mark-compact.cc.

1718  {
1719  for (StackFrameIterator it(isolate, top); !it.done(); it.Advance()) {
1720  // Note: for the frame that has a pending lazy deoptimization
1721  // StackFrame::unchecked_code will return a non-optimized code object for
1722  // the outermost function and StackFrame::LookupCode will return
1723  // actual optimized code object.
1724  StackFrame* frame = it.frame();
1725  Code* code = frame->unchecked_code();
1726  MarkBit code_mark = Marking::MarkBitFrom(code);
1727  MarkObject(code, code_mark);
1728  if (frame->is_optimized()) {
1730  frame->LookupCode());
1731  }
1732  }
1733 }
HANDLE HANDLE LPSTACKFRAME64 StackFrame

References v8::internal::StackFrameIterator::Advance(), heap(), isolate(), and v8::internal::StaticMarkingVisitor< MarkCompactMarkingVisitor >::MarkInlinedFunctionsCode().

Referenced by PrepareForCodeFlushing(), and v8::internal::CodeMarkingVisitor::VisitThread().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessEphemeralMarking()

void v8::internal::MarkCompactCollector::ProcessEphemeralMarking ( ObjectVisitor visitor)
private

Definition at line 2204 of file mark-compact.cc.

2204  {
2205  bool work_to_do = true;
2207  while (work_to_do) {
2209  visitor, &IsUnmarkedHeapObjectWithHeap);
2212  work_to_do = !marking_deque_.IsEmpty();
2214  }
2215 }
bool IterateObjectGroups(ObjectVisitor *v, WeakSlotCallbackWithHeap can_skip)
static bool IsUnmarkedHeapObjectWithHeap(Heap *heap, Object **p)

References DCHECK, v8::internal::Isolate::global_handles(), v8::internal::MarkingDeque::IsEmpty(), isolate(), IsUnmarkedHeapObjectWithHeap(), v8::internal::GlobalHandles::IterateObjectGroups(), MarkImplicitRefGroups(), marking_deque_, ProcessMarkingDeque(), and ProcessWeakCollections().

Referenced by MarkLiveObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessInvalidatedCode()

void v8::internal::MarkCompactCollector::ProcessInvalidatedCode ( ObjectVisitor visitor)
private

Definition at line 3495 of file mark-compact.cc.

3495  {
3496  int length = invalidated_code_.length();
3497  for (int i = 0; i < length; i++) {
3498  Code* code = invalidated_code_[i];
3499  if (code != NULL) {
3500  code->Iterate(visitor);
3501  SetMarkBitsUnderInvalidatedCode(code, false);
3502  }
3503  }
3504  invalidated_code_.Rewind(0);
3505 }

References invalidated_code_, v8::internal::HeapObject::Iterate(), NULL, and v8::internal::SetMarkBitsUnderInvalidatedCode().

Referenced by EvacuateNewSpaceAndCandidates().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessMapCaches()

void v8::internal::MarkCompactCollector::ProcessMapCaches ( )
private

Definition at line 2394 of file mark-compact.cc.

2394  {
2395  Object* raw_context = heap()->native_contexts_list();
2396  while (raw_context != heap()->undefined_value()) {
2397  Context* context = reinterpret_cast<Context*>(raw_context);
2398  if (IsMarked(context)) {
2399  HeapObject* raw_map_cache =
2400  HeapObject::cast(context->get(Context::MAP_CACHE_INDEX));
2401  // A map cache may be reachable from the stack. In this case
2402  // it's already transitively marked and it's too late to clean
2403  // up its parts.
2404  if (!IsMarked(raw_map_cache) &&
2405  raw_map_cache != heap()->undefined_value()) {
2406  MapCache* map_cache = reinterpret_cast<MapCache*>(raw_map_cache);
2407  int existing_elements = map_cache->NumberOfElements();
2408  int used_elements = 0;
2409  for (int i = MapCache::kElementsStartIndex; i < map_cache->length();
2410  i += MapCache::kEntrySize) {
2411  Object* raw_key = map_cache->get(i);
2412  if (raw_key == heap()->undefined_value() ||
2413  raw_key == heap()->the_hole_value())
2414  continue;
2416  Object* raw_map = map_cache->get(i + 1);
2417  if (raw_map->IsHeapObject() && IsMarked(raw_map)) {
2418  ++used_elements;
2419  } else {
2420  // Delete useless entries with unmarked maps.
2421  DCHECK(raw_map->IsMap());
2422  map_cache->set_the_hole(i);
2423  map_cache->set_the_hole(i + 1);
2424  }
2425  }
2426  if (used_elements == 0) {
2427  context->set(Context::MAP_CACHE_INDEX, heap()->undefined_value());
2428  } else {
2429  // Note: we don't actually shrink the cache here to avoid
2430  // extra complexity during GC. We rely on subsequent cache
2431  // usages (EnsureCapacity) to do this.
2432  map_cache->ElementsRemoved(existing_elements - used_elements);
2433  MarkBit map_cache_markbit = Marking::MarkBitFrom(map_cache);
2434  MarkObject(map_cache, map_cache_markbit);
2435  }
2436  }
2437  }
2438  // Move to next element in the list.
2439  raw_context = context->get(Context::NEXT_CONTEXT_LINK);
2440  }
2442 }
Object * native_contexts_list() const
Definition: heap.h:793
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))

References DCHECK, v8::internal::HashTable< Derived, Shape, Key >::ElementsRemoved(), v8::internal::FixedArray::get(), heap(), IsMarked(), v8::internal::HashTable< MapCache, MapCacheShape, HashTableKey * >::kElementsStartIndex, v8::internal::HashTable< MapCache, MapCacheShape, HashTableKey * >::kEntrySize, v8::internal::FixedArrayBase::length(), v8::internal::Context::MAP_CACHE_INDEX, v8::internal::Heap::native_contexts_list(), v8::internal::Context::NEXT_CONTEXT_LINK, v8::internal::HashTable< Derived, Shape, Key >::NumberOfElements(), ProcessMarkingDeque(), v8::internal::FixedArray::set(), v8::internal::FixedArray::set_the_hole(), and v8::internal::STATIC_ASSERT().

Referenced by AfterMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessMarkingDeque()

void v8::internal::MarkCompactCollector::ProcessMarkingDeque ( )
private

Definition at line 2193 of file mark-compact.cc.

2193  {
2195  while (marking_deque_.overflowed()) {
2198  }
2199 }

References EmptyMarkingDeque(), marking_deque_, v8::internal::MarkingDeque::overflowed(), and RefillMarkingDeque().

Referenced by MarkStringTable(), PrepareForCodeFlushing(), ProcessEphemeralMarking(), ProcessMapCaches(), and ProcessTopOptimizedFrame().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessTopOptimizedFrame()

void v8::internal::MarkCompactCollector::ProcessTopOptimizedFrame ( ObjectVisitor visitor)
private

Definition at line 2218 of file mark-compact.cc.

2218  {
2219  for (StackFrameIterator it(isolate(), isolate()->thread_local_top());
2220  !it.done(); it.Advance()) {
2221  if (it.frame()->type() == StackFrame::JAVA_SCRIPT) {
2222  return;
2223  }
2224  if (it.frame()->type() == StackFrame::OPTIMIZED) {
2225  Code* code = it.frame()->LookupCode();
2226  if (!code->CanDeoptAt(it.frame()->pc())) {
2227  code->CodeIterateBody(visitor);
2228  }
2230  return;
2231  }
2232  }
2233 }

References v8::internal::StackFrameIterator::Advance(), v8::internal::Code::CanDeoptAt(), v8::internal::Code::CodeIterateBody(), isolate(), and ProcessMarkingDeque().

Referenced by MarkLiveObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessWeakCollections()

void v8::internal::MarkCompactCollector::ProcessWeakCollections ( )
private

Definition at line 2794 of file mark-compact.cc.

2794  {
2795  GCTracer::Scope gc_scope(heap()->tracer(),
2797  Object* weak_collection_obj = heap()->encountered_weak_collections();
2798  while (weak_collection_obj != Smi::FromInt(0)) {
2799  JSWeakCollection* weak_collection =
2800  reinterpret_cast<JSWeakCollection*>(weak_collection_obj);
2801  DCHECK(MarkCompactCollector::IsMarked(weak_collection));
2802  if (weak_collection->table()->IsHashTable()) {
2803  ObjectHashTable* table = ObjectHashTable::cast(weak_collection->table());
2804  Object** anchor = reinterpret_cast<Object**>(table->address());
2805  for (int i = 0; i < table->Capacity(); i++) {
2806  if (MarkCompactCollector::IsMarked(HeapObject::cast(table->KeyAt(i)))) {
2807  Object** key_slot =
2808  table->RawFieldOfElementAt(ObjectHashTable::EntryToIndex(i));
2809  RecordSlot(anchor, key_slot, *key_slot);
2810  Object** value_slot =
2811  table->RawFieldOfElementAt(ObjectHashTable::EntryToValueIndex(i));
2812  MarkCompactMarkingVisitor::MarkObjectByPointer(this, anchor,
2813  value_slot);
2814  }
2815  }
2816  }
2817  weak_collection_obj = weak_collection->next();
2818  }
2819 }
static int EntryToValueIndex(int entry)
Definition: objects.h:3791

References v8::internal::HeapObject::address(), v8::internal::HashTable< Derived, Shape, Key >::Capacity(), DCHECK, v8::internal::Heap::encountered_weak_collections(), v8::internal::HashTable< ObjectHashTable, ObjectHashTableShape, Handle< Object > >::EntryToIndex(), v8::internal::ObjectHashTable::EntryToValueIndex(), v8::internal::Smi::FromInt(), heap(), IsMarked(), v8::internal::HashTable< Derived, Shape, Key >::KeyAt(), v8::internal::GCTracer::Scope::MC_WEAKCOLLECTION_PROCESS, and v8::internal::FixedArray::RawFieldOfElementAt().

Referenced by ProcessEphemeralMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordCodeEntrySlot()

void v8::internal::MarkCompactCollector::RecordCodeEntrySlot ( Address  slot,
Code target 
)

Definition at line 4468 of file mark-compact.cc.

4468  {
4469  Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
4470  if (target_page->IsEvacuationCandidate() &&
4471  !ShouldSkipEvacuationSlotRecording(reinterpret_cast<Object**>(slot))) {
4473  target_page->slots_buffer_address(),
4476  EvictEvacuationCandidate(target_page);
4477  }
4478  }
4479 }

References v8::internal::SlotsBuffer::AddTo(), v8::internal::SlotsBuffer::CODE_ENTRY_SLOT, v8::internal::SlotsBuffer::FAIL_ON_OVERFLOW, v8::internal::MemoryChunk::FromAddress(), v8::internal::MemoryChunk::IsEvacuationCandidate(), v8::internal::MemoryChunk::slots_buffer_address(), and slots_buffer_allocator_.

Referenced by v8::internal::CodeFlusher::ProcessJSFunctionCandidates(), and v8::internal::IncrementalMarking::RecordWriteOfCodeEntrySlow().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordCodeTargetPatch()

void v8::internal::MarkCompactCollector::RecordCodeTargetPatch ( Address  pc,
Code target 
)

Definition at line 4482 of file mark-compact.cc.

4482  {
4483  DCHECK(heap()->gc_state() == Heap::MARK_COMPACT);
4484  if (is_compacting()) {
4485  Code* host =
4487  pc);
4488  MarkBit mark_bit = Marking::MarkBitFrom(host);
4489  if (Marking::IsBlack(mark_bit)) {
4490  RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
4491  RecordRelocSlot(&rinfo, target);
4492  }
4493  }
4494 }
Code * GcSafeFindCodeForInnerPointer(Address inner_pointer)
Definition: frames.cc:1468
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
const Register pc

References v8::internal::RelocInfo::CODE_TARGET, DCHECK, v8::internal::InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(), heap(), v8::internal::Isolate::inner_pointer_to_code_cache(), is_compacting(), isolate(), v8::internal::Heap::MARK_COMPACT, v8::internal::pc, and RecordRelocSlot().

Referenced by v8::internal::IC::SetTargetAtAddress().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordMigratedSlot()

void v8::internal::MarkCompactCollector::RecordMigratedSlot ( Object value,
Address  slot 
)
private

Definition at line 2860 of file mark-compact.cc.

2860  {
2861  if (heap_->InNewSpace(value)) {
2862  heap_->store_buffer()->Mark(slot);
2863  } else if (value->IsHeapObject() && IsOnEvacuationCandidate(value)) {
2865  reinterpret_cast<Object**>(slot),
2867  }
2868 }
bool InNewSpace(Object *object)
Definition: heap-inl.h:322
void Mark(Address addr)

References v8::internal::SlotsBuffer::AddTo(), heap_, v8::internal::SlotsBuffer::IGNORE_OVERFLOW, v8::internal::Heap::InNewSpace(), v8::internal::StoreBuffer::Mark(), migration_slots_buffer_, slots_buffer_allocator_, and v8::internal::Heap::store_buffer().

Referenced by MigrateObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordRelocSlot()

void v8::internal::MarkCompactCollector::RecordRelocSlot ( RelocInfo rinfo,
Object target 
)

Definition at line 4437 of file mark-compact.cc.

4437  {
4438  Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
4439  RelocInfo::Mode rmode = rinfo->rmode();
4440  if (target_page->IsEvacuationCandidate() &&
4441  (rinfo->host() == NULL ||
4442  !ShouldSkipEvacuationSlotRecording(rinfo->host()))) {
4443  bool success;
4444  if (RelocInfo::IsEmbeddedObject(rmode) && rinfo->IsInConstantPool()) {
4445  // This doesn't need to be typed since it is just a normal heap pointer.
4446  Object** target_pointer =
4447  reinterpret_cast<Object**>(rinfo->constant_pool_entry_address());
4448  success = SlotsBuffer::AddTo(
4449  &slots_buffer_allocator_, target_page->slots_buffer_address(),
4450  target_pointer, SlotsBuffer::FAIL_ON_OVERFLOW);
4451  } else if (RelocInfo::IsCodeTarget(rmode) && rinfo->IsInConstantPool()) {
4452  success = SlotsBuffer::AddTo(
4453  &slots_buffer_allocator_, target_page->slots_buffer_address(),
4454  SlotsBuffer::CODE_ENTRY_SLOT, rinfo->constant_pool_entry_address(),
4456  } else {
4457  success = SlotsBuffer::AddTo(
4458  &slots_buffer_allocator_, target_page->slots_buffer_address(),
4459  SlotTypeForRMode(rmode), rinfo->pc(), SlotsBuffer::FAIL_ON_OVERFLOW);
4460  }
4461  if (!success) {
4462  EvictEvacuationCandidate(target_page);
4463  }
4464  }
4465 }
static bool IsEmbeddedObject(Mode mode)
Definition: assembler.h:402
static bool IsCodeTarget(Mode mode)
Definition: assembler.h:399
static SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode)

References v8::internal::SlotsBuffer::AddTo(), v8::internal::SlotsBuffer::CODE_ENTRY_SLOT, v8::internal::SlotsBuffer::FAIL_ON_OVERFLOW, v8::internal::MemoryChunk::FromAddress(), v8::internal::RelocInfo::host(), v8::internal::RelocInfo::IsCodeTarget(), v8::internal::RelocInfo::IsEmbeddedObject(), v8::internal::MemoryChunk::IsEvacuationCandidate(), v8::internal::RelocInfo::IsInConstantPool(), NULL, v8::internal::RelocInfo::pc(), v8::internal::RelocInfo::rmode(), v8::internal::MemoryChunk::slots_buffer_address(), slots_buffer_allocator_, and v8::internal::SlotTypeForRMode().

Referenced by RecordCodeTargetPatch(), and v8::internal::IncrementalMarking::RecordWriteIntoCodeSlow().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RefillFreeList()

void v8::internal::MarkCompactCollector::RefillFreeList ( PagedSpace space)

Definition at line 627 of file mark-compact.cc.

627  {
628  FreeList* free_list;
629 
630  if (space == heap()->old_pointer_space()) {
631  free_list = free_list_old_pointer_space_.get();
632  } else if (space == heap()->old_data_space()) {
633  free_list = free_list_old_data_space_.get();
634  } else {
635  // Any PagedSpace might invoke RefillFreeLists, so we need to make sure
636  // to only refill them for old data and pointer spaces.
637  return;
638  }
639 
640  intptr_t freed_bytes = space->free_list()->Concatenate(free_list);
641  space->AddToAccountingStats(freed_bytes);
642  space->DecrementUnsweptFreeBytes(freed_bytes);
643 }
SmartPointer< FreeList > free_list_old_data_space_
Definition: mark-compact.h:890
SmartPointer< FreeList > free_list_old_pointer_space_
Definition: mark-compact.h:891

References free_list_old_data_space_, free_list_old_pointer_space_, heap(), and space().

Referenced by EnsureSweepingCompleted(), and v8::internal::PagedSpace::SlowAllocateRaw().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RefillMarkingDeque()

void v8::internal::MarkCompactCollector::RefillMarkingDeque ( )
private

Definition at line 2155 of file mark-compact.cc.

2155  {
2157 
2159  if (marking_deque_.IsFull()) return;
2160 
2162  heap()->old_pointer_space());
2163  if (marking_deque_.IsFull()) return;
2164 
2165  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_data_space());
2166  if (marking_deque_.IsFull()) return;
2167 
2168  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space());
2169  if (marking_deque_.IsFull()) return;
2170 
2171  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->map_space());
2172  if (marking_deque_.IsFull()) return;
2173 
2174  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->cell_space());
2175  if (marking_deque_.IsFull()) return;
2176 
2178  heap()->property_cell_space());
2179  if (marking_deque_.IsFull()) return;
2180 
2181  LargeObjectIterator lo_it(heap()->lo_space());
2183  if (marking_deque_.IsFull()) return;
2184 
2186 }
static void DiscoverGreyObjectsWithIterator(Heap *heap, MarkingDeque *marking_deque, T *it)
static void DiscoverGreyObjectsInNewSpace(Heap *heap, MarkingDeque *marking_deque)
static void DiscoverGreyObjectsInSpace(Heap *heap, MarkingDeque *marking_deque, PagedSpace *space)

References v8::internal::MarkingDeque::ClearOverflowed(), DCHECK, v8::internal::DiscoverGreyObjectsInNewSpace(), v8::internal::DiscoverGreyObjectsInSpace(), v8::internal::DiscoverGreyObjectsWithIterator(), heap(), v8::internal::MarkingDeque::IsFull(), marking_deque_, and v8::internal::MarkingDeque::overflowed().

Referenced by MarkLiveObjects(), MarkRoots(), and ProcessMarkingDeque().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReleaseEvacuationCandidates()

void v8::internal::MarkCompactCollector::ReleaseEvacuationCandidates ( )
private

Definition at line 3706 of file mark-compact.cc.

3706  {
3707  int npages = evacuation_candidates_.length();
3708  for (int i = 0; i < npages; i++) {
3709  Page* p = evacuation_candidates_[i];
3710  if (!p->IsEvacuationCandidate()) continue;
3711  PagedSpace* space = static_cast<PagedSpace*>(p->owner());
3712  space->Free(p->area_start(), p->area_size());
3713  p->set_scan_on_scavenge(false);
3714  slots_buffer_allocator_.DeallocateChain(p->slots_buffer_address());
3715  p->ResetLiveBytes();
3716  space->ReleasePage(p);
3717  }
3718  evacuation_candidates_.Rewind(0);
3719  compacting_ = false;
3720  heap()->FreeQueuedChunks();
3721 }
void FreeQueuedChunks()
Definition: heap.cc:6025

References v8::internal::MemoryChunk::area_size(), v8::internal::MemoryChunk::area_start(), compacting_, v8::internal::SlotsBufferAllocator::DeallocateChain(), evacuation_candidates_, v8::internal::Heap::FreeQueuedChunks(), heap(), v8::internal::MemoryChunk::IsEvacuationCandidate(), v8::internal::MemoryChunk::owner(), v8::internal::MemoryChunk::ResetLiveBytes(), v8::internal::MemoryChunk::set_scan_on_scavenge(), v8::internal::MemoryChunk::slots_buffer_address(), slots_buffer_allocator_, and space().

Referenced by SweepSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RemoveDeadInvalidatedCode()

void v8::internal::MarkCompactCollector::RemoveDeadInvalidatedCode ( )
private

Definition at line 3487 of file mark-compact.cc.

3487  {
3488  int length = invalidated_code_.length();
3489  for (int i = 0; i < length; i++) {
3491  }
3492 }

References invalidated_code_, IsMarked(), and NULL.

Referenced by SweepSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReportDeleteIfNeeded()

void v8::internal::MarkCompactCollector::ReportDeleteIfNeeded ( HeapObject obj,
Isolate isolate 
)
static

Definition at line 4381 of file mark-compact.cc.

4382  {
4383  if (obj->IsCode()) {
4384  PROFILE(isolate, CodeDeleteEvent(obj->address()));
4385  }
4386 }

References v8::internal::HeapObject::address(), isolate(), and PROFILE.

Referenced by v8::internal::LargeObjectSpace::FreeUnmarkedObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ sequential_sweeping()

bool v8::internal::MarkCompactCollector::sequential_sweeping ( ) const
inline

Definition at line 653 of file mark-compact.h.

653 { return sequential_sweeping_; }

References sequential_sweeping_.

Referenced by set_sequential_sweeping().

+ Here is the caller graph for this function:

◆ set_sequential_sweeping()

void v8::internal::MarkCompactCollector::set_sequential_sweeping ( bool  sequential_sweeping)
inline

Definition at line 649 of file mark-compact.h.

References sequential_sweeping(), and sequential_sweeping_.

+ Here is the call graph for this function:

◆ SetFlags()

void v8::internal::MarkCompactCollector::SetFlags ( int  flags)
inline

◆ SetUp()

void v8::internal::MarkCompactCollector::SetUp ( )

Definition at line 328 of file mark-compact.cc.

328  {
329  free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space()));
330  free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space()));
331 }

References free_list_old_data_space_, free_list_old_pointer_space_, heap_, v8::internal::Heap::old_data_space(), and v8::internal::Heap::old_pointer_space().

Referenced by v8::internal::Heap::SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartCompaction()

bool v8::internal::MarkCompactCollector::StartCompaction ( CompactionMode  mode)

Definition at line 353 of file mark-compact.cc.

353  {
354  if (!compacting_) {
355  DCHECK(evacuation_candidates_.length() == 0);
356 
357 #ifdef ENABLE_GDB_JIT_INTERFACE
358  // If GDBJIT interface is active disable compaction.
359  if (FLAG_gdbjit) return false;
360 #endif
361 
362  CollectEvacuationCandidates(heap()->old_pointer_space());
363  CollectEvacuationCandidates(heap()->old_data_space());
364 
365  if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION ||
366  FLAG_incremental_code_compaction)) {
367  CollectEvacuationCandidates(heap()->code_space());
368  } else if (FLAG_trace_fragmentation) {
369  TraceFragmentation(heap()->code_space());
370  }
371 
372  if (FLAG_trace_fragmentation) {
373  TraceFragmentation(heap()->map_space());
374  TraceFragmentation(heap()->cell_space());
375  TraceFragmentation(heap()->property_cell_space());
376  }
377 
381 
382  compacting_ = evacuation_candidates_.length() > 0;
383  }
384 
385  return compacting_;
386 }
void CollectEvacuationCandidates(PagedSpace *space)
void EvictEvacuationCandidatesFromFreeLists()
Definition: spaces.cc:2516
static void TraceFragmentation(PagedSpace *space)

References v8::internal::Heap::code_space(), CollectEvacuationCandidates(), compacting_, DCHECK, evacuation_candidates_, v8::internal::PagedSpace::EvictEvacuationCandidatesFromFreeLists(), heap(), mode(), NON_INCREMENTAL_COMPACTION, v8::internal::Heap::old_data_space(), v8::internal::Heap::old_pointer_space(), and v8::internal::TraceFragmentation().

Referenced by Prepare(), and v8::internal::IncrementalMarking::StartMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartSweeperThreads()

void v8::internal::MarkCompactCollector::StartSweeperThreads ( )
private

Definition at line 556 of file mark-compact.cc.

556  {
557  DCHECK(free_list_old_pointer_space_.get()->IsEmpty());
558  DCHECK(free_list_old_data_space_.get()->IsEmpty());
559  sweeping_in_progress_ = true;
560  for (int i = 0; i < isolate()->num_sweeper_threads(); i++) {
562  }
563  if (FLAG_job_based_sweeping) {
565  new SweeperTask(heap(), heap()->old_data_space()),
568  new SweeperTask(heap(), heap()->old_pointer_space()),
570  }
571 }
virtual void CallOnBackgroundThread(Task *task, ExpectedRuntime expected_runtime)=0
Schedules a task to be invoked on a background thread.
static v8::Platform * GetCurrentPlatform()
Definition: v8.cc:115

References v8::Platform::CallOnBackgroundThread(), DCHECK, free_list_old_data_space_, free_list_old_pointer_space_, v8::internal::V8::GetCurrentPlatform(), heap(), isolate(), v8::Platform::kShortRunningTask, v8::internal::Isolate::num_sweeper_threads(), v8::internal::SweeperThread::StartSweeping(), v8::internal::Isolate::sweeper_threads(), and sweeping_in_progress_.

Referenced by SweepSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ sweeping_in_progress()

bool v8::internal::MarkCompactCollector::sweeping_in_progress ( )
inline

Definition at line 647 of file mark-compact.h.

647 { return sweeping_in_progress_; }

References sweeping_in_progress_.

Referenced by v8::internal::Isolate::Deinit(), v8::internal::Heap::IdleNotification(), Prepare(), v8::internal::PagedSpace::SlowAllocateRaw(), v8::internal::IncrementalMarking::Start(), v8::internal::IncrementalMarking::Step(), and v8::internal::PagedSpace::WaitForSweeperThreadsAndRetryAllocation().

+ Here is the caller graph for this function:

◆ SweepInParallel() [1/2]

int v8::internal::MarkCompactCollector::SweepInParallel ( Page page,
PagedSpace space 
)

Definition at line 4137 of file mark-compact.cc.

4137  {
4138  int max_freed = 0;
4139  if (page->TryParallelSweeping()) {
4140  FreeList* free_list = space == heap()->old_pointer_space()
4142  : free_list_old_data_space_.get();
4143  FreeList private_free_list(space);
4145  IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
4146  free_list->Concatenate(&private_free_list);
4147  }
4148  return max_freed;
4149 }

References v8::internal::FreeList::Concatenate(), free_list_old_data_space_, free_list_old_pointer_space_, heap(), v8::internal::IGNORE_FREE_SPACE, v8::internal::IGNORE_SKIP_LIST, NULL, v8::internal::Heap::old_pointer_space(), space(), v8::internal::Sweep(), SWEEP_IN_PARALLEL, v8::internal::SWEEP_ONLY, and v8::internal::MemoryChunk::TryParallelSweeping().

+ Here is the call graph for this function:

◆ SweepInParallel() [2/2]

int v8::internal::MarkCompactCollector::SweepInParallel ( PagedSpace space,
int  required_freed_bytes 
)

Definition at line 4118 of file mark-compact.cc.

4119  {
4120  int max_freed = 0;
4121  int max_freed_overall = 0;
4122  PageIterator it(space);
4123  while (it.has_next()) {
4124  Page* p = it.next();
4125  max_freed = SweepInParallel(p, space);
4126  DCHECK(max_freed >= 0);
4127  if (required_freed_bytes > 0 && max_freed >= required_freed_bytes) {
4128  return max_freed;
4129  }
4130  max_freed_overall = Max(max_freed, max_freed_overall);
4131  if (p == space->end_of_unswept_pages()) break;
4132  }
4133  return max_freed_overall;
4134 }
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)

References DCHECK, v8::internal::Max(), and space().

Referenced by EnsureSweepingCompleted(), v8::internal::StoreBuffer::IteratePointersToNewSpace(), v8::internal::SweeperThread::Run(), v8::internal::MarkCompactCollector::SweeperTask::Run(), and v8::internal::PagedSpace::SlowAllocateRaw().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SweepSpace()

void v8::internal::MarkCompactCollector::SweepSpace ( PagedSpace space,
SweeperType  sweeper 
)
private

Definition at line 4152 of file mark-compact.cc.

4152  {
4153  space->ClearStats();
4154 
4155  // We defensively initialize end_of_unswept_pages_ here with the first page
4156  // of the pages list.
4157  space->set_end_of_unswept_pages(space->FirstPage());
4158 
4159  PageIterator it(space);
4160 
4161  int pages_swept = 0;
4162  bool unused_page_present = false;
4163  bool parallel_sweeping_active = false;
4164 
4165  while (it.has_next()) {
4166  Page* p = it.next();
4167  DCHECK(p->parallel_sweeping() == MemoryChunk::SWEEPING_DONE);
4168 
4169  // Clear sweeping flags indicating that marking bits are still intact.
4170  p->ClearWasSwept();
4171 
4172  if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION) ||
4173  p->IsEvacuationCandidate()) {
4174  // Will be processed in EvacuateNewSpaceAndCandidates.
4175  DCHECK(evacuation_candidates_.length() > 0);
4176  continue;
4177  }
4178 
4179  // One unused page is kept, all further are released before sweeping them.
4180  if (p->LiveBytes() == 0) {
4181  if (unused_page_present) {
4182  if (FLAG_gc_verbose) {
4183  PrintF("Sweeping 0x%" V8PRIxPTR " released page.\n",
4184  reinterpret_cast<intptr_t>(p));
4185  }
4186  // Adjust unswept free bytes because releasing a page expects said
4187  // counter to be accurate for unswept pages.
4188  space->IncreaseUnsweptFreeBytes(p);
4189  space->ReleasePage(p);
4190  continue;
4191  }
4192  unused_page_present = true;
4193  }
4194 
4195  switch (sweeper) {
4196  case CONCURRENT_SWEEPING:
4197  case PARALLEL_SWEEPING:
4198  if (!parallel_sweeping_active) {
4199  if (FLAG_gc_verbose) {
4200  PrintF("Sweeping 0x%" V8PRIxPTR ".\n",
4201  reinterpret_cast<intptr_t>(p));
4202  }
4205  pages_swept++;
4206  parallel_sweeping_active = true;
4207  } else {
4208  if (FLAG_gc_verbose) {
4209  PrintF("Sweeping 0x%" V8PRIxPTR " in parallel.\n",
4210  reinterpret_cast<intptr_t>(p));
4211  }
4212  p->set_parallel_sweeping(MemoryChunk::SWEEPING_PENDING);
4213  space->IncreaseUnsweptFreeBytes(p);
4214  }
4215  space->set_end_of_unswept_pages(p);
4216  break;
4217  case SEQUENTIAL_SWEEPING: {
4218  if (FLAG_gc_verbose) {
4219  PrintF("Sweeping 0x%" V8PRIxPTR ".\n", reinterpret_cast<intptr_t>(p));
4220  }
4221  if (space->identity() == CODE_SPACE && FLAG_zap_code_space) {
4223  ZAP_FREE_SPACE>(space, NULL, p, NULL);
4224  } else if (space->identity() == CODE_SPACE) {
4227  } else {
4230  }
4231  pages_swept++;
4232  break;
4233  }
4234  default: { UNREACHABLE(); }
4235  }
4236  }
4237 
4238  if (FLAG_gc_verbose) {
4239  PrintF("SweepSpace: %s (%d pages swept)\n",
4240  AllocationSpaceName(space->identity()), pages_swept);
4241  }
4242 
4243  // Give pages that are queued to be freed back to the OS.
4244  heap()->FreeQueuedChunks();
4245 }

References v8::internal::AllocationSpaceName(), v8::internal::Page::ClearWasSwept(), v8::internal::CODE_SPACE, CONCURRENT_SWEEPING, DCHECK, evacuation_candidates_, v8::internal::Heap::FreeQueuedChunks(), heap(), v8::internal::IGNORE_FREE_SPACE, v8::internal::IGNORE_SKIP_LIST, v8::internal::MemoryChunk::IsEvacuationCandidate(), v8::internal::MemoryChunk::IsFlagSet(), v8::internal::MemoryChunk::LiveBytes(), NULL, PARALLEL_SWEEPING, v8::internal::MemoryChunk::parallel_sweeping(), v8::internal::PrintF(), v8::internal::REBUILD_SKIP_LIST, v8::internal::MemoryChunk::RESCAN_ON_EVACUATION, SEQUENTIAL_SWEEPING, v8::internal::MemoryChunk::set_parallel_sweeping(), space(), v8::internal::Sweep(), SWEEP_ON_MAIN_THREAD, v8::internal::SWEEP_ONLY, v8::internal::MemoryChunk::SWEEPING_DONE, v8::internal::MemoryChunk::SWEEPING_PENDING, UNREACHABLE, V8PRIxPTR, and v8::internal::ZAP_FREE_SPACE.

Referenced by SweepSpaces().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SweepSpaces()

void v8::internal::MarkCompactCollector::SweepSpaces ( )
private

Definition at line 4260 of file mark-compact.cc.

4260  {
4261  GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_SWEEP);
4262  double start_time = 0.0;
4263  if (FLAG_print_cumulative_gc_stat) {
4264  start_time = base::OS::TimeCurrentMillis();
4265  }
4266 
4267 #ifdef DEBUG
4268  state_ = SWEEP_SPACES;
4269 #endif
4270  SweeperType how_to_sweep = CONCURRENT_SWEEPING;
4271  if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_SWEEPING;
4272  if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_SWEEPING;
4273 
4275 
4276  // Noncompacting collections simply sweep the spaces to clear the mark
4277  // bits and free the nonlive blocks (for old and map spaces). We sweep
4278  // the map space last because freeing non-live maps overwrites them and
4279  // the other spaces rely on possibly non-live maps to get the sizes for
4280  // non-live objects.
4281  {
4282  GCTracer::Scope sweep_scope(heap()->tracer(),
4284  {
4285  SequentialSweepingScope scope(this);
4286  SweepSpace(heap()->old_pointer_space(), how_to_sweep);
4287  SweepSpace(heap()->old_data_space(), how_to_sweep);
4288  }
4289 
4290  if (ShouldStartSweeperThreads(how_to_sweep)) {
4292  }
4293 
4294  if (ShouldWaitForSweeperThreads(how_to_sweep)) {
4296  }
4297  }
4299 
4300  {
4301  GCTracer::Scope sweep_scope(heap()->tracer(),
4303  SweepSpace(heap()->code_space(), SEQUENTIAL_SWEEPING);
4304  }
4305 
4306  {
4307  GCTracer::Scope sweep_scope(heap()->tracer(),
4309  SweepSpace(heap()->cell_space(), SEQUENTIAL_SWEEPING);
4310  SweepSpace(heap()->property_cell_space(), SEQUENTIAL_SWEEPING);
4311  }
4312 
4314 
4315  // ClearNonLiveTransitions depends on precise sweeping of map space to
4316  // detect whether unmarked map became dead in this collection or in one
4317  // of the previous ones.
4318  {
4319  GCTracer::Scope sweep_scope(heap()->tracer(),
4321  SweepSpace(heap()->map_space(), SEQUENTIAL_SWEEPING);
4322  }
4323 
4324  // Deallocate unmarked objects and clear marked bits for marked objects.
4326 
4327  // Deallocate evacuated candidate pages.
4329 
4330  if (FLAG_print_cumulative_gc_stat) {
4332  start_time);
4333  }
4334 }
void AddSweepingTime(double duration)
Definition: gc-tracer.h:270
void SweepSpace(PagedSpace *space, SweeperType sweeper)
static bool ShouldStartSweeperThreads(MarkCompactCollector::SweeperType type)
static bool ShouldWaitForSweeperThreads(MarkCompactCollector::SweeperType type)

References v8::internal::GCTracer::AddSweepingTime(), CONCURRENT_SWEEPING, EnsureSweepingCompleted(), EvacuateNewSpaceAndCandidates(), v8::internal::LargeObjectSpace::FreeUnmarkedObjects(), heap(), heap_, v8::internal::Heap::lo_space(), v8::internal::GCTracer::Scope::MC_SWEEP, v8::internal::GCTracer::Scope::MC_SWEEP_CELL, v8::internal::GCTracer::Scope::MC_SWEEP_CODE, v8::internal::GCTracer::Scope::MC_SWEEP_MAP, v8::internal::GCTracer::Scope::MC_SWEEP_OLDSPACE, MoveEvacuationCandidatesToEndOfPagesList(), PARALLEL_SWEEPING, ReleaseEvacuationCandidates(), RemoveDeadInvalidatedCode(), SEQUENTIAL_SWEEPING, v8::internal::ShouldStartSweeperThreads(), v8::internal::ShouldWaitForSweeperThreads(), StartSweeperThreads(), SweepSpace(), v8::base::OS::TimeCurrentMillis(), and v8::internal::Heap::tracer().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TearDown()

void v8::internal::MarkCompactCollector::TearDown ( )

Definition at line 334 of file mark-compact.cc.

334 { AbortCompaction(); }

References AbortCompaction().

Referenced by v8::internal::Heap::TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TrimDescriptorArray()

void v8::internal::MarkCompactCollector::TrimDescriptorArray ( Map map,
DescriptorArray descriptors,
int  number_of_own_descriptors 
)
private

Definition at line 2654 of file mark-compact.cc.

2656  {
2657  int number_of_descriptors = descriptors->number_of_descriptors_storage();
2658  int to_trim = number_of_descriptors - number_of_own_descriptors;
2659  if (to_trim == 0) return;
2660 
2662  descriptors, to_trim * DescriptorArray::kDescriptorSize);
2663  descriptors->SetNumberOfDescriptors(number_of_own_descriptors);
2664 
2665  if (descriptors->HasEnumCache()) TrimEnumCache(map, descriptors);
2666  descriptors->Sort();
2667 }
static const int kDescriptorSize
Definition: objects.h:3038
void TrimEnumCache(Map *map, DescriptorArray *descriptors)

References v8::internal::Heap::FROM_GC, v8::internal::DescriptorArray::HasEnumCache(), heap_, v8::internal::DescriptorArray::kDescriptorSize, map, v8::internal::DescriptorArray::number_of_descriptors_storage(), v8::internal::Heap::RightTrimFixedArray(), v8::internal::DescriptorArray::SetNumberOfDescriptors(), v8::internal::DescriptorArray::Sort(), and TrimEnumCache().

Referenced by ClearMapTransitions().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TrimEnumCache()

void v8::internal::MarkCompactCollector::TrimEnumCache ( Map map,
DescriptorArray descriptors 
)
private

Definition at line 2670 of file mark-compact.cc.

2671  {
2672  int live_enum = map->EnumLength();
2673  if (live_enum == kInvalidEnumCacheSentinel) {
2674  live_enum = map->NumberOfDescribedProperties(OWN_DESCRIPTORS, DONT_ENUM);
2675  }
2676  if (live_enum == 0) return descriptors->ClearEnumCache();
2677 
2678  FixedArray* enum_cache = descriptors->GetEnumCache();
2679 
2680  int to_trim = enum_cache->length() - live_enum;
2681  if (to_trim <= 0) return;
2682  heap_->RightTrimFixedArray<Heap::FROM_GC>(descriptors->GetEnumCache(),
2683  to_trim);
2684 
2685  if (!descriptors->HasEnumIndicesCache()) return;
2686  FixedArray* enum_indices_cache = descriptors->GetEnumIndicesCache();
2687  heap_->RightTrimFixedArray<Heap::FROM_GC>(enum_indices_cache, to_trim);
2688 }
static const int kInvalidEnumCacheSentinel
@ OWN_DESCRIPTORS
Definition: objects.h:287
@ DONT_ENUM

References v8::internal::DescriptorArray::ClearEnumCache(), DONT_ENUM, v8::internal::Heap::FROM_GC, v8::internal::DescriptorArray::GetEnumCache(), v8::internal::DescriptorArray::GetEnumIndicesCache(), v8::internal::DescriptorArray::HasEnumIndicesCache(), heap_, v8::internal::kInvalidEnumCacheSentinel, v8::internal::FixedArrayBase::length(), map, v8::internal::OWN_DESCRIPTORS, and v8::internal::Heap::RightTrimFixedArray().

Referenced by TrimDescriptorArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TryPromoteObject()

bool v8::internal::MarkCompactCollector::TryPromoteObject ( HeapObject object,
int  object_size 
)

Definition at line 3068 of file mark-compact.cc.

3069  {
3070  DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
3071 
3072  OldSpace* target_space = heap()->TargetSpace(object);
3073 
3074  DCHECK(target_space == heap()->old_pointer_space() ||
3075  target_space == heap()->old_data_space());
3076  HeapObject* target;
3077  AllocationResult allocation = target_space->AllocateRaw(object_size);
3078  if (allocation.To(&target)) {
3079  MigrateObject(target, object, object_size, target_space->identity());
3080  heap()->IncrementPromotedObjectsSize(object_size);
3081  return true;
3082  }
3083 
3084  return false;
3085 }
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:392
void IncrementPromotedObjectsSize(int object_size)
Definition: heap.h:1114

References v8::internal::PagedSpace::AllocateRaw(), DCHECK, heap(), v8::internal::Space::identity(), v8::internal::Heap::IncrementPromotedObjectsSize(), v8::internal::Page::kMaxRegularHeapObjectSize, MigrateObject(), v8::internal::Heap::TargetSpace(), and v8::internal::AllocationResult::To().

Referenced by DiscoverAndEvacuateBlackObjectsOnPage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ WillBeDeoptimized()

bool v8::internal::MarkCompactCollector::WillBeDeoptimized ( Code code)
private

Definition at line 3466 of file mark-compact.cc.

3466  {
3467  return code->is_optimized_code() && code->marked_for_deoptimization();
3468 }

References v8::internal::Code::is_optimized_code(), and v8::internal::Code::marked_for_deoptimization().

Referenced by ClearNonLiveDependentCodeInGroup().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

Friends And Related Function Documentation

◆ CodeMarkingVisitor

friend class CodeMarkingVisitor
friend

Definition at line 729 of file mark-compact.h.

◆ Heap

friend class Heap
friend

Definition at line 893 of file mark-compact.h.

◆ MarkCompactMarkingVisitor

friend class MarkCompactMarkingVisitor
friend

Definition at line 728 of file mark-compact.h.

◆ MarkingVisitor

friend class MarkingVisitor
friend

Definition at line 727 of file mark-compact.h.

◆ RootMarkingVisitor

friend class RootMarkingVisitor
friend

Definition at line 726 of file mark-compact.h.

◆ SharedFunctionInfoMarkingVisitor

friend class SharedFunctionInfoMarkingVisitor
friend

Definition at line 730 of file mark-compact.h.

Member Data Documentation

◆ abort_incremental_marking_

bool v8::internal::MarkCompactCollector::abort_incremental_marking_
private

Definition at line 693 of file mark-compact.h.

Referenced by abort_incremental_marking(), Prepare(), and SetFlags().

◆ code_flusher_

CodeFlusher* v8::internal::MarkCompactCollector::code_flusher_
private

◆ compacting_

bool v8::internal::MarkCompactCollector::compacting_
private

◆ evacuation_candidates_

◆ free_list_old_data_space_

SmartPointer<FreeList> v8::internal::MarkCompactCollector::free_list_old_data_space_
private

Definition at line 890 of file mark-compact.h.

Referenced by RefillFreeList(), SetUp(), StartSweeperThreads(), and SweepInParallel().

◆ free_list_old_pointer_space_

SmartPointer<FreeList> v8::internal::MarkCompactCollector::free_list_old_pointer_space_
private

Definition at line 891 of file mark-compact.h.

Referenced by RefillFreeList(), SetUp(), StartSweeperThreads(), and SweepInParallel().

◆ have_code_to_deoptimize_

bool v8::internal::MarkCompactCollector::have_code_to_deoptimize_
private

Definition at line 885 of file mark-compact.h.

Referenced by ClearDependentCode(), and Finish().

◆ heap_

◆ invalidated_code_

List<Code*> v8::internal::MarkCompactCollector::invalidated_code_
private

◆ kMultiFreeEncoding

const uint32_t v8::internal::MarkCompactCollector::kMultiFreeEncoding = 1
static

Definition at line 538 of file mark-compact.h.

◆ kSingleFreeEncoding

const uint32_t v8::internal::MarkCompactCollector::kSingleFreeEncoding = 0
static

Definition at line 537 of file mark-compact.h.

◆ marking_deque_

MarkingDeque v8::internal::MarkCompactCollector::marking_deque_
private

◆ marking_parity_

MarkingParity v8::internal::MarkCompactCollector::marking_parity_
private

Definition at line 695 of file mark-compact.h.

Referenced by CollectGarbage(), and marking_parity().

◆ migration_slots_buffer_

SlotsBuffer* v8::internal::MarkCompactCollector::migration_slots_buffer_
private

◆ pending_sweeper_jobs_semaphore_

base::Semaphore v8::internal::MarkCompactCollector::pending_sweeper_jobs_semaphore_
private

◆ reduce_memory_footprint_

bool v8::internal::MarkCompactCollector::reduce_memory_footprint_
private

◆ sequential_sweeping_

bool v8::internal::MarkCompactCollector::sequential_sweeping_
private

Definition at line 708 of file mark-compact.h.

Referenced by sequential_sweeping(), and set_sequential_sweeping().

◆ slots_buffer_allocator_

◆ sweeping_in_progress_

bool v8::internal::MarkCompactCollector::sweeping_in_progress_
private

◆ was_marked_incrementally_

bool v8::internal::MarkCompactCollector::was_marked_incrementally_
private

The documentation for this class was generated from the following files: