V8 Project
v8::internal::IncrementalMarking Class Reference

#include <incremental-marking.h>

+ Collaboration diagram for v8::internal::IncrementalMarking:

Public Types

enum  State { STOPPED , SWEEPING , MARKING , COMPLETE }
 
enum  CompletionAction { GC_VIA_STACK_GUARD , NO_GC_VIA_STACK_GUARD }
 
enum  CompactionFlag { ALLOW_COMPACTION , PREVENT_COMPACTION }
 

Public Member Functions

 IncrementalMarking (Heap *heap)
 
void TearDown ()
 
State state ()
 
bool should_hurry ()
 
void set_should_hurry (bool val)
 
bool IsStopped ()
 
 INLINE (bool IsMarking())
 
bool IsMarkingIncomplete ()
 
bool IsComplete ()
 
bool WorthActivating ()
 
bool ShouldActivate ()
 
void Start (CompactionFlag flag=ALLOW_COMPACTION)
 
void Stop ()
 
void PrepareForScavenge ()
 
void UpdateMarkingDequeAfterScavenge ()
 
void Hurry ()
 
void Finalize ()
 
void Abort ()
 
void MarkingComplete (CompletionAction action)
 
void OldSpaceStep (intptr_t allocated)
 
void Step (intptr_t allocated, CompletionAction action, bool force_marking=false)
 
void RestartIfNotMarking ()
 
 INLINE (bool BaseRecordWrite(HeapObject *obj, Object **slot, Object *value))
 
 INLINE (void RecordWrite(HeapObject *obj, Object **slot, Object *value))
 
 INLINE (void RecordWriteIntoCode(HeapObject *obj, RelocInfo *rinfo, Object *value))
 
 INLINE (void RecordWriteOfCodeEntry(JSFunction *host, Object **slot, Code *value))
 
void RecordWriteSlow (HeapObject *obj, Object **slot, Object *value)
 
void RecordWriteIntoCodeSlow (HeapObject *obj, RelocInfo *rinfo, Object *value)
 
void RecordWriteOfCodeEntrySlow (JSFunction *host, Object **slot, Code *value)
 
void RecordCodeTargetPatch (Code *host, Address pc, HeapObject *value)
 
void RecordCodeTargetPatch (Address pc, HeapObject *value)
 
void RecordWrites (HeapObject *obj)
 
void BlackToGreyAndUnshift (HeapObject *obj, MarkBit mark_bit)
 
void WhiteToGreyAndPush (HeapObject *obj, MarkBit mark_bit)
 
void SetOldSpacePageFlags (MemoryChunk *chunk)
 
void SetNewSpacePageFlags (NewSpacePage *chunk)
 
MarkingDequemarking_deque ()
 
bool IsCompacting ()
 
void ActivateGeneratedStub (Code *stub)
 
void NotifyOfHighPromotionRate ()
 
void EnterNoMarkingScope ()
 
void LeaveNoMarkingScope ()
 
void UncommitMarkingDeque ()
 
void NotifyIncompleteScanOfObject (int unscanned_bytes)
 

Static Public Member Functions

static void Initialize ()
 
static void RecordWriteFromCode (HeapObject *obj, Object **slot, Isolate *isolate)
 

Static Public Attributes

static const intptr_t kAllocatedThreshold = 65536
 
static const intptr_t kWriteBarriersInvokedThreshold = 32768
 
static const intptr_t kInitialMarkingSpeed = 1
 
static const intptr_t kFastMarking = 3
 
static const intptr_t kMarkingSpeedAccellerationInterval = 1024
 
static const intptr_t kMarkingSpeedAccelleration = 2
 
static const intptr_t kMaxMarkingSpeed = 1000
 

Private Member Functions

int64_t SpaceLeftInOldSpace ()
 
void SpeedUp ()
 
void ResetStepCounters ()
 
void StartMarking (CompactionFlag flag)
 
void ActivateIncrementalWriteBarrier (PagedSpace *space)
 
void ActivateIncrementalWriteBarrier ()
 
void DeactivateIncrementalWriteBarrier ()
 
void EnsureMarkingDequeIsCommitted ()
 
 INLINE (void ProcessMarkingDeque())
 
 INLINE (intptr_t ProcessMarkingDeque(intptr_t bytes_to_process))
 
 INLINE (void VisitObject(Map *map, HeapObject *obj, int size))
 
 DISALLOW_IMPLICIT_CONSTRUCTORS (IncrementalMarking)
 

Static Private Member Functions

static void ActivateIncrementalWriteBarrier (NewSpace *space)
 
static void DeactivateIncrementalWriteBarrierForSpace (PagedSpace *space)
 
static void DeactivateIncrementalWriteBarrierForSpace (NewSpace *space)
 
static void SetOldSpacePageFlags (MemoryChunk *chunk, bool is_marking, bool is_compacting)
 
static void SetNewSpacePageFlags (NewSpacePage *chunk, bool is_marking)
 

Private Attributes

Heapheap_
 
State state_
 
bool is_compacting_
 
base::VirtualMemorymarking_deque_memory_
 
bool marking_deque_memory_committed_
 
MarkingDeque marking_deque_
 
int steps_count_
 
int64_t old_generation_space_available_at_start_of_incremental_
 
int64_t old_generation_space_used_at_start_of_incremental_
 
int64_t bytes_rescanned_
 
bool should_hurry_
 
int marking_speed_
 
intptr_t bytes_scanned_
 
intptr_t allocated_
 
intptr_t write_barriers_invoked_since_last_step_
 
int no_marking_scope_depth_
 
int unscanned_bytes_of_large_object_
 

Detailed Description

Definition at line 17 of file incremental-marking.h.

Member Enumeration Documentation

◆ CompactionFlag

◆ CompletionAction

◆ State

Constructor & Destructor Documentation

◆ IncrementalMarking()

v8::internal::IncrementalMarking::IncrementalMarking ( Heap heap)
explicit

Definition at line 19 of file incremental-marking.cc.

20  : heap_(heap),
21  state_(STOPPED),
24  steps_count_(0),
27  should_hurry_(false),
28  marking_speed_(0),
29  allocated_(0),
base::VirtualMemory * marking_deque_memory_
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL

Member Function Documentation

◆ Abort()

void v8::internal::IncrementalMarking::Abort ( )

Definition at line 757 of file incremental-marking.cc.

757  {
758  if (IsStopped()) return;
759  if (FLAG_trace_incremental_marking) {
760  PrintF("[IncrementalMarking] Aborting.\n");
761  }
765  if (IsMarking()) {
769 
770  if (is_compacting_) {
771  LargeObjectIterator it(heap_->lo_space());
772  for (HeapObject* obj = it.Next(); obj != NULL; obj = it.Next()) {
773  Page* p = Page::FromAddress(obj->address());
774  if (p->IsFlagSet(Page::RESCAN_ON_EVACUATION)) {
775  p->ClearFlag(Page::RESCAN_ON_EVACUATION);
776  }
777  }
778  }
779  }
780  heap_->isolate()->stack_guard()->ClearGC();
781  state_ = STOPPED;
782  is_compacting_ = false;
783 }
LargeObjectSpace * lo_space()
Definition: heap.h:600
Isolate * isolate()
Definition: heap-inl.h:589
NewSpace * new_space()
Definition: heap.h:593
StackGuard * stack_guard()
Definition: isolate.h:872
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:276
void LowerInlineAllocationLimit(intptr_t step)
Definition: spaces.h:2473
static void PatchIncrementalMarkingRecordWriteStubs(Heap *heap, RecordWriteStub::Mode mode)
void PrintF(const char *format,...)
Definition: utils.cc:80

References v8::internal::MemoryChunk::ClearFlag(), DeactivateIncrementalWriteBarrier(), v8::internal::MemoryChunk::FromAddress(), heap_, is_compacting_, v8::internal::MemoryChunk::IsFlagSet(), v8::internal::Heap::isolate(), IsStopped(), v8::internal::Heap::lo_space(), v8::internal::NewSpace::LowerInlineAllocationLimit(), v8::internal::Heap::new_space(), v8::internal::LargeObjectIterator::Next(), NULL, v8::internal::PatchIncrementalMarkingRecordWriteStubs(), v8::internal::PrintF(), v8::internal::MemoryChunk::RESCAN_ON_EVACUATION, ResetStepCounters(), set_should_hurry(), v8::internal::Isolate::stack_guard(), state_, STOPPED, and v8::internal::RecordWriteStub::STORE_BUFFER_ONLY.

Referenced by v8::internal::MarkCompactCollector::MarkLiveObjects(), and v8::internal::MarkCompactCollector::Prepare().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ActivateGeneratedStub()

void v8::internal::IncrementalMarking::ActivateGeneratedStub ( Code stub)

Definition at line 448 of file incremental-marking.cc.

448  {
450 
451  if (!IsMarking()) {
452  // Initially stub is generated in STORE_BUFFER_ONLY mode thus
453  // we don't need to do anything if incremental marking is
454  // not active.
455  } else if (IsCompacting()) {
457  } else {
459  }
460 }
static void Patch(Code *stub, Mode mode)
static Mode GetMode(Code *stub)
#define DCHECK(condition)
Definition: logging.h:205

References DCHECK, v8::internal::RecordWriteStub::GetMode(), v8::internal::RecordWriteStub::INCREMENTAL, v8::internal::RecordWriteStub::INCREMENTAL_COMPACTION, IsCompacting(), v8::internal::RecordWriteStub::Patch(), and v8::internal::RecordWriteStub::STORE_BUFFER_ONLY.

Referenced by v8::internal::RecordWriteStub::Activate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ActivateIncrementalWriteBarrier() [1/3]

void v8::internal::IncrementalMarking::ActivateIncrementalWriteBarrier ( )
private

Definition at line 407 of file incremental-marking.cc.

407  {
415 
416  LargePage* lop = heap_->lo_space()->first_page();
417  while (lop->is_valid()) {
419  lop = lop->next_page();
420  }
421 }
OldSpace * old_pointer_space()
Definition: heap.h:594
PropertyCellSpace * property_cell_space()
Definition: heap.h:599
OldSpace * code_space()
Definition: heap.h:596
CellSpace * cell_space()
Definition: heap.h:598
OldSpace * old_data_space()
Definition: heap.h:595
MapSpace * map_space()
Definition: heap.h:597
void SetOldSpacePageFlags(MemoryChunk *chunk)

References v8::internal::Heap::cell_space(), v8::internal::Heap::code_space(), v8::internal::LargeObjectSpace::first_page(), heap_, is_compacting_, v8::internal::MemoryChunk::is_valid(), v8::internal::Heap::lo_space(), v8::internal::Heap::map_space(), v8::internal::Heap::new_space(), v8::internal::LargePage::next_page(), v8::internal::Heap::old_data_space(), v8::internal::Heap::old_pointer_space(), v8::internal::Heap::property_cell_space(), and SetOldSpacePageFlags().

Referenced by StartMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ActivateIncrementalWriteBarrier() [2/3]

void v8::internal::IncrementalMarking::ActivateIncrementalWriteBarrier ( NewSpace space)
staticprivate

Definition at line 398 of file incremental-marking.cc.

398  {
399  NewSpacePageIterator it(space->ToSpaceStart(), space->ToSpaceEnd());
400  while (it.has_next()) {
401  NewSpacePage* p = it.next();
402  SetNewSpacePageFlags(p, true);
403  }
404 }
void SetNewSpacePageFlags(NewSpacePage *chunk)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)

References SetNewSpacePageFlags(), and space().

+ Here is the call graph for this function:

◆ ActivateIncrementalWriteBarrier() [3/3]

void v8::internal::IncrementalMarking::ActivateIncrementalWriteBarrier ( PagedSpace space)
private

Definition at line 389 of file incremental-marking.cc.

389  {
390  PageIterator it(space);
391  while (it.has_next()) {
392  Page* p = it.next();
394  }
395 }

References is_compacting_, SetOldSpacePageFlags(), and space().

+ Here is the call graph for this function:

◆ BlackToGreyAndUnshift()

void v8::internal::IncrementalMarking::BlackToGreyAndUnshift ( HeapObject obj,
MarkBit  mark_bit 
)
inline

Definition at line 81 of file incremental-marking-inl.h.

82  {
83  DCHECK(Marking::MarkBitFrom(obj) == mark_bit);
84  DCHECK(obj->Size() >= 2 * kPointerSize);
85  DCHECK(IsMarking());
86  Marking::BlackToGrey(mark_bit);
87  int obj_size = obj->Size();
88  MemoryChunk::IncrementLiveBytesFromGC(obj->address(), -obj_size);
89  bytes_scanned_ -= obj_size;
90  int64_t old_bytes_rescanned = bytes_rescanned_;
91  bytes_rescanned_ = old_bytes_rescanned + obj_size;
92  if ((bytes_rescanned_ >> 20) != (old_bytes_rescanned >> 20)) {
94  // If we have queued twice the heap size for rescanning then we are
95  // going around in circles, scanning the same objects again and again
96  // as the program mutates the heap faster than we can incrementally
97  // trace it. In this case we switch to non-incremental marking in
98  // order to finish off this marking phase.
99  if (FLAG_trace_gc) {
100  PrintPID("Hurrying incremental marking because of lack of progress\n");
101  }
103  }
104  }
105 
106  marking_deque_.UnshiftGrey(obj);
107 }
intptr_t PromotedSpaceSizeOfObjects()
Definition: heap.cc:4967
static const intptr_t kMaxMarkingSpeed
static void IncrementLiveBytesFromGC(Address address, int by)
Definition: spaces.h:517
const int kPointerSize
Definition: globals.h:129
void PrintPID(const char *format,...)
Definition: utils.cc:96

References v8::internal::HeapObject::address(), bytes_rescanned_, bytes_scanned_, DCHECK, heap_, v8::internal::MemoryChunk::IncrementLiveBytesFromGC(), kMaxMarkingSpeed, v8::internal::kPointerSize, marking_deque_, marking_speed_, v8::internal::PrintPID(), v8::internal::Heap::PromotedSpaceSizeOfObjects(), and v8::internal::HeapObject::Size().

Referenced by RecordWriteIntoCodeSlow(), and RecordWrites().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeactivateIncrementalWriteBarrier()

void v8::internal::IncrementalMarking::DeactivateIncrementalWriteBarrier ( )
private

Definition at line 372 of file incremental-marking.cc.

References v8::internal::Heap::cell_space(), v8::internal::Heap::code_space(), DeactivateIncrementalWriteBarrierForSpace(), v8::internal::LargeObjectSpace::first_page(), heap_, v8::internal::MemoryChunk::is_valid(), v8::internal::Heap::lo_space(), v8::internal::Heap::map_space(), v8::internal::Heap::new_space(), v8::internal::LargePage::next_page(), v8::internal::Heap::old_data_space(), v8::internal::Heap::old_pointer_space(), v8::internal::Heap::property_cell_space(), and SetOldSpacePageFlags().

Referenced by Abort(), and Finalize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeactivateIncrementalWriteBarrierForSpace() [1/2]

void v8::internal::IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace ( NewSpace space)
staticprivate

Definition at line 362 of file incremental-marking.cc.

363  {
364  NewSpacePageIterator it(space);
365  while (it.has_next()) {
366  NewSpacePage* p = it.next();
367  SetNewSpacePageFlags(p, false);
368  }
369 }

References SetNewSpacePageFlags(), and space().

+ Here is the call graph for this function:

◆ DeactivateIncrementalWriteBarrierForSpace() [2/2]

void v8::internal::IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace ( PagedSpace space)
staticprivate

Definition at line 352 of file incremental-marking.cc.

353  {
354  PageIterator it(space);
355  while (it.has_next()) {
356  Page* p = it.next();
357  SetOldSpacePageFlags(p, false, false);
358  }
359 }

References SetOldSpacePageFlags(), and space().

Referenced by DeactivateIncrementalWriteBarrier().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DISALLOW_IMPLICIT_CONSTRUCTORS()

v8::internal::IncrementalMarking::DISALLOW_IMPLICIT_CONSTRUCTORS ( IncrementalMarking  )
private

◆ EnsureMarkingDequeIsCommitted()

void v8::internal::IncrementalMarking::EnsureMarkingDequeIsCommitted ( )
private

Definition at line 484 of file incremental-marking.cc.

484  {
485  if (marking_deque_memory_ == NULL) {
486  marking_deque_memory_ = new base::VirtualMemory(4 * MB);
487  }
489  bool success = marking_deque_memory_->Commit(
490  reinterpret_cast<Address>(marking_deque_memory_->address()),
492  false); // Not executable.
493  CHECK(success);
495  }
496 }
bool Commit(void *address, size_t size, bool is_executable)
#define CHECK(condition)
Definition: logging.h:36
byte * Address
Definition: globals.h:101
const int MB
Definition: globals.h:107

References v8::base::VirtualMemory::address(), CHECK, v8::base::VirtualMemory::Commit(), marking_deque_memory_, marking_deque_memory_committed_, v8::internal::MB, NULL, and v8::base::VirtualMemory::size().

Referenced by StartMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnterNoMarkingScope()

void v8::internal::IncrementalMarking::EnterNoMarkingScope ( )
inline

Definition at line 158 of file incremental-marking.h.

References no_marking_scope_depth_.

Referenced by v8::internal::DescriptorArray::WhitenessWitness::WhitenessWitness().

+ Here is the caller graph for this function:

◆ Finalize()

void v8::internal::IncrementalMarking::Finalize ( )

◆ Hurry()

void v8::internal::IncrementalMarking::Hurry ( )

Definition at line 708 of file incremental-marking.cc.

708  {
709  if (state() == MARKING) {
710  double start = 0.0;
711  if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) {
712  start = base::OS::TimeCurrentMillis();
713  if (FLAG_trace_incremental_marking) {
714  PrintF("[IncrementalMarking] Hurry\n");
715  }
716  }
717  // TODO(gc) hurry can mark objects it encounters black as mutator
718  // was stopped.
719  ProcessMarkingDeque();
720  state_ = COMPLETE;
721  if (FLAG_trace_incremental_marking || FLAG_print_cumulative_gc_stat) {
722  double end = base::OS::TimeCurrentMillis();
723  double delta = end - start;
724  heap_->tracer()->AddMarkingTime(delta);
725  if (FLAG_trace_incremental_marking) {
726  PrintF("[IncrementalMarking] Complete (hurry), spent %d ms.\n",
727  static_cast<int>(delta));
728  }
729  }
730  }
731 
732  if (FLAG_cleanup_code_caches_at_gc) {
733  PolymorphicCodeCache* poly_cache = heap_->polymorphic_code_cache();
734  Marking::GreyToBlack(Marking::MarkBitFrom(poly_cache));
735  MemoryChunk::IncrementLiveBytesFromGC(poly_cache->address(),
737  }
738 
739  Object* context = heap_->native_contexts_list();
740  while (!context->IsUndefined()) {
741  // GC can happen when the context is not fully initialized,
742  // so the cache can be undefined.
743  HeapObject* cache = HeapObject::cast(
745  if (!cache->IsUndefined()) {
746  MarkBit mark_bit = Marking::MarkBitFrom(cache);
747  if (Marking::IsGrey(mark_bit)) {
748  Marking::GreyToBlack(mark_bit);
749  MemoryChunk::IncrementLiveBytesFromGC(cache->address(), cache->Size());
750  }
751  }
752  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
753  }
754 }
static double TimeCurrentMillis()
static Context * cast(Object *context)
Definition: contexts.h:255
Object * get(int index)
Definition: objects-inl.h:2165
void AddMarkingTime(double duration)
Definition: gc-tracer.h:260
Object * native_contexts_list() const
Definition: heap.h:793
GCTracer * tracer()
Definition: heap.h:1166
kSerializedDataOffset Object
Definition: objects-inl.h:5322

References v8::internal::GCTracer::AddMarkingTime(), v8::internal::HeapObject::address(), v8::internal::Context::cast(), COMPLETE, v8::internal::FixedArray::get(), heap_, v8::internal::MemoryChunk::IncrementLiveBytesFromGC(), v8::internal::PolymorphicCodeCache::kSize, MARKING, v8::internal::Heap::native_contexts_list(), v8::internal::Context::NEXT_CONTEXT_LINK, v8::internal::Context::NORMALIZED_MAP_CACHE_INDEX, v8::internal::PrintF(), v8::internal::HeapObject::Size(), state(), state_, v8::base::OS::TimeCurrentMillis(), and v8::internal::Heap::tracer().

Referenced by Finalize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Initialize()

void v8::internal::IncrementalMarking::Initialize ( )
static

Definition at line 311 of file incremental-marking.cc.

References v8::internal::IncrementalMarkingMarkingVisitor::Initialize().

Referenced by v8::internal::MarkCompactCollector::Initialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ INLINE() [1/8]

v8::internal::IncrementalMarking::INLINE ( bool   BaseRecordWriteHeapObject *obj, Object **slot, Object *value)

◆ INLINE() [2/8]

v8::internal::IncrementalMarking::INLINE ( bool   IsMarking())
inline

Definition at line 39 of file incremental-marking.h.

39 { return state() >= MARKING; }

References MARKING, and state().

+ Here is the call graph for this function:

◆ INLINE() [3/8]

v8::internal::IncrementalMarking::INLINE ( intptr_t   ProcessMarkingDequeintptr_t bytes_to_process)
private

◆ INLINE() [4/8]

v8::internal::IncrementalMarking::INLINE ( void   ProcessMarkingDeque())
private

◆ INLINE() [5/8]

v8::internal::IncrementalMarking::INLINE ( void   RecordWriteHeapObject *obj, Object **slot, Object *value)

◆ INLINE() [6/8]

v8::internal::IncrementalMarking::INLINE ( void   RecordWriteIntoCodeHeapObject *obj, RelocInfo *rinfo, Object *value)

◆ INLINE() [7/8]

v8::internal::IncrementalMarking::INLINE ( void   RecordWriteOfCodeEntryJSFunction *host, Object **slot, Code *value)

◆ INLINE() [8/8]

v8::internal::IncrementalMarking::INLINE ( void   VisitObjectMap *map, HeapObject *obj, int size)
private

◆ IsCompacting()

bool v8::internal::IncrementalMarking::IsCompacting ( )
inline

Definition at line 140 of file incremental-marking.h.

140 { return IsMarking() && is_compacting_; }

References is_compacting_.

Referenced by ActivateGeneratedStub(), v8::internal::MarkCompactCollector::InvalidateCode(), v8::internal::Heap::SelectScavengingVisitorsTable(), and SetOldSpacePageFlags().

+ Here is the caller graph for this function:

◆ IsComplete()

bool v8::internal::IncrementalMarking::IsComplete ( )
inline

Definition at line 43 of file incremental-marking.h.

43 { return state() == COMPLETE; }

References COMPLETE, and state().

+ Here is the call graph for this function:

◆ IsMarkingIncomplete()

bool v8::internal::IncrementalMarking::IsMarkingIncomplete ( )
inline

Definition at line 41 of file incremental-marking.h.

41 { return state() == MARKING; }

References MARKING, and state().

Referenced by v8::internal::FreeList::Allocate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsStopped()

bool v8::internal::IncrementalMarking::IsStopped ( )
inline

Definition at line 37 of file incremental-marking.h.

37 { return state() == STOPPED; }

References state(), and STOPPED.

Referenced by Abort(), v8::internal::MarkCompactCollector::CollectGarbage(), v8::internal::Heap::IdleNotification(), OldSpaceStep(), and v8::internal::Heap::WorthActivatingIncrementalMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ LeaveNoMarkingScope()

void v8::internal::IncrementalMarking::LeaveNoMarkingScope ( )
inline

Definition at line 160 of file incremental-marking.h.

References no_marking_scope_depth_.

◆ marking_deque()

MarkingDeque* v8::internal::IncrementalMarking::marking_deque ( )
inline

Definition at line 138 of file incremental-marking.h.

138 { return &marking_deque_; }

References marking_deque_.

Referenced by v8::internal::MarkCompactCollector::MarkLiveObjects(), and v8::internal::IncrementalMarkingMarkingVisitor::VisitFixedArrayIncremental().

+ Here is the caller graph for this function:

◆ MarkingComplete()

void v8::internal::IncrementalMarking::MarkingComplete ( CompletionAction  action)

Definition at line 801 of file incremental-marking.cc.

801  {
802  state_ = COMPLETE;
803  // We will set the stack guard to request a GC now. This will mean the rest
804  // of the GC gets performed as soon as possible (we can't do a GC here in a
805  // record-write context). If a few things get allocated between now and then
806  // that shouldn't make us do a scavenge and keep being incremental, so we set
807  // the should-hurry flag to indicate that there can't be much work left to do.
808  set_should_hurry(true);
809  if (FLAG_trace_incremental_marking) {
810  PrintF("[IncrementalMarking] Complete (normal).\n");
811  }
812  if (action == GC_VIA_STACK_GUARD) {
813  heap_->isolate()->stack_guard()->RequestGC();
814  }
815 }

References COMPLETE, GC_VIA_STACK_GUARD, heap_, v8::internal::Heap::isolate(), v8::internal::PrintF(), set_should_hurry(), v8::internal::Isolate::stack_guard(), and state_.

Referenced by Step().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NotifyIncompleteScanOfObject()

void v8::internal::IncrementalMarking::NotifyIncompleteScanOfObject ( int  unscanned_bytes)
inline

Definition at line 164 of file incremental-marking.h.

164  {
165  unscanned_bytes_of_large_object_ = unscanned_bytes;
166  }

References unscanned_bytes_of_large_object_.

Referenced by v8::internal::IncrementalMarkingMarkingVisitor::VisitFixedArrayIncremental().

+ Here is the caller graph for this function:

◆ NotifyOfHighPromotionRate()

void v8::internal::IncrementalMarking::NotifyOfHighPromotionRate ( )
inline

Definition at line 144 of file incremental-marking.h.

144  {
145  if (IsMarking()) {
147  if (FLAG_trace_gc) {
148  PrintPID(
149  "Increasing marking speed to %d "
150  "due to high promotion rate\n",
151  static_cast<int>(kFastMarking));
152  }
154  }
155  }
156  }

References kFastMarking, marking_speed_, and v8::internal::PrintPID().

Referenced by v8::internal::Heap::PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldSpaceStep()

void v8::internal::IncrementalMarking::OldSpaceStep ( intptr_t  allocated)

Definition at line 818 of file incremental-marking.cc.

818  {
819  if (IsStopped() && ShouldActivate()) {
820  // TODO(hpayer): Let's play safe for now, but compaction should be
821  // in principle possible.
823  } else {
825  }
826 }
void Step(intptr_t allocated, CompletionAction action, bool force_marking=false)
void Start(CompactionFlag flag=ALLOW_COMPACTION)
static const intptr_t kInitialMarkingSpeed

References GC_VIA_STACK_GUARD, IsStopped(), kFastMarking, kInitialMarkingSpeed, PREVENT_COMPACTION, ShouldActivate(), Start(), and Step().

Referenced by v8::internal::FreeList::Allocate(), and v8::internal::LargeObjectSpace::AllocateRaw().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrepareForScavenge()

void v8::internal::IncrementalMarking::PrepareForScavenge ( )

Definition at line 592 of file incremental-marking.cc.

592  {
593  if (!IsMarking()) return;
594  NewSpacePageIterator it(heap_->new_space()->FromSpaceStart(),
596  while (it.has_next()) {
597  Bitmap::Clear(it.next());
598  }
599 }
static void Clear(MemoryChunk *chunk)
Definition: spaces-inl.h:21
Address FromSpaceEnd()
Definition: spaces.h:2485
Address FromSpaceStart()
Definition: spaces.h:2484

References v8::internal::Bitmap::Clear(), v8::internal::NewSpace::FromSpaceEnd(), v8::internal::NewSpace::FromSpaceStart(), heap_, and v8::internal::Heap::new_space().

Referenced by v8::internal::Heap::Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordCodeTargetPatch() [1/2]

void v8::internal::IncrementalMarking::RecordCodeTargetPatch ( Address  pc,
HeapObject value 
)

Definition at line 78 of file incremental-marking.cc.

78  {
79  if (IsMarking()) {
80  Code* host = heap_->isolate()
83  RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
84  RecordWriteIntoCode(host, &rinfo, value);
85  }
86 }
Code * GcSafeFindCodeForInnerPointer(Address inner_pointer)
Definition: frames.cc:1468
InnerPointerToCodeCache * inner_pointer_to_code_cache()
Definition: isolate.h:911
const Register pc

References v8::internal::RelocInfo::CODE_TARGET, v8::internal::InnerPointerToCodeCache::GcSafeFindCodeForInnerPointer(), heap_, v8::internal::Isolate::inner_pointer_to_code_cache(), v8::internal::Heap::isolate(), and v8::internal::pc.

+ Here is the call graph for this function:

◆ RecordCodeTargetPatch() [2/2]

void v8::internal::IncrementalMarking::RecordCodeTargetPatch ( Code host,
Address  pc,
HeapObject value 
)

Definition at line 69 of file incremental-marking.cc.

70  {
71  if (IsMarking()) {
72  RelocInfo rinfo(pc, RelocInfo::CODE_TARGET, 0, host);
73  RecordWriteIntoCode(host, &rinfo, value);
74  }
75 }

References v8::internal::RelocInfo::CODE_TARGET, and v8::internal::pc.

Referenced by v8::internal::IC::SetTargetAtAddress().

+ Here is the caller graph for this function:

◆ RecordWriteFromCode()

void v8::internal::IncrementalMarking::RecordWriteFromCode ( HeapObject obj,
Object **  slot,
Isolate isolate 
)
static

Definition at line 50 of file incremental-marking.cc.

51  {
52  DCHECK(obj->IsHeapObject());
53  IncrementalMarking* marking = isolate->heap()->incremental_marking();
54 
55  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
56  int counter = chunk->write_barrier_counter();
58  marking->write_barriers_invoked_since_last_step_ +=
60  chunk->write_barrier_counter();
61  chunk->set_write_barrier_counter(
63  }
64 
65  marking->RecordWrite(obj, slot, *slot);
66 }
static const int kWriteBarrierCounterGranularity
Definition: spaces.h:362

References v8::internal::HeapObject::address(), DCHECK, v8::internal::MemoryChunk::FromAddress(), v8::internal::Isolate::heap(), v8::internal::Heap::incremental_marking(), v8::internal::MemoryChunk::kWriteBarrierCounterGranularity, v8::internal::MemoryChunk::set_write_barrier_counter(), v8::internal::MemoryChunk::write_barrier_counter(), and write_barriers_invoked_since_last_step_.

+ Here is the call graph for this function:

◆ RecordWriteIntoCodeSlow()

void v8::internal::IncrementalMarking::RecordWriteIntoCodeSlow ( HeapObject obj,
RelocInfo rinfo,
Object value 
)

Definition at line 100 of file incremental-marking.cc.

102  {
103  MarkBit value_bit = Marking::MarkBitFrom(HeapObject::cast(value));
104  if (Marking::IsWhite(value_bit)) {
105  MarkBit obj_bit = Marking::MarkBitFrom(obj);
106  if (Marking::IsBlack(obj_bit)) {
107  BlackToGreyAndUnshift(obj, obj_bit);
109  }
110  // Object is either grey or white. It will be scanned if survives.
111  return;
112  }
113 
114  if (is_compacting_) {
115  MarkBit obj_bit = Marking::MarkBitFrom(obj);
116  if (Marking::IsBlack(obj_bit)) {
117  // Object is not going to be rescanned. We need to record the slot.
119  Code::cast(value));
120  }
121  }
122 }
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1197
void BlackToGreyAndUnshift(HeapObject *obj, MarkBit mark_bit)
void RecordRelocSlot(RelocInfo *rinfo, Object *target)

References BlackToGreyAndUnshift(), heap_, is_compacting_, v8::internal::Heap::mark_compact_collector(), v8::internal::MarkCompactCollector::RecordRelocSlot(), and RestartIfNotMarking().

+ Here is the call graph for this function:

◆ RecordWriteOfCodeEntrySlow()

void v8::internal::IncrementalMarking::RecordWriteOfCodeEntrySlow ( JSFunction host,
Object **  slot,
Code value 
)

Definition at line 89 of file incremental-marking.cc.

91  {
92  if (BaseRecordWrite(host, slot, value)) {
93  DCHECK(slot != NULL);
95  reinterpret_cast<Address>(slot), value);
96  }
97 }
void RecordCodeEntrySlot(Address slot, Code *target)

References DCHECK, heap_, v8::internal::Heap::mark_compact_collector(), NULL, and v8::internal::MarkCompactCollector::RecordCodeEntrySlot().

+ Here is the call graph for this function:

◆ RecordWrites()

void v8::internal::IncrementalMarking::RecordWrites ( HeapObject obj)
inline

Definition at line 66 of file incremental-marking-inl.h.

66  {
67  if (IsMarking()) {
68  MarkBit obj_bit = Marking::MarkBitFrom(obj);
69  if (Marking::IsBlack(obj_bit)) {
70  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
71  if (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR)) {
72  chunk->set_progress_bar(0);
73  }
74  BlackToGreyAndUnshift(obj, obj_bit);
76  }
77  }
78 }

References v8::internal::HeapObject::address(), BlackToGreyAndUnshift(), v8::internal::MemoryChunk::FromAddress(), v8::internal::MemoryChunk::HAS_PROGRESS_BAR, v8::internal::MemoryChunk::IsFlagSet(), RestartIfNotMarking(), and v8::internal::MemoryChunk::set_progress_bar().

Referenced by v8::internal::CopyDictionaryToObjectElements(), v8::internal::CopyObjectToObjectElements(), v8::internal::Map::DeprecateTarget(), v8::internal::Map::EnsureDescriptorSlack(), v8::internal::CodeFlusher::EvictCandidate(), v8::internal::CodeFlusher::EvictOptimizedCodeMap(), and v8::internal::Heap::MoveElements().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordWriteSlow()

void v8::internal::IncrementalMarking::RecordWriteSlow ( HeapObject obj,
Object **  slot,
Object value 
)

Definition at line 37 of file incremental-marking.cc.

38  {
39  if (BaseRecordWrite(obj, slot, value) && slot != NULL) {
40  MarkBit obj_bit = Marking::MarkBitFrom(obj);
41  if (Marking::IsBlack(obj_bit)) {
42  // Object is not going to be rescanned we need to record the slot.
43  heap_->mark_compact_collector()->RecordSlot(HeapObject::RawField(obj, 0),
44  slot, value);
45  }
46  }
47 }
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1311

References heap_, v8::internal::Heap::mark_compact_collector(), NULL, and v8::internal::HeapObject::RawField().

+ Here is the call graph for this function:

◆ ResetStepCounters()

void v8::internal::IncrementalMarking::ResetStepCounters ( )
private

Definition at line 965 of file incremental-marking.cc.

References bytes_rescanned_, bytes_scanned_, heap_, kInitialMarkingSpeed, marking_speed_, old_generation_space_available_at_start_of_incremental_, old_generation_space_used_at_start_of_incremental_, v8::internal::Heap::PromotedTotalSize(), SpaceLeftInOldSpace(), steps_count_, and write_barriers_invoked_since_last_step_.

Referenced by Abort(), Finalize(), and Start().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RestartIfNotMarking()

void v8::internal::IncrementalMarking::RestartIfNotMarking ( )
inline

Definition at line 91 of file incremental-marking.h.

91  {
92  if (state_ == COMPLETE) {
93  state_ = MARKING;
94  if (FLAG_trace_incremental_marking) {
95  PrintF("[IncrementalMarking] Restarting (new grey objects)\n");
96  }
97  }
98  }

References COMPLETE, MARKING, v8::internal::PrintF(), and state_.

Referenced by RecordWriteIntoCodeSlow(), RecordWrites(), and v8::internal::Marking::TransferMark().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ set_should_hurry()

void v8::internal::IncrementalMarking::set_should_hurry ( bool  val)
inline

Definition at line 35 of file incremental-marking.h.

35 { should_hurry_ = val; }

References should_hurry_.

Referenced by Abort(), Finalize(), and MarkingComplete().

+ Here is the caller graph for this function:

◆ SetNewSpacePageFlags() [1/2]

void v8::internal::IncrementalMarking::SetNewSpacePageFlags ( NewSpacePage chunk)
inline

Definition at line 134 of file incremental-marking.h.

134  {
135  SetNewSpacePageFlags(chunk, IsMarking());
136  }

Referenced by ActivateIncrementalWriteBarrier(), DeactivateIncrementalWriteBarrierForSpace(), and v8::internal::NewSpacePage::Initialize().

+ Here is the caller graph for this function:

◆ SetNewSpacePageFlags() [2/2]

void v8::internal::IncrementalMarking::SetNewSpacePageFlags ( NewSpacePage chunk,
bool  is_marking 
)
staticprivate

Definition at line 340 of file incremental-marking.cc.

341  {
343  if (is_marking) {
345  } else {
347  }
348  chunk->SetFlag(MemoryChunk::SCAN_ON_SCAVENGE);
349 }

References v8::internal::MemoryChunk::ClearFlag(), v8::internal::MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING, v8::internal::MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING, v8::internal::MemoryChunk::SCAN_ON_SCAVENGE, and v8::internal::MemoryChunk::SetFlag().

+ Here is the call graph for this function:

◆ SetOldSpacePageFlags() [1/2]

void v8::internal::IncrementalMarking::SetOldSpacePageFlags ( MemoryChunk chunk)
inline

Definition at line 130 of file incremental-marking.h.

130  {
131  SetOldSpacePageFlags(chunk, IsMarking(), IsCompacting());
132  }

References IsCompacting().

Referenced by ActivateIncrementalWriteBarrier(), DeactivateIncrementalWriteBarrier(), DeactivateIncrementalWriteBarrierForSpace(), v8::internal::LargePage::Initialize(), v8::internal::Page::Initialize(), and v8::internal::MemoryChunk::set_scan_on_scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetOldSpacePageFlags() [2/2]

void v8::internal::IncrementalMarking::SetOldSpacePageFlags ( MemoryChunk chunk,
bool  is_marking,
bool  is_compacting 
)
staticprivate

Definition at line 316 of file incremental-marking.cc.

318  {
319  if (is_marking) {
322 
323  // It's difficult to filter out slots recorded for large objects.
324  if (chunk->owner()->identity() == LO_SPACE &&
325  chunk->size() > static_cast<size_t>(Page::kPageSize) && is_compacting) {
326  chunk->SetFlag(MemoryChunk::RESCAN_ON_EVACUATION);
327  }
328  } else if (chunk->owner()->identity() == CELL_SPACE ||
329  chunk->owner()->identity() == PROPERTY_CELL_SPACE ||
330  chunk->scan_on_scavenge()) {
333  } else {
336  }
337 }
static const int kPageSize
Definition: spaces.h:748
@ PROPERTY_CELL_SPACE
Definition: globals.h:365

References v8::internal::CELL_SPACE, v8::internal::MemoryChunk::ClearFlag(), v8::internal::Space::identity(), v8::internal::Page::kPageSize, v8::internal::LO_SPACE, v8::internal::MemoryChunk::owner(), v8::internal::MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING, v8::internal::MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING, v8::internal::PROPERTY_CELL_SPACE, v8::internal::MemoryChunk::RESCAN_ON_EVACUATION, v8::internal::MemoryChunk::scan_on_scavenge(), v8::internal::MemoryChunk::SetFlag(), and v8::internal::MemoryChunk::size().

+ Here is the call graph for this function:

◆ should_hurry()

bool v8::internal::IncrementalMarking::should_hurry ( )
inline

Definition at line 34 of file incremental-marking.h.

34 { return should_hurry_; }

References should_hurry_.

◆ ShouldActivate()

bool v8::internal::IncrementalMarking::ShouldActivate ( )

Definition at line 424 of file incremental-marking.cc.

424  {
426 }
bool NextGCIsLikelyToBeFull()
Definition: heap.h:1135

References heap_, v8::internal::Heap::NextGCIsLikelyToBeFull(), and WorthActivating().

Referenced by v8::internal::Heap::IdleNotification(), and OldSpaceStep().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SpaceLeftInOldSpace()

int64_t v8::internal::IncrementalMarking::SpaceLeftInOldSpace ( )
private

Definition at line 978 of file incremental-marking.cc.

978  {
980 }
intptr_t MaxOldGenerationSize()
Definition: heap.h:555

References heap_, v8::internal::Heap::MaxOldGenerationSize(), and v8::internal::Heap::PromotedSpaceSizeOfObjects().

Referenced by ResetStepCounters(), and SpeedUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SpeedUp()

void v8::internal::IncrementalMarking::SpeedUp ( )
private

Definition at line 829 of file incremental-marking.cc.

829  {
830  bool speed_up = false;
831 
833  if (FLAG_trace_gc) {
834  PrintPID("Speed up marking after %d steps\n",
835  static_cast<int>(kMarkingSpeedAccellerationInterval));
836  }
837  speed_up = true;
838  }
839 
840  bool space_left_is_very_small =
842 
843  bool only_1_nth_of_space_that_was_available_still_left =
846 
847  if (space_left_is_very_small ||
848  only_1_nth_of_space_that_was_available_still_left) {
849  if (FLAG_trace_gc) PrintPID("Speed up marking because of low space left\n");
850  speed_up = true;
851  }
852 
853  bool size_of_old_space_multiplied_by_n_during_marking =
855  (marking_speed_ + 1) *
857  if (size_of_old_space_multiplied_by_n_during_marking) {
858  speed_up = true;
859  if (FLAG_trace_gc) {
860  PrintPID("Speed up marking because of heap size increase\n");
861  }
862  }
863 
864  int64_t promoted_during_marking =
867  intptr_t delay = marking_speed_ * MB;
868  intptr_t scavenge_slack = heap_->MaxSemiSpaceSize();
869 
870  // We try to scan at at least twice the speed that we are allocating.
871  if (promoted_during_marking > bytes_scanned_ / 2 + scavenge_slack + delay) {
872  if (FLAG_trace_gc) {
873  PrintPID("Speed up marking because marker was not keeping up\n");
874  }
875  speed_up = true;
876  }
877 
878  if (speed_up) {
879  if (state_ != MARKING) {
880  if (FLAG_trace_gc) {
881  PrintPID("Postponing speeding up marking until marking starts\n");
882  }
883  } else {
885  marking_speed_ = static_cast<int>(
886  Min(kMaxMarkingSpeed, static_cast<intptr_t>(marking_speed_ * 1.3)));
887  if (FLAG_trace_gc) {
888  PrintPID("Marking speed increased to %d\n", marking_speed_);
889  }
890  }
891  }
892 }
int MaxSemiSpaceSize()
Definition: heap.h:552
static const intptr_t kMarkingSpeedAccellerationInterval
static const intptr_t kMarkingSpeedAccelleration
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)

References bytes_scanned_, heap_, kMarkingSpeedAccelleration, kMarkingSpeedAccellerationInterval, kMaxMarkingSpeed, MARKING, marking_speed_, v8::internal::Heap::MaxSemiSpaceSize(), v8::internal::MB, v8::internal::Min(), old_generation_space_available_at_start_of_incremental_, old_generation_space_used_at_start_of_incremental_, v8::internal::PrintPID(), v8::internal::Heap::PromotedTotalSize(), SpaceLeftInOldSpace(), state_, and steps_count_.

Referenced by Step().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Start()

void v8::internal::IncrementalMarking::Start ( CompactionFlag  flag = ALLOW_COMPACTION)

Definition at line 510 of file incremental-marking.cc.

510  {
511  if (FLAG_trace_incremental_marking) {
512  PrintF("[IncrementalMarking] Start\n");
513  }
514  DCHECK(FLAG_incremental_marking);
515  DCHECK(FLAG_incremental_marking_steps);
516  DCHECK(state_ == STOPPED);
520 
522 
525  } else {
526  if (FLAG_trace_incremental_marking) {
527  PrintF("[IncrementalMarking] Start sweeping.\n");
528  }
529  state_ = SWEEPING;
530  }
531 
533 }
HeapState gc_state()
Definition: heap.h:955
static const intptr_t kAllocatedThreshold
void StartMarking(CompactionFlag flag)
bool serializer_enabled() const
Definition: isolate.h:1007
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418

References DCHECK, v8::internal::flag, v8::internal::Heap::gc_state(), heap_, v8::internal::Isolate::IsInitialized(), v8::internal::Heap::isolate(), kAllocatedThreshold, v8::internal::NewSpace::LowerInlineAllocationLimit(), v8::internal::Heap::mark_compact_collector(), v8::internal::Heap::new_space(), v8::internal::Heap::NOT_IN_GC, v8::internal::PrintF(), ResetStepCounters(), v8::internal::Isolate::serializer_enabled(), StartMarking(), state_, STOPPED, SWEEPING, and v8::internal::MarkCompactCollector::sweeping_in_progress().

Referenced by v8::internal::Heap::CollectGarbage(), v8::internal::Heap::IdleNotification(), and OldSpaceStep().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ StartMarking()

void v8::internal::IncrementalMarking::StartMarking ( CompactionFlag  flag)
private

Definition at line 536 of file incremental-marking.cc.

536  {
537  if (FLAG_trace_incremental_marking) {
538  PrintF("[IncrementalMarking] Start marking\n");
539  }
540 
541  is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) &&
544 
545  state_ = MARKING;
546 
550 
552 
554 
555  // Initialize marking stack.
556  Address addr = static_cast<Address>(marking_deque_memory_->address());
557  size_t size = marking_deque_memory_->size();
558  if (FLAG_force_marking_deque_overflows) size = 64 * kPointerSize;
559  marking_deque_.Initialize(addr, addr + size);
560 
562 
563 // Marking bits are cleared by the sweeper.
564 #ifdef VERIFY_HEAP
565  if (FLAG_verify_heap) {
566  heap_->mark_compact_collector()->VerifyMarkbitsAreClean();
567  }
568 #endif
569 
572 
573  if (FLAG_cleanup_code_caches_at_gc) {
574  // We will mark cache black with a separate pass
575  // when we finish marking.
576  MarkObjectGreyDoNotEnqueue(heap_->polymorphic_code_cache());
577  }
578 
579  // Mark strong roots grey.
580  IncrementalMarkingRootMarkingVisitor visitor(this);
582 
584 
585  // Ready to start incremental marking.
586  if (FLAG_trace_incremental_marking) {
587  PrintF("[IncrementalMarking] Running\n");
588  }
589 }
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4747
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:711
CompilationCache * compilation_cache()
Definition: isolate.h:865
bool StartCompaction(CompactionMode mode)
void Initialize(Address low, Address high)
Definition: mark-compact.h:145
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
@ VISIT_ONLY_STRONG
Definition: globals.h:397
static void MarkObjectGreyDoNotEnqueue(Object *obj)

References ActivateIncrementalWriteBarrier(), v8::base::VirtualMemory::address(), ALLOW_COMPACTION, v8::internal::Isolate::compilation_cache(), v8::internal::Heap::CompletelyClearInstanceofCache(), EnsureMarkingDequeIsCommitted(), v8::internal::flag, heap_, v8::internal::RecordWriteStub::INCREMENTAL, v8::internal::RecordWriteStub::INCREMENTAL_COMPACTION, v8::internal::MarkCompactCollector::INCREMENTAL_COMPACTION, v8::internal::MarkingDeque::Initialize(), is_compacting_, v8::internal::Heap::isolate(), v8::internal::Heap::IterateStrongRoots(), v8::internal::kPointerSize, v8::internal::Heap::mark_compact_collector(), v8::internal::CompilationCache::MarkCompactPrologue(), MARKING, marking_deque_, marking_deque_memory_, v8::internal::MarkObjectGreyDoNotEnqueue(), v8::internal::MarkCompactCollector::MarkWeakObjectToCodeTable(), mode(), v8::internal::PatchIncrementalMarkingRecordWriteStubs(), v8::internal::PrintF(), v8::base::VirtualMemory::size(), size, v8::internal::MarkCompactCollector::StartCompaction(), state_, and v8::internal::VISIT_ONLY_STRONG.

Referenced by Start(), and Step().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ state()

State v8::internal::IncrementalMarking::state ( )
inline

Definition at line 29 of file incremental-marking.h.

29  {
30  DCHECK(state_ == STOPPED || FLAG_incremental_marking);
31  return state_;
32  }

References DCHECK, state_, and STOPPED.

Referenced by Hurry(), INLINE(), IsComplete(), IsMarkingIncomplete(), and IsStopped().

+ Here is the caller graph for this function:

◆ Step()

void v8::internal::IncrementalMarking::Step ( intptr_t  allocated,
CompletionAction  action,
bool  force_marking = false 
)

Definition at line 895 of file incremental-marking.cc.

896  {
897  if (heap_->gc_state() != Heap::NOT_IN_GC || !FLAG_incremental_marking ||
898  !FLAG_incremental_marking_steps ||
899  (state_ != SWEEPING && state_ != MARKING)) {
900  return;
901  }
902 
903  allocated_ += allocated_bytes;
904 
905  if (!force_marking && allocated_ < kAllocatedThreshold &&
908  return;
909  }
910 
911  if (state_ == MARKING && no_marking_scope_depth_ > 0) return;
912 
913  {
914  HistogramTimerScope incremental_marking_scope(
915  heap_->isolate()->counters()->gc_incremental_marking());
916  double start = base::OS::TimeCurrentMillis();
917 
918  // The marking speed is driven either by the allocation rate or by the rate
919  // at which we are having to check the color of objects in the write
920  // barrier.
921  // It is possible for a tight non-allocating loop to run a lot of write
922  // barriers before we get here and check them (marking can only take place
923  // on
924  // allocation), so to reduce the lumpiness we don't use the write barriers
925  // invoked since last step directly to determine the amount of work to do.
926  intptr_t bytes_to_process =
929  allocated_ = 0;
931 
932  bytes_scanned_ += bytes_to_process;
933  intptr_t bytes_processed = 0;
934 
935  if (state_ == SWEEPING) {
939  }
941  bytes_scanned_ = 0;
943  }
944  } else if (state_ == MARKING) {
945  bytes_processed = ProcessMarkingDeque(bytes_to_process);
946  if (marking_deque_.IsEmpty()) MarkingComplete(action);
947  }
948 
949  steps_count_++;
950 
951  // Speed up marking if we are marking too slow or if we are almost done
952  // with marking.
953  SpeedUp();
954 
955  double end = base::OS::TimeCurrentMillis();
956  double duration = (end - start);
957  // Note that we report zero bytes here when sweeping was in progress or
958  // when we just started incremental marking. In these cases we did not
959  // process the marking deque.
960  heap_->tracer()->AddIncrementalMarkingStep(duration, bytes_processed);
961  }
962 }
void AddIncrementalMarkingStep(double duration, intptr_t bytes)
Definition: gc-tracer.cc:210
static const intptr_t kWriteBarriersInvokedThreshold
void MarkingComplete(CompletionAction action)
Counters * counters()
Definition: isolate.h:857
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)

References v8::internal::GCTracer::AddIncrementalMarkingStep(), allocated_, bytes_scanned_, v8::internal::Isolate::counters(), v8::internal::MarkCompactCollector::EnsureSweepingCompleted(), v8::internal::Heap::gc_state(), heap_, v8::internal::MarkingDeque::IsEmpty(), v8::internal::Heap::isolate(), v8::internal::MarkCompactCollector::IsSweepingCompleted(), kAllocatedThreshold, kWriteBarriersInvokedThreshold, v8::internal::Heap::mark_compact_collector(), MARKING, marking_deque_, marking_speed_, MarkingComplete(), v8::internal::Max(), no_marking_scope_depth_, v8::internal::Heap::NOT_IN_GC, PREVENT_COMPACTION, SpeedUp(), StartMarking(), state_, steps_count_, SWEEPING, v8::internal::MarkCompactCollector::sweeping_in_progress(), v8::base::OS::TimeCurrentMillis(), v8::internal::Heap::tracer(), and write_barriers_invoked_since_last_step_.

Referenced by v8::internal::Heap::AdvanceIdleIncrementalMarking(), v8::internal::Heap::CollectGarbage(), OldSpaceStep(), and v8::internal::NewSpace::SlowAllocateRaw().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Stop()

void v8::internal::IncrementalMarking::Stop ( )

◆ TearDown()

void v8::internal::IncrementalMarking::TearDown ( )

Definition at line 34 of file incremental-marking.cc.

34 { delete marking_deque_memory_; }

References marking_deque_memory_.

Referenced by v8::internal::Heap::TearDown().

+ Here is the caller graph for this function:

◆ UncommitMarkingDeque()

void v8::internal::IncrementalMarking::UncommitMarkingDeque ( )

Definition at line 499 of file incremental-marking.cc.

499  {
501  bool success = marking_deque_memory_->Uncommit(
502  reinterpret_cast<Address>(marking_deque_memory_->address()),
504  CHECK(success);
506  }
507 }
bool Uncommit(void *address, size_t size)

References v8::base::VirtualMemory::address(), CHECK, marking_deque_memory_, marking_deque_memory_committed_, v8::base::VirtualMemory::size(), state_, STOPPED, and v8::base::VirtualMemory::Uncommit().

Referenced by v8::internal::Heap::CollectAllAvailableGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateMarkingDequeAfterScavenge()

void v8::internal::IncrementalMarking::UpdateMarkingDequeAfterScavenge ( )

Definition at line 602 of file incremental-marking.cc.

602  {
603  if (!IsMarking()) return;
604 
605  int current = marking_deque_.bottom();
606  int mask = marking_deque_.mask();
607  int limit = marking_deque_.top();
608  HeapObject** array = marking_deque_.array();
609  int new_top = current;
610 
611  Map* filler_map = heap_->one_pointer_filler_map();
612 
613  while (current != limit) {
614  HeapObject* obj = array[current];
615  DCHECK(obj->IsHeapObject());
616  current = ((current + 1) & mask);
617  if (heap_->InNewSpace(obj)) {
618  MapWord map_word = obj->map_word();
619  if (map_word.IsForwardingAddress()) {
620  HeapObject* dest = map_word.ToForwardingAddress();
621  array[new_top] = dest;
622  new_top = ((new_top + 1) & mask);
623  DCHECK(new_top != marking_deque_.bottom());
624 #ifdef DEBUG
625  MarkBit mark_bit = Marking::MarkBitFrom(obj);
626  DCHECK(Marking::IsGrey(mark_bit) ||
627  (obj->IsFiller() && Marking::IsWhite(mark_bit)));
628 #endif
629  }
630  } else if (obj->map() != filler_map) {
631  // Skip one word filler objects that appear on the
632  // stack when we perform in place array shift.
633  array[new_top] = obj;
634  new_top = ((new_top + 1) & mask);
635  DCHECK(new_top != marking_deque_.bottom());
636 #ifdef DEBUG
637  MarkBit mark_bit = Marking::MarkBitFrom(obj);
638  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
639  DCHECK(Marking::IsGrey(mark_bit) ||
640  (obj->IsFiller() && Marking::IsWhite(mark_bit)) ||
641  (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
642  Marking::IsBlack(mark_bit)));
643 #endif
644  }
645  }
646  marking_deque_.set_top(new_top);
647 }
bool InNewSpace(Object *object)
Definition: heap-inl.h:322

References v8::internal::HeapObject::address(), v8::internal::MarkingDeque::array(), v8::internal::MarkingDeque::bottom(), DCHECK, v8::internal::MemoryChunk::FromAddress(), v8::internal::MemoryChunk::HAS_PROGRESS_BAR, heap_, v8::internal::Heap::InNewSpace(), v8::internal::MemoryChunk::IsFlagSet(), v8::internal::HeapObject::map(), v8::internal::HeapObject::map_word(), marking_deque_, v8::internal::MarkingDeque::mask(), v8::internal::MarkingDeque::set_top(), and v8::internal::MarkingDeque::top().

Referenced by v8::internal::Heap::Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ WhiteToGreyAndPush()

void v8::internal::IncrementalMarking::WhiteToGreyAndPush ( HeapObject obj,
MarkBit  mark_bit 
)
inline

Definition at line 110 of file incremental-marking-inl.h.

110  {
111  Marking::WhiteToGrey(mark_bit);
112  marking_deque_.PushGrey(obj);
113 }

References marking_deque_.

Referenced by v8::internal::IncrementalMarkingMarkingVisitor::INLINE(), v8::internal::IncrementalMarkingRootMarkingVisitor::MarkObjectByPointer(), and v8::internal::Marking::TransferMark().

+ Here is the caller graph for this function:

◆ WorthActivating()

bool v8::internal::IncrementalMarking::WorthActivating ( )

Definition at line 429 of file incremental-marking.cc.

429  {
430 #ifndef DEBUG
431  static const intptr_t kActivationThreshold = 8 * MB;
432 #else
433  // TODO(gc) consider setting this to some low level so that some
434  // debug tests run with incremental marking and some without.
435  static const intptr_t kActivationThreshold = 0;
436 #endif
437  // Only start incremental marking in a safe state: 1) when incremental
438  // marking is turned on, 2) when we are currently not in a GC, and
439  // 3) when we are currently not serializing or deserializing the heap.
440  return FLAG_incremental_marking && FLAG_incremental_marking_steps &&
443  heap_->isolate()->IsInitialized() &&
444  heap_->PromotedSpaceSizeOfObjects() > kActivationThreshold;
445 }

References v8::internal::Heap::gc_state(), heap_, v8::internal::Isolate::IsInitialized(), v8::internal::Heap::isolate(), v8::internal::MB, v8::internal::Heap::NOT_IN_GC, v8::internal::Heap::PromotedSpaceSizeOfObjects(), and v8::internal::Isolate::serializer_enabled().

Referenced by ShouldActivate(), and v8::internal::Heap::WorthActivatingIncrementalMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

Member Data Documentation

◆ allocated_

intptr_t v8::internal::IncrementalMarking::allocated_
private

Definition at line 214 of file incremental-marking.h.

Referenced by Step().

◆ bytes_rescanned_

int64_t v8::internal::IncrementalMarking::bytes_rescanned_
private

Definition at line 210 of file incremental-marking.h.

Referenced by BlackToGreyAndUnshift(), and ResetStepCounters().

◆ bytes_scanned_

intptr_t v8::internal::IncrementalMarking::bytes_scanned_
private

Definition at line 213 of file incremental-marking.h.

Referenced by BlackToGreyAndUnshift(), ResetStepCounters(), SpeedUp(), and Step().

◆ heap_

◆ is_compacting_

bool v8::internal::IncrementalMarking::is_compacting_
private

◆ kAllocatedThreshold

const intptr_t v8::internal::IncrementalMarking::kAllocatedThreshold = 65536
static

Definition at line 73 of file incremental-marking.h.

Referenced by v8::internal::FreeList::Allocate(), Start(), and Step().

◆ kFastMarking

const intptr_t v8::internal::IncrementalMarking::kFastMarking = 3
static

Definition at line 79 of file incremental-marking.h.

Referenced by NotifyOfHighPromotionRate(), and OldSpaceStep().

◆ kInitialMarkingSpeed

const intptr_t v8::internal::IncrementalMarking::kInitialMarkingSpeed = 1
static

Definition at line 76 of file incremental-marking.h.

Referenced by OldSpaceStep(), and ResetStepCounters().

◆ kMarkingSpeedAccelleration

const intptr_t v8::internal::IncrementalMarking::kMarkingSpeedAccelleration = 2
static

Definition at line 83 of file incremental-marking.h.

Referenced by SpeedUp().

◆ kMarkingSpeedAccellerationInterval

const intptr_t v8::internal::IncrementalMarking::kMarkingSpeedAccellerationInterval = 1024
static

Definition at line 81 of file incremental-marking.h.

Referenced by SpeedUp().

◆ kMaxMarkingSpeed

const intptr_t v8::internal::IncrementalMarking::kMaxMarkingSpeed = 1000
static

Definition at line 84 of file incremental-marking.h.

Referenced by BlackToGreyAndUnshift(), and SpeedUp().

◆ kWriteBarriersInvokedThreshold

const intptr_t v8::internal::IncrementalMarking::kWriteBarriersInvokedThreshold = 32768
static

Definition at line 74 of file incremental-marking.h.

Referenced by Step().

◆ marking_deque_

MarkingDeque v8::internal::IncrementalMarking::marking_deque_
private

◆ marking_deque_memory_

base::VirtualMemory* v8::internal::IncrementalMarking::marking_deque_memory_
private

◆ marking_deque_memory_committed_

bool v8::internal::IncrementalMarking::marking_deque_memory_committed_
private

Definition at line 204 of file incremental-marking.h.

Referenced by EnsureMarkingDequeIsCommitted(), and UncommitMarkingDeque().

◆ marking_speed_

int v8::internal::IncrementalMarking::marking_speed_
private

◆ no_marking_scope_depth_

int v8::internal::IncrementalMarking::no_marking_scope_depth_
private

Definition at line 217 of file incremental-marking.h.

Referenced by EnterNoMarkingScope(), LeaveNoMarkingScope(), and Step().

◆ old_generation_space_available_at_start_of_incremental_

int64_t v8::internal::IncrementalMarking::old_generation_space_available_at_start_of_incremental_
private

Definition at line 208 of file incremental-marking.h.

Referenced by ResetStepCounters(), and SpeedUp().

◆ old_generation_space_used_at_start_of_incremental_

int64_t v8::internal::IncrementalMarking::old_generation_space_used_at_start_of_incremental_
private

Definition at line 209 of file incremental-marking.h.

Referenced by ResetStepCounters(), and SpeedUp().

◆ should_hurry_

bool v8::internal::IncrementalMarking::should_hurry_
private

Definition at line 211 of file incremental-marking.h.

Referenced by set_should_hurry(), and should_hurry().

◆ state_

State v8::internal::IncrementalMarking::state_
private

◆ steps_count_

int v8::internal::IncrementalMarking::steps_count_
private

Definition at line 207 of file incremental-marking.h.

Referenced by ResetStepCounters(), SpeedUp(), and Step().

◆ unscanned_bytes_of_large_object_

int v8::internal::IncrementalMarking::unscanned_bytes_of_large_object_
private

Definition at line 219 of file incremental-marking.h.

Referenced by NotifyIncompleteScanOfObject().

◆ write_barriers_invoked_since_last_step_

intptr_t v8::internal::IncrementalMarking::write_barriers_invoked_since_last_step_
private

Definition at line 215 of file incremental-marking.h.

Referenced by RecordWriteFromCode(), ResetStepCounters(), and Step().


The documentation for this class was generated from the following files: