V8 Project
v8::internal::Heap Class Reference

#include <heap.h>

+ Collaboration diagram for v8::internal::Heap:

Classes

struct  ConstantStringTable
 
struct  GCEpilogueCallbackPair
 
struct  GCPrologueCallbackPair
 
class  RelocationLock
 
struct  StringTypeTable
 
struct  StructTable
 

Public Types

enum  InvocationMode { FROM_GC , FROM_MUTATOR }
 
enum  HeapState { NOT_IN_GC , SCAVENGE , MARK_COMPACT }
 
enum  ScratchpadSlotMode { IGNORE_SCRATCHPAD_SLOT , RECORD_SCRATCHPAD_SLOT }
 
enum  RootListIndex { kStringTableRootIndex , kRootListLength , kStrongRootListLength = kStringTableRootIndex , kSmiRootsStart = kStringTableRootIndex + 1 }
 
enum  { FIRST_CODE_KIND_SUB_TYPE = LAST_TYPE + 1 , FIRST_FIXED_ARRAY_SUB_TYPE , FIRST_CODE_AGE_SUB_TYPE , OBJECT_STATS_COUNT = FIRST_CODE_AGE_SUB_TYPE + Code::kCodeAgeCount + 1 }
 

Public Member Functions

bool ConfigureHeap (int max_semi_space_size, int max_old_space_size, int max_executable_size, size_t code_range_size)
 
bool ConfigureHeapDefault ()
 
bool SetUp ()
 
bool CreateHeapObjects ()
 
void TearDown ()
 
void SetStackLimits ()
 
bool HasBeenSetUp ()
 
intptr_t MaxReserved ()
 
int MaxSemiSpaceSize ()
 
int ReservedSemiSpaceSize ()
 
int InitialSemiSpaceSize ()
 
intptr_t MaxOldGenerationSize ()
 
intptr_t MaxExecutableSize ()
 
intptr_t Capacity ()
 
intptr_t CommittedMemory ()
 
intptr_t CommittedMemoryExecutable ()
 
size_t CommittedPhysicalMemory ()
 
intptr_t MaximumCommittedMemory ()
 
void UpdateMaximumCommitted ()
 
intptr_t Available ()
 
intptr_t SizeOfObjects ()
 
Address NewSpaceStart ()
 
uintptr_t NewSpaceMask ()
 
Address NewSpaceTop ()
 
NewSpacenew_space ()
 
OldSpaceold_pointer_space ()
 
OldSpaceold_data_space ()
 
OldSpacecode_space ()
 
MapSpacemap_space ()
 
CellSpacecell_space ()
 
PropertyCellSpaceproperty_cell_space ()
 
LargeObjectSpacelo_space ()
 
PagedSpacepaged_space (int idx)
 
bool always_allocate ()
 
Address always_allocate_scope_depth_address ()
 
AddressNewSpaceAllocationTopAddress ()
 
AddressNewSpaceAllocationLimitAddress ()
 
AddressOldPointerSpaceAllocationTopAddress ()
 
AddressOldPointerSpaceAllocationLimitAddress ()
 
AddressOldDataSpaceAllocationTopAddress ()
 
AddressOldDataSpaceAllocationLimitAddress ()
 
MUST_USE_RESULT AllocationResult CopyJSObject (JSObject *source, AllocationSite *site=NULL)
 
void ClearInstanceofCache ()
 
void ClearAllICsByKind (Code::Kind kind)
 
void RepairFreeListsAfterBoot ()
 
void MoveElements (FixedArray *array, int dst_index, int src_index, int len)
 
void FinalizeExternalString (String *string)
 
void CreateFillerObjectAt (Address addr, int size)
 
bool CanMoveObjectStart (HeapObject *object)
 
void AdjustLiveBytes (Address address, int by, InvocationMode mode)
 
FixedArrayBaseLeftTrimFixedArray (FixedArrayBase *obj, int elements_to_trim)
 
template<Heap::InvocationMode mode>
void RightTrimFixedArray (FixedArrayBase *obj, int elements_to_trim)
 
ObjectToBoolean (bool condition)
 
bool CollectGarbage (AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
 
void CollectAllGarbage (int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
 
void CollectAllAvailableGarbage (const char *gc_reason=NULL)
 
bool IsHeapIterable ()
 
int NotifyContextDisposed ()
 
void increment_scan_on_scavenge_pages ()
 
void decrement_scan_on_scavenge_pages ()
 
PromotionQueuepromotion_queue ()
 
void AddGCPrologueCallback (v8::Isolate::GCPrologueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
 
void RemoveGCPrologueCallback (v8::Isolate::GCPrologueCallback callback)
 
void AddGCEpilogueCallback (v8::Isolate::GCEpilogueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
 
void RemoveGCEpilogueCallback (v8::Isolate::GCEpilogueCallback callback)
 
Stringhidden_string ()
 
void set_native_contexts_list (Object *object)
 
Objectnative_contexts_list () const
 
void set_array_buffers_list (Object *object)
 
Objectarray_buffers_list () const
 
void set_allocation_sites_list (Object *object)
 
Objectallocation_sites_list ()
 
Object ** allocation_sites_list_address ()
 
Objectweak_object_to_code_table ()
 
void set_encountered_weak_collections (Object *weak_collection)
 
Objectencountered_weak_collections () const
 
unsigned int ms_count ()
 
void IterateRoots (ObjectVisitor *v, VisitMode mode)
 
void IterateStrongRoots (ObjectVisitor *v, VisitMode mode)
 
void IterateSmiRoots (ObjectVisitor *v)
 
void IterateWeakRoots (ObjectVisitor *v, VisitMode mode)
 
void IterateAndMarkPointersToFromSpace (Address start, Address end, ObjectSlotCallback callback)
 
bool InNewSpace (Object *object)
 
bool InNewSpace (Address address)
 
bool InNewSpacePage (Address address)
 
bool InFromSpace (Object *object)
 
bool InToSpace (Object *object)
 
bool InOldPointerSpace (Address address)
 
bool InOldPointerSpace (Object *object)
 
bool InOldDataSpace (Address address)
 
bool InOldDataSpace (Object *object)
 
bool Contains (Address addr)
 
bool Contains (HeapObject *value)
 
bool InSpace (Address addr, AllocationSpace space)
 
bool InSpace (HeapObject *value, AllocationSpace space)
 
OldSpaceTargetSpace (HeapObject *object)
 
bool AllowedToBeMigrated (HeapObject *object, AllocationSpace dest)
 
void public_set_code_stubs (UnseededNumberDictionary *value)
 
HeapObjectCallback GcSafeSizeOfOldObjectFunction ()
 
void public_set_non_monomorphic_cache (UnseededNumberDictionary *value)
 
void public_set_empty_script (Script *script)
 
void public_set_store_buffer_top (Address *top)
 
void public_set_materialized_objects (FixedArray *objects)
 
Object ** roots_array_start ()
 
Addressstore_buffer_top_address ()
 
uint32_t allocations_count ()
 
double synthetic_time ()
 
void PrintShortHeapStatistics ()
 
 INLINE (void RecordWrite(Address address, int offset))
 
 INLINE (void RecordWrites(Address address, int start, int len))
 
HeapState gc_state ()
 
bool IsInGCPostProcessing ()
 
AllocationMementoFindAllocationMemento (HeapObject *object)
 
void ReserveSpace (int *sizes, Address *addresses)
 
void CreateApiObjects ()
 
intptr_t PromotedTotalSize ()
 
intptr_t OldGenerationSpaceAvailable ()
 
intptr_t OldGenerationCapacityAvailable ()
 
intptr_t OldGenerationAllocationLimit (intptr_t old_gen_size, int freed_global_handles)
 
bool inline_allocation_disabled ()
 
void EnableInlineAllocation ()
 
void DisableInlineAllocation ()
 
bool IdleNotification (int idle_time_in_ms)
 
 STATIC_ASSERT (kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
 
 STATIC_ASSERT (kNullValueRootIndex==Internals::kNullValueRootIndex)
 
 STATIC_ASSERT (kTrueValueRootIndex==Internals::kTrueValueRootIndex)
 
 STATIC_ASSERT (kFalseValueRootIndex==Internals::kFalseValueRootIndex)
 
 STATIC_ASSERT (kempty_stringRootIndex==Internals::kEmptyStringRootIndex)
 
bool RootCanBeTreatedAsConstant (RootListIndex root_index)
 
MapMapForFixedTypedArray (ExternalArrayType array_type)
 
RootListIndex RootIndexForFixedTypedArray (ExternalArrayType array_type)
 
MapMapForExternalArrayType (ExternalArrayType array_type)
 
RootListIndex RootIndexForExternalArrayType (ExternalArrayType array_type)
 
RootListIndex RootIndexForEmptyExternalArray (ElementsKind kind)
 
RootListIndex RootIndexForEmptyFixedTypedArray (ElementsKind kind)
 
ExternalArrayEmptyExternalArrayForMap (Map *map)
 
FixedTypedArrayBaseEmptyFixedTypedArrayForMap (Map *map)
 
void RecordStats (HeapStats *stats, bool take_snapshot=false)
 
void CheckNewSpaceExpansionCriteria ()
 
void IncrementPromotedObjectsSize (int object_size)
 
void IncrementSemiSpaceCopiedObjectSize (int object_size)
 
void IncrementNodesDiedInNewSpace ()
 
void IncrementNodesCopiedInNewSpace ()
 
void IncrementNodesPromoted ()
 
void IncrementYoungSurvivorsCounter (int survived)
 
bool NextGCIsLikelyToBeFull ()
 
void UpdateNewSpaceReferencesInExternalStringTable (ExternalStringTableUpdaterCallback updater_func)
 
void UpdateReferencesInExternalStringTable (ExternalStringTableUpdaterCallback updater_func)
 
void ProcessWeakReferences (WeakObjectRetainer *retainer)
 
void VisitExternalResources (v8::ExternalResourceVisitor *visitor)
 
bool ShouldBePromoted (Address old_address, int object_size)
 
void ClearJSFunctionResultCaches ()
 
void ClearNormalizedMapCaches ()
 
GCTracertracer ()
 
intptr_t PromotedSpaceSizeOfObjects ()
 
double total_regexp_code_generated ()
 
void IncreaseTotalRegexpCodeGenerated (int size)
 
void IncrementCodeGeneratedBytes (bool is_crankshafted, int size)
 
void UpdateCumulativeGCStatistics (double duration, double spent_in_mutator, double marking_time)
 
double get_max_gc_pause ()
 
intptr_t get_max_alive_after_gc ()
 
double get_min_in_mutator ()
 
MarkCompactCollectormark_compact_collector ()
 
StoreBufferstore_buffer ()
 
Markingmarking ()
 
IncrementalMarkingincremental_marking ()
 
ExternalStringTableexternal_string_table ()
 
int sweep_generation ()
 
Isolateisolate ()
 
void CallGCPrologueCallbacks (GCType gc_type, GCCallbackFlags flags)
 
void CallGCEpilogueCallbacks (GCType gc_type, GCCallbackFlags flags)
 
bool OldGenerationAllocationLimitReached ()
 
void DoScavengeObject (Map *map, HeapObject **slot, HeapObject *obj)
 
void QueueMemoryChunkForFree (MemoryChunk *chunk)
 
void FreeQueuedChunks ()
 
int gc_count () const
 
void CompletelyClearInstanceofCache ()
 
uint32_t HashSeed ()
 
void SetArgumentsAdaptorDeoptPCOffset (int pc_offset)
 
void SetConstructStubDeoptPCOffset (int pc_offset)
 
void SetGetterStubDeoptPCOffset (int pc_offset)
 
void SetSetterStubDeoptPCOffset (int pc_offset)
 
void RememberUnmappedPage (Address page, bool compacted)
 
int global_ic_age ()
 
void AgeInlineCaches ()
 
bool flush_monomorphic_ics ()
 
int64_t amount_of_external_allocated_memory ()
 
void DeoptMarkedAllocationSites ()
 
bool MaximumSizeScavenge ()
 
bool DeoptMaybeTenuredAllocationSites ()
 
void RecordObjectStats (InstanceType type, size_t size)
 
void RecordCodeSubTypeStats (int code_sub_type, int code_age, size_t size)
 
void RecordFixedArraySubTypeStats (int array_sub_type, size_t size)
 
void CheckpointObjectStats ()
 
void AddWeakObjectToCodeDependency (Handle< Object > obj, Handle< DependentCode > dep)
 
DependentCodeLookupWeakObjectToCodeDependency (Handle< Object > obj)
 
void InitializeWeakObjectToCodeTable ()
 
void EnsureWeakObjectToCodeTable ()
 
void OnAllocationEvent (HeapObject *object, int size_in_bytes)
 
void OnMoveEvent (HeapObject *target, HeapObject *source, int size_in_bytes)
 
template<>
bool IsOneByte (Vector< const char > str, int chars)
 
template<>
bool IsOneByte (String *str, int chars)
 
template<typename T >
AllocationResult AllocateInternalizedStringImpl (T t, int chars, uint32_t hash_field)
 
template<bool is_one_byte, typename T >
AllocationResult AllocateInternalizedStringImpl (T t, int chars, uint32_t hash_field)
 

Static Public Member Functions

template<typename T >
static bool IsOneByte (T t, int chars)
 
static AllocationSpace TargetSpaceId (InstanceType type)
 
static bool ShouldZapGarbage ()
 
static void ScavengePointer (HeapObject **p)
 
static void ScavengeObject (HeapObject **p, HeapObject *object)
 
static void UpdateAllocationSiteFeedback (HeapObject *object, ScratchpadSlotMode mode)
 
static bool RootCanBeWrittenAfterInitialization (RootListIndex root_index)
 
static void CopyBlock (Address dst, Address src, int byte_size)
 
static void MoveBlock (Address dst, Address src, int byte_size)
 
static void FatalProcessOutOfMemory (const char *location, bool take_snapshot=false)
 

Static Public Attributes

static const int kSloppyArgumentsObjectSize
 
static const int kStrictArgumentsObjectSize
 
static const int kArgumentsLengthIndex = 0
 
static const int kArgumentsCalleeIndex = 1
 
static const int kNoGCFlags = 0
 
static const int kReduceMemoryFootprintMask = 1
 
static const int kAbortIncrementalMarkingMask = 2
 
static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask
 
static const intptr_t kMinimumOldGenerationAllocationLimit
 
static const int kPointerMultiplier = i::kPointerSize / 4
 
static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier
 
static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier
 
static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier
 
static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier
 
static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier
 
static const int kMaxOldSpaceSizeMediumMemoryDevice
 
static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier
 
static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier
 
static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier
 
static const int kMaxExecutableSizeMediumMemoryDevice
 
static const int kMaxExecutableSizeHighMemoryDevice
 
static const int kMaxExecutableSizeHugeMemoryDevice
 
static const int kOldSpaceRoots = 0x20
 

Protected Member Functions

MUST_USE_RESULT AllocationResult AllocateMap (InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
 
MUST_USE_RESULT AllocationResult AllocateJSObject (JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED, AllocationSite *allocation_site=NULL)
 
MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap (Map *map, PretenureFlag pretenure=NOT_TENURED, bool alloc_props=true, AllocationSite *allocation_site=NULL)
 
MUST_USE_RESULT AllocationResult AllocateHeapNumber (double value, MutableMode mode=IMMUTABLE, PretenureFlag pretenure=NOT_TENURED)
 
MUST_USE_RESULT AllocationResult AllocateByteArray (int length, PretenureFlag pretenure=NOT_TENURED)
 
MUST_USE_RESULT AllocationResult CopyCode (Code *code, Vector< byte > reloc_info)
 
MUST_USE_RESULT AllocationResult CopyCode (Code *code)
 
MUST_USE_RESULT AllocationResult AllocateFixedArray (int length, PretenureFlag pretenure=NOT_TENURED)
 

Private Member Functions

 Heap ()
 
int64_t PromotedExternalMemorySize ()
 
void MarkMapPointersAsEncoded (bool encoded)
 
void GarbageCollectionPrologue ()
 
void GarbageCollectionEpilogue ()
 
void ProcessPretenuringFeedback ()
 
GarbageCollector SelectGarbageCollector (AllocationSpace space, const char **reason)
 
void EnsureFillerObjectAtTop ()
 
void MakeHeapIterable ()
 
bool CollectGarbage (GarbageCollector collector, const char *gc_reason, const char *collector_reason, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
 
bool PerformGarbageCollection (GarbageCollector collector, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
 
void UpdateOldSpaceLimits ()
 
MUST_USE_RESULT AllocationResult AllocateRaw (int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
 
MUST_USE_RESULT AllocationResult Allocate (Map *map, AllocationSpace space, AllocationSite *allocation_site=NULL)
 
MUST_USE_RESULT AllocationResult AllocatePartialMap (InstanceType instance_type, int instance_size)
 
void InitializeJSObjectFromMap (JSObject *obj, FixedArray *properties, Map *map)
 
void InitializeAllocationMemento (AllocationMemento *memento, AllocationSite *allocation_site)
 
MUST_USE_RESULT AllocationResult AllocateFillerObject (int size, bool double_align, AllocationSpace space)
 
MUST_USE_RESULT AllocationResult AllocateRawFixedArray (int length, PretenureFlag pretenure)
 
MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray (int length, PretenureFlag pretenure)
 
MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller (int length, PretenureFlag pretenure, Object *filler)
 
MUST_USE_RESULT AllocationResult AllocateRawOneByteString (int length, PretenureFlag pretenure)
 
MUST_USE_RESULT AllocationResult AllocateRawTwoByteString (int length, PretenureFlag pretenure)
 
bool CreateInitialMaps ()
 
void CreateInitialObjects ()
 
MUST_USE_RESULT AllocationResult AllocateInternalizedStringFromUtf8 (Vector< const char > str, int chars, uint32_t hash_field)
 
MUST_USE_RESULT AllocationResult AllocateOneByteInternalizedString (Vector< const uint8_t > str, uint32_t hash_field)
 
MUST_USE_RESULT AllocationResult AllocateTwoByteInternalizedString (Vector< const uc16 > str, uint32_t hash_field)
 
template<bool is_one_byte, typename T >
MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl (T t, int chars, uint32_t hash_field)
 
template<typename T >
MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl (T t, int chars, uint32_t hash_field)
 
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray (int length)
 
MUST_USE_RESULT AllocationResult CopyFixedArray (FixedArray *src)
 
MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap (FixedArray *src, Map *map)
 
MUST_USE_RESULT AllocationResult CopyFixedDoubleArray (FixedDoubleArray *src)
 
MUST_USE_RESULT AllocationResult CopyConstantPoolArray (ConstantPoolArray *src)
 
MUST_USE_RESULT AllocationResult LookupSingleCharacterStringFromCode (uint16_t code)
 
MUST_USE_RESULT AllocationResult AllocateSymbol ()
 
MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap (ConstantPoolArray *src, Map *map)
 
MUST_USE_RESULT AllocationResult AllocateConstantPoolArray (const ConstantPoolArray::NumberOfEntries &small)
 
MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray (const ConstantPoolArray::NumberOfEntries &small, const ConstantPoolArray::NumberOfEntries &extended)
 
MUST_USE_RESULT AllocationResult AllocateExternalArray (int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
 
MUST_USE_RESULT AllocationResult AllocateFixedTypedArray (int length, ExternalArrayType array_type, PretenureFlag pretenure)
 
MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray (FixedArray *src)
 
MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap (FixedDoubleArray *src, Map *map)
 
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray (int length, PretenureFlag pretenure=NOT_TENURED)
 
 NO_INLINE (void CreateJSEntryStub())
 
 NO_INLINE (void CreateJSConstructEntryStub())
 
void CreateFixedStubs ()
 
MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray ()
 
MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray (ExternalArrayType array_type)
 
MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray (ExternalArrayType array_type)
 
MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray ()
 
MUST_USE_RESULT AllocationResult AllocateCell (Object *value)
 
MUST_USE_RESULT AllocationResult AllocatePropertyCell ()
 
MUST_USE_RESULT AllocationResult AllocateStruct (InstanceType type)
 
MUST_USE_RESULT AllocationResult AllocateForeign (Address address, PretenureFlag pretenure=NOT_TENURED)
 
MUST_USE_RESULT AllocationResult AllocateCode (int object_size, bool immovable)
 
MUST_USE_RESULT AllocationResult InternalizeStringWithKey (HashTableKey *key)
 
MUST_USE_RESULT AllocationResult InternalizeString (String *str)
 
void Scavenge ()
 
void EnsureFromSpaceIsCommitted ()
 
bool UncommitFromSpace ()
 
void ZapFromSpace ()
 
Address DoScavenge (ObjectVisitor *scavenge_visitor, Address new_space_front)
 
void MarkCompact ()
 
void MarkCompactPrologue ()
 
void ProcessNativeContexts (WeakObjectRetainer *retainer)
 
void ProcessArrayBuffers (WeakObjectRetainer *retainer)
 
void ProcessAllocationSites (WeakObjectRetainer *retainer)
 
void ResetAllAllocationSitesDependentCode (PretenureFlag flag)
 
void EvaluateOldSpaceLocalPretenuring (uint64_t size_of_objects_before_gc)
 
void TearDownArrayBuffers ()
 
void ReportStatisticsBeforeGC ()
 
void ReportStatisticsAfterGC ()
 
int FullSizeNumberStringCacheLength ()
 
void FlushNumberStringCache ()
 
void FlushAllocationSitesScratchpad ()
 
void InitializeAllocationSitesScratchpad ()
 
void AddAllocationSiteToScratchpad (AllocationSite *site, ScratchpadSlotMode mode)
 
void UpdateSurvivalStatistics (int start_new_space_size)
 
bool IsHighSurvivalRate ()
 
void SelectScavengingVisitorsTable ()
 
void AdvanceIdleIncrementalMarking (intptr_t step_size)
 
bool WorthActivatingIncrementalMarking ()
 
void ClearObjectStats (bool clear_last_time_stats=false)
 
void set_weak_object_to_code_table (Object *value)
 
Object ** weak_object_to_code_table_address ()
 
void UpdateAllocationsHash (HeapObject *object)
 
void UpdateAllocationsHash (uint32_t value)
 
void PrintAlloctionsHash ()
 
 DISALLOW_COPY_AND_ASSIGN (Heap)
 

Static Private Member Functions

static int GcSafeSizeOfOldObject (HeapObject *object)
 
static AllocationSpace SelectSpace (int object_size, AllocationSpace preferred_old_space, PretenureFlag pretenure)
 
static StringUpdateNewSpaceReferenceInExternalStringTableEntry (Heap *heap, Object **pointer)
 
static void ScavengeStoreBufferCallback (Heap *heap, MemoryChunk *page, StoreBufferEvent event)
 
static void ScavengeObjectSlow (HeapObject **p, HeapObject *object)
 

Private Attributes

int64_t amount_of_external_allocated_memory_
 
int64_t amount_of_external_allocated_memory_at_last_global_gc_
 
Isolateisolate_
 
Objectroots_ [kRootListLength]
 
size_t code_range_size_
 
int reserved_semispace_size_
 
int max_semi_space_size_
 
int initial_semispace_size_
 
intptr_t max_old_generation_size_
 
intptr_t max_executable_size_
 
intptr_t maximum_committed_
 
int survived_since_last_expansion_
 
int sweep_generation_
 
int always_allocate_scope_depth_
 
int contexts_disposed_
 
int global_ic_age_
 
bool flush_monomorphic_ics_
 
int scan_on_scavenge_pages_
 
NewSpace new_space_
 
OldSpaceold_pointer_space_
 
OldSpaceold_data_space_
 
OldSpacecode_space_
 
MapSpacemap_space_
 
CellSpacecell_space_
 
PropertyCellSpaceproperty_cell_space_
 
LargeObjectSpacelo_space_
 
HeapState gc_state_
 
int gc_post_processing_depth_
 
Address new_space_top_after_last_gc_
 
uint32_t allocations_count_
 
uint32_t raw_allocations_hash_
 
uint32_t dump_allocations_hash_countdown_
 
unsigned int ms_count_
 
unsigned int gc_count_
 
int remembered_unmapped_pages_index_
 
Address remembered_unmapped_pages_ [kRememberedUnmappedPages]
 
int unflattened_strings_length_
 
intptr_t old_generation_allocation_limit_
 
bool old_gen_exhausted_
 
bool inline_allocation_disabled_
 
Objectnative_contexts_list_
 
Objectarray_buffers_list_
 
Objectallocation_sites_list_
 
Objectweak_object_to_code_table_
 
Objectencountered_weak_collections_
 
StoreBufferRebuilder store_buffer_rebuilder_
 
Stringhidden_string_
 
List< GCPrologueCallbackPairgc_prologue_callbacks_
 
List< GCEpilogueCallbackPairgc_epilogue_callbacks_
 
HeapObjectCallback gc_safe_size_of_old_object_
 
double total_regexp_code_generated_
 
GCTracer tracer_
 
int high_survival_rate_period_length_
 
intptr_t promoted_objects_size_
 
double promotion_rate_
 
intptr_t semi_space_copied_object_size_
 
double semi_space_copied_rate_
 
int nodes_died_in_new_space_
 
int nodes_copied_in_new_space_
 
int nodes_promoted_
 
unsigned int maximum_size_scavenges_
 
size_t object_counts_ [OBJECT_STATS_COUNT]
 
size_t object_counts_last_time_ [OBJECT_STATS_COUNT]
 
size_t object_sizes_ [OBJECT_STATS_COUNT]
 
size_t object_sizes_last_time_ [OBJECT_STATS_COUNT]
 
double max_gc_pause_
 
double total_gc_time_ms_
 
intptr_t max_alive_after_gc_
 
double min_in_mutator_
 
double marking_time_
 
double sweeping_time_
 
MarkCompactCollector mark_compact_collector_
 
StoreBuffer store_buffer_
 
Marking marking_
 
IncrementalMarking incremental_marking_
 
GCIdleTimeHandler gc_idle_time_handler_
 
unsigned int gc_count_at_last_idle_gc_
 
size_t full_codegen_bytes_generated_
 
size_t crankshaft_codegen_bytes_generated_
 
int gcs_since_last_deopt_
 
int allocation_sites_scratchpad_length_
 
PromotionQueue promotion_queue_
 
bool configured_
 
ExternalStringTable external_string_table_
 
VisitorDispatchTable< ScavengingCallbackscavenging_visitors_table_
 
MemoryChunkchunks_queued_for_free_
 
base::Mutex relocation_mutex_
 
int gc_callbacks_depth_
 

Static Private Attributes

static const int kRememberedUnmappedPages = 128
 
static const StringTypeTable string_type_table []
 
static const ConstantStringTable constant_string_table []
 
static const StructTable struct_table []
 
static const int kYoungSurvivalRateHighThreshold = 90
 
static const int kYoungSurvivalRateAllowedDeviation = 15
 
static const int kOldSurvivalRateLowThreshold = 10
 
static const int kInitialStringTableSize = 2048
 
static const int kInitialEvalCacheSize = 64
 
static const int kInitialNumberStringCacheSize = 256
 
static const int kAllocationSiteScratchpadSize = 256
 
static const int kMaxMarkCompactsInIdleRound = 7
 
static const int kIdleScavengeThreshold = 5
 

Friends

class AlwaysAllocateScope
 
class Deserializer
 
class Factory
 
class GCCallbacksScope
 
class GCTracer
 
class HeapIterator
 
class Isolate
 
class MarkCompactCollector
 
class MarkCompactMarkingVisitor
 
class MapCompact
 
class Page
 

Detailed Description

Definition at line 517 of file heap.h.

Member Enumeration Documentation

◆ anonymous enum

anonymous enum
Enumerator
FIRST_CODE_KIND_SUB_TYPE 
FIRST_FIXED_ARRAY_SUB_TYPE 
FIRST_CODE_AGE_SUB_TYPE 
OBJECT_STATS_COUNT 

Definition at line 1291 of file heap.h.

◆ HeapState

Enumerator
NOT_IN_GC 
SCAVENGE 
MARK_COMPACT 

Definition at line 954 of file heap.h.

◆ InvocationMode

Enumerator
FROM_GC 
FROM_MUTATOR 

Definition at line 693 of file heap.h.

◆ RootListIndex

Enumerator
kStringTableRootIndex 
kRootListLength 
kStrongRootListLength 
kSmiRootsStart 

Definition at line 1054 of file heap.h.

1054  {
1055 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1057 #undef ROOT_INDEX_DECLARATION
1058 
1059 #define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
1061 #undef STRING_DECLARATION
1062 
1063 // Utility type maps
1064 #define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
1066 #undef DECLARE_STRUCT_MAP
1068 
1069 #define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
1071 #undef ROOT_INDEX_DECLARATION
1075  };
@ kStringTableRootIndex
Definition: heap.h:1067
@ kStrongRootListLength
Definition: heap.h:1073
#define STRING_INDEX_DECLARATION(name, str)
Definition: heap.h:1059
#define ROOT_INDEX_DECLARATION(type, name, camel_name)
Definition: heap.h:1069
#define STRONG_ROOT_LIST(V)
Definition: heap.h:28
#define INTERNALIZED_STRING_LIST(V)
Definition: heap.h:262
#define DECLARE_STRUCT_MAP(NAME, Name, name)
Definition: heap.h:1064
#define SMI_ROOT_LIST(V)
Definition: heap.h:197
#define STRUCT_LIST(V)
Definition: objects.h:515

◆ ScratchpadSlotMode

Enumerator
IGNORE_SCRATCHPAD_SLOT 
RECORD_SCRATCHPAD_SLOT 

Definition at line 974 of file heap.h.

Constructor & Destructor Documentation

◆ Heap()

v8::internal::Heap::Heap ( )
private

Definition at line 53 of file heap.cc.

56  isolate_(NULL),
58  // semispace_size_ should be a power of 2 and old_generation_size_ should
59  // be a multiple of Page::kPageSize.
63  max_old_generation_size_(700ul * (kPointerSize / 4) * MB),
64  max_executable_size_(256ul * (kPointerSize / 4) * MB),
65  // Variables set based on semispace_size_ and old_generation_size_ in
66  // ConfigureHeap.
67  // Will be 4 * reserved_semispace_size_ to ensure that young
68  // generation can be aligned to its size.
74  global_ic_age_(0),
77  new_space_(this),
84  lo_space_(NULL),
89  dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc),
90  ms_count_(0),
91  gc_count_(0),
94 #ifdef DEBUG
95  allocation_timeout_(0),
96 #endif // DEBUG
98  old_gen_exhausted_(false),
104  tracer_(this),
107  promotion_rate_(0),
112  nodes_promoted_(0),
114  max_gc_pause_(0.0),
115  total_gc_time_ms_(0.0),
118  marking_time_(0.0),
119  sweeping_time_(0.0),
121  store_buffer_(this),
122  marking_(this),
123  incremental_marking_(this),
128 #ifdef VERIFY_HEAP
129  no_weak_object_verification_scope_depth_(0),
130 #endif
132  promotion_queue_(this),
133  configured_(false),
137 // Allow build-time customization of the max semispace size. Building
138 // V8 with snapshots and a non-default max semispace size is much
139 // easier if you can define it as part of the build environment.
140 #if defined(V8_MAX_SEMISPACE_SIZE)
141  max_semi_space_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
142 #endif
143 
144  // Ensure old_generation_size_ is a multiple of kPageSize.
146 
147  memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
152  // Put a dummy entry in the remembered pages so we can find the list the
153  // minidump even if there are no real unmapped pages.
154  RememberUnmappedPage(NULL, false);
155 
156  ClearObjectStats(true);
157 }
int64_t amount_of_external_allocated_memory_
Definition: heap.h:1417
bool flush_monomorphic_ics_
Definition: heap.h:1450
ExternalStringTable external_string_table_
Definition: heap.h:2013
intptr_t max_old_generation_size_
Definition: heap.h:1432
double max_gc_pause_
Definition: heap.h:1959
int nodes_copied_in_new_space_
Definition: heap.h:1914
void set_array_buffers_list(Object *object)
Definition: heap.h:795
StoreBuffer store_buffer_
Definition: heap.h:1978
MarkCompactCollector mark_compact_collector_
Definition: heap.h:1976
int sweep_generation_
Definition: heap.h:1441
int initial_semispace_size_
Definition: heap.h:1431
StoreBufferRebuilder store_buffer_rebuilder_
Definition: heap.h:1539
unsigned int maximum_size_scavenges_
Definition: heap.h:1921
uint32_t dump_allocations_hash_countdown_
Definition: heap.h:1476
Isolate * isolate_
Definition: heap.h:1424
uint32_t raw_allocations_hash_
Definition: heap.h:1473
void ClearObjectStats(bool clear_last_time_stats=false)
Definition: heap.cc:6087
OldSpace * code_space_
Definition: heap.h:1457
intptr_t max_executable_size_
Definition: heap.h:1433
int scan_on_scavenge_pages_
Definition: heap.h:1452
double total_gc_time_ms_
Definition: heap.h:1962
int gcs_since_last_deopt_
Definition: heap.h:1994
int global_ic_age_
Definition: heap.h:1448
bool old_gen_exhausted_
Definition: heap.h:1517
int survived_since_last_expansion_
Definition: heap.h:1438
void RememberUnmappedPage(Address page, bool compacted)
Definition: heap.cc:6072
intptr_t old_generation_allocation_limit_
Definition: heap.h:1513
unsigned int ms_count_
Definition: heap.h:1479
int unflattened_strings_length_
Definition: heap.h:1490
int contexts_disposed_
Definition: heap.h:1446
uint32_t allocations_count_
Definition: heap.h:1470
size_t crankshaft_codegen_bytes_generated_
Definition: heap.h:1989
double total_regexp_code_generated_
Definition: heap.h:1882
GCTracer tracer_
Definition: heap.h:1884
static const intptr_t kMinimumOldGenerationAllocationLimit
Definition: heap.h:1011
int always_allocate_scope_depth_
Definition: heap.h:1443
StoreBuffer * store_buffer()
Definition: heap.h:1201
Object * roots_[kRootListLength]
Definition: heap.h:1426
int allocation_sites_scratchpad_length_
Definition: heap.h:2001
void set_allocation_sites_list(Object *object)
Definition: heap.h:798
int64_t amount_of_external_allocated_memory_at_last_global_gc_
Definition: heap.h:1420
String * hidden_string_
Definition: heap.h:1564
int nodes_died_in_new_space_
Definition: heap.h:1913
HeapState gc_state_
Definition: heap.h:1462
int nodes_promoted_
Definition: heap.h:1915
int gc_post_processing_depth_
Definition: heap.h:1463
PropertyCellSpace * property_cell_space_
Definition: heap.h:1460
int max_semi_space_size_
Definition: heap.h:1430
intptr_t maximum_committed_
Definition: heap.h:1434
OldSpace * old_pointer_space_
Definition: heap.h:1455
NewSpace new_space_
Definition: heap.h:1454
int remembered_unmapped_pages_index_
Definition: heap.h:1486
MemoryChunk * chunks_queued_for_free_
Definition: heap.h:2017
bool inline_allocation_disabled_
Definition: heap.h:1521
CellSpace * cell_space_
Definition: heap.h:1459
int gc_callbacks_depth_
Definition: heap.h:2021
PromotionQueue promotion_queue_
Definition: heap.h:2007
int reserved_semispace_size_
Definition: heap.h:1429
double marking_time_
Definition: heap.h:1971
MapSpace * map_space_
Definition: heap.h:1458
unsigned int gc_count_at_last_idle_gc_
Definition: heap.h:1985
unsigned int gc_count_
Definition: heap.h:1482
Marking marking_
Definition: heap.h:1980
IncrementalMarking incremental_marking_
Definition: heap.h:1982
intptr_t max_alive_after_gc_
Definition: heap.h:1965
double semi_space_copied_rate_
Definition: heap.h:1912
double sweeping_time_
Definition: heap.h:1974
void set_native_contexts_list(Object *object)
Definition: heap.h:790
intptr_t promoted_objects_size_
Definition: heap.h:1909
double promotion_rate_
Definition: heap.h:1910
size_t full_codegen_bytes_generated_
Definition: heap.h:1988
bool configured_
Definition: heap.h:2011
OldSpace * old_data_space_
Definition: heap.h:1456
double min_in_mutator_
Definition: heap.h:1968
void set_encountered_weak_collections(Object *weak_collection)
Definition: heap.h:808
size_t code_range_size_
Definition: heap.h:1428
LargeObjectSpace * lo_space_
Definition: heap.h:1461
int high_survival_rate_period_length_
Definition: heap.h:1908
HeapObjectCallback gc_safe_size_of_old_object_
Definition: heap.h:1597
intptr_t semi_space_copied_object_size_
Definition: heap.h:1911
static const int kPageSize
Definition: spaces.h:748
static Smi * FromInt(int value)
Definition: objects-inl.h:1321
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
Definition: logging.h:205
const int kPointerSize
Definition: globals.h:129
const int kMaxInt
Definition: globals.h:109
const int MB
Definition: globals.h:107

References ClearObjectStats(), DCHECK, v8::internal::Smi::FromInt(), v8::internal::Page::kPageSize, kRootListLength, max_semi_space_size_, v8::internal::MB, NULL, RememberUnmappedPage(), reserved_semispace_size_, roots_, set_allocation_sites_list(), set_array_buffers_list(), set_encountered_weak_collections(), and set_native_contexts_list().

+ Here is the call graph for this function:

Member Function Documentation

◆ AddAllocationSiteToScratchpad()

void v8::internal::Heap::AddAllocationSiteToScratchpad ( AllocationSite site,
ScratchpadSlotMode  mode 
)
private

Definition at line 3077 of file heap.cc.

3078  {
3080  // We cannot use the normal write-barrier because slots need to be
3081  // recorded with non-incremental marking as well. We have to explicitly
3082  // record the slot to take evacuation candidates into account.
3083  allocation_sites_scratchpad()->set(allocation_sites_scratchpad_length_,
3084  site, SKIP_WRITE_BARRIER);
3085  Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt(
3087 
3088  if (mode == RECORD_SCRATCHPAD_SLOT) {
3089  // We need to allow slots buffer overflow here since the evacuation
3090  // candidates are not part of the global list of old space pages and
3091  // releasing an evacuation candidate due to a slots buffer overflow
3092  // results in lost pages.
3093  mark_compact_collector()->RecordSlot(slot, slot, *slot,
3095  }
3097  }
3098 }
static const int kAllocationSiteScratchpadSize
Definition: heap.h:2000
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1197
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
@ SKIP_WRITE_BARRIER
Definition: objects.h:235
kSerializedDataOffset Object
Definition: objects-inl.h:5322

References allocation_sites_scratchpad_length_, v8::internal::SlotsBuffer::IGNORE_OVERFLOW, kAllocationSiteScratchpadSize, mark_compact_collector(), mode(), RECORD_SCRATCHPAD_SLOT, and v8::internal::SKIP_WRITE_BARRIER.

Referenced by UpdateAllocationSiteFeedback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AddGCEpilogueCallback()

void v8::internal::Heap::AddGCEpilogueCallback ( v8::Isolate::GCEpilogueCallback  callback,
GCType  gc_type_filter,
bool  pass_isolate = true 
)

Definition at line 5319 of file heap.cc.

5320  {
5321  DCHECK(callback != NULL);
5322  GCEpilogueCallbackPair pair(callback, gc_type, pass_isolate);
5323  DCHECK(!gc_epilogue_callbacks_.Contains(pair));
5324  return gc_epilogue_callbacks_.Add(pair);
5325 }
List< GCEpilogueCallbackPair > gc_epilogue_callbacks_
Definition: heap.h:1594

References DCHECK, gc_epilogue_callbacks_, and NULL.

Referenced by v8::Isolate::AddGCEpilogueCallback(), and v8::V8::AddGCEpilogueCallback().

+ Here is the caller graph for this function:

◆ AddGCPrologueCallback()

void v8::internal::Heap::AddGCPrologueCallback ( v8::Isolate::GCPrologueCallback  callback,
GCType  gc_type_filter,
bool  pass_isolate = true 
)

Definition at line 5298 of file heap.cc.

5299  {
5300  DCHECK(callback != NULL);
5301  GCPrologueCallbackPair pair(callback, gc_type, pass_isolate);
5302  DCHECK(!gc_prologue_callbacks_.Contains(pair));
5303  return gc_prologue_callbacks_.Add(pair);
5304 }
List< GCPrologueCallbackPair > gc_prologue_callbacks_
Definition: heap.h:1580

References DCHECK, gc_prologue_callbacks_, and NULL.

Referenced by v8::Isolate::AddGCPrologueCallback(), and v8::V8::AddGCPrologueCallback().

+ Here is the caller graph for this function:

◆ AddWeakObjectToCodeDependency()

void v8::internal::Heap::AddWeakObjectToCodeDependency ( Handle< Object obj,
Handle< DependentCode dep 
)

Definition at line 5341 of file heap.cc.

5342  {
5343  DCHECK(!InNewSpace(*obj));
5344  DCHECK(!InNewSpace(*dep));
5345  // This handle scope keeps the table handle local to this function, which
5346  // allows us to safely skip write barriers in table update operations.
5347  HandleScope scope(isolate());
5348  Handle<WeakHashTable> table(WeakHashTable::cast(weak_object_to_code_table_),
5349  isolate());
5350  table = WeakHashTable::Put(table, obj, dep);
5351 
5352  if (ShouldZapGarbage() && weak_object_to_code_table_ != *table) {
5353  WeakHashTable::cast(weak_object_to_code_table_)->Zap(the_hole_value());
5354  }
5356  DCHECK_EQ(*dep, table->Lookup(obj));
5357 }
bool InNewSpace(Object *object)
Definition: heap-inl.h:322
Isolate * isolate()
Definition: heap-inl.h:589
Object * weak_object_to_code_table_
Definition: heap.h:1532
void set_weak_object_to_code_table(Object *value)
Definition: heap.h:1935
static bool ShouldZapGarbage()
Definition: heap.h:926
static MUST_USE_RESULT Handle< WeakHashTable > Put(Handle< WeakHashTable > table, Handle< Object > key, Handle< Object > value)
Definition: objects.cc:15376
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206

References DCHECK, DCHECK_EQ, InNewSpace(), isolate(), v8::internal::WeakHashTable::Put(), set_weak_object_to_code_table(), ShouldZapGarbage(), and weak_object_to_code_table_.

Referenced by v8::internal::AddWeakObjectToCodeDependency().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AdjustLiveBytes()

void v8::internal::Heap::AdjustLiveBytes ( Address  address,
int  by,
InvocationMode  mode 
)

Definition at line 3254 of file heap.cc.

3254  {
3255  if (incremental_marking()->IsMarking() &&
3256  Marking::IsBlack(Marking::MarkBitFrom(address))) {
3257  if (mode == FROM_GC) {
3259  } else {
3261  }
3262  }
3263 }
IncrementalMarking * incremental_marking()
Definition: heap.h:1205
static void IncrementLiveBytesFromMutator(Address address, int by)
Definition: spaces.cc:868
static void IncrementLiveBytesFromGC(Address address, int by)
Definition: spaces.h:517

References FROM_GC, incremental_marking(), v8::internal::MemoryChunk::IncrementLiveBytesFromGC(), v8::internal::MemoryChunk::IncrementLiveBytesFromMutator(), and mode().

Referenced by LeftTrimFixedArray(), v8::internal::String::MakeExternal(), v8::internal::JSObject::MigrateFastToSlow(), RightTrimFixedArray(), v8::internal::StringReplaceGlobalRegExpWithEmptyString(), and v8::internal::SeqString::Truncate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AdvanceIdleIncrementalMarking()

void v8::internal::Heap::AdvanceIdleIncrementalMarking ( intptr_t  step_size)
private

Definition at line 4267 of file heap.cc.

4267  {
4268  incremental_marking()->Step(step_size,
4270 
4271  if (incremental_marking()->IsComplete()) {
4272  bool uncommit = false;
4274  // No GC since the last full GC, the mutator is probably not active.
4276  uncommit = true;
4277  }
4279  "idle notification: finalize incremental");
4282  if (uncommit) {
4283  new_space_.Shrink();
4285  }
4286  }
4287 }
bool UncommitFromSpace()
Definition: heap.h:1840
static const int kReduceMemoryFootprintMask
Definition: heap.h:717
GCIdleTimeHandler gc_idle_time_handler_
Definition: heap.h:1984
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:724
void Step(intptr_t allocated, CompletionAction action, bool force_marking=false)
CompilationCache * compilation_cache()
Definition: isolate.h:865

References v8::internal::CompilationCache::Clear(), CollectAllGarbage(), v8::internal::Isolate::compilation_cache(), gc_count_, gc_count_at_last_idle_gc_, gc_idle_time_handler_, incremental_marking(), isolate_, kReduceMemoryFootprintMask, new_space_, v8::internal::IncrementalMarking::NO_GC_VIA_STACK_GUARD, v8::internal::GCIdleTimeHandler::NotifyIdleMarkCompact(), v8::internal::NewSpace::Shrink(), v8::internal::IncrementalMarking::Step(), and UncommitFromSpace().

Referenced by IdleNotification().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AgeInlineCaches()

void v8::internal::Heap::AgeInlineCaches ( )
inline

Definition at line 1270 of file heap.h.

1270  {
1272  }
static const T kMax
Definition: utils.h:209

References global_ic_age_, and v8::internal::BitFieldBase< T, shift, size, U >::kMax.

Referenced by NotifyContextDisposed().

+ Here is the caller graph for this function:

◆ Allocate()

AllocationResult v8::internal::Heap::Allocate ( Map map,
AllocationSpace  space,
AllocationSite allocation_site = NULL 
)
private

Definition at line 3574 of file heap.cc.

3575  {
3577  DCHECK(map->instance_type() != MAP_TYPE);
3578  // If allocation failures are disallowed, we may allocate in a different
3579  // space when new space is full and the object is not a large object.
3580  AllocationSpace retry_space =
3581  (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
3582  int size = map->instance_size();
3583  if (allocation_site != NULL) {
3585  }
3586  HeapObject* result;
3587  AllocationResult allocation = AllocateRaw(size, space, retry_space);
3588  if (!allocation.To(&result)) return allocation;
3589  // No need for write barrier since object is white and map is in old space.
3590  result->set_map_no_write_barrier(map);
3591  if (allocation_site != NULL) {
3592  AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
3593  reinterpret_cast<Address>(result) + map->instance_size());
3594  InitializeAllocationMemento(alloc_memento, allocation_site);
3595  }
3596  return result;
3597 }
void InitializeAllocationMemento(AllocationMemento *memento, AllocationSite *allocation_site)
Definition: heap.cc:3563
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:166
static AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:399
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
byte * Address
Definition: globals.h:101

References AllocateRaw(), DCHECK, gc_state_, InitializeAllocationMemento(), v8::internal::AllocationMemento::kSize, map, v8::internal::MAP_TYPE, v8::internal::NEW_SPACE, NOT_IN_GC, NULL, v8::internal::HeapObject::set_map_no_write_barrier(), size, space(), TargetSpaceId(), and v8::internal::AllocationResult::To().

Referenced by AllocateForeign(), AllocateJSObjectFromMap(), AllocateStruct(), and CreateInitialMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateByteArray()

AllocationResult v8::internal::Heap::AllocateByteArray ( int  length,
PretenureFlag  pretenure = NOT_TENURED 
)
protected

Definition at line 3203 of file heap.cc.

3203  {
3204  if (length < 0 || length > ByteArray::kMaxLength) {
3205  v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
3206  }
3207  int size = ByteArray::SizeFor(length);
3209  HeapObject* result;
3210  {
3211  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3212  if (!allocation.To(&result)) return allocation;
3213  }
3214 
3215  result->set_map_no_write_barrier(byte_array_map());
3216  ByteArray::cast(result)->set_length(length);
3217  return result;
3218 }
static int SizeFor(int length)
Definition: objects.h:4360
static const int kMaxLength
Definition: objects.h:4394
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
Definition: heap.cc:5376
static AllocationSpace SelectSpace(int object_size, AllocationSpace preferred_old_space, PretenureFlag pretenure)
Definition: heap.h:1649
@ OLD_DATA_SPACE
Definition: globals.h:361

References AllocateRaw(), FatalProcessOutOfMemory(), v8::internal::ByteArray::kMaxLength, v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::HeapObject::set_map_no_write_barrier(), size, v8::internal::ByteArray::SizeFor(), space(), and v8::internal::AllocationResult::To().

Referenced by CopyCode(), and CreateInitialMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateCell()

AllocationResult v8::internal::Heap::AllocateCell ( Object value)
private

Definition at line 2646 of file heap.cc.

2646  {
2647  int size = Cell::kSize;
2649 
2650  HeapObject* result;
2651  {
2652  AllocationResult allocation = AllocateRaw(size, CELL_SPACE, CELL_SPACE);
2653  if (!allocation.To(&result)) return allocation;
2654  }
2655  result->set_map_no_write_barrier(cell_map());
2656  Cell::cast(result)->set_value(value);
2657  return result;
2658 }
static const int kSize
Definition: objects.h:9447
STATIC_ASSERT(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
static const int kMaxRegularHeapObjectSize
Definition: spaces.h:754

References AllocateRaw(), v8::internal::CELL_SPACE, v8::internal::Page::kMaxRegularHeapObjectSize, v8::internal::Cell::kSize, v8::internal::HeapObject::set_map_no_write_barrier(), size, STATIC_ASSERT(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateCode()

AllocationResult v8::internal::Heap::AllocateCode ( int  object_size,
bool  immovable 
)
private

Definition at line 3431 of file heap.cc.

3431  {
3432  DCHECK(IsAligned(static_cast<intptr_t>(object_size), kCodeAlignment));
3433  AllocationResult allocation =
3434  AllocateRaw(object_size, CODE_SPACE, CODE_SPACE);
3435 
3436  HeapObject* result;
3437  if (!allocation.To(&result)) return allocation;
3438 
3439  if (immovable) {
3440  Address address = result->address();
3441  // Code objects which should stay at a fixed address are allocated either
3442  // in the first page of code space (objects on the first page of each space
3443  // are never moved) or in large object space.
3444  if (!code_space_->FirstPage()->Contains(address) &&
3445  MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) {
3446  // Discard the first code allocation, which was on a page where it could
3447  // be moved.
3448  CreateFillerObjectAt(result->address(), object_size);
3449  allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE);
3450  if (!allocation.To(&result)) return allocation;
3451  OnAllocationEvent(result, object_size);
3452  }
3453  }
3454 
3455  result->set_map_no_write_barrier(code_map());
3456  Code* code = Code::cast(result);
3458  isolate_->code_range()->contains(code->address()));
3459  code->set_gc_metadata(Smi::FromInt(0));
3460  code->set_ic_age(global_ic_age_);
3461  return code;
3462 }
bool contains(Address address)
Definition: spaces.h:887
void OnAllocationEvent(HeapObject *object, int size_in_bytes)
Definition: heap-inl.h:224
void CreateFillerObjectAt(Address addr, int size)
Definition: heap.cc:3221
CodeRange * code_range()
Definition: isolate.h:863
MUST_USE_RESULT AllocationResult AllocateRaw(int object_size, Executability executable)
Definition: spaces.cc:2834
bool Contains(Address addr)
Definition: spaces.h:348
Space * owner() const
Definition: spaces.h:307
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:276
AllocationSpace identity()
Definition: spaces.h:829
const intptr_t kCodeAlignment
Definition: globals.h:240
bool IsAligned(T value, U alignment)
Definition: utils.h:123

References v8::internal::HeapObject::address(), v8::internal::LargeObjectSpace::AllocateRaw(), AllocateRaw(), v8::internal::Isolate::code_range(), v8::internal::CODE_SPACE, code_space_, v8::internal::MemoryChunk::Contains(), v8::internal::CodeRange::contains(), CreateFillerObjectAt(), DCHECK, v8::internal::EXECUTABLE, v8::internal::PagedSpace::FirstPage(), v8::internal::MemoryChunk::FromAddress(), v8::internal::Smi::FromInt(), global_ic_age_, v8::internal::Space::identity(), v8::internal::IsAligned(), isolate_, v8::internal::kCodeAlignment, v8::internal::LO_SPACE, lo_space_, NULL, OnAllocationEvent(), v8::internal::MemoryChunk::owner(), v8::internal::Code::set_ic_age(), v8::internal::HeapObject::set_map_no_write_barrier(), v8::internal::AllocationResult::To(), and v8::internal::CodeRange::valid().

+ Here is the call graph for this function:

◆ AllocateConstantPoolArray()

AllocationResult v8::internal::Heap::AllocateConstantPoolArray ( const ConstantPoolArray::NumberOfEntries &  small)
private

Definition at line 4128 of file heap.cc.

4129  {
4130  CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
4131  int size = ConstantPoolArray::SizeFor(small);
4132 #ifndef V8_HOST_ARCH_64_BIT
4133  size += kPointerSize;
4134 #endif
4136 
4137  HeapObject* object;
4138  {
4139  AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
4140  if (!allocation.To(&object)) return allocation;
4141  }
4142  object = EnsureDoubleAligned(this, object, size);
4143  object->set_map_no_write_barrier(constant_pool_array_map());
4144 
4145  ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
4146  constant_pool->Init(small);
4147  constant_pool->ClearPtrEntries(isolate());
4148  return constant_pool;
4149 }
static const int kMaxSmallEntriesPerType
Definition: objects.h:2830
static int SizeFor(const NumberOfEntries &small)
Definition: objects.h:2755
#define CHECK(condition)
Definition: logging.h:36
static HeapObject * EnsureDoubleAligned(Heap *heap, HeapObject *object, int size)
Definition: heap.cc:1799
@ OLD_POINTER_SPACE
Definition: globals.h:360

References AllocateRaw(), CHECK, v8::internal::ConstantPoolArray::ClearPtrEntries(), v8::internal::EnsureDoubleAligned(), v8::internal::ConstantPoolArray::Init(), isolate(), v8::internal::ConstantPoolArray::kMaxSmallEntriesPerType, v8::internal::kPointerSize, v8::internal::OLD_POINTER_SPACE, SelectSpace(), size, v8::internal::ConstantPoolArray::SizeFor(), space(), v8::internal::TENURED, and v8::internal::AllocationResult::To().

Referenced by CopyConstantPoolArrayWithMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateEmptyConstantPoolArray()

AllocationResult v8::internal::Heap::AllocateEmptyConstantPoolArray ( )
private

Definition at line 4178 of file heap.cc.

4178  {
4179  ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0);
4180  int size = ConstantPoolArray::SizeFor(small);
4181  HeapObject* result;
4182  {
4183  AllocationResult allocation =
4185  if (!allocation.To(&result)) return allocation;
4186  }
4187  result->set_map_no_write_barrier(constant_pool_array_map());
4188  ConstantPoolArray::cast(result)->Init(small);
4189  return result;
4190 }

References AllocateRaw(), v8::internal::OLD_DATA_SPACE, v8::internal::HeapObject::set_map_no_write_barrier(), size, v8::internal::ConstantPoolArray::SizeFor(), and v8::internal::AllocationResult::To().

Referenced by CreateInitialMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateEmptyExternalArray()

AllocationResult v8::internal::Heap::AllocateEmptyExternalArray ( ExternalArrayType  array_type)
private

Definition at line 3933 of file heap.cc.

3934  {
3935  return AllocateExternalArray(0, array_type, NULL, TENURED);
3936 }
MUST_USE_RESULT AllocationResult AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
Definition: heap.cc:3362

References AllocateExternalArray(), NULL, and v8::internal::TENURED.

+ Here is the call graph for this function:

◆ AllocateEmptyFixedArray()

AllocationResult v8::internal::Heap::AllocateEmptyFixedArray ( )
private

Definition at line 3918 of file heap.cc.

3918  {
3919  int size = FixedArray::SizeFor(0);
3920  HeapObject* result;
3921  {
3922  AllocationResult allocation =
3924  if (!allocation.To(&result)) return allocation;
3925  }
3926  // Initialize the object.
3927  result->set_map_no_write_barrier(fixed_array_map());
3928  FixedArray::cast(result)->set_length(0);
3929  return result;
3930 }
static int SizeFor(int length)
Definition: objects.h:2452

References AllocateRaw(), v8::internal::OLD_DATA_SPACE, v8::internal::HeapObject::set_map_no_write_barrier(), size, v8::internal::FixedArray::SizeFor(), and v8::internal::AllocationResult::To().

Referenced by CreateInitialMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateEmptyFixedTypedArray()

AllocationResult v8::internal::Heap::AllocateEmptyFixedTypedArray ( ExternalArrayType  array_type)
private

Definition at line 3967 of file heap.cc.

3968  {
3969  return AllocateFixedTypedArray(0, array_type, TENURED);
3970 }
MUST_USE_RESULT AllocationResult AllocateFixedTypedArray(int length, ExternalArrayType array_type, PretenureFlag pretenure)
Definition: heap.cc:3400

References AllocateFixedTypedArray(), and v8::internal::TENURED.

+ Here is the call graph for this function:

◆ AllocateExtendedConstantPoolArray()

AllocationResult v8::internal::Heap::AllocateExtendedConstantPoolArray ( const ConstantPoolArray::NumberOfEntries &  small,
const ConstantPoolArray::NumberOfEntries &  extended 
)
private

Definition at line 4152 of file heap.cc.

4154  {
4155  CHECK(small.are_in_range(0, ConstantPoolArray::kMaxSmallEntriesPerType));
4156  CHECK(extended.are_in_range(0, kMaxInt));
4157  int size = ConstantPoolArray::SizeForExtended(small, extended);
4158 #ifndef V8_HOST_ARCH_64_BIT
4159  size += kPointerSize;
4160 #endif
4162 
4163  HeapObject* object;
4164  {
4165  AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
4166  if (!allocation.To(&object)) return allocation;
4167  }
4168  object = EnsureDoubleAligned(this, object, size);
4169  object->set_map_no_write_barrier(constant_pool_array_map());
4170 
4171  ConstantPoolArray* constant_pool = ConstantPoolArray::cast(object);
4172  constant_pool->InitExtended(small, extended);
4173  constant_pool->ClearPtrEntries(isolate());
4174  return constant_pool;
4175 }
static int SizeForExtended(const NumberOfEntries &small, const NumberOfEntries &extended)
Definition: objects.h:2764

References AllocateRaw(), CHECK, v8::internal::ConstantPoolArray::ClearPtrEntries(), v8::internal::EnsureDoubleAligned(), v8::internal::ConstantPoolArray::InitExtended(), isolate(), v8::internal::kMaxInt, v8::internal::ConstantPoolArray::kMaxSmallEntriesPerType, v8::internal::kPointerSize, v8::internal::OLD_POINTER_SPACE, SelectSpace(), size, v8::internal::ConstantPoolArray::SizeForExtended(), space(), v8::internal::TENURED, and v8::internal::AllocationResult::To().

Referenced by CopyConstantPoolArrayWithMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateExternalArray()

AllocationResult v8::internal::Heap::AllocateExternalArray ( int  length,
ExternalArrayType  array_type,
void *  external_pointer,
PretenureFlag  pretenure 
)
private

Definition at line 3362 of file heap.cc.

3365  {
3368  HeapObject* result;
3369  {
3370  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3371  if (!allocation.To(&result)) return allocation;
3372  }
3373 
3374  result->set_map_no_write_barrier(MapForExternalArrayType(array_type));
3375  ExternalArray::cast(result)->set_length(length);
3376  ExternalArray::cast(result)->set_external_pointer(external_pointer);
3377  return result;
3378 }
static const int kAlignedSize
Definition: objects.h:4474
Map * MapForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3101

References AllocateRaw(), v8::internal::ExternalArray::kAlignedSize, MapForExternalArrayType(), v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::HeapObject::set_map_no_write_barrier(), size, space(), and v8::internal::AllocationResult::To().

Referenced by AllocateEmptyExternalArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateFillerObject()

AllocationResult v8::internal::Heap::AllocateFillerObject ( int  size,
bool  double_align,
AllocationSpace  space 
)
private

Definition at line 2329 of file heap.cc.

2330  {
2331  HeapObject* obj;
2332  {
2333  AllocationResult allocation = AllocateRaw(size, space, space);
2334  if (!allocation.To(&obj)) return allocation;
2335  }
2336 #ifdef DEBUG
2337  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
2338  DCHECK(chunk->owner()->identity() == space);
2339 #endif
2340  CreateFillerObjectAt(obj->address(), size);
2341  return obj;
2342 }

References v8::internal::HeapObject::address(), AllocateRaw(), CreateFillerObjectAt(), DCHECK, v8::internal::MemoryChunk::FromAddress(), v8::internal::Space::identity(), v8::internal::MemoryChunk::owner(), size, space(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateFixedArray()

AllocationResult v8::internal::Heap::AllocateFixedArray ( int  length,
PretenureFlag  pretenure = NOT_TENURED 
)
protected

Definition at line 4073 of file heap.cc.

4073  {
4074  return AllocateFixedArrayWithFiller(length, pretenure, undefined_value());
4075 }
MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure, Object *filler)
Definition: heap.cc:4051

References AllocateFixedArrayWithFiller().

Referenced by AllocateJSObjectFromMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateFixedArrayWithFiller()

AllocationResult v8::internal::Heap::AllocateFixedArrayWithFiller ( int  length,
PretenureFlag  pretenure,
Object filler 
)
private

Definition at line 4051 of file heap.cc.

4053  {
4054  DCHECK(length >= 0);
4055  DCHECK(empty_fixed_array()->IsFixedArray());
4056  if (length == 0) return empty_fixed_array();
4057 
4058  DCHECK(!InNewSpace(filler));
4059  HeapObject* result;
4060  {
4061  AllocationResult allocation = AllocateRawFixedArray(length, pretenure);
4062  if (!allocation.To(&result)) return allocation;
4063  }
4064 
4065  result->set_map_no_write_barrier(fixed_array_map());
4066  FixedArray* array = FixedArray::cast(result);
4067  array->set_length(length);
4068  MemsetPointer(array->data_start(), filler, length);
4069  return array;
4070 }
MUST_USE_RESULT AllocationResult AllocateRawFixedArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4039
void MemsetPointer(T **dest, U *value, int counter)
Definition: utils.h:1183

References AllocateRawFixedArray(), v8::internal::FixedArray::data_start(), DCHECK, InNewSpace(), v8::internal::MemsetPointer(), v8::internal::FixedArrayBase::set_length(), v8::internal::HeapObject::set_map_no_write_barrier(), and v8::internal::AllocationResult::To().

Referenced by AllocateFixedArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateFixedTypedArray()

AllocationResult v8::internal::Heap::AllocateFixedTypedArray ( int  length,
ExternalArrayType  array_type,
PretenureFlag  pretenure 
)
private

Definition at line 3400 of file heap.cc.

3402  {
3403  int element_size;
3404  ElementsKind elements_kind;
3405  ForFixedTypedArray(array_type, &element_size, &elements_kind);
3406  int size = OBJECT_POINTER_ALIGN(length * element_size +
3408 #ifndef V8_HOST_ARCH_64_BIT
3409  if (array_type == kExternalFloat64Array) {
3410  size += kPointerSize;
3411  }
3412 #endif
3414 
3415  HeapObject* object;
3416  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3417  if (!allocation.To(&object)) return allocation;
3418 
3419  if (array_type == kExternalFloat64Array) {
3420  object = EnsureDoubleAligned(this, object, size);
3421  }
3422 
3423  object->set_map(MapForFixedTypedArray(array_type));
3424  FixedTypedArrayBase* elements = FixedTypedArrayBase::cast(object);
3425  elements->set_length(length);
3426  memset(elements->DataPtr(), 0, elements->DataSize());
3427  return elements;
3428 }
static const int kDataOffset
Definition: objects.h:4716
Map * MapForFixedTypedArray(ExternalArrayType array_type)
Definition: heap.cc:3123
#define OBJECT_POINTER_ALIGN(value)
Definition: globals.h:578
static void ForFixedTypedArray(ExternalArrayType array_type, int *element_size, ElementsKind *element_kind)
Definition: heap.cc:3380
@ kExternalFloat64Array
Definition: v8.h:2225

References AllocateRaw(), v8::internal::FixedTypedArrayBase::DataPtr(), v8::internal::FixedTypedArrayBase::DataSize(), v8::internal::EnsureDoubleAligned(), v8::internal::ForFixedTypedArray(), v8::internal::FixedTypedArrayBase::kDataOffset, v8::kExternalFloat64Array, v8::internal::kPointerSize, MapForFixedTypedArray(), OBJECT_POINTER_ALIGN, v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::FixedArrayBase::set_length(), size, space(), and v8::internal::AllocationResult::To().

Referenced by AllocateEmptyFixedTypedArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateForeign()

AllocationResult v8::internal::Heap::AllocateForeign ( Address  address,
PretenureFlag  pretenure = NOT_TENURED 
)
private

Definition at line 3190 of file heap.cc.

3191  {
3192  // Statically ensure that it is safe to allocate foreigns in paged spaces.
3194  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
3195  Foreign* result;
3196  AllocationResult allocation = Allocate(foreign_map(), space);
3197  if (!allocation.To(&result)) return allocation;
3198  result->set_foreign_address(address);
3199  return result;
3200 }
static const int kSize
Definition: objects.h:10005
MUST_USE_RESULT AllocationResult Allocate(Map *map, AllocationSpace space, AllocationSite *allocation_site=NULL)
Definition: heap.cc:3574

References Allocate(), v8::internal::Page::kMaxRegularHeapObjectSize, v8::internal::Foreign::kSize, v8::internal::NEW_SPACE, v8::internal::OLD_DATA_SPACE, v8::internal::Foreign::set_foreign_address(), space(), STATIC_ASSERT(), v8::internal::TENURED, and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateHeapNumber()

AllocationResult v8::internal::Heap::AllocateHeapNumber ( double  value,
MutableMode  mode = IMMUTABLE,
PretenureFlag  pretenure = NOT_TENURED 
)
protected

Definition at line 2624 of file heap.cc.

2625  {
2626  // Statically ensure that it is safe to allocate heap numbers in paged
2627  // spaces.
2628  int size = HeapNumber::kSize;
2630 
2632 
2633  HeapObject* result;
2634  {
2635  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
2636  if (!allocation.To(&result)) return allocation;
2637  }
2638 
2639  Map* map = mode == MUTABLE ? mutable_heap_number_map() : heap_number_map();
2640  HeapObject::cast(result)->set_map_no_write_barrier(map);
2641  HeapNumber::cast(result)->set_value(value);
2642  return result;
2643 }
static const int kSize
Definition: objects.h:1521

References AllocateRaw(), v8::internal::Page::kMaxRegularHeapObjectSize, v8::internal::HeapNumber::kSize, map, mode(), v8::internal::MUTABLE, v8::internal::OLD_DATA_SPACE, SelectSpace(), size, space(), STATIC_ASSERT(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateInternalizedStringFromUtf8()

AllocationResult v8::internal::Heap::AllocateInternalizedStringFromUtf8 ( Vector< const char >  str,
int  chars,
uint32_t  hash_field 
)
inlineprivate

Definition at line 66 of file heap-inl.h.

67  {
68  if (IsOneByte(str, chars)) {
70  hash_field);
71  }
72  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
73 }
static bool IsOneByte(T t, int chars)
MUST_USE_RESULT AllocationResult AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
Definition: heap-inl.h:86
static Vector< T > cast(Vector< S > input)
Definition: vector.h:98

References AllocateOneByteInternalizedString(), and IsOneByte().

+ Here is the call graph for this function:

◆ AllocateInternalizedStringImpl() [1/4]

template<typename T >
AllocationResult v8::internal::Heap::AllocateInternalizedStringImpl ( T  t,
int  chars,
uint32_t  hash_field 
)

Definition at line 77 of file heap-inl.h.

78  {
79  if (IsOneByte(t, chars)) {
80  return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
81  }
82  return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
83 }

References IsOneByte().

+ Here is the call graph for this function:

◆ AllocateInternalizedStringImpl() [2/4]

template<bool is_one_byte, typename T >
AllocationResult v8::internal::Heap::AllocateInternalizedStringImpl ( T  t,
int  chars,
uint32_t  hash_field 
)

Definition at line 3818 of file heap.cc.

3819  {
3820  DCHECK(chars >= 0);
3821  // Compute map and object size.
3822  int size;
3823  Map* map;
3824 
3825  DCHECK_LE(0, chars);
3826  DCHECK_GE(String::kMaxLength, chars);
3827  if (is_one_byte) {
3828  map = one_byte_internalized_string_map();
3830  } else {
3831  map = internalized_string_map();
3833  }
3835 
3836  // Allocate string.
3837  HeapObject* result;
3838  {
3839  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3840  if (!allocation.To(&result)) return allocation;
3841  }
3842 
3843  result->set_map_no_write_barrier(map);
3844  // Set length and hash fields of the allocated string.
3845  String* answer = String::cast(result);
3846  answer->set_length(chars);
3847  answer->set_hash_field(hash_field);
3848 
3849  DCHECK_EQ(size, answer->Size());
3850 
3851  if (is_one_byte) {
3852  WriteOneByteData(t, SeqOneByteString::cast(answer)->GetChars(), chars);
3853  } else {
3854  WriteTwoByteData(t, SeqTwoByteString::cast(answer)->GetChars(), chars);
3855  }
3856  return answer;
3857 }
static int SizeFor(int length)
Definition: objects.h:8976
static int SizeFor(int length)
Definition: objects.h:9015
static const int kMaxLength
Definition: objects.h:8820
#define DCHECK_LE(v1, v2)
Definition: logging.h:210
#define DCHECK_GE(v1, v2)
Definition: logging.h:208
static void WriteTwoByteData(Vector< const char > vector, uint16_t *chars, int len)
Definition: heap.cc:3778
static void WriteOneByteData(Vector< const char > vector, uint8_t *chars, int len)
Definition: heap.cc:3771

References AllocateRaw(), DCHECK, DCHECK_EQ, DCHECK_GE, DCHECK_LE, v8::internal::String::kMaxLength, map, v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::Name::set_hash_field(), v8::internal::String::set_length(), v8::internal::HeapObject::set_map_no_write_barrier(), size, v8::internal::HeapObject::Size(), v8::internal::SeqOneByteString::SizeFor(), v8::internal::SeqTwoByteString::SizeFor(), space(), v8::internal::TENURED, v8::internal::AllocationResult::To(), v8::internal::WriteOneByteData(), and v8::internal::WriteTwoByteData().

+ Here is the call graph for this function:

◆ AllocateInternalizedStringImpl() [3/4]

template<bool is_one_byte, typename T >
MUST_USE_RESULT AllocationResult v8::internal::Heap::AllocateInternalizedStringImpl ( T  t,
int  chars,
uint32_t  hash_field 
)
private

◆ AllocateInternalizedStringImpl() [4/4]

template<typename T >
MUST_USE_RESULT AllocationResult v8::internal::Heap::AllocateInternalizedStringImpl ( T  t,
int  chars,
uint32_t  hash_field 
)
inlineprivate

◆ AllocateJSObject()

AllocationResult v8::internal::Heap::AllocateJSObject ( JSFunction constructor,
PretenureFlag  pretenure = NOT_TENURED,
AllocationSite allocation_site = NULL 
)
protected

Definition at line 3671 of file heap.cc.

3673  {
3674  DCHECK(constructor->has_initial_map());
3675 
3676  // Allocate the object based on the constructors initial map.
3677  AllocationResult allocation = AllocateJSObjectFromMap(
3678  constructor->initial_map(), pretenure, true, allocation_site);
3679 #ifdef DEBUG
3680  // Make sure result is NOT a global object if valid.
3681  HeapObject* obj;
3682  DCHECK(!allocation.To(&obj) || !obj->IsGlobalObject());
3683 #endif
3684  return allocation;
3685 }
MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED, bool alloc_props=true, AllocationSite *allocation_site=NULL)
Definition: heap.cc:3631

References AllocateJSObjectFromMap(), DCHECK, v8::internal::JSFunction::has_initial_map(), v8::internal::JSFunction::initial_map(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateJSObjectFromMap()

AllocationResult v8::internal::Heap::AllocateJSObjectFromMap ( Map map,
PretenureFlag  pretenure = NOT_TENURED,
bool  alloc_props = true,
AllocationSite allocation_site = NULL 
)
protected

Definition at line 3631 of file heap.cc.

3633  {
3634  // JSFunctions should be allocated using AllocateFunction to be
3635  // properly initialized.
3636  DCHECK(map->instance_type() != JS_FUNCTION_TYPE);
3637 
3638  // Both types of global objects should be allocated using
3639  // AllocateGlobalObject to be properly initialized.
3640  DCHECK(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
3641  DCHECK(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
3642 
3643  // Allocate the backing storage for the properties.
3644  FixedArray* properties;
3645  if (allocate_properties) {
3646  int prop_size = map->InitialPropertiesLength();
3647  DCHECK(prop_size >= 0);
3648  {
3649  AllocationResult allocation = AllocateFixedArray(prop_size, pretenure);
3650  if (!allocation.To(&properties)) return allocation;
3651  }
3652  } else {
3653  properties = empty_fixed_array();
3654  }
3655 
3656  // Allocate the JSObject.
3657  int size = map->instance_size();
3659  JSObject* js_obj;
3660  AllocationResult allocation = Allocate(map, space, allocation_site);
3661  if (!allocation.To(&js_obj)) return allocation;
3662 
3663  // Initialize the JSObject.
3664  InitializeJSObjectFromMap(js_obj, properties, map);
3665  DCHECK(js_obj->HasFastElements() || js_obj->HasExternalArrayElements() ||
3666  js_obj->HasFixedTypedArrayElements());
3667  return js_obj;
3668 }
void InitializeJSObjectFromMap(JSObject *obj, FixedArray *properties, Map *map)
Definition: heap.cc:3600
MUST_USE_RESULT AllocationResult AllocateFixedArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:4073
@ JS_FUNCTION_TYPE
Definition: objects.h:749
@ JS_GLOBAL_OBJECT_TYPE
Definition: objects.h:735
@ JS_BUILTINS_OBJECT_TYPE
Definition: objects.h:736

References Allocate(), AllocateFixedArray(), DCHECK, v8::internal::JSObject::HasExternalArrayElements(), v8::internal::JSObject::HasFastElements(), v8::internal::JSObject::HasFixedTypedArrayElements(), InitializeJSObjectFromMap(), v8::internal::JS_BUILTINS_OBJECT_TYPE, v8::internal::JS_FUNCTION_TYPE, v8::internal::JS_GLOBAL_OBJECT_TYPE, map, v8::internal::OLD_POINTER_SPACE, SelectSpace(), size, space(), and v8::internal::AllocationResult::To().

Referenced by AllocateJSObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateMap()

AllocationResult v8::internal::Heap::AllocateMap ( InstanceType  instance_type,
int  instance_size,
ElementsKind  elements_kind = TERMINAL_FAST_ELEMENTS_KIND 
)
protected

Definition at line 2295 of file heap.cc.

2297  {
2298  HeapObject* result;
2299  AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
2300  if (!allocation.To(&result)) return allocation;
2301 
2302  result->set_map_no_write_barrier(meta_map());
2303  Map* map = Map::cast(result);
2304  map->set_instance_type(instance_type);
2305  map->set_visitor_id(
2306  StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2307  map->set_prototype(null_value(), SKIP_WRITE_BARRIER);
2308  map->set_constructor(null_value(), SKIP_WRITE_BARRIER);
2309  map->set_instance_size(instance_size);
2310  map->set_inobject_properties(0);
2311  map->set_pre_allocated_property_fields(0);
2312  map->set_code_cache(empty_fixed_array(), SKIP_WRITE_BARRIER);
2313  map->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2315  map->init_back_pointer(undefined_value());
2316  map->set_unused_property_fields(0);
2317  map->set_instance_descriptors(empty_descriptor_array());
2318  map->set_bit_field(0);
2319  map->set_bit_field2(1 << Map::kIsExtensible);
2322  map->set_bit_field3(bit_field3);
2323  map->set_elements_kind(elements_kind);
2324 
2325  return map;
2326 }
static U encode(T value)
Definition: utils.h:217
static const int kIsExtensible
Definition: objects.h:6250
static const int kSize
Definition: objects.h:6202
static VisitorId GetVisitorId(int instance_type, int instance_size)
static const int kInvalidEnumCacheSentinel

References AllocateRaw(), v8::internal::BitFieldBase< T, shift, size, U >::encode(), v8::internal::StaticVisitorBase::GetVisitorId(), v8::internal::kInvalidEnumCacheSentinel, v8::internal::Map::kIsExtensible, v8::internal::Map::kSize, map, v8::internal::MAP_SPACE, v8::internal::HeapObject::set_map_no_write_barrier(), v8::internal::SKIP_WRITE_BARRIER, and v8::internal::AllocationResult::To().

Referenced by CreateInitialMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateOneByteInternalizedString()

AllocationResult v8::internal::Heap::AllocateOneByteInternalizedString ( Vector< const uint8_t >  str,
uint32_t  hash_field 
)
inlineprivate

Definition at line 86 of file heap-inl.h.

87  {
88  CHECK_GE(String::kMaxLength, str.length());
89  // Compute map and object size.
90  Map* map = one_byte_internalized_string_map();
91  int size = SeqOneByteString::SizeFor(str.length());
93 
94  // Allocate string.
95  HeapObject* result;
96  {
97  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
98  if (!allocation.To(&result)) return allocation;
99  }
100 
101  // String maps are all immortal immovable objects.
102  result->set_map_no_write_barrier(map);
103  // Set length and hash fields of the allocated string.
104  String* answer = String::cast(result);
105  answer->set_length(str.length());
106  answer->set_hash_field(hash_field);
107 
108  DCHECK_EQ(size, answer->Size());
109 
110  // Fill in the characters.
111  MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
112  str.length());
113 
114  return answer;
115 }
static const int kHeaderSize
Definition: objects.h:8941
#define CHECK_GE(a, b)
Definition: logging.h:178
void MemCopy(void *dest, const void *src, size_t size)
Definition: utils.h:350

References v8::internal::HeapObject::address(), AllocateRaw(), CHECK_GE, DCHECK_EQ, v8::internal::SeqString::kHeaderSize, v8::internal::String::kMaxLength, v8::internal::Vector< T >::length(), map, v8::internal::MemCopy(), v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::Name::set_hash_field(), v8::internal::String::set_length(), v8::internal::HeapObject::set_map_no_write_barrier(), size, v8::internal::HeapObject::Size(), v8::internal::SeqOneByteString::SizeFor(), space(), v8::internal::Vector< T >::start(), v8::internal::TENURED, and v8::internal::AllocationResult::To().

Referenced by AllocateInternalizedStringFromUtf8().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocatePartialMap()

AllocationResult v8::internal::Heap::AllocatePartialMap ( InstanceType  instance_type,
int  instance_size 
)
private

Definition at line 2271 of file heap.cc.

2272  {
2273  Object* result;
2274  AllocationResult allocation = AllocateRaw(Map::kSize, MAP_SPACE, MAP_SPACE);
2275  if (!allocation.To(&result)) return allocation;
2276 
2277  // Map::cast cannot be used due to uninitialized map field.
2278  reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
2279  reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
2280  reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
2281  reinterpret_cast<Map*>(result)->set_visitor_id(
2282  StaticVisitorBase::GetVisitorId(instance_type, instance_size));
2283  reinterpret_cast<Map*>(result)->set_inobject_properties(0);
2284  reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
2285  reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
2286  reinterpret_cast<Map*>(result)->set_bit_field(0);
2287  reinterpret_cast<Map*>(result)->set_bit_field2(0);
2290  reinterpret_cast<Map*>(result)->set_bit_field3(bit_field3);
2291  return result;
2292 }

References AllocateRaw(), v8::internal::BitFieldBase< T, shift, size, U >::encode(), v8::internal::StaticVisitorBase::GetVisitorId(), v8::internal::kInvalidEnumCacheSentinel, v8::internal::Map::kSize, v8::internal::MAP_SPACE, and v8::internal::AllocationResult::To().

Referenced by CreateInitialMaps().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocatePropertyCell()

AllocationResult v8::internal::Heap::AllocatePropertyCell ( )
private

Definition at line 2661 of file heap.cc.

2661  {
2662  int size = PropertyCell::kSize;
2664 
2665  HeapObject* result;
2666  AllocationResult allocation =
2668  if (!allocation.To(&result)) return allocation;
2669 
2670  result->set_map_no_write_barrier(global_property_cell_map());
2671  PropertyCell* cell = PropertyCell::cast(result);
2672  cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2674  cell->set_value(the_hole_value());
2675  cell->set_type(HeapType::None());
2676  return result;
2677 }
static const int kSize
Definition: objects.h:9496
@ PROPERTY_CELL_SPACE
Definition: globals.h:365
@ None
Definition: v8.h:2211

References AllocateRaw(), v8::internal::Page::kMaxRegularHeapObjectSize, v8::internal::PropertyCell::kSize, v8::None, v8::internal::PROPERTY_CELL_SPACE, v8::internal::HeapObject::set_map_no_write_barrier(), v8::internal::PropertyCell::set_type(), size, v8::internal::SKIP_WRITE_BARRIER, STATIC_ASSERT(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateRaw()

AllocationResult v8::internal::Heap::AllocateRaw ( int  size_in_bytes,
AllocationSpace  space,
AllocationSpace  retry_space 
)
inlineprivate

Definition at line 166 of file heap-inl.h.

167  {
168  DCHECK(AllowHandleAllocation::IsAllowed());
169  DCHECK(AllowHeapAllocation::IsAllowed());
171 #ifdef DEBUG
172  if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
173  Heap::allocation_timeout_-- <= 0) {
175  }
176  isolate_->counters()->objs_since_last_full()->Increment();
177  isolate_->counters()->objs_since_last_young()->Increment();
178 #endif
179 
180  HeapObject* object;
181  AllocationResult allocation;
182  if (NEW_SPACE == space) {
183  allocation = new_space_.AllocateRaw(size_in_bytes);
184  if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
185  space = retry_space;
186  } else {
187  if (allocation.To(&object)) {
188  OnAllocationEvent(object, size_in_bytes);
189  }
190  return allocation;
191  }
192  }
193 
194  if (OLD_POINTER_SPACE == space) {
195  allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
196  } else if (OLD_DATA_SPACE == space) {
197  allocation = old_data_space_->AllocateRaw(size_in_bytes);
198  } else if (CODE_SPACE == space) {
199  if (size_in_bytes <= code_space()->AreaSize()) {
200  allocation = code_space_->AllocateRaw(size_in_bytes);
201  } else {
202  // Large code objects are allocated in large object space.
203  allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
204  }
205  } else if (LO_SPACE == space) {
206  allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
207  } else if (CELL_SPACE == space) {
208  allocation = cell_space_->AllocateRaw(size_in_bytes);
209  } else if (PROPERTY_CELL_SPACE == space) {
210  allocation = property_cell_space_->AllocateRaw(size_in_bytes);
211  } else {
212  DCHECK(MAP_SPACE == space);
213  allocation = map_space_->AllocateRaw(size_in_bytes);
214  }
215  if (allocation.To(&object)) {
216  OnAllocationEvent(object, size_in_bytes);
217  } else {
218  old_gen_exhausted_ = true;
219  }
220  return allocation;
221 }
static AllocationResult Retry(AllocationSpace space=NEW_SPACE)
Definition: spaces.h:1616
OldSpace * code_space()
Definition: heap.h:596
bool always_allocate()
Definition: heap.h:622
Counters * counters()
Definition: isolate.h:857
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes)
Definition: spaces-inl.h:248
@ NOT_EXECUTABLE
Definition: globals.h:391

References v8::internal::LargeObjectSpace::AllocateRaw(), v8::internal::PagedSpace::AllocateRaw(), always_allocate(), v8::internal::CELL_SPACE, cell_space_, v8::internal::CODE_SPACE, code_space(), code_space_, v8::internal::Isolate::counters(), DCHECK, v8::internal::EXECUTABLE, gc_state_, isolate_, v8::internal::AllocationResult::IsRetry(), v8::internal::LO_SPACE, lo_space_, v8::internal::MAP_SPACE, map_space_, v8::internal::NEW_SPACE, new_space_, v8::internal::NOT_EXECUTABLE, NOT_IN_GC, v8::internal::OLD_DATA_SPACE, old_data_space_, old_gen_exhausted_, v8::internal::OLD_POINTER_SPACE, old_pointer_space_, OnAllocationEvent(), v8::internal::PROPERTY_CELL_SPACE, property_cell_space_, v8::internal::AllocationResult::Retry(), space(), and v8::internal::AllocationResult::To().

Referenced by Allocate(), AllocateByteArray(), AllocateCell(), AllocateCode(), AllocateConstantPoolArray(), AllocateEmptyConstantPoolArray(), AllocateEmptyFixedArray(), AllocateExtendedConstantPoolArray(), AllocateExternalArray(), AllocateFillerObject(), AllocateFixedTypedArray(), AllocateHeapNumber(), AllocateInternalizedStringImpl(), AllocateMap(), AllocateOneByteInternalizedString(), AllocatePartialMap(), AllocatePropertyCell(), AllocateRawFixedArray(), AllocateRawFixedDoubleArray(), AllocateRawOneByteString(), AllocateRawTwoByteString(), AllocateSymbol(), AllocateTwoByteInternalizedString(), CopyCode(), and CopyJSObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateRawFixedArray()

AllocationResult v8::internal::Heap::AllocateRawFixedArray ( int  length,
PretenureFlag  pretenure 
)
private

Definition at line 4039 of file heap.cc.

4040  {
4041  if (length < 0 || length > FixedArray::kMaxLength) {
4042  v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
4043  }
4044  int size = FixedArray::SizeFor(length);
4046 
4048 }
static const int kMaxLength
Definition: objects.h:2469

References AllocateRaw(), FatalProcessOutOfMemory(), v8::internal::FixedArray::kMaxLength, v8::internal::OLD_POINTER_SPACE, SelectSpace(), size, v8::internal::FixedArray::SizeFor(), and space().

Referenced by AllocateFixedArrayWithFiller(), AllocateUninitializedFixedArray(), CopyAndTenureFixedCOWArray(), and CopyFixedArrayWithMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateRawFixedDoubleArray()

AllocationResult v8::internal::Heap::AllocateRawFixedDoubleArray ( int  length,
PretenureFlag  pretenure 
)
private

Definition at line 4107 of file heap.cc.

4108  {
4109  if (length < 0 || length > FixedDoubleArray::kMaxLength) {
4110  v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
4111  }
4112  int size = FixedDoubleArray::SizeFor(length);
4113 #ifndef V8_HOST_ARCH_64_BIT
4114  size += kPointerSize;
4115 #endif
4117 
4118  HeapObject* object;
4119  {
4120  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
4121  if (!allocation.To(&object)) return allocation;
4122  }
4123 
4124  return EnsureDoubleAligned(this, object, size);
4125 }
static int SizeFor(int length)
Definition: objects.h:2531
static const int kMaxLength
Definition: objects.h:2554

References AllocateRaw(), v8::internal::EnsureDoubleAligned(), FatalProcessOutOfMemory(), v8::internal::FixedDoubleArray::kMaxLength, v8::internal::kPointerSize, v8::internal::OLD_DATA_SPACE, SelectSpace(), size, v8::internal::FixedDoubleArray::SizeFor(), space(), and v8::internal::AllocationResult::To().

Referenced by AllocateUninitializedFixedDoubleArray(), and CopyFixedDoubleArrayWithMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ AllocateRawOneByteString()

AllocationResult v8::internal::Heap::AllocateRawOneByteString ( int  length,
PretenureFlag  pretenure 
)
private

Definition at line 3871 of file heap.cc.

3872  {
3873  DCHECK_LE(0, length);
3874  DCHECK_GE(String::kMaxLength, length);
3875  int size = SeqOneByteString::SizeFor(length);
3878 
3879  HeapObject* result;
3880  {
3881  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3882  if (!allocation.To(&result)) return allocation;
3883  }
3884 
3885  // Partially initialize the object.
3886  result->set_map_no_write_barrier(one_byte_string_map());
3887  String::cast(result)->set_length(length);
3888  String::cast(result)->set_hash_field(String::kEmptyHashField);
3889  DCHECK_EQ(size, HeapObject::cast(result)->Size());
3890 
3891  return result;
3892 }
static const int kEmptyHashField
Definition: objects.h:8534
static const int kMaxSize
Definition: objects.h:8981

References AllocateRaw(), DCHECK, DCHECK_EQ, DCHECK_GE, DCHECK_LE, v8::internal::Name::kEmptyHashField, v8::internal::String::kMaxLength, v8::internal::SeqOneByteString::kMaxSize, v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::HeapObject::set_map_no_write_barrier(), size, v8::internal::SeqOneByteString::SizeFor(), space(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateRawTwoByteString()

AllocationResult v8::internal::Heap::AllocateRawTwoByteString ( int  length,
PretenureFlag  pretenure 
)
private

Definition at line 3895 of file heap.cc.

3896  {
3897  DCHECK_LE(0, length);
3898  DCHECK_GE(String::kMaxLength, length);
3899  int size = SeqTwoByteString::SizeFor(length);
3902 
3903  HeapObject* result;
3904  {
3905  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
3906  if (!allocation.To(&result)) return allocation;
3907  }
3908 
3909  // Partially initialize the object.
3910  result->set_map_no_write_barrier(string_map());
3911  String::cast(result)->set_length(length);
3912  String::cast(result)->set_hash_field(String::kEmptyHashField);
3913  DCHECK_EQ(size, HeapObject::cast(result)->Size());
3914  return result;
3915 }
static const int kMaxSize
Definition: objects.h:9020

References AllocateRaw(), DCHECK, DCHECK_EQ, DCHECK_GE, DCHECK_LE, v8::internal::Name::kEmptyHashField, v8::internal::String::kMaxLength, v8::internal::SeqTwoByteString::kMaxSize, v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::HeapObject::set_map_no_write_barrier(), size, v8::internal::SeqTwoByteString::SizeFor(), space(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateStruct()

AllocationResult v8::internal::Heap::AllocateStruct ( InstanceType  type)
private

Definition at line 4223 of file heap.cc.

4223  {
4224  Map* map;
4225  switch (type) {
4226 #define MAKE_CASE(NAME, Name, name) \
4227  case NAME##_TYPE: \
4228  map = name##_map(); \
4229  break;
4231 #undef MAKE_CASE
4232  default:
4233  UNREACHABLE();
4234  return exception();
4235  }
4236  int size = map->instance_size();
4238  Struct* result;
4239  {
4240  AllocationResult allocation = Allocate(map, space);
4241  if (!allocation.To(&result)) return allocation;
4242  }
4243  result->InitializeBody(size);
4244  return result;
4245 }
#define MAKE_CASE(NAME, Name, name)
#define UNREACHABLE()
Definition: logging.h:30

References Allocate(), v8::internal::Struct::InitializeBody(), MAKE_CASE, map, v8::internal::OLD_POINTER_SPACE, SelectSpace(), size, space(), STRUCT_LIST, v8::internal::TENURED, v8::internal::AllocationResult::To(), and UNREACHABLE.

+ Here is the call graph for this function:

◆ AllocateSymbol()

AllocationResult v8::internal::Heap::AllocateSymbol ( )
private

Definition at line 4193 of file heap.cc.

4193  {
4194  // Statically ensure that it is safe to allocate symbols in paged spaces.
4196 
4197  HeapObject* result;
4198  AllocationResult allocation =
4200  if (!allocation.To(&result)) return allocation;
4201 
4202  result->set_map_no_write_barrier(symbol_map());
4203 
4204  // Generate a random hash value.
4205  int hash;
4206  int attempts = 0;
4207  do {
4208  hash = isolate()->random_number_generator()->NextInt() & Name::kHashBitMask;
4209  attempts++;
4210  } while (hash == 0 && attempts < 30);
4211  if (hash == 0) hash = 1; // never return 0
4212 
4213  Symbol::cast(result)
4214  ->set_hash_field(Name::kIsNotArrayIndexMask | (hash << Name::kHashShift));
4215  Symbol::cast(result)->set_name(undefined_value());
4216  Symbol::cast(result)->set_flags(Smi::FromInt(0));
4217 
4218  DCHECK(!Symbol::cast(result)->is_private());
4219  return result;
4220 }
base::RandomNumberGenerator * random_number_generator()
Definition: isolate-inl.h:33
static const int kHashShift
Definition: objects.h:8499
static const uint32_t kHashBitMask
Definition: objects.h:8503
static const int kIsNotArrayIndexMask
Definition: objects.h:8495
static const int kSize
Definition: objects.h:8569

References AllocateRaw(), DCHECK, v8::internal::Smi::FromInt(), isolate(), v8::internal::Name::kHashBitMask, v8::internal::Name::kHashShift, v8::internal::Name::kIsNotArrayIndexMask, v8::internal::Page::kMaxRegularHeapObjectSize, v8::internal::Symbol::kSize, v8::internal::OLD_POINTER_SPACE, v8::internal::Isolate::random_number_generator(), v8::internal::HeapObject::set_map_no_write_barrier(), STATIC_ASSERT(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateTwoByteInternalizedString()

AllocationResult v8::internal::Heap::AllocateTwoByteInternalizedString ( Vector< const uc16 str,
uint32_t  hash_field 
)
inlineprivate

Definition at line 118 of file heap-inl.h.

119  {
121  // Compute map and object size.
122  Map* map = internalized_string_map();
125 
126  // Allocate string.
127  HeapObject* result;
128  {
129  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
130  if (!allocation.To(&result)) return allocation;
131  }
132 
133  result->set_map(map);
134  // Set length and hash fields of the allocated string.
135  String* answer = String::cast(result);
136  answer->set_length(str.length());
137  answer->set_hash_field(hash_field);
138 
139  DCHECK_EQ(size, answer->Size());
140 
141  // Fill in the characters.
142  MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
143  str.length() * kUC16Size);
144 
145  return answer;
146 }
T * start() const
Definition: vector.h:47
int length() const
Definition: vector.h:41
const int kUC16Size
Definition: globals.h:187

References v8::internal::HeapObject::address(), AllocateRaw(), CHECK_GE, DCHECK_EQ, v8::internal::SeqString::kHeaderSize, v8::internal::String::kMaxLength, v8::internal::kUC16Size, v8::internal::Vector< T >::length(), map, v8::internal::MemCopy(), v8::internal::OLD_DATA_SPACE, SelectSpace(), v8::internal::Name::set_hash_field(), v8::internal::String::set_length(), v8::internal::HeapObject::set_map(), size, v8::internal::HeapObject::Size(), v8::internal::SeqTwoByteString::SizeFor(), space(), v8::internal::Vector< T >::start(), v8::internal::TENURED, and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateUninitializedFixedArray()

AllocationResult v8::internal::Heap::AllocateUninitializedFixedArray ( int  length)
private

Definition at line 4078 of file heap.cc.

4078  {
4079  if (length == 0) return empty_fixed_array();
4080 
4081  HeapObject* obj;
4082  {
4083  AllocationResult allocation = AllocateRawFixedArray(length, NOT_TENURED);
4084  if (!allocation.To(&obj)) return allocation;
4085  }
4086 
4087  obj->set_map_no_write_barrier(fixed_array_map());
4088  FixedArray::cast(obj)->set_length(length);
4089  return obj;
4090 }

References AllocateRawFixedArray(), v8::internal::NOT_TENURED, v8::internal::HeapObject::set_map_no_write_barrier(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ AllocateUninitializedFixedDoubleArray()

AllocationResult v8::internal::Heap::AllocateUninitializedFixedDoubleArray ( int  length,
PretenureFlag  pretenure = NOT_TENURED 
)
private

Definition at line 4093 of file heap.cc.

4094  {
4095  if (length == 0) return empty_fixed_array();
4096 
4097  HeapObject* elements;
4098  AllocationResult allocation = AllocateRawFixedDoubleArray(length, pretenure);
4099  if (!allocation.To(&elements)) return allocation;
4100 
4101  elements->set_map_no_write_barrier(fixed_double_array_map());
4102  FixedDoubleArray::cast(elements)->set_length(length);
4103  return elements;
4104 }
MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
Definition: heap.cc:4107

References AllocateRawFixedDoubleArray(), v8::internal::HeapObject::set_map_no_write_barrier(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ allocation_sites_list()

Object* v8::internal::Heap::allocation_sites_list ( )
inline

Definition at line 801 of file heap.h.

801 { return allocation_sites_list_; }
Object * allocation_sites_list_
Definition: heap.h:1527

References allocation_sites_list_.

Referenced by DeoptMarkedAllocationSites(), v8::internal::Deserializer::Deserialize(), v8::internal::AllocationSite::IsNestedSite(), ProcessAllocationSites(), ProcessPretenuringFeedback(), v8::internal::Deserializer::RelinkAllocationSite(), and ResetAllAllocationSitesDependentCode().

+ Here is the caller graph for this function:

◆ allocation_sites_list_address()

Object** v8::internal::Heap::allocation_sites_list_address ( )
inline

Definition at line 804 of file heap.h.

804 { return &allocation_sites_list_; }

References allocation_sites_list_.

◆ allocations_count()

uint32_t v8::internal::Heap::allocations_count ( )
inline

Definition at line 939 of file heap.h.

939 { return allocations_count_; }

References allocations_count_.

◆ AllowedToBeMigrated()

bool v8::internal::Heap::AllowedToBeMigrated ( HeapObject object,
AllocationSpace  dest 
)
inline

Definition at line 427 of file heap-inl.h.

427  {
428  // Object migration is governed by the following rules:
429  //
430  // 1) Objects in new-space can be migrated to one of the old spaces
431  // that matches their target space or they stay in new-space.
432  // 2) Objects in old-space stay in the same space when migrating.
433  // 3) Fillers (two or more words) can migrate due to left-trimming of
434  // fixed arrays in new-space, old-data-space and old-pointer-space.
435  // 4) Fillers (one word) can never migrate, they are skipped by
436  // incremental marking explicitly to prevent invalid pattern.
437  // 5) Short external strings can end up in old pointer space when a cons
438  // string in old pointer space is made external (String::MakeExternal).
439  //
440  // Since this function is used for debugging only, we do not place
441  // asserts here, but check everything explicitly.
442  if (obj->map() == one_pointer_filler_map()) return false;
443  InstanceType type = obj->map()->instance_type();
444  MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
445  AllocationSpace src = chunk->owner()->identity();
446  switch (src) {
447  case NEW_SPACE:
448  return dst == src || dst == TargetSpaceId(type);
449  case OLD_POINTER_SPACE:
450  return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
451  obj->IsExternalString());
452  case OLD_DATA_SPACE:
453  return dst == src && dst == TargetSpaceId(type);
454  case CODE_SPACE:
455  return dst == src && type == CODE_TYPE;
456  case MAP_SPACE:
457  case CELL_SPACE:
458  case PROPERTY_CELL_SPACE:
459  case LO_SPACE:
460  return false;
461  case INVALID_SPACE:
462  break;
463  }
464  UNREACHABLE();
465  return false;
466 }
@ INVALID_SPACE
Definition: globals.h:367

References v8::internal::HeapObject::address(), v8::internal::CELL_SPACE, v8::internal::CODE_SPACE, v8::internal::CODE_TYPE, v8::internal::MemoryChunk::FromAddress(), v8::internal::Space::identity(), v8::internal::Map::instance_type(), v8::internal::INVALID_SPACE, v8::internal::LO_SPACE, v8::internal::HeapObject::map(), v8::internal::MAP_SPACE, v8::internal::NEW_SPACE, v8::internal::OLD_DATA_SPACE, v8::internal::OLD_POINTER_SPACE, v8::internal::MemoryChunk::owner(), v8::internal::PROPERTY_CELL_SPACE, TargetSpaceId(), and UNREACHABLE.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::PromoteObject(), and v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::SemiSpaceCopyObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ always_allocate()

bool v8::internal::Heap::always_allocate ( )
inline

Definition at line 622 of file heap.h.

622 { return always_allocate_scope_depth_ != 0; }

References always_allocate_scope_depth_.

Referenced by AllocateRaw(), and CopyJSObject().

+ Here is the caller graph for this function:

◆ always_allocate_scope_depth_address()

Address v8::internal::Heap::always_allocate_scope_depth_address ( )
inline

Definition at line 623 of file heap.h.

623  {
624  return reinterpret_cast<Address>(&always_allocate_scope_depth_);
625  }

References always_allocate_scope_depth_.

◆ amount_of_external_allocated_memory()

int64_t v8::internal::Heap::amount_of_external_allocated_memory ( )
inline

Definition at line 1276 of file heap.h.

1276  {
1278  }

References amount_of_external_allocated_memory_.

Referenced by v8::internal::StatisticsExtension::GetCounters().

+ Here is the caller graph for this function:

◆ array_buffers_list()

Object* v8::internal::Heap::array_buffers_list ( ) const
inline

Definition at line 796 of file heap.h.

796 { return array_buffers_list_; }
Object * array_buffers_list_
Definition: heap.h:1526

References array_buffers_list_.

Referenced by ProcessArrayBuffers(), v8::internal::Runtime::SetupArrayBuffer(), and TearDownArrayBuffers().

+ Here is the caller graph for this function:

◆ Available()

intptr_t v8::internal::Heap::Available ( )

Definition at line 211 of file heap.cc.

211  {
212  if (!HasBeenSetUp()) return 0;
213 
218 }
bool HasBeenSetUp()
Definition: heap.cc:221
intptr_t Available()
Definition: spaces.h:2400
intptr_t Available()
Definition: spaces.h:1733

References v8::internal::PagedSpace::Available(), v8::internal::NewSpace::Available(), cell_space_, code_space_, HasBeenSetUp(), map_space_, new_space_, old_data_space_, old_pointer_space_, and property_cell_space_.

Referenced by PrintShortHeapStatistics(), and SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CallGCEpilogueCallbacks()

void v8::internal::Heap::CallGCEpilogueCallbacks ( GCType  gc_type,
GCCallbackFlags  flags 
)

Definition at line 1163 of file heap.cc.

1164  {
1165  for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
1166  if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
1167  if (!gc_epilogue_callbacks_[i].pass_isolate_) {
1168  v8::GCPrologueCallback callback =
1169  reinterpret_cast<v8::GCPrologueCallback>(
1170  gc_epilogue_callbacks_[i].callback);
1171  callback(gc_type, gc_callback_flags);
1172  } else {
1173  v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
1174  gc_epilogue_callbacks_[i].callback(isolate, gc_type, gc_callback_flags);
1175  }
1176  }
1177  }
1178 }
Isolate represents an isolated instance of the V8 engine.
Definition: v8.h:4356
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
Definition: v8.h:4216

References gc_epilogue_callbacks_, and isolate().

Referenced by v8::internal::NativeObjectsExplorer::FillRetainedObjects(), and PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CallGCPrologueCallbacks()

void v8::internal::Heap::CallGCPrologueCallbacks ( GCType  gc_type,
GCCallbackFlags  flags 
)

Definition at line 1146 of file heap.cc.

1146  {
1147  for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
1148  if (gc_type & gc_prologue_callbacks_[i].gc_type) {
1149  if (!gc_prologue_callbacks_[i].pass_isolate_) {
1150  v8::GCPrologueCallback callback =
1151  reinterpret_cast<v8::GCPrologueCallback>(
1152  gc_prologue_callbacks_[i].callback);
1153  callback(gc_type, flags);
1154  } else {
1155  v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate());
1156  gc_prologue_callbacks_[i].callback(isolate, gc_type, flags);
1157  }
1158  }
1159  }
1160 }

References v8::internal::anonymous_namespace{flags.cc}::flags, gc_prologue_callbacks_, and isolate().

Referenced by v8::internal::NativeObjectsExplorer::FillRetainedObjects(), and PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CanMoveObjectStart()

bool v8::internal::Heap::CanMoveObjectStart ( HeapObject object)

Definition at line 3235 of file heap.cc.

3235  {
3236  Address address = object->address();
3237  bool is_in_old_pointer_space = InOldPointerSpace(address);
3238  bool is_in_old_data_space = InOldDataSpace(address);
3239 
3240  if (lo_space()->Contains(object)) return false;
3241 
3242  Page* page = Page::FromAddress(address);
3243  // We can move the object start if:
3244  // (1) the object is not in old pointer or old data space,
3245  // (2) the page of the object was already swept,
3246  // (3) the page was already concurrently swept. This case is an optimization
3247  // for concurrent sweeping. The WasSwept predicate for concurrently swept
3248  // pages is set after sweeping all pages.
3249  return (!is_in_old_pointer_space && !is_in_old_data_space) ||
3250  page->WasSwept() || page->SweepingCompleted();
3251 }
bool Contains(Address addr)
Definition: heap.cc:4447
LargeObjectSpace * lo_space()
Definition: heap.h:600
bool InOldDataSpace(Address address)
Definition: heap-inl.h:354
friend class Page
Definition: heap.h:2036
bool InOldPointerSpace(Address address)
Definition: heap-inl.h:344

References Contains(), v8::internal::MemoryChunk::FromAddress(), InOldDataSpace(), InOldPointerSpace(), lo_space(), v8::internal::MemoryChunk::SweepingCompleted(), and v8::internal::Page::WasSwept().

Referenced by v8::internal::BUILTIN(), and LeftTrimFixedArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Capacity()

intptr_t v8::internal::Heap::Capacity ( )

Definition at line 160 of file heap.cc.

160  {
161  if (!HasBeenSetUp()) return 0;
162 
167 }
intptr_t Capacity()
Definition: spaces.h:2371

References v8::internal::PagedSpace::Capacity(), v8::internal::NewSpace::Capacity(), cell_space_, code_space_, HasBeenSetUp(), map_space_, new_space_, old_data_space_, old_pointer_space_, and property_cell_space_.

Referenced by GarbageCollectionEpilogue(), and SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ cell_space()

◆ CheckNewSpaceExpansionCriteria()

void v8::internal::Heap::CheckNewSpaceExpansionCriteria ( )

Definition at line 1291 of file heap.cc.

1291  {
1294  // Grow the size of new space if there is room to grow, enough data
1295  // has survived scavenge since the last expansion and we are not in
1296  // high promotion mode.
1297  new_space_.Grow();
1299  }
1300 }
intptr_t TotalCapacity()
Definition: spaces.h:2379

References v8::internal::NewSpace::Grow(), v8::internal::NewSpace::MaximumCapacity(), new_space_, survived_since_last_expansion_, and v8::internal::NewSpace::TotalCapacity().

Referenced by GarbageCollectionPrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CheckpointObjectStats()

void v8::internal::Heap::CheckpointObjectStats ( )

Definition at line 6100 of file heap.cc.

6100  {
6101  base::LockGuard<base::Mutex> lock_guard(
6103  Counters* counters = isolate()->counters();
6104 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6105  counters->count_of_##name()->Increment( \
6106  static_cast<int>(object_counts_[name])); \
6107  counters->count_of_##name()->Decrement( \
6108  static_cast<int>(object_counts_last_time_[name])); \
6109  counters->size_of_##name()->Increment( \
6110  static_cast<int>(object_sizes_[name])); \
6111  counters->size_of_##name()->Decrement( \
6112  static_cast<int>(object_sizes_last_time_[name]));
6114 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6115  int index;
6116 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6117  index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \
6118  counters->count_of_CODE_TYPE_##name()->Increment( \
6119  static_cast<int>(object_counts_[index])); \
6120  counters->count_of_CODE_TYPE_##name()->Decrement( \
6121  static_cast<int>(object_counts_last_time_[index])); \
6122  counters->size_of_CODE_TYPE_##name()->Increment( \
6123  static_cast<int>(object_sizes_[index])); \
6124  counters->size_of_CODE_TYPE_##name()->Decrement( \
6125  static_cast<int>(object_sizes_last_time_[index]));
6127 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6128 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6129  index = FIRST_FIXED_ARRAY_SUB_TYPE + name; \
6130  counters->count_of_FIXED_ARRAY_##name()->Increment( \
6131  static_cast<int>(object_counts_[index])); \
6132  counters->count_of_FIXED_ARRAY_##name()->Decrement( \
6133  static_cast<int>(object_counts_last_time_[index])); \
6134  counters->size_of_FIXED_ARRAY_##name()->Increment( \
6135  static_cast<int>(object_sizes_[index])); \
6136  counters->size_of_FIXED_ARRAY_##name()->Decrement( \
6137  static_cast<int>(object_sizes_last_time_[index]));
6139 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6140 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6141  index = \
6142  FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge; \
6143  counters->count_of_CODE_AGE_##name()->Increment( \
6144  static_cast<int>(object_counts_[index])); \
6145  counters->count_of_CODE_AGE_##name()->Decrement( \
6146  static_cast<int>(object_counts_last_time_[index])); \
6147  counters->size_of_CODE_AGE_##name()->Increment( \
6148  static_cast<int>(object_sizes_[index])); \
6149  counters->size_of_CODE_AGE_##name()->Decrement( \
6150  static_cast<int>(object_sizes_last_time_[index]));
6152 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6153 
6156  ClearObjectStats();
6157 }
#define CODE_AGE_LIST_COMPLETE(V)
Definition: builtins.h:30
size_t object_counts_[OBJECT_STATS_COUNT]
Definition: heap.h:1953
size_t object_sizes_last_time_[OBJECT_STATS_COUNT]
Definition: heap.h:1956
size_t object_counts_last_time_[OBJECT_STATS_COUNT]
Definition: heap.h:1954
size_t object_sizes_[OBJECT_STATS_COUNT]
Definition: heap.h:1955
#define ADJUST_LAST_TIME_OBJECT_COUNT(name)
static base::LazyMutex checkpoint_object_stats_mutex
Definition: heap.cc:6097
#define INSTANCE_TYPE_LIST(V)
Definition: objects.h:339
#define CODE_KIND_LIST(V)
Definition: objects.h:4950
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V)
Definition: objects.h:800

References ADJUST_LAST_TIME_OBJECT_COUNT, v8::internal::checkpoint_object_stats_mutex, ClearObjectStats(), CODE_AGE_LIST_COMPLETE, CODE_KIND_LIST, v8::internal::Isolate::counters(), FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST, INSTANCE_TYPE_LIST, isolate(), v8::internal::MemCopy(), object_counts_, object_counts_last_time_, object_sizes_, object_sizes_last_time_, and v8::base::LazyInstanceImpl< T, AllocationTrait, CreateTrait, InitOnceTrait, DestroyTrait >::Pointer().

Referenced by v8::internal::MarkCompactCollector::AfterMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearAllICsByKind()

void v8::internal::Heap::ClearAllICsByKind ( Code::Kind  kind)

Definition at line 470 of file heap.cc.

470  {
471  HeapObjectIterator it(code_space());
472 
473  for (Object* object = it.Next(); object != NULL; object = it.Next()) {
474  Code* code = Code::cast(object);
475  Code::Kind current_kind = code->kind();
476  if (current_kind == Code::FUNCTION ||
477  current_kind == Code::OPTIMIZED_FUNCTION) {
478  code->ClearInlineCaches(kind);
479  }
480  }
481 }

References v8::internal::Code::ClearInlineCaches(), code_space(), v8::internal::Code::kind(), v8::internal::HeapObjectIterator::Next(), and NULL.

Referenced by v8::internal::JSObject::SetElementCallback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearInstanceofCache()

void v8::internal::Heap::ClearInstanceofCache ( )
inline

Definition at line 701 of file heap-inl.h.

701  {
702  set_instanceof_cache_function(the_hole_value());
703 }

Referenced by v8::internal::JSFunction::SetInstancePrototype(), and v8::internal::JSObject::SetPrototype().

+ Here is the caller graph for this function:

◆ ClearJSFunctionResultCaches()

void v8::internal::Heap::ClearJSFunctionResultCaches ( )

Definition at line 980 of file heap.cc.

980  {
981  if (isolate_->bootstrapper()->IsActive()) return;
982 
983  Object* context = native_contexts_list();
984  while (!context->IsUndefined()) {
985  // Get the caches for this context. GC can happen when the context
986  // is not fully initialized, so the caches can be undefined.
987  Object* caches_or_undefined =
989  if (!caches_or_undefined->IsUndefined()) {
990  FixedArray* caches = FixedArray::cast(caches_or_undefined);
991  // Clear the caches:
992  int length = caches->length();
993  for (int i = 0; i < length; i++) {
994  JSFunctionResultCache::cast(caches->get(i))->Clear();
995  }
996  }
997  // Get the next context:
998  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
999  }
1000 }
static Context * cast(Object *context)
Definition: contexts.h:255
Object * get(int index)
Definition: objects-inl.h:2165
Object * native_contexts_list() const
Definition: heap.h:793
Bootstrapper * bootstrapper()
Definition: isolate.h:856

References v8::internal::Isolate::bootstrapper(), v8::internal::Context::cast(), v8::internal::FixedArray::get(), isolate_, v8::internal::Context::JSFUNCTION_RESULT_CACHES_INDEX, v8::internal::FixedArrayBase::length(), native_contexts_list(), and v8::internal::Context::NEXT_CONTEXT_LINK.

Referenced by GarbageCollectionPrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearNormalizedMapCaches()

void v8::internal::Heap::ClearNormalizedMapCaches ( )

Definition at line 1003 of file heap.cc.

1003  {
1004  if (isolate_->bootstrapper()->IsActive() &&
1005  !incremental_marking()->IsMarking()) {
1006  return;
1007  }
1008 
1009  Object* context = native_contexts_list();
1010  while (!context->IsUndefined()) {
1011  // GC can happen when the context is not fully initialized,
1012  // so the cache can be undefined.
1013  Object* cache =
1015  if (!cache->IsUndefined()) {
1016  NormalizedMapCache::cast(cache)->Clear();
1017  }
1018  context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
1019  }
1020 }

References v8::internal::Isolate::bootstrapper(), v8::internal::Context::cast(), v8::internal::FixedArray::get(), incremental_marking(), isolate_, native_contexts_list(), v8::internal::Context::NEXT_CONTEXT_LINK, and v8::internal::Context::NORMALIZED_MAP_CACHE_INDEX.

Referenced by MarkCompactPrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearObjectStats()

void v8::internal::Heap::ClearObjectStats ( bool  clear_last_time_stats = false)
private

Definition at line 6087 of file heap.cc.

6087  {
6088  memset(object_counts_, 0, sizeof(object_counts_));
6089  memset(object_sizes_, 0, sizeof(object_sizes_));
6090  if (clear_last_time_stats) {
6093  }
6094 }

References object_counts_, object_counts_last_time_, object_sizes_, and object_sizes_last_time_.

Referenced by CheckpointObjectStats(), and Heap().

+ Here is the caller graph for this function:

◆ code_space()

◆ CollectAllAvailableGarbage()

void v8::internal::Heap::CollectAllAvailableGarbage ( const char *  gc_reason = NULL)

Definition at line 735 of file heap.cc.

735  {
736  // Since we are ignoring the return value, the exact choice of space does
737  // not matter, so long as we do not specify NEW_SPACE, which would not
738  // cause a full GC.
739  // Major GC would invoke weak handle callbacks on weakly reachable
740  // handles, but won't collect weakly reachable objects until next
741  // major GC. Therefore if we collect aggressively and weak handle callback
742  // has been invoked, we rerun major GC to release objects which become
743  // garbage.
744  // Note: as weak callbacks can execute arbitrary code, we cannot
745  // hope that eventually there will be no weak callbacks invocations.
746  // Therefore stop recollecting after several attempts.
747  if (isolate()->concurrent_recompilation_enabled()) {
748  // The optimizing compiler may be unnecessarily holding on to memory.
749  DisallowHeapAllocation no_recursive_gc;
751  }
755  const int kMaxNumberOfAttempts = 7;
756  const int kMinNumberOfAttempts = 2;
757  for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
758  if (!CollectGarbage(MARK_COMPACTOR, gc_reason, NULL) &&
759  attempt + 1 >= kMinNumberOfAttempts) {
760  break;
761  }
762  }
764  new_space_.Shrink();
767 }
static const int kMakeHeapIterableMask
Definition: heap.h:721
static const int kNoGCFlags
Definition: heap.h:716
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap-inl.h:581
OptimizingCompilerThread * optimizing_compiler_thread()
Definition: isolate.h:1059
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, false > DisallowHeapAllocation
Definition: assert-scope.h:110
@ MARK_COMPACTOR
Definition: globals.h:389

References v8::internal::CompilationCache::Clear(), CollectGarbage(), v8::internal::Isolate::compilation_cache(), v8::internal::OptimizingCompilerThread::Flush(), incremental_marking(), isolate(), isolate_, kMakeHeapIterableMask, kNoGCFlags, kReduceMemoryFootprintMask, mark_compact_collector(), v8::internal::MARK_COMPACTOR, new_space_, NULL, v8::internal::Isolate::optimizing_compiler_thread(), v8::internal::MarkCompactCollector::SetFlags(), v8::internal::NewSpace::Shrink(), UncommitFromSpace(), and v8::internal::IncrementalMarking::UncommitMarkingDeque().

Referenced by v8::Isolate::LowMemoryNotification().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectAllGarbage()

void v8::internal::Heap::CollectAllGarbage ( int  flags,
const char *  gc_reason = NULL,
const GCCallbackFlags  gc_callback_flags = kNoGCCallbackFlags 
)

Definition at line 724 of file heap.cc.

725  {
726  // Since we are ignoring the return value, the exact choice of space does
727  // not matter, so long as we do not specify NEW_SPACE, which would not
728  // cause a full GC.
730  CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags);
732 }

References CollectGarbage(), v8::internal::anonymous_namespace{flags.cc}::flags, kNoGCFlags, mark_compact_collector_, v8::internal::OLD_POINTER_SPACE, and v8::internal::MarkCompactCollector::SetFlags().

Referenced by AdvanceIdleIncrementalMarking(), v8::internal::HeapSnapshotGenerator::GenerateSnapshot(), v8::internal::StatisticsExtension::GetCounters(), v8::internal::Debug::GetLoadedScripts(), IdleNotification(), v8::internal::Logger::LogAccessorCallbacks(), v8::internal::Logger::LogCodeObjects(), v8::internal::Logger::LogCompiledFunctions(), main(), MakeHeapIterable(), v8::internal::Debug::PrepareForBreakPoints(), v8::internal::RUNTIME_FUNCTION(), v8::internal::ScriptCache::ScriptCache(), and v8::internal::HeapObjectsMap::UpdateHeapObjectsMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbage() [1/2]

bool v8::internal::Heap::CollectGarbage ( AllocationSpace  space,
const char *  gc_reason = NULL,
const GCCallbackFlags  gc_callback_flags = kNoGCCallbackFlags 
)
inline

Definition at line 581 of file heap-inl.h.

582  {
583  const char* collector_reason = NULL;
584  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
585  return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
586 }
GarbageCollector SelectGarbageCollector(AllocationSpace space, const char **reason)
Definition: heap.cc:236

References NULL, SelectGarbageCollector(), and space().

Referenced by v8::internal::AbortIncrementalMarkingAndCollectGarbage(), CollectAllAvailableGarbage(), CollectAllGarbage(), IdleNotification(), and ReserveSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CollectGarbage() [2/2]

bool v8::internal::Heap::CollectGarbage ( GarbageCollector  collector,
const char *  gc_reason,
const char *  collector_reason,
const GCCallbackFlags  gc_callback_flags = kNoGCCallbackFlags 
)
private

Definition at line 785 of file heap.cc.

787  {
788  // The VM is in the GC state until exiting this function.
789  VMState<GC> state(isolate_);
790 
791 #ifdef DEBUG
792  // Reset the allocation timeout to the GC interval, but make sure to
793  // allow at least a few allocations after a collection. The reason
794  // for this is that we have a lot of allocation sequences and we
795  // assume that a garbage collection will allow the subsequent
796  // allocation attempts to go through.
797  allocation_timeout_ = Max(6, FLAG_gc_interval);
798 #endif
799 
801 
802  if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
803  if (FLAG_trace_incremental_marking) {
804  PrintF("[IncrementalMarking] Scavenge during marking.\n");
805  }
806  }
807 
808  if (collector == MARK_COMPACTOR &&
809  !mark_compact_collector()->abort_incremental_marking() &&
810  !incremental_marking()->IsStopped() &&
811  !incremental_marking()->should_hurry() &&
812  FLAG_incremental_marking_steps) {
813  // Make progress in incremental marking.
814  const intptr_t kStepSizeWhenDelayedByScavenge = 1 * MB;
815  incremental_marking()->Step(kStepSizeWhenDelayedByScavenge,
817  if (!incremental_marking()->IsComplete() && !FLAG_gc_global) {
818  if (FLAG_trace_incremental_marking) {
819  PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
820  }
821  collector = SCAVENGER;
822  collector_reason = "incremental marking delaying mark-sweep";
823  }
824  }
825 
826  bool next_gc_likely_to_collect_more = false;
827 
828  {
829  tracer()->Start(collector, gc_reason, collector_reason);
830  DCHECK(AllowHeapAllocation::IsAllowed());
831  DisallowHeapAllocation no_allocation_during_gc;
833 
834  {
835  HistogramTimerScope histogram_timer_scope(
836  (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger()
837  : isolate_->counters()->gc_compactor());
838  next_gc_likely_to_collect_more =
839  PerformGarbageCollection(collector, gc_callback_flags);
840  }
841 
843  tracer()->Stop();
844  }
845 
846  // Start incremental marking for the next cycle. The heap snapshot
847  // generator needs incremental marking to stay off after it aborted.
848  if (!mark_compact_collector()->abort_incremental_marking() &&
851  }
852 
853  return next_gc_likely_to_collect_more;
854 }
void Start(GarbageCollector collector, const char *gc_reason, const char *collector_reason)
Definition: gc-tracer.cc:98
void EnsureFillerObjectAtTop()
Definition: heap.cc:770
void GarbageCollectionEpilogue()
Definition: heap.cc:587
bool WorthActivatingIncrementalMarking()
Definition: heap.cc:4290
bool PerformGarbageCollection(GarbageCollector collector, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:1042
void GarbageCollectionPrologue()
Definition: heap.cc:410
GCTracer * tracer()
Definition: heap.h:1166
void Start(CompactionFlag flag=ALLOW_COMPACTION)
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
void PrintF(const char *format,...)
Definition: utils.cc:80

References v8::internal::Isolate::counters(), DCHECK, EnsureFillerObjectAtTop(), GarbageCollectionEpilogue(), GarbageCollectionPrologue(), incremental_marking(), isolate_, mark_compact_collector(), v8::internal::MARK_COMPACTOR, v8::internal::Max(), v8::internal::MB, v8::internal::IncrementalMarking::NO_GC_VIA_STACK_GUARD, PerformGarbageCollection(), v8::internal::PrintF(), v8::internal::SCAVENGER, v8::internal::IncrementalMarking::Start(), v8::internal::GCTracer::Start(), v8::internal::IncrementalMarking::Step(), v8::internal::GCTracer::Stop(), tracer(), and WorthActivatingIncrementalMarking().

+ Here is the call graph for this function:

◆ CommittedMemory()

intptr_t v8::internal::Heap::CommittedMemory ( )

Definition at line 170 of file heap.cc.

References cell_space_, code_space_, v8::internal::PagedSpace::CommittedMemory(), v8::internal::NewSpace::CommittedMemory(), HasBeenSetUp(), lo_space_, map_space_, new_space_, old_data_space_, old_pointer_space_, property_cell_space_, and v8::internal::LargeObjectSpace::Size().

Referenced by GarbageCollectionEpilogue(), v8::Isolate::GetHeapStatistics(), PrintShortHeapStatistics(), and UpdateMaximumCommitted().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CommittedMemoryExecutable()

intptr_t v8::internal::Heap::CommittedMemoryExecutable ( )

Definition at line 194 of file heap.cc.

194  {
195  if (!HasBeenSetUp()) return 0;
196 
197  return isolate()->memory_allocator()->SizeExecutable();
198 }
MemoryAllocator * memory_allocator()
Definition: isolate.h:883

References HasBeenSetUp(), isolate(), v8::internal::Isolate::memory_allocator(), and v8::internal::MemoryAllocator::SizeExecutable().

Referenced by v8::Isolate::GetHeapStatistics().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CommittedPhysicalMemory()

size_t v8::internal::Heap::CommittedPhysicalMemory ( )

Definition at line 180 of file heap.cc.

References cell_space_, code_space_, v8::internal::PagedSpace::CommittedPhysicalMemory(), v8::internal::NewSpace::CommittedPhysicalMemory(), v8::internal::LargeObjectSpace::CommittedPhysicalMemory(), HasBeenSetUp(), lo_space_, map_space_, new_space_, old_data_space_, old_pointer_space_, and property_cell_space_.

Referenced by v8::Isolate::GetHeapStatistics().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CompletelyClearInstanceofCache()

void v8::internal::Heap::CompletelyClearInstanceofCache ( )
inline

Definition at line 711 of file heap-inl.h.

711  {
712  set_instanceof_cache_map(the_hole_value());
713  set_instanceof_cache_function(the_hole_value());
714 }

Referenced by MarkCompactPrologue(), and v8::internal::IncrementalMarking::StartMarking().

+ Here is the caller graph for this function:

◆ ConfigureHeap()

bool v8::internal::Heap::ConfigureHeap ( int  max_semi_space_size,
int  max_old_space_size,
int  max_executable_size,
size_t  code_range_size 
)

Definition at line 4827 of file heap.cc.

4828  {
4829  if (HasBeenSetUp()) return false;
4830 
4831  // Overwrite default configuration.
4832  if (max_semi_space_size > 0) {
4833  max_semi_space_size_ = max_semi_space_size * MB;
4834  }
4835  if (max_old_space_size > 0) {
4836  max_old_generation_size_ = max_old_space_size * MB;
4837  }
4838  if (max_executable_size > 0) {
4839  max_executable_size_ = max_executable_size * MB;
4840  }
4841 
4842  // If max space size flags are specified overwrite the configuration.
4843  if (FLAG_max_semi_space_size > 0) {
4844  max_semi_space_size_ = FLAG_max_semi_space_size * MB;
4845  }
4846  if (FLAG_max_old_space_size > 0) {
4847  max_old_generation_size_ = FLAG_max_old_space_size * MB;
4848  }
4849  if (FLAG_max_executable_size > 0) {
4850  max_executable_size_ = FLAG_max_executable_size * MB;
4851  }
4852 
4853  if (FLAG_stress_compaction) {
4854  // This will cause more frequent GCs when stressing.
4856  }
4857 
4859  // If we are using a snapshot we always reserve the default amount
4860  // of memory for each semispace because code in the snapshot has
4861  // write-barrier code that relies on the size and alignment of new
4862  // space. We therefore cannot use a larger max semispace size
4863  // than the default reserved semispace size.
4866  if (FLAG_trace_gc) {
4867  PrintPID("Max semi-space size cannot be more than %d kbytes\n",
4868  reserved_semispace_size_ >> 10);
4869  }
4870  }
4871  } else {
4872  // If we are not using snapshots we reserve space for the actual
4873  // max semispace size.
4875  }
4876 
4877  // The max executable size must be less than or equal to the max old
4878  // generation size.
4881  }
4882 
4883  // The new space size must be a power of two to support single-bit testing
4884  // for containment.
4889 
4890  if (FLAG_min_semi_space_size > 0) {
4891  int initial_semispace_size = FLAG_min_semi_space_size * MB;
4892  if (initial_semispace_size > max_semi_space_size_) {
4894  if (FLAG_trace_gc) {
4895  PrintPID(
4896  "Min semi-space size cannot be more than the maximum"
4897  "semi-space size of %d MB\n",
4899  }
4900  } else {
4901  initial_semispace_size_ = initial_semispace_size;
4902  }
4903  }
4904 
4906 
4907  // The old generation is paged and needs at least one page for each space.
4908  int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
4910  Max(static_cast<intptr_t>(paged_space_count * Page::kPageSize),
4912 
4913  // We rely on being able to allocate new arrays in paged spaces.
4915  (JSArray::kSize +
4918 
4919  code_range_size_ = code_range_size * MB;
4920 
4921  configured_ = true;
4922  return true;
4923 }
static const int kSize
Definition: objects.h:10073
static const int kInitialMaxFastElementArray
Definition: objects.h:2180
static bool HaveASnapshotToStartFrom()
uint32_t RoundUpToPowerOfTwo32(uint32_t value)
Definition: bits.cc:12
void PrintPID(const char *format,...)
Definition: utils.cc:96
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
@ FIRST_PAGED_SPACE
Definition: globals.h:371
@ LAST_PAGED_SPACE
Definition: globals.h:372

References code_range_size_, configured_, DCHECK, v8::internal::FIRST_PAGED_SPACE, HasBeenSetUp(), v8::internal::Snapshot::HaveASnapshotToStartFrom(), initial_semispace_size_, v8::internal::JSObject::kInitialMaxFastElementArray, v8::internal::Page::kMaxRegularHeapObjectSize, v8::internal::Page::kPageSize, v8::internal::AllocationMemento::kSize, v8::internal::JSArray::kSize, v8::internal::LAST_PAGED_SPACE, v8::internal::Max(), max_executable_size_, max_old_generation_size_, max_semi_space_size_, v8::internal::MB, v8::internal::Min(), v8::internal::PrintPID(), reserved_semispace_size_, v8::base::bits::RoundUpToPowerOfTwo32(), and v8::internal::FixedArray::SizeFor().

Referenced by ConfigureHeapDefault(), and v8::SetResourceConstraints().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ConfigureHeapDefault()

bool v8::internal::Heap::ConfigureHeapDefault ( )

Definition at line 4926 of file heap.cc.

4926 { return ConfigureHeap(0, 0, 0, 0); }
bool ConfigureHeap(int max_semi_space_size, int max_old_space_size, int max_executable_size, size_t code_range_size)
Definition: heap.cc:4827

References ConfigureHeap().

Referenced by SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Contains() [1/2]

bool v8::internal::Heap::Contains ( Address  addr)

Definition at line 4447 of file heap.cc.

4447  {
4448  if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false;
4449  return HasBeenSetUp() &&
4450  (new_space_.ToSpaceContains(addr) ||
4451  old_pointer_space_->Contains(addr) ||
4452  old_data_space_->Contains(addr) || code_space_->Contains(addr) ||
4453  map_space_->Contains(addr) || cell_space_->Contains(addr) ||
4454  property_cell_space_->Contains(addr) ||
4455  lo_space_->SlowContains(addr));
4456 }
bool SlowContains(Address addr)
Definition: spaces.h:2783
bool IsOutsideAllocatedSpace(const void *address) const
Definition: spaces.h:1041
bool ToSpaceContains(Address address)
Definition: spaces.h:2491
bool Contains(Address a)
Definition: spaces-inl.h:150

References cell_space_, code_space_, v8::internal::PagedSpace::Contains(), HasBeenSetUp(), isolate_, v8::internal::MemoryAllocator::IsOutsideAllocatedSpace(), lo_space_, map_space_, v8::internal::Isolate::memory_allocator(), new_space_, old_data_space_, old_pointer_space_, property_cell_space_, v8::internal::LargeObjectSpace::SlowContains(), and v8::internal::NewSpace::ToSpaceContains().

Referenced by CanMoveObjectStart(), Contains(), v8::internal::HeapObject::HeapObjectShortPrint(), v8::internal::MarkCompactMarkingVisitor::INLINE(), v8::internal::JSObject::JSObjectShortPrint(), LeftTrimFixedArray(), RightTrimFixedArray(), and v8::internal::VerifyPointersVisitor::VisitPointers().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Contains() [2/2]

bool v8::internal::Heap::Contains ( HeapObject value)

Definition at line 4444 of file heap.cc.

4444 { return Contains(value->address()); }

References v8::internal::HeapObject::address(), and Contains().

+ Here is the call graph for this function:

◆ CopyAndTenureFixedCOWArray()

AllocationResult v8::internal::Heap::CopyAndTenureFixedCOWArray ( FixedArray src)
private

Definition at line 3939 of file heap.cc.

3939  {
3940  if (!InNewSpace(src)) {
3941  return src;
3942  }
3943 
3944  int len = src->length();
3945  HeapObject* obj;
3946  {
3947  AllocationResult allocation = AllocateRawFixedArray(len, TENURED);
3948  if (!allocation.To(&obj)) return allocation;
3949  }
3950  obj->set_map_no_write_barrier(fixed_array_map());
3951  FixedArray* result = FixedArray::cast(obj);
3952  result->set_length(len);
3953 
3954  // Copy the content
3955  DisallowHeapAllocation no_gc;
3956  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
3957  for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
3958 
3959  // TODO(mvstanton): The map is set twice because of protection against calling
3960  // set() on a COW FixedArray. Issue v8:3221 created to track this, and
3961  // we might then be able to remove this whole method.
3962  HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map());
3963  return result;
3964 }

References AllocateRawFixedArray(), v8::internal::FixedArray::get(), v8::internal::HeapObject::GetWriteBarrierMode(), InNewSpace(), v8::internal::FixedArrayBase::length(), mode(), v8::internal::FixedArray::set(), v8::internal::FixedArrayBase::set_length(), v8::internal::HeapObject::set_map_no_write_barrier(), v8::internal::TENURED, and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ CopyBlock()

void v8::internal::Heap::CopyBlock ( Address  dst,
Address  src,
int  byte_size 
)
inlinestatic

Definition at line 469 of file heap-inl.h.

469  {
470  CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
471  static_cast<size_t>(byte_size / kPointerSize));
472 }
void CopyWords(T *dst, const T *src, size_t num_words)
Definition: utils.h:1112

References v8::internal::CopyWords(), and v8::internal::kPointerSize.

Referenced by CopyCode(), CopyConstantPoolArrayWithMap(), CopyFixedArrayWithMap(), CopyFixedDoubleArrayWithMap(), CopyJSObject(), and v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::INLINE().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyCode() [1/2]

AllocationResult v8::internal::Heap::CopyCode ( Code code)
protected

Definition at line 3465 of file heap.cc.

3465  {
3466  AllocationResult allocation;
3467  HeapObject* new_constant_pool;
3468  if (FLAG_enable_ool_constant_pool &&
3469  code->constant_pool() != empty_constant_pool_array()) {
3470  // Copy the constant pool, since edits to the copied code may modify
3471  // the constant pool.
3472  allocation = CopyConstantPoolArray(code->constant_pool());
3473  if (!allocation.To(&new_constant_pool)) return allocation;
3474  } else {
3475  new_constant_pool = empty_constant_pool_array();
3476  }
3477 
3478  HeapObject* result;
3479  // Allocate an object the same size as the code object.
3480  int obj_size = code->Size();
3481  allocation = AllocateRaw(obj_size, CODE_SPACE, CODE_SPACE);
3482  if (!allocation.To(&result)) return allocation;
3483 
3484  // Copy code object.
3485  Address old_addr = code->address();
3486  Address new_addr = result->address();
3487  CopyBlock(new_addr, old_addr, obj_size);
3488  Code* new_code = Code::cast(result);
3489 
3490  // Update the constant pool.
3491  new_code->set_constant_pool(new_constant_pool);
3492 
3493  // Relocate the copy.
3495  isolate_->code_range()->contains(code->address()));
3496  new_code->Relocate(new_addr - old_addr);
3497  return new_code;
3498 }
MUST_USE_RESULT AllocationResult CopyConstantPoolArray(ConstantPoolArray *src)
Definition: heap-inl.h:160
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:469

References v8::internal::HeapObject::address(), AllocateRaw(), v8::internal::Isolate::code_range(), v8::internal::CODE_SPACE, v8::internal::Code::constant_pool(), v8::internal::CodeRange::contains(), CopyBlock(), CopyConstantPoolArray(), DCHECK, isolate_, NULL, v8::internal::Code::Relocate(), v8::internal::Code::set_constant_pool(), v8::internal::HeapObject::Size(), v8::internal::AllocationResult::To(), and v8::internal::CodeRange::valid().

+ Here is the call graph for this function:

◆ CopyCode() [2/2]

AllocationResult v8::internal::Heap::CopyCode ( Code code,
Vector< byte reloc_info 
)
protected

Definition at line 3501 of file heap.cc.

3501  {
3502  // Allocate ByteArray and ConstantPoolArray before the Code object, so that we
3503  // do not risk leaving uninitialized Code object (and breaking the heap).
3504  ByteArray* reloc_info_array;
3505  {
3506  AllocationResult allocation =
3507  AllocateByteArray(reloc_info.length(), TENURED);
3508  if (!allocation.To(&reloc_info_array)) return allocation;
3509  }
3510  HeapObject* new_constant_pool;
3511  if (FLAG_enable_ool_constant_pool &&
3512  code->constant_pool() != empty_constant_pool_array()) {
3513  // Copy the constant pool, since edits to the copied code may modify
3514  // the constant pool.
3515  AllocationResult allocation = CopyConstantPoolArray(code->constant_pool());
3516  if (!allocation.To(&new_constant_pool)) return allocation;
3517  } else {
3518  new_constant_pool = empty_constant_pool_array();
3519  }
3520 
3521  int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3522 
3523  int new_obj_size = Code::SizeFor(new_body_size);
3524 
3525  Address old_addr = code->address();
3526 
3527  size_t relocation_offset =
3528  static_cast<size_t>(code->instruction_end() - old_addr);
3529 
3530  HeapObject* result;
3531  AllocationResult allocation =
3532  AllocateRaw(new_obj_size, CODE_SPACE, CODE_SPACE);
3533  if (!allocation.To(&result)) return allocation;
3534 
3535  // Copy code object.
3536  Address new_addr = result->address();
3537 
3538  // Copy header and instructions.
3539  CopyBytes(new_addr, old_addr, relocation_offset);
3540 
3541  Code* new_code = Code::cast(result);
3542  new_code->set_relocation_info(reloc_info_array);
3543 
3544  // Update constant pool.
3545  new_code->set_constant_pool(new_constant_pool);
3546 
3547  // Copy patched rinfo.
3548  CopyBytes(new_code->relocation_start(), reloc_info.start(),
3549  static_cast<size_t>(reloc_info.length()));
3550 
3551  // Relocate the copy.
3553  isolate_->code_range()->contains(code->address()));
3554  new_code->Relocate(new_addr - old_addr);
3555 
3556 #ifdef VERIFY_HEAP
3557  if (FLAG_verify_heap) code->ObjectVerify();
3558 #endif
3559  return new_code;
3560 }
static int SizeFor(int body_size)
Definition: objects.h:5256
MUST_USE_RESULT AllocationResult AllocateByteArray(int length, PretenureFlag pretenure=NOT_TENURED)
Definition: heap.cc:3203
const intptr_t kObjectAlignment
Definition: globals.h:226
static void RoundUp(Vector< char > buffer, int *length, int *decimal_point)
Definition: fixed-dtoa.cc:171
void CopyBytes(uint8_t *target, uint8_t *source)

References v8::internal::HeapObject::address(), AllocateByteArray(), AllocateRaw(), v8::internal::Isolate::code_range(), v8::internal::CODE_SPACE, v8::internal::Code::constant_pool(), v8::internal::CodeRange::contains(), v8::internal::CopyBytes(), CopyConstantPoolArray(), DCHECK, v8::internal::Code::instruction_end(), v8::internal::Code::instruction_size(), isolate_, v8::internal::kObjectAlignment, v8::internal::Vector< T >::length(), NULL, v8::internal::Code::Relocate(), v8::internal::Code::relocation_start(), v8::internal::RoundUp(), v8::internal::Code::set_constant_pool(), v8::internal::Code::SizeFor(), v8::internal::Vector< T >::start(), v8::internal::TENURED, v8::internal::AllocationResult::To(), and v8::internal::CodeRange::valid().

+ Here is the call graph for this function:

◆ CopyConstantPoolArray()

AllocationResult v8::internal::Heap::CopyConstantPoolArray ( ConstantPoolArray src)
inlineprivate

Definition at line 160 of file heap-inl.h.

160  {
161  if (src->length() == 0) return src;
162  return CopyConstantPoolArrayWithMap(src, src->map());
163 }
MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
Definition: heap.cc:4014

References CopyConstantPoolArrayWithMap(), v8::internal::ConstantPoolArray::length(), and v8::internal::HeapObject::map().

Referenced by CopyCode().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyConstantPoolArrayWithMap()

AllocationResult v8::internal::Heap::CopyConstantPoolArrayWithMap ( ConstantPoolArray src,
Map map 
)
private

Definition at line 4014 of file heap.cc.

4015  {
4016  HeapObject* obj;
4017  if (src->is_extended_layout()) {
4018  ConstantPoolArray::NumberOfEntries small(src,
4020  ConstantPoolArray::NumberOfEntries extended(
4022  AllocationResult allocation =
4023  AllocateExtendedConstantPoolArray(small, extended);
4024  if (!allocation.To(&obj)) return allocation;
4025  } else {
4026  ConstantPoolArray::NumberOfEntries small(src,
4028  AllocationResult allocation = AllocateConstantPoolArray(small);
4029  if (!allocation.To(&obj)) return allocation;
4030  }
4031  obj->set_map_no_write_barrier(map);
4033  src->address() + ConstantPoolArray::kFirstEntryOffset,
4034  src->size() - ConstantPoolArray::kFirstEntryOffset);
4035  return obj;
4036 }
static const int kFirstEntryOffset
Definition: objects.h:2827
MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(const ConstantPoolArray::NumberOfEntries &small)
Definition: heap.cc:4128
MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(const ConstantPoolArray::NumberOfEntries &small, const ConstantPoolArray::NumberOfEntries &extended)
Definition: heap.cc:4152

References v8::internal::HeapObject::address(), AllocateConstantPoolArray(), AllocateExtendedConstantPoolArray(), CopyBlock(), v8::internal::ConstantPoolArray::EXTENDED_SECTION, v8::internal::ConstantPoolArray::is_extended_layout(), v8::internal::ConstantPoolArray::kFirstEntryOffset, map, v8::internal::HeapObject::set_map_no_write_barrier(), v8::internal::ConstantPoolArray::size(), v8::internal::ConstantPoolArray::SMALL_SECTION, and v8::internal::AllocationResult::To().

Referenced by CopyConstantPoolArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyFixedArray()

AllocationResult v8::internal::Heap::CopyFixedArray ( FixedArray src)
inlineprivate

Definition at line 148 of file heap-inl.h.

148  {
149  if (src->length() == 0) return src;
150  return CopyFixedArrayWithMap(src, src->map());
151 }
MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:3973

References CopyFixedArrayWithMap(), v8::internal::FixedArrayBase::length(), and v8::internal::HeapObject::map().

Referenced by CopyJSObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyFixedArrayWithMap()

AllocationResult v8::internal::Heap::CopyFixedArrayWithMap ( FixedArray src,
Map map 
)
private

Definition at line 3973 of file heap.cc.

3973  {
3974  int len = src->length();
3975  HeapObject* obj;
3976  {
3977  AllocationResult allocation = AllocateRawFixedArray(len, NOT_TENURED);
3978  if (!allocation.To(&obj)) return allocation;
3979  }
3980  if (InNewSpace(obj)) {
3981  obj->set_map_no_write_barrier(map);
3982  CopyBlock(obj->address() + kPointerSize, src->address() + kPointerSize,
3984  return obj;
3985  }
3986  obj->set_map_no_write_barrier(map);
3987  FixedArray* result = FixedArray::cast(obj);
3988  result->set_length(len);
3989 
3990  // Copy the content
3991  DisallowHeapAllocation no_gc;
3992  WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
3993  for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
3994  return result;
3995 }

References v8::internal::HeapObject::address(), AllocateRawFixedArray(), CopyBlock(), v8::internal::FixedArray::get(), v8::internal::HeapObject::GetWriteBarrierMode(), InNewSpace(), v8::internal::kPointerSize, v8::internal::FixedArrayBase::length(), map, mode(), v8::internal::NOT_TENURED, v8::internal::FixedArray::set(), v8::internal::FixedArrayBase::set_length(), v8::internal::HeapObject::set_map_no_write_barrier(), v8::internal::FixedArray::SizeFor(), and v8::internal::AllocationResult::To().

Referenced by CopyFixedArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyFixedDoubleArray()

AllocationResult v8::internal::Heap::CopyFixedDoubleArray ( FixedDoubleArray src)
inlineprivate

Definition at line 154 of file heap-inl.h.

154  {
155  if (src->length() == 0) return src;
156  return CopyFixedDoubleArrayWithMap(src, src->map());
157 }
MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:3998

References CopyFixedDoubleArrayWithMap(), v8::internal::FixedArrayBase::length(), and v8::internal::HeapObject::map().

Referenced by CopyJSObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyFixedDoubleArrayWithMap()

AllocationResult v8::internal::Heap::CopyFixedDoubleArrayWithMap ( FixedDoubleArray src,
Map map 
)
private

Definition at line 3998 of file heap.cc.

3999  {
4000  int len = src->length();
4001  HeapObject* obj;
4002  {
4003  AllocationResult allocation = AllocateRawFixedDoubleArray(len, NOT_TENURED);
4004  if (!allocation.To(&obj)) return allocation;
4005  }
4006  obj->set_map_no_write_barrier(map);
4007  CopyBlock(obj->address() + FixedDoubleArray::kLengthOffset,
4008  src->address() + FixedDoubleArray::kLengthOffset,
4010  return obj;
4011 }
static const int kLengthOffset
Definition: objects.h:2392

References v8::internal::HeapObject::address(), AllocateRawFixedDoubleArray(), CopyBlock(), v8::internal::FixedArrayBase::kLengthOffset, v8::internal::FixedArrayBase::length(), map, v8::internal::NOT_TENURED, v8::internal::HeapObject::set_map_no_write_barrier(), v8::internal::FixedDoubleArray::SizeFor(), and v8::internal::AllocationResult::To().

Referenced by CopyFixedDoubleArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CopyJSObject()

AllocationResult v8::internal::Heap::CopyJSObject ( JSObject source,
AllocationSite site = NULL 
)

Definition at line 3688 of file heap.cc.

3688  {
3689  // Never used to copy functions. If functions need to be copied we
3690  // have to be careful to clear the literals array.
3691  SLOW_DCHECK(!source->IsJSFunction());
3692 
3693  // Make the clone.
3694  Map* map = source->map();
3695  int object_size = map->instance_size();
3696  HeapObject* clone;
3697 
3698  DCHECK(site == NULL || AllocationSite::CanTrack(map->instance_type()));
3699 
3701 
3702  // If we're forced to always allocate, we use the general allocation
3703  // functions which may leave us with an object in old space.
3704  if (always_allocate()) {
3705  {
3706  AllocationResult allocation =
3707  AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
3708  if (!allocation.To(&clone)) return allocation;
3709  }
3710  Address clone_address = clone->address();
3711  CopyBlock(clone_address, source->address(), object_size);
3712  // Update write barrier for all fields that lie beyond the header.
3713  RecordWrites(clone_address, JSObject::kHeaderSize,
3714  (object_size - JSObject::kHeaderSize) / kPointerSize);
3715  } else {
3716  wb_mode = SKIP_WRITE_BARRIER;
3717 
3718  {
3719  int adjusted_object_size =
3720  site != NULL ? object_size + AllocationMemento::kSize : object_size;
3721  AllocationResult allocation =
3722  AllocateRaw(adjusted_object_size, NEW_SPACE, NEW_SPACE);
3723  if (!allocation.To(&clone)) return allocation;
3724  }
3725  SLOW_DCHECK(InNewSpace(clone));
3726  // Since we know the clone is allocated in new space, we can copy
3727  // the contents without worrying about updating the write barrier.
3728  CopyBlock(clone->address(), source->address(), object_size);
3729 
3730  if (site != NULL) {
3731  AllocationMemento* alloc_memento = reinterpret_cast<AllocationMemento*>(
3732  reinterpret_cast<Address>(clone) + object_size);
3733  InitializeAllocationMemento(alloc_memento, site);
3734  }
3735  }
3736 
3737  SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() ==
3738  source->GetElementsKind());
3739  FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
3740  FixedArray* properties = FixedArray::cast(source->properties());
3741  // Update elements if necessary.
3742  if (elements->length() > 0) {
3743  FixedArrayBase* elem;
3744  {
3745  AllocationResult allocation;
3746  if (elements->map() == fixed_cow_array_map()) {
3747  allocation = FixedArray::cast(elements);
3748  } else if (source->HasFastDoubleElements()) {
3749  allocation = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
3750  } else {
3751  allocation = CopyFixedArray(FixedArray::cast(elements));
3752  }
3753  if (!allocation.To(&elem)) return allocation;
3754  }
3755  JSObject::cast(clone)->set_elements(elem, wb_mode);
3756  }
3757  // Update properties if necessary.
3758  if (properties->length() > 0) {
3759  FixedArray* prop;
3760  {
3761  AllocationResult allocation = CopyFixedArray(properties);
3762  if (!allocation.To(&prop)) return allocation;
3763  }
3764  JSObject::cast(clone)->set_properties(prop, wb_mode);
3765  }
3766  // Return the new clone.
3767  return clone;
3768 }
#define SLOW_DCHECK(condition)
Definition: checks.h:30
static bool CanTrack(InstanceType type)
Definition: objects-inl.h:1614
MUST_USE_RESULT AllocationResult CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:148
MUST_USE_RESULT AllocationResult CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:154
static const int kHeaderSize
Definition: objects.h:2195
@ UPDATE_WRITE_BARRIER
Definition: objects.h:235

References v8::internal::HeapObject::address(), AllocateRaw(), always_allocate(), v8::internal::AllocationSite::CanTrack(), CopyBlock(), CopyFixedArray(), CopyFixedDoubleArray(), DCHECK, v8::internal::JSObject::GetElementsKind(), v8::internal::JSObject::HasFastDoubleElements(), InitializeAllocationMemento(), InNewSpace(), v8::internal::JSObject::kHeaderSize, v8::internal::kPointerSize, v8::internal::AllocationMemento::kSize, v8::internal::FixedArrayBase::length(), map, v8::internal::HeapObject::map(), v8::internal::NEW_SPACE, NULL, v8::internal::OLD_POINTER_SPACE, v8::internal::SKIP_WRITE_BARRIER, SLOW_DCHECK, v8::internal::AllocationResult::To(), and v8::internal::UPDATE_WRITE_BARRIER.

+ Here is the call graph for this function:

◆ CreateApiObjects()

void v8::internal::Heap::CreateApiObjects ( )

Definition at line 2680 of file heap.cc.

2680  {
2681  HandleScope scope(isolate());
2682  Factory* factory = isolate()->factory();
2683  Handle<Map> new_neander_map =
2684  factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
2685 
2686  // Don't use Smi-only elements optimizations for objects with the neander
2687  // map. There are too many cases where element values are set directly with a
2688  // bottleneck to trap the Smi-only -> fast elements transition, and there
2689  // appears to be no benefit for optimize this case.
2690  new_neander_map->set_elements_kind(TERMINAL_FAST_ELEMENTS_KIND);
2691  set_neander_map(*new_neander_map);
2692 
2693  Handle<JSObject> listeners = factory->NewNeanderObject();
2694  Handle<FixedArray> elements = factory->NewFixedArray(2);
2695  elements->set(0, Smi::FromInt(0));
2696  listeners->set_elements(*elements);
2697  set_message_listeners(*listeners);
2698 }
friend class Factory
Definition: heap.h:2025
Factory * factory()
Definition: isolate.h:982
@ JS_OBJECT_TYPE
Definition: objects.h:731
@ TERMINAL_FAST_ELEMENTS_KIND
Definition: elements-kind.h:63

References Factory, v8::internal::Isolate::factory(), v8::internal::Smi::FromInt(), isolate(), v8::internal::JS_OBJECT_TYPE, v8::internal::JSObject::kHeaderSize, and v8::internal::TERMINAL_FAST_ELEMENTS_KIND.

Referenced by CreateHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateFillerObjectAt()

void v8::internal::Heap::CreateFillerObjectAt ( Address  addr,
int  size 
)

Definition at line 3221 of file heap.cc.

3221  {
3222  if (size == 0) return;
3223  HeapObject* filler = HeapObject::FromAddress(addr);
3224  if (size == kPointerSize) {
3225  filler->set_map_no_write_barrier(one_pointer_filler_map());
3226  } else if (size == 2 * kPointerSize) {
3227  filler->set_map_no_write_barrier(two_pointer_filler_map());
3228  } else {
3229  filler->set_map_no_write_barrier(free_space_map());
3230  FreeSpace::cast(filler)->set_size(size);
3231  }
3232 }
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1464

References v8::internal::HeapObject::FromAddress(), v8::internal::kPointerSize, v8::internal::HeapObject::set_map_no_write_barrier(), and size.

Referenced by v8::internal::NewSpace::AddFreshPage(), AllocateCode(), AllocateFillerObject(), v8::internal::EnsureDoubleAligned(), EnsureFillerObjectAtTop(), v8::internal::PagedSpace::EvictEvacuationCandidatesFromFreeLists(), LeftTrimFixedArray(), v8::internal::String::MakeExternal(), v8::internal::JSObject::MigrateFastToSlow(), RightTrimFixedArray(), v8::internal::StringReplaceGlobalRegExpWithEmptyString(), and v8::internal::SeqString::Truncate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateFixedStubs()

void v8::internal::Heap::CreateFixedStubs ( )
private

Definition at line 2713 of file heap.cc.

2713  {
2714  // Here we create roots for fixed stubs. They are needed at GC
2715  // for cooking and uncooking (check out frames.cc).
2716  // The eliminates the need for doing dictionary lookup in the
2717  // stub cache for these stubs.
2718  HandleScope scope(isolate());
2719 
2720  // Create stubs that should be there, so we don't unexpectedly have to
2721  // create them if we need them during the creation of another stub.
2722  // Stub creation mixes raw pointers and handles in an unsafe manner so
2723  // we cannot create stubs while we are creating stubs.
2724  CodeStub::GenerateStubsAheadOfTime(isolate());
2725 
2726  // MacroAssembler::Abort calls (usually enabled with --debug-code) depend on
2727  // CEntryStub, so we need to call GenerateStubsAheadOfTime before JSEntryStub
2728  // is created.
2729 
2730  // gcc-4.4 has problem generating correct code of following snippet:
2731  // { JSEntryStub stub;
2732  // js_entry_code_ = *stub.GetCode();
2733  // }
2734  // { JSConstructEntryStub stub;
2735  // js_construct_entry_code_ = *stub.GetCode();
2736  // }
2737  // To workaround the problem, make separate functions without inlining.
2738  Heap::CreateJSEntryStub();
2739  Heap::CreateJSConstructEntryStub();
2740 }

References isolate().

Referenced by CreateInitialObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateHeapObjects()

bool v8::internal::Heap::CreateHeapObjects ( )

Definition at line 5156 of file heap.cc.

5156  {
5157  // Create initial maps.
5158  if (!CreateInitialMaps()) return false;
5159  CreateApiObjects();
5160 
5161  // Create initial objects
5163  CHECK_EQ(0, gc_count_);
5164 
5165  set_native_contexts_list(undefined_value());
5166  set_array_buffers_list(undefined_value());
5167  set_allocation_sites_list(undefined_value());
5168  weak_object_to_code_table_ = undefined_value();
5169  return true;
5170 }
void CreateApiObjects()
Definition: heap.cc:2680
void CreateInitialObjects()
Definition: heap.cc:2743
bool CreateInitialMaps()
Definition: heap.cc:2372
#define CHECK_EQ(expected, value)
Definition: logging.h:169

References CHECK_EQ, CreateApiObjects(), CreateInitialMaps(), CreateInitialObjects(), gc_count_, set_allocation_sites_list(), set_array_buffers_list(), set_native_contexts_list(), and weak_object_to_code_table_.

Referenced by v8::internal::Isolate::Init().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateInitialMaps()

bool v8::internal::Heap::CreateInitialMaps ( )
private

Definition at line 2372 of file heap.cc.

2372  {
2373  HeapObject* obj;
2374  {
2375  AllocationResult allocation = AllocatePartialMap(MAP_TYPE, Map::kSize);
2376  if (!allocation.To(&obj)) return false;
2377  }
2378  // Map::cast cannot be used due to uninitialized map field.
2379  Map* new_meta_map = reinterpret_cast<Map*>(obj);
2380  set_meta_map(new_meta_map);
2381  new_meta_map->set_map(new_meta_map);
2382 
2383  { // Partial map allocation
2384 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \
2385  { \
2386  Map* map; \
2387  if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
2388  set_##field_name##_map(map); \
2389  }
2390 
2395  constant_pool_array);
2396 
2397 #undef ALLOCATE_PARTIAL_MAP
2398  }
2399 
2400  // Allocate the empty array.
2401  {
2402  AllocationResult allocation = AllocateEmptyFixedArray();
2403  if (!allocation.To(&obj)) return false;
2404  }
2405  set_empty_fixed_array(FixedArray::cast(obj));
2406 
2407  {
2408  AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE);
2409  if (!allocation.To(&obj)) return false;
2410  }
2411  set_null_value(Oddball::cast(obj));
2412  Oddball::cast(obj)->set_kind(Oddball::kNull);
2413 
2414  {
2415  AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE);
2416  if (!allocation.To(&obj)) return false;
2417  }
2418  set_undefined_value(Oddball::cast(obj));
2419  Oddball::cast(obj)->set_kind(Oddball::kUndefined);
2420  DCHECK(!InNewSpace(undefined_value()));
2421 
2422  // Set preliminary exception sentinel value before actually initializing it.
2423  set_exception(null_value());
2424 
2425  // Allocate the empty descriptor array.
2426  {
2427  AllocationResult allocation = AllocateEmptyFixedArray();
2428  if (!allocation.To(&obj)) return false;
2429  }
2430  set_empty_descriptor_array(DescriptorArray::cast(obj));
2431 
2432  // Allocate the constant pool array.
2433  {
2434  AllocationResult allocation = AllocateEmptyConstantPoolArray();
2435  if (!allocation.To(&obj)) return false;
2436  }
2437  set_empty_constant_pool_array(ConstantPoolArray::cast(obj));
2438 
2439  // Fix the instance_descriptors for the existing maps.
2440  meta_map()->set_code_cache(empty_fixed_array());
2441  meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2442  meta_map()->init_back_pointer(undefined_value());
2443  meta_map()->set_instance_descriptors(empty_descriptor_array());
2444 
2445  fixed_array_map()->set_code_cache(empty_fixed_array());
2446  fixed_array_map()->set_dependent_code(
2447  DependentCode::cast(empty_fixed_array()));
2448  fixed_array_map()->init_back_pointer(undefined_value());
2449  fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
2450 
2451  undefined_map()->set_code_cache(empty_fixed_array());
2452  undefined_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2453  undefined_map()->init_back_pointer(undefined_value());
2454  undefined_map()->set_instance_descriptors(empty_descriptor_array());
2455 
2456  null_map()->set_code_cache(empty_fixed_array());
2457  null_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2458  null_map()->init_back_pointer(undefined_value());
2459  null_map()->set_instance_descriptors(empty_descriptor_array());
2460 
2461  constant_pool_array_map()->set_code_cache(empty_fixed_array());
2462  constant_pool_array_map()->set_dependent_code(
2463  DependentCode::cast(empty_fixed_array()));
2464  constant_pool_array_map()->init_back_pointer(undefined_value());
2465  constant_pool_array_map()->set_instance_descriptors(empty_descriptor_array());
2466 
2467  // Fix prototype object for existing maps.
2468  meta_map()->set_prototype(null_value());
2469  meta_map()->set_constructor(null_value());
2470 
2471  fixed_array_map()->set_prototype(null_value());
2472  fixed_array_map()->set_constructor(null_value());
2473 
2474  undefined_map()->set_prototype(null_value());
2475  undefined_map()->set_constructor(null_value());
2476 
2477  null_map()->set_prototype(null_value());
2478  null_map()->set_constructor(null_value());
2479 
2480  constant_pool_array_map()->set_prototype(null_value());
2481  constant_pool_array_map()->set_constructor(null_value());
2482 
2483  { // Map allocation
2484 #define ALLOCATE_MAP(instance_type, size, field_name) \
2485  { \
2486  Map* map; \
2487  if (!AllocateMap((instance_type), size).To(&map)) return false; \
2488  set_##field_name##_map(map); \
2489  }
2490 
2491 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
2492  ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
2493 
2494  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, fixed_cow_array)
2495  DCHECK(fixed_array_map() != fixed_cow_array_map());
2496 
2500  mutable_heap_number)
2503 
2506  ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, uninitialized);
2507  ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, arguments_marker);
2508  ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, no_interceptor_result_sentinel);
2510  ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, termination_exception);
2511 
2512  for (unsigned i = 0; i < arraysize(string_type_table); i++) {
2513  const StringTypeTable& entry = string_type_table[i];
2514  {
2515  AllocationResult allocation = AllocateMap(entry.type, entry.size);
2516  if (!allocation.To(&obj)) return false;
2517  }
2518  // Mark cons string maps as unstable, because their objects can change
2519  // maps during GC.
2520  Map* map = Map::cast(obj);
2521  if (StringShape(entry.type).IsCons()) map->mark_unstable();
2522  roots_[entry.index] = map;
2523  }
2524 
2525  ALLOCATE_VARSIZE_MAP(STRING_TYPE, undetectable_string)
2526  undetectable_string_map()->set_is_undetectable();
2527 
2528  ALLOCATE_VARSIZE_MAP(ONE_BYTE_STRING_TYPE, undetectable_one_byte_string);
2529  undetectable_one_byte_string_map()->set_is_undetectable();
2530 
2531  ALLOCATE_VARSIZE_MAP(FIXED_DOUBLE_ARRAY_TYPE, fixed_double_array)
2534 
2535 #define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2536  ALLOCATE_MAP(EXTERNAL_##TYPE##_ARRAY_TYPE, ExternalArray::kAlignedSize, \
2537  external_##type##_array)
2538 
2540 #undef ALLOCATE_EXTERNAL_ARRAY_MAP
2541 
2542 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2543  ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, fixed_##type##_array)
2544 
2546 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP
2547 
2548  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, sloppy_arguments_elements)
2549 
2551 
2553  ALLOCATE_MAP(PROPERTY_CELL_TYPE, PropertyCell::kSize, global_property_cell)
2554  ALLOCATE_MAP(FILLER_TYPE, kPointerSize, one_pointer_filler)
2555  ALLOCATE_MAP(FILLER_TYPE, 2 * kPointerSize, two_pointer_filler)
2556 
2557 
2558  for (unsigned i = 0; i < arraysize(struct_table); i++) {
2559  const StructTable& entry = struct_table[i];
2560  Map* map;
2561  if (!AllocateMap(entry.type, entry.size).To(&map)) return false;
2562  roots_[entry.index] = map;
2563  }
2564 
2566  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, ordered_hash_table)
2567 
2568  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, function_context)
2569  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, catch_context)
2570  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, with_context)
2571  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, block_context)
2572  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_context)
2573  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, global_context)
2574 
2575  ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, native_context)
2576  native_context_map()->set_dictionary_map(true);
2577  native_context_map()->set_visitor_id(
2578  StaticVisitorBase::kVisitNativeContext);
2579 
2581  shared_function_info)
2582 
2585  external_map()->set_is_extensible(false);
2586 #undef ALLOCATE_VARSIZE_MAP
2587 #undef ALLOCATE_MAP
2588  }
2589 
2590  { // Empty arrays
2591  {
2592  ByteArray* byte_array;
2593  if (!AllocateByteArray(0, TENURED).To(&byte_array)) return false;
2594  set_empty_byte_array(byte_array);
2595  }
2596 
2597 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \
2598  { \
2599  ExternalArray* obj; \
2600  if (!AllocateEmptyExternalArray(kExternal##Type##Array).To(&obj)) \
2601  return false; \
2602  set_empty_external_##type##_array(obj); \
2603  }
2604 
2606 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY
2607 
2608 #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \
2609  { \
2610  FixedTypedArrayBase* obj; \
2611  if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \
2612  return false; \
2613  set_empty_fixed_##type##_array(obj); \
2614  }
2615 
2617 #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY
2618  }
2619  DCHECK(!InNewSpace(empty_fixed_array()));
2620  return true;
2621 }
MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray()
Definition: heap.cc:4178
MUST_USE_RESULT AllocationResult AllocatePartialMap(InstanceType instance_type, int instance_size)
Definition: heap.cc:2271
static const StringTypeTable string_type_table[]
Definition: heap.h:1558
static const StructTable struct_table[]
Definition: heap.h:1560
MUST_USE_RESULT AllocationResult AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
Definition: heap.cc:2295
MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray()
Definition: heap.cc:3918
static const int kSize
Definition: objects.h:7688
static const byte kUndefined
Definition: objects.h:9406
static const byte kNull
Definition: objects.h:9404
static const int kSize
Definition: objects.h:9398
static const int kAlignedSize
Definition: objects.h:6979
#define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name)
#define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size)
#define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size)
#define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size)
#define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size)
#define ALLOCATE_MAP(instance_type, size, field_name)
#define ALLOCATE_VARSIZE_MAP(instance_type, field_name)
#define arraysize(array)
Definition: macros.h:86
@ FIXED_DOUBLE_ARRAY_TYPE
Definition: objects.h:692
@ FIXED_ARRAY_TYPE
Definition: objects.h:717
@ PROPERTY_CELL_TYPE
Definition: objects.h:665
@ FREE_SPACE_TYPE
Definition: objects.h:673
@ BYTE_ARRAY_TYPE
Definition: objects.h:672
@ ODDBALL_TYPE
Definition: objects.h:663
@ MUTABLE_HEAP_NUMBER_TYPE
Definition: objects.h:670
@ ONE_BYTE_STRING_TYPE
Definition: objects.h:633
@ HEAP_NUMBER_TYPE
Definition: objects.h:669
@ JS_MESSAGE_OBJECT_TYPE
Definition: objects.h:729
@ SHARED_FUNCTION_INFO_TYPE
Definition: objects.h:719
@ CONSTANT_POOL_ARRAY_TYPE
Definition: objects.h:718
@ FOREIGN_TYPE
Definition: objects.h:671
const int kVariableSizeSentinel
Definition: objects.h:309
#define TYPED_ARRAYS(V)
Definition: objects.h:4433

References Allocate(), ALLOCATE_EMPTY_EXTERNAL_ARRAY, ALLOCATE_EMPTY_FIXED_TYPED_ARRAY, ALLOCATE_EXTERNAL_ARRAY_MAP, ALLOCATE_FIXED_TYPED_ARRAY_MAP, ALLOCATE_MAP, ALLOCATE_PARTIAL_MAP, ALLOCATE_VARSIZE_MAP, AllocateByteArray(), AllocateEmptyConstantPoolArray(), AllocateEmptyFixedArray(), AllocateMap(), AllocatePartialMap(), arraysize, v8::internal::BYTE_ARRAY_TYPE, v8::internal::CELL_TYPE, v8::internal::CODE_TYPE, v8::internal::CONSTANT_POOL_ARRAY_TYPE, DCHECK, v8::internal::FILLER_TYPE, v8::internal::FIXED_ARRAY_TYPE, v8::internal::FIXED_DOUBLE_ARRAY_TYPE, v8::internal::FOREIGN_TYPE, v8::internal::FREE_SPACE_TYPE, v8::internal::HEAP_NUMBER_TYPE, v8::internal::Heap::StringTypeTable::index, v8::internal::Heap::StructTable::index, InNewSpace(), v8::internal::JS_MESSAGE_OBJECT_TYPE, v8::internal::JS_OBJECT_TYPE, v8::internal::SharedFunctionInfo::kAlignedSize, v8::internal::JSObject::kHeaderSize, v8::internal::Oddball::kNull, v8::internal::kPointerSize, v8::internal::HeapNumber::kSize, v8::internal::Map::kSize, v8::internal::JSMessageObject::kSize, v8::internal::Symbol::kSize, v8::internal::Oddball::kSize, v8::internal::Cell::kSize, v8::internal::PropertyCell::kSize, v8::internal::Foreign::kSize, v8::internal::Oddball::kUndefined, v8::internal::kVariableSizeSentinel, map, v8::internal::MAP_TYPE, v8::internal::MUTABLE_HEAP_NUMBER_TYPE, v8::internal::ODDBALL_TYPE, v8::internal::OLD_POINTER_SPACE, v8::internal::ONE_BYTE_STRING_TYPE, v8::internal::PROPERTY_CELL_TYPE, roots_, v8::internal::HeapObject::set_map(), v8::internal::SHARED_FUNCTION_INFO_TYPE, v8::internal::Heap::StringTypeTable::size, v8::internal::Heap::StructTable::size, v8::internal::STRING_TYPE, string_type_table, struct_table, v8::internal::SYMBOL_TYPE, v8::internal::TENURED, v8::internal::AllocationResult::To(), v8::internal::Heap::StringTypeTable::type, v8::internal::Heap::StructTable::type, and TYPED_ARRAYS.

Referenced by CreateHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ CreateInitialObjects()

void v8::internal::Heap::CreateInitialObjects ( )
private

Definition at line 2743 of file heap.cc.

2743  {
2744  HandleScope scope(isolate());
2745  Factory* factory = isolate()->factory();
2746 
2747  // The -0 value must be set before NewNumber works.
2748  set_minus_zero_value(*factory->NewHeapNumber(-0.0, IMMUTABLE, TENURED));
2749  DCHECK(std::signbit(minus_zero_value()->Number()) != 0);
2750 
2751  set_nan_value(
2752  *factory->NewHeapNumber(base::OS::nan_value(), IMMUTABLE, TENURED));
2753  set_infinity_value(*factory->NewHeapNumber(V8_INFINITY, IMMUTABLE, TENURED));
2754 
2755  // The hole has not been created yet, but we want to put something
2756  // predictable in the gaps in the string table, so lets make that Smi zero.
2757  set_the_hole_value(reinterpret_cast<Oddball*>(Smi::FromInt(0)));
2758 
2759  // Allocate initial string table.
2760  set_string_table(*StringTable::New(isolate(), kInitialStringTableSize));
2761 
2762  // Finish initializing oddballs after creating the string table.
2763  Oddball::Initialize(isolate(), factory->undefined_value(), "undefined",
2764  factory->nan_value(), Oddball::kUndefined);
2765 
2766  // Initialize the null_value.
2767  Oddball::Initialize(isolate(), factory->null_value(), "null",
2769 
2770  set_true_value(*factory->NewOddball(factory->boolean_map(), "true",
2771  handle(Smi::FromInt(1), isolate()),
2772  Oddball::kTrue));
2773 
2774  set_false_value(*factory->NewOddball(factory->boolean_map(), "false",
2775  handle(Smi::FromInt(0), isolate()),
2776  Oddball::kFalse));
2777 
2778  set_the_hole_value(*factory->NewOddball(factory->the_hole_map(), "hole",
2779  handle(Smi::FromInt(-1), isolate()),
2781 
2782  set_uninitialized_value(*factory->NewOddball(
2783  factory->uninitialized_map(), "uninitialized",
2785 
2786  set_arguments_marker(*factory->NewOddball(
2787  factory->arguments_marker_map(), "arguments_marker",
2789 
2790  set_no_interceptor_result_sentinel(*factory->NewOddball(
2791  factory->no_interceptor_result_sentinel_map(),
2792  "no_interceptor_result_sentinel", handle(Smi::FromInt(-2), isolate()),
2793  Oddball::kOther));
2794 
2795  set_termination_exception(*factory->NewOddball(
2796  factory->termination_exception_map(), "termination_exception",
2798 
2799  set_exception(*factory->NewOddball(factory->exception_map(), "exception",
2800  handle(Smi::FromInt(-5), isolate()),
2802 
2803  for (unsigned i = 0; i < arraysize(constant_string_table); i++) {
2804  Handle<String> str =
2805  factory->InternalizeUtf8String(constant_string_table[i].contents);
2807  }
2808 
2809  // Allocate the hidden string which is used to identify the hidden properties
2810  // in JSObjects. The hash code has a special value so that it will not match
2811  // the empty string when searching for the property. It cannot be part of the
2812  // loop above because it needs to be allocated manually with the special
2813  // hash code in place. The hash code for the hidden_string is zero to ensure
2814  // that it will always be at the first entry in property descriptors.
2815  hidden_string_ = *factory->NewOneByteInternalizedString(
2817 
2818  // Create the code_stubs dictionary. The initial size is set to avoid
2819  // expanding the dictionary during bootstrapping.
2820  set_code_stubs(*UnseededNumberDictionary::New(isolate(), 128));
2821 
2822  // Create the non_monomorphic_cache used in stub-cache.cc. The initial size
2823  // is set to avoid expanding the dictionary during bootstrapping.
2824  set_non_monomorphic_cache(*UnseededNumberDictionary::New(isolate(), 64));
2825 
2826  set_polymorphic_code_cache(PolymorphicCodeCache::cast(
2827  *factory->NewStruct(POLYMORPHIC_CODE_CACHE_TYPE)));
2828 
2829  set_instanceof_cache_function(Smi::FromInt(0));
2830  set_instanceof_cache_map(Smi::FromInt(0));
2831  set_instanceof_cache_answer(Smi::FromInt(0));
2832 
2833  CreateFixedStubs();
2834 
2835  // Allocate the dictionary of intrinsic function names.
2836  Handle<NameDictionary> intrinsic_names =
2837  NameDictionary::New(isolate(), Runtime::kNumFunctions, TENURED);
2839  set_intrinsic_function_names(*intrinsic_names);
2840 
2841  set_number_string_cache(
2842  *factory->NewFixedArray(kInitialNumberStringCacheSize * 2, TENURED));
2843 
2844  // Allocate cache for single character one byte strings.
2845  set_single_character_string_cache(
2846  *factory->NewFixedArray(String::kMaxOneByteCharCode + 1, TENURED));
2847 
2848  // Allocate cache for string split and regexp-multiple.
2849  set_string_split_cache(*factory->NewFixedArray(
2851  set_regexp_multiple_cache(*factory->NewFixedArray(
2853 
2854  // Allocate cache for external strings pointing to native source code.
2855  set_natives_source_cache(
2856  *factory->NewFixedArray(Natives::GetBuiltinsCount()));
2857 
2858  set_undefined_cell(*factory->NewCell(factory->undefined_value()));
2859 
2860  // The symbol registry is initialized lazily.
2861  set_symbol_registry(undefined_value());
2862 
2863  // Allocate object to hold object observation state.
2864  set_observation_state(*factory->NewJSObjectFromMap(
2865  factory->NewMap(JS_OBJECT_TYPE, JSObject::kHeaderSize)));
2866 
2867  // Microtask queue uses the empty fixed array as a sentinel for "empty".
2868  // Number of queued microtasks stored in Isolate::pending_microtask_count().
2869  set_microtask_queue(empty_fixed_array());
2870 
2871  set_detailed_stack_trace_symbol(*factory->NewPrivateOwnSymbol());
2872  set_elements_transition_symbol(*factory->NewPrivateOwnSymbol());
2873  set_frozen_symbol(*factory->NewPrivateOwnSymbol());
2874  set_megamorphic_symbol(*factory->NewPrivateOwnSymbol());
2875  set_premonomorphic_symbol(*factory->NewPrivateOwnSymbol());
2876  set_generic_symbol(*factory->NewPrivateOwnSymbol());
2877  set_nonexistent_symbol(*factory->NewPrivateOwnSymbol());
2878  set_normal_ic_symbol(*factory->NewPrivateOwnSymbol());
2879  set_observed_symbol(*factory->NewPrivateOwnSymbol());
2880  set_stack_trace_symbol(*factory->NewPrivateOwnSymbol());
2881  set_uninitialized_symbol(*factory->NewPrivateOwnSymbol());
2882  set_home_object_symbol(*factory->NewPrivateOwnSymbol());
2883 
2884  Handle<SeededNumberDictionary> slow_element_dictionary =
2886  slow_element_dictionary->set_requires_slow_elements();
2887  set_empty_slow_element_dictionary(*slow_element_dictionary);
2888 
2889  set_materialized_objects(*factory->NewFixedArray(0, TENURED));
2890 
2891  // Handling of script id generation is in Factory::NewScript.
2892  set_last_script_id(Smi::FromInt(v8::UnboundScript::kNoScriptId));
2893 
2894  set_allocation_sites_scratchpad(
2895  *factory->NewFixedArray(kAllocationSiteScratchpadSize, TENURED));
2897 
2898  // Initialize keyed lookup cache.
2900 
2901  // Initialize context slot cache.
2903 
2904  // Initialize descriptor cache.
2906 
2907  // Initialize compilation cache.
2909 }
static const int kNoScriptId
Definition: v8.h:977
static double nan_value()
static MUST_USE_RESULT Handle< UnseededNumberDictionary > New(Isolate *isolate, int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
Definition: objects.cc:14899
static MUST_USE_RESULT Handle< StringTable > New(Isolate *isolate, int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
Definition: objects.cc:13756
void CreateFixedStubs()
Definition: heap.cc:2713
static const ConstantStringTable constant_string_table[]
Definition: heap.h:1559
static const int kInitialStringTableSize
Definition: heap.h:1948
static const int kInitialNumberStringCacheSize
Definition: heap.h:1950
void InitializeAllocationSitesScratchpad()
Definition: heap.cc:3068
KeyedLookupCache * keyed_lookup_cache()
Definition: isolate.h:887
DescriptorLookupCache * descriptor_lookup_cache()
Definition: isolate.h:895
ContextSlotCache * context_slot_cache()
Definition: isolate.h:891
static void Initialize(Isolate *isolate, Handle< Oddball > oddball, const char *to_string, Handle< Object > to_number, byte kind)
Definition: objects.cc:9596
static const byte kFalse
Definition: objects.h:9400
static const byte kArgumentMarker
Definition: objects.h:9405
static const byte kException
Definition: objects.h:9409
static const byte kTheHole
Definition: objects.h:9403
static const byte kTrue
Definition: objects.h:9401
static const byte kUninitialized
Definition: objects.h:9407
static const byte kOther
Definition: objects.h:9408
static const int kRegExpResultsCacheSize
Definition: heap.h:2380
static void InitializeIntrinsicFunctionNames(Isolate *isolate, Handle< NameDictionary > dict)
Definition: runtime.cc:9273
static const int kEmptyStringHash
Definition: objects.h:8817
static const int32_t kMaxOneByteCharCode
Definition: objects.h:8811
#define V8_INFINITY
Definition: globals.h:25
@ POLYMORPHIC_CODE_CACHE_TYPE
Definition: objects.h:711
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
Vector< const uint8_t > OneByteVector(const char *data, int length)
Definition: vector.h:162

References arraysize, v8::internal::CompilationCache::Clear(), v8::internal::KeyedLookupCache::Clear(), v8::internal::DescriptorLookupCache::Clear(), v8::internal::ContextSlotCache::Clear(), v8::internal::Isolate::compilation_cache(), constant_string_table, v8::internal::Isolate::context_slot_cache(), CreateFixedStubs(), DCHECK, v8::internal::Isolate::descriptor_lookup_cache(), Factory, v8::internal::Isolate::factory(), v8::internal::Smi::FromInt(), v8::internal::NativesCollection< type >::GetBuiltinsCount(), v8::internal::handle(), hidden_string_, v8::internal::IMMUTABLE, v8::internal::Heap::ConstantStringTable::index, v8::internal::Oddball::Initialize(), InitializeAllocationSitesScratchpad(), v8::internal::Runtime::InitializeIntrinsicFunctionNames(), isolate(), isolate_, v8::internal::JS_OBJECT_TYPE, kAllocationSiteScratchpadSize, v8::internal::Oddball::kArgumentMarker, v8::internal::String::kEmptyStringHash, v8::internal::Oddball::kException, v8::internal::Isolate::keyed_lookup_cache(), v8::internal::Oddball::kFalse, v8::internal::JSObject::kHeaderSize, kInitialNumberStringCacheSize, kInitialStringTableSize, v8::internal::String::kMaxOneByteCharCode, v8::UnboundScript::kNoScriptId, v8::internal::Oddball::kNull, v8::internal::Oddball::kOther, v8::internal::RegExpResultsCache::kRegExpResultsCacheSize, v8::internal::Oddball::kTheHole, v8::internal::Oddball::kTrue, v8::internal::Oddball::kUndefined, v8::internal::Oddball::kUninitialized, v8::base::OS::nan_value(), v8::internal::HashTable< StringTable, StringTableShape, HashTableKey * >::New(), v8::internal::Dictionary< NameDictionary, NameDictionaryShape, Handle< Name > >::New(), v8::internal::Dictionary< SeededNumberDictionary, SeededNumberDictionaryShape, uint32_t >::New(), v8::internal::Dictionary< UnseededNumberDictionary, UnseededNumberDictionaryShape, uint32_t >::New(), v8::internal::OneByteVector(), v8::internal::POLYMORPHIC_CODE_CACHE_TYPE, roots_, v8::internal::TENURED, and V8_INFINITY.

Referenced by CreateHeapObjects().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ decrement_scan_on_scavenge_pages()

void v8::internal::Heap::decrement_scan_on_scavenge_pages ( )
inline

Definition at line 746 of file heap.h.

746  {
748  if (FLAG_gc_verbose) {
749  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
750  }
751  }

References v8::internal::PrintF(), and scan_on_scavenge_pages_.

Referenced by v8::internal::PagedSpace::ReleasePage(), and v8::internal::MemoryChunk::set_scan_on_scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DeoptMarkedAllocationSites()

void v8::internal::Heap::DeoptMarkedAllocationSites ( )

Definition at line 569 of file heap.cc.

569  {
570  // TODO(hpayer): If iterating over the allocation sites list becomes a
571  // performance issue, use a cache heap data structure instead (similar to the
572  // allocation sites scratchpad).
573  Object* list_element = allocation_sites_list();
574  while (list_element->IsAllocationSite()) {
575  AllocationSite* site = AllocationSite::cast(list_element);
576  if (site->deopt_dependent_code()) {
577  site->dependent_code()->MarkCodeForDeoptimization(
579  site->set_deopt_dependent_code(false);
580  }
581  list_element = site->weak_next();
582  }
584 }
static void DeoptimizeMarkedCode(Isolate *isolate)
Definition: deoptimizer.cc:454
Object * allocation_sites_list()
Definition: heap.h:801

References allocation_sites_list(), v8::internal::AllocationSite::deopt_dependent_code(), v8::internal::Deoptimizer::DeoptimizeMarkedCode(), isolate_, v8::internal::DependentCode::kAllocationSiteTenuringChangedGroup, and v8::internal::AllocationSite::set_deopt_dependent_code().

+ Here is the call graph for this function:

◆ DeoptMaybeTenuredAllocationSites()

bool v8::internal::Heap::DeoptMaybeTenuredAllocationSites ( )
inline

Definition at line 1284 of file heap.h.

1284  {
1286  }
bool IsAtMaximumCapacity()
Definition: spaces.h:2409

References v8::internal::NewSpace::IsAtMaximumCapacity(), maximum_size_scavenges_, and new_space_.

Referenced by ProcessPretenuringFeedback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DisableInlineAllocation()

void v8::internal::Heap::DisableInlineAllocation ( )

Definition at line 5034 of file heap.cc.

5034  {
5035  if (inline_allocation_disabled_) return;
5037 
5038  // Update inline allocation limit for new space.
5040 
5041  // Update inline allocation limit for old spaces.
5042  PagedSpaces spaces(this);
5043  for (PagedSpace* space = spaces.next(); space != NULL;
5044  space = spaces.next()) {
5045  space->EmptyAllocationInfo();
5046  }
5047 }
NewSpace * new_space()
Definition: heap.h:593
void UpdateInlineAllocationLimit(int size_in_bytes)
Definition: spaces.cc:1323

References inline_allocation_disabled_, new_space(), NULL, space(), and v8::internal::NewSpace::UpdateInlineAllocationLimit().

Referenced by v8::internal::HeapProfiler::StartHeapObjectsTracking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DISALLOW_COPY_AND_ASSIGN()

v8::internal::Heap::DISALLOW_COPY_AND_ASSIGN ( Heap  )
private

◆ DoScavenge()

Address v8::internal::Heap::DoScavenge ( ObjectVisitor scavenge_visitor,
Address  new_space_front 
)
private

Definition at line 1743 of file heap.cc.

1744  {
1745  do {
1746  SemiSpace::AssertValidRange(new_space_front, new_space_.top());
1747  // The addresses new_space_front and new_space_.top() define a
1748  // queue of unprocessed copied objects. Process them until the
1749  // queue is empty.
1750  while (new_space_front != new_space_.top()) {
1751  if (!NewSpacePage::IsAtEnd(new_space_front)) {
1752  HeapObject* object = HeapObject::FromAddress(new_space_front);
1753  new_space_front +=
1754  NewSpaceScavenger::IterateBody(object->map(), object);
1755  } else {
1756  new_space_front =
1757  NewSpacePage::FromLimit(new_space_front)->next_page()->area_start();
1758  }
1759  }
1760 
1761  // Promote and process all the to-be-promoted objects.
1762  {
1763  StoreBufferRebuildScope scope(this, store_buffer(),
1765  while (!promotion_queue()->is_empty()) {
1766  HeapObject* target;
1767  int size;
1768  promotion_queue()->remove(&target, &size);
1769 
1770  // Promoted object might be already partially visited
1771  // during old space pointer iteration. Thus we search specificly
1772  // for pointers to from semispace instead of looking for pointers
1773  // to new space.
1774  DCHECK(!target->IsMap());
1776  target->address(), target->address() + size, &ScavengeObject);
1777  }
1778  }
1779 
1780  // Take another spin if there are now unswept objects in new space
1781  // (there are currently no more unswept promoted objects).
1782  } while (new_space_front != new_space_.top());
1783 
1784  return new_space_front;
1785 }
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:554
PromotionQueue * promotion_queue()
Definition: heap.h:753
static void ScavengeStoreBufferCallback(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
Definition: heap.cc:1309
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
Definition: heap.cc:4540
static bool IsAtEnd(Address addr)
Definition: spaces.h:2014
static NewSpacePage * FromLimit(Address address_limit)
Definition: spaces.h:2031
NewSpacePage * next_page() const
Definition: spaces.h:1993
void remove(HeapObject **target, int *size)
Definition: heap.h:421
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:2165

References v8::internal::HeapObject::address(), v8::internal::MemoryChunk::area_start(), v8::internal::SemiSpace::AssertValidRange(), DCHECK, v8::internal::HeapObject::FromAddress(), v8::internal::NewSpacePage::FromLimit(), v8::internal::NewSpacePage::IsAtEnd(), IterateAndMarkPointersToFromSpace(), v8::internal::HeapObject::map(), new_space_, v8::internal::NewSpacePage::next_page(), promotion_queue(), v8::internal::PromotionQueue::remove(), ScavengeObject(), ScavengeStoreBufferCallback(), size, store_buffer(), and v8::internal::NewSpace::top().

Referenced by Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ DoScavengeObject()

void v8::internal::Heap::DoScavengeObject ( Map map,
HeapObject **  slot,
HeapObject obj 
)
inline

Definition at line 1221 of file heap.h.

1221  {
1223  }
VisitorDispatchTable< ScavengingCallback > scavenging_visitors_table_
Definition: heap.h:2015

References v8::internal::VisitorDispatchTable< Callback >::GetVisitor(), map, and scavenging_visitors_table_.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateShortcutCandidate().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EmptyExternalArrayForMap()

ExternalArray * v8::internal::Heap::EmptyExternalArrayForMap ( Map map)

Definition at line 3178 of file heap.cc.

3178  {
3179  return ExternalArray::cast(
3180  roots_[RootIndexForEmptyExternalArray(map->elements_kind())]);
3181 }
RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind)
Definition: heap.cc:3145

References map, RootIndexForEmptyExternalArray(), and roots_.

Referenced by v8::internal::Map::GetInitialElements().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EmptyFixedTypedArrayForMap()

FixedTypedArrayBase * v8::internal::Heap::EmptyFixedTypedArrayForMap ( Map map)

Definition at line 3184 of file heap.cc.

3184  {
3185  return FixedTypedArrayBase::cast(
3186  roots_[RootIndexForEmptyFixedTypedArray(map->elements_kind())]);
3187 }
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind)
Definition: heap.cc:3162

References map, RootIndexForEmptyFixedTypedArray(), and roots_.

Referenced by v8::internal::Map::GetInitialElements().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnableInlineAllocation()

void v8::internal::Heap::EnableInlineAllocation ( )

Definition at line 5025 of file heap.cc.

5025  {
5026  if (!inline_allocation_disabled_) return;
5028 
5029  // Update inline allocation limit for new space.
5031 }

References inline_allocation_disabled_, new_space(), and v8::internal::NewSpace::UpdateInlineAllocationLimit().

Referenced by v8::internal::HeapProfiler::StopHeapObjectsTracking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ encountered_weak_collections()

Object* v8::internal::Heap::encountered_weak_collections ( ) const
inline

Definition at line 811 of file heap.h.

811  {
813  }
Object * encountered_weak_collections_
Definition: heap.h:1537

References encountered_weak_collections_.

Referenced by v8::internal::MarkCompactCollector::AbortWeakCollections(), v8::internal::MarkCompactCollector::ClearWeakCollections(), v8::internal::StaticMarkingVisitor< IncrementalMarkingMarkingVisitor >::MarkInlinedFunctionsCode(), and v8::internal::MarkCompactCollector::ProcessWeakCollections().

+ Here is the caller graph for this function:

◆ EnsureFillerObjectAtTop()

void v8::internal::Heap::EnsureFillerObjectAtTop ( )
private

Definition at line 770 of file heap.cc.

770  {
771  // There may be an allocation memento behind every object in new space.
772  // If we evacuate a not full new space or if we are on the last page of
773  // the new space, then there may be uninitialized memory behind the top
774  // pointer of the new space page. We store a filler object there to
775  // identify the unused space.
776  Address from_top = new_space_.top();
777  Address from_limit = new_space_.limit();
778  if (from_top < from_limit) {
779  int remaining_in_page = static_cast<int>(from_limit - from_top);
780  CreateFillerObjectAt(from_top, remaining_in_page);
781  }
782 }

References CreateFillerObjectAt(), v8::internal::NewSpace::limit(), new_space_, and v8::internal::NewSpace::top().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureFromSpaceIsCommitted()

void v8::internal::Heap::EnsureFromSpaceIsCommitted ( )
private

Definition at line 971 of file heap.cc.

971  {
972  if (new_space_.CommitFromSpaceIfNeeded()) return;
973 
974  // Committing memory to from space failed.
975  // Memory is exhausted and we will die.
976  V8::FatalProcessOutOfMemory("Committing semi space failed.");
977 }
bool CommitFromSpaceIfNeeded()
Definition: spaces.h:2534
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)

References v8::internal::NewSpace::CommitFromSpaceIfNeeded(), v8::internal::V8::FatalProcessOutOfMemory(), and new_space_.

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureWeakObjectToCodeTable()

void v8::internal::Heap::EnsureWeakObjectToCodeTable ( )

Definition at line 5367 of file heap.cc.

5367  {
5368  if (!weak_object_to_code_table()->IsHashTable()) {
5371  TENURED));
5372  }
5373 }
Object * weak_object_to_code_table()
Definition: heap.h:806
@ USE_DEFAULT_MINIMUM_CAPACITY
Definition: globals.h:385

References isolate(), v8::internal::HashTable< WeakHashTable, WeakHashTableShape< 2 >, Handle< Object > >::New(), set_weak_object_to_code_table(), v8::internal::TENURED, v8::internal::USE_DEFAULT_MINIMUM_CAPACITY, and weak_object_to_code_table().

Referenced by v8::internal::AddWeakObjectToCodeDependency().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvaluateOldSpaceLocalPretenuring()

void v8::internal::Heap::EvaluateOldSpaceLocalPretenuring ( uint64_t  size_of_objects_before_gc)
private

Definition at line 1684 of file heap.cc.

1685  {
1686  uint64_t size_of_objects_after_gc = SizeOfObjects();
1687  double old_generation_survival_rate =
1688  (static_cast<double>(size_of_objects_after_gc) * 100) /
1689  static_cast<double>(size_of_objects_before_gc);
1690 
1691  if (old_generation_survival_rate < kOldSurvivalRateLowThreshold) {
1692  // Too many objects died in the old generation, pretenuring of wrong
1693  // allocation sites may be the cause for that. We have to deopt all
1694  // dependent code registered in the allocation sites to re-evaluate
1695  // our pretenuring decisions.
1697  if (FLAG_trace_pretenuring) {
1698  PrintF(
1699  "Deopt all allocation sites dependent code due to low survival "
1700  "rate in the old generation %f\n",
1701  old_generation_survival_rate);
1702  }
1703  }
1704 }
void ResetAllAllocationSitesDependentCode(PretenureFlag flag)
Definition: heap.cc:1667
static const int kOldSurvivalRateLowThreshold
Definition: heap.h:1906
intptr_t SizeOfObjects()
Definition: heap.cc:460

References kOldSurvivalRateLowThreshold, v8::internal::PrintF(), ResetAllAllocationSitesDependentCode(), SizeOfObjects(), and v8::internal::TENURED.

Referenced by MarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ external_string_table()

ExternalStringTable* v8::internal::Heap::external_string_table ( )
inline

Definition at line 1207 of file heap.h.

1207  {
1208  return &external_string_table_;
1209  }

References external_string_table_.

Referenced by v8::internal::ExternalizeStringExtension::Externalize(), v8::String::MakeExternal(), and v8::String::NewExternal().

+ Here is the caller graph for this function:

◆ FatalProcessOutOfMemory()

void v8::internal::Heap::FatalProcessOutOfMemory ( const char *  location,
bool  take_snapshot = false 
)
static

Definition at line 5376 of file heap.cc.

5376  {
5377  v8::internal::V8::FatalProcessOutOfMemory(location, take_snapshot);
5378 }

References v8::internal::V8::FatalProcessOutOfMemory().

Referenced by v8::internal::OrderedHashTable< Derived, Iterator, entrysize >::Allocate(), AllocateByteArray(), AllocateRawFixedArray(), AllocateRawFixedDoubleArray(), and v8::internal::HashTable< Derived, Shape, Key >::New().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FinalizeExternalString()

void v8::internal::Heap::FinalizeExternalString ( String string)
inline

Definition at line 307 of file heap-inl.h.

307  {
308  DCHECK(string->IsExternalString());
310  reinterpret_cast<v8::String::ExternalStringResourceBase**>(
311  reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
313 
314  // Dispose of the C++ object if it has not already been disposed.
315  if (*resource_addr != NULL) {
316  (*resource_addr)->Dispose();
317  *resource_addr = NULL;
318  }
319 }
virtual void Dispose()
Internally V8 will call this Dispose method when the external string resource is no longer needed.
Definition: v8.h:1868
static const int kResourceOffset
Definition: objects.h:9136
const int kHeapObjectTag
Definition: v8.h:5737

References DCHECK, v8::String::ExternalStringResourceBase::Dispose(), v8::internal::kHeapObjectTag, v8::internal::ExternalString::kResourceOffset, and NULL.

Referenced by v8::internal::ExternalStringTable::TearDown(), UpdateNewSpaceReferenceInExternalStringTableEntry(), and v8::internal::StringTableCleaner< finalize_external_strings >::VisitPointers().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FindAllocationMemento()

AllocationMemento * v8::internal::Heap::FindAllocationMemento ( HeapObject object)
inline

Definition at line 497 of file heap-inl.h.

497  {
498  // Check if there is potentially a memento behind the object. If
499  // the last word of the memento is on another page we return
500  // immediately.
501  Address object_address = object->address();
502  Address memento_address = object_address + object->Size();
503  Address last_memento_word_address = memento_address + kPointerSize;
504  if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
505  return NULL;
506  }
507 
508  HeapObject* candidate = HeapObject::FromAddress(memento_address);
509  Map* candidate_map = candidate->map();
510  // This fast check may peek at an uninitialized word. However, the slow check
511  // below (memento_address == top) ensures that this is safe. Mark the word as
512  // initialized to silence MemorySanitizer warnings.
513  MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
514  if (candidate_map != allocation_memento_map()) return NULL;
515 
516  // Either the object is the last object in the new space, or there is another
517  // object of at least word size (the header map word) following it, so
518  // suffices to compare ptr and top here. Note that technically we do not have
519  // to compare with the current top pointer of the from space page during GC,
520  // since we always install filler objects above the top pointer of a from
521  // space page when performing a garbage collection. However, always performing
522  // the test makes it possible to have a single, unified version of
523  // FindAllocationMemento that is used both by the GC and the mutator.
524  Address top = NewSpaceTop();
525  DCHECK(memento_address == top ||
526  memento_address + HeapObject::kHeaderSize <= top ||
527  !NewSpacePage::OnSamePage(memento_address, top));
528  if (memento_address == top) return NULL;
529 
530  AllocationMemento* memento = AllocationMemento::cast(candidate);
531  if (!memento->IsValid()) return NULL;
532  return memento;
533 }
static const int kHeaderSize
Definition: objects.h:1428
Address NewSpaceTop()
Definition: heap.h:591
static bool OnSamePage(Address address1, Address address2)
Definition: spaces.h:2036
#define MSAN_MEMORY_IS_INITIALIZED(p, s)
Definition: msan.h:30

References v8::internal::HeapObject::address(), DCHECK, v8::internal::HeapObject::FromAddress(), v8::internal::AllocationMemento::IsValid(), v8::internal::HeapObject::kHeaderSize, v8::internal::kPointerSize, v8::internal::HeapObject::map(), MSAN_MEMORY_IS_INITIALIZED, NewSpaceTop(), NULL, and v8::internal::NewSpacePage::OnSamePage().

Referenced by v8::internal::JSObject::UpdateAllocationSite(), and UpdateAllocationSiteFeedback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ flush_monomorphic_ics()

bool v8::internal::Heap::flush_monomorphic_ics ( )
inline

Definition at line 1274 of file heap.h.

1274 { return flush_monomorphic_ics_; }

References flush_monomorphic_ics_.

◆ FlushAllocationSitesScratchpad()

void v8::internal::Heap::FlushAllocationSitesScratchpad ( )
private

Definition at line 3060 of file heap.cc.

3060  {
3061  for (int i = 0; i < allocation_sites_scratchpad_length_; i++) {
3062  allocation_sites_scratchpad()->set_undefined(i);
3063  }
3065 }

References allocation_sites_scratchpad_length_.

Referenced by ProcessPretenuringFeedback().

+ Here is the caller graph for this function:

◆ FlushNumberStringCache()

void v8::internal::Heap::FlushNumberStringCache ( )
private

Definition at line 3051 of file heap.cc.

3051  {
3052  // Flush the number to string cache.
3053  int len = number_string_cache()->length();
3054  for (int i = 0; i < len; i++) {
3055  number_string_cache()->set_undefined(i);
3056  }
3057 }

Referenced by MarkCompactPrologue().

+ Here is the caller graph for this function:

◆ FreeQueuedChunks()

void v8::internal::Heap::FreeQueuedChunks ( )

Definition at line 6025 of file heap.cc.

6025  {
6026  if (chunks_queued_for_free_ == NULL) return;
6027  MemoryChunk* next;
6028  MemoryChunk* chunk;
6029  for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6030  next = chunk->next_chunk();
6031  chunk->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED);
6032 
6033  if (chunk->owner()->identity() == LO_SPACE) {
6034  // StoreBuffer::Filter relies on MemoryChunk::FromAnyPointerAddress.
6035  // If FromAnyPointerAddress encounters a slot that belongs to a large
6036  // chunk queued for deletion it will fail to find the chunk because
6037  // it try to perform a search in the list of pages owned by of the large
6038  // object space and queued chunks were detached from that list.
6039  // To work around this we split large chunk into normal kPageSize aligned
6040  // pieces and initialize size, owner and flags field of every piece.
6041  // If FromAnyPointerAddress encounters a slot that belongs to one of
6042  // these smaller pieces it will treat it as a slot on a normal Page.
6043  Address chunk_end = chunk->address() + chunk->size();
6044  MemoryChunk* inner =
6045  MemoryChunk::FromAddress(chunk->address() + Page::kPageSize);
6046  MemoryChunk* inner_last = MemoryChunk::FromAddress(chunk_end - 1);
6047  while (inner <= inner_last) {
6048  // Size of a large chunk is always a multiple of
6049  // OS::AllocateAlignment() so there is always
6050  // enough space for a fake MemoryChunk header.
6051  Address area_end = Min(inner->address() + Page::kPageSize, chunk_end);
6052  // Guard against overflow.
6053  if (area_end < inner->address()) area_end = chunk_end;
6054  inner->SetArea(inner->address(), area_end);
6055  inner->set_size(Page::kPageSize);
6056  inner->set_owner(lo_space());
6057  inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED);
6058  inner = MemoryChunk::FromAddress(inner->address() + Page::kPageSize);
6059  }
6060  }
6061  }
6062  isolate_->heap()->store_buffer()->Compact();
6064  for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6065  next = chunk->next_chunk();
6066  isolate_->memory_allocator()->Free(chunk);
6067  }
6069 }
void Free(MemoryChunk *chunk)
Definition: spaces.cc:700

References v8::internal::MemoryChunk::ABOUT_TO_BE_FREED, v8::internal::MemoryChunk::address(), chunks_queued_for_free_, v8::internal::StoreBuffer::Compact(), v8::internal::StoreBuffer::Filter(), v8::internal::MemoryAllocator::Free(), v8::internal::MemoryChunk::FromAddress(), v8::internal::Isolate::heap(), v8::internal::Space::identity(), isolate_, v8::internal::Page::kPageSize, v8::internal::LO_SPACE, lo_space(), v8::internal::Isolate::memory_allocator(), v8::internal::Min(), v8::internal::MemoryChunk::next_chunk(), NULL, v8::internal::MemoryChunk::owner(), v8::internal::MemoryChunk::set_owner(), v8::internal::MemoryChunk::set_size(), v8::internal::MemoryChunk::SetArea(), v8::internal::MemoryChunk::SetFlag(), v8::internal::MemoryChunk::size(), and store_buffer().

Referenced by v8::internal::LargeObjectSpace::FreeUnmarkedObjects(), v8::internal::MarkCompactCollector::ReleaseEvacuationCandidates(), and v8::internal::MarkCompactCollector::SweepSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FullSizeNumberStringCacheLength()

int v8::internal::Heap::FullSizeNumberStringCacheLength ( )
private

Definition at line 3038 of file heap.cc.

3038  {
3039  // Compute the size of the number string cache based on the max newspace size.
3040  // The number string cache has a minimum size based on twice the initial cache
3041  // size to ensure that it is bigger after being made 'full size'.
3042  int number_string_cache_size = max_semi_space_size_ / 512;
3043  number_string_cache_size = Max(kInitialNumberStringCacheSize * 2,
3044  Min(0x4000, number_string_cache_size));
3045  // There is a string and a number per entry so the length is twice the number
3046  // of entries.
3047  return number_string_cache_size * 2;
3048 }

References kInitialNumberStringCacheSize, v8::internal::Max(), max_semi_space_size_, and v8::internal::Min().

+ Here is the call graph for this function:

◆ GarbageCollectionEpilogue()

void v8::internal::Heap::GarbageCollectionEpilogue ( )
private

Definition at line 587 of file heap.cc.

587  {
589 
590  // In release mode, we only zap the from space under heap verification.
591  if (Heap::ShouldZapGarbage()) {
592  ZapFromSpace();
593  }
594 
595  // Process pretenuring feedback and update allocation sites.
597 
598 #ifdef VERIFY_HEAP
599  if (FLAG_verify_heap) {
600  Verify();
601  }
602 #endif
603 
604  AllowHeapAllocation for_the_rest_of_the_epilogue;
605 
606 #ifdef DEBUG
607  if (FLAG_print_global_handles) isolate_->global_handles()->Print();
608  if (FLAG_print_handles) PrintHandles();
609  if (FLAG_gc_verbose) Print();
610  if (FLAG_code_stats) ReportCodeStatistics("After GC");
611 #endif
612  if (FLAG_deopt_every_n_garbage_collections > 0) {
613  // TODO(jkummerow/ulan/jarin): This is not safe! We can't assume that
614  // the topmost optimized frame can be deoptimized safely, because it
615  // might not have a lazy bailout point right after its current PC.
616  if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) {
619  }
620  }
621 
623 
624  isolate_->counters()->alive_after_last_gc()->Set(
625  static_cast<int>(SizeOfObjects()));
626 
627  isolate_->counters()->string_table_capacity()->Set(
628  string_table()->Capacity());
629  isolate_->counters()->number_of_symbols()->Set(
630  string_table()->NumberOfElements());
631 
633  isolate_->counters()->codegen_fraction_crankshaft()->AddSample(
634  static_cast<int>((crankshaft_codegen_bytes_generated_ * 100.0) /
637  }
638 
639  if (CommittedMemory() > 0) {
640  isolate_->counters()->external_fragmentation_total()->AddSample(
641  static_cast<int>(100 - (SizeOfObjects() * 100.0) / CommittedMemory()));
642 
643  isolate_->counters()->heap_fraction_new_space()->AddSample(static_cast<int>(
644  (new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
645  isolate_->counters()->heap_fraction_old_pointer_space()->AddSample(
646  static_cast<int>((old_pointer_space()->CommittedMemory() * 100.0) /
647  CommittedMemory()));
648  isolate_->counters()->heap_fraction_old_data_space()->AddSample(
649  static_cast<int>((old_data_space()->CommittedMemory() * 100.0) /
650  CommittedMemory()));
651  isolate_->counters()->heap_fraction_code_space()->AddSample(
652  static_cast<int>((code_space()->CommittedMemory() * 100.0) /
653  CommittedMemory()));
654  isolate_->counters()->heap_fraction_map_space()->AddSample(static_cast<int>(
655  (map_space()->CommittedMemory() * 100.0) / CommittedMemory()));
656  isolate_->counters()->heap_fraction_cell_space()->AddSample(
657  static_cast<int>((cell_space()->CommittedMemory() * 100.0) /
658  CommittedMemory()));
659  isolate_->counters()->heap_fraction_property_cell_space()->AddSample(
660  static_cast<int>((property_cell_space()->CommittedMemory() * 100.0) /
661  CommittedMemory()));
662  isolate_->counters()->heap_fraction_lo_space()->AddSample(static_cast<int>(
663  (lo_space()->CommittedMemory() * 100.0) / CommittedMemory()));
664 
665  isolate_->counters()->heap_sample_total_committed()->AddSample(
666  static_cast<int>(CommittedMemory() / KB));
667  isolate_->counters()->heap_sample_total_used()->AddSample(
668  static_cast<int>(SizeOfObjects() / KB));
669  isolate_->counters()->heap_sample_map_space_committed()->AddSample(
670  static_cast<int>(map_space()->CommittedMemory() / KB));
671  isolate_->counters()->heap_sample_cell_space_committed()->AddSample(
672  static_cast<int>(cell_space()->CommittedMemory() / KB));
673  isolate_->counters()
674  ->heap_sample_property_cell_space_committed()
675  ->AddSample(
676  static_cast<int>(property_cell_space()->CommittedMemory() / KB));
677  isolate_->counters()->heap_sample_code_space_committed()->AddSample(
678  static_cast<int>(code_space()->CommittedMemory() / KB));
679 
680  isolate_->counters()->heap_sample_maximum_committed()->AddSample(
681  static_cast<int>(MaximumCommittedMemory() / KB));
682  }
683 
684 #define UPDATE_COUNTERS_FOR_SPACE(space) \
685  isolate_->counters()->space##_bytes_available()->Set( \
686  static_cast<int>(space()->Available())); \
687  isolate_->counters()->space##_bytes_committed()->Set( \
688  static_cast<int>(space()->CommittedMemory())); \
689  isolate_->counters()->space##_bytes_used()->Set( \
690  static_cast<int>(space()->SizeOfObjects()));
691 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
692  if (space()->CommittedMemory() > 0) { \
693  isolate_->counters()->external_fragmentation_##space()->AddSample( \
694  static_cast<int>(100 - \
695  (space()->SizeOfObjects() * 100.0) / \
696  space()->CommittedMemory())); \
697  }
698 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
699  UPDATE_COUNTERS_FOR_SPACE(space) \
700  UPDATE_FRAGMENTATION_FOR_SPACE(space)
701 
710 #undef UPDATE_COUNTERS_FOR_SPACE
711 #undef UPDATE_FRAGMENTATION_FOR_SPACE
712 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
713 
714 #ifdef DEBUG
716 #endif // DEBUG
717 
718  // Remember the last top pointer so that we can later find out
719  // whether we allocated in new space since the last GC.
721 }
static void DeoptimizeAll(Isolate *isolate)
Definition: deoptimizer.cc:437
OldSpace * old_pointer_space()
Definition: heap.h:594
void ZapFromSpace()
Definition: heap.cc:4527
PropertyCellSpace * property_cell_space()
Definition: heap.h:599
void ProcessPretenuringFeedback()
Definition: heap.cc:493
intptr_t MaximumCommittedMemory()
Definition: heap.h:572
void ReportStatisticsAfterGC()
Definition: heap.cc:394
CellSpace * cell_space()
Definition: heap.h:598
OldSpace * old_data_space()
Definition: heap.h:595
Address new_space_top_after_last_gc_
Definition: heap.h:1464
void UpdateMaximumCommitted()
Definition: heap.cc:201
intptr_t CommittedMemory()
Definition: heap.cc:170
intptr_t Capacity()
Definition: heap.cc:160
MapSpace * map_space()
Definition: heap.h:597
GlobalHandles * global_handles()
Definition: isolate.h:917
#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space)
#define UPDATE_COUNTERS_FOR_SPACE(space)
const int KB
Definition: globals.h:106
PerThreadAssertScopeDebugOnly< HEAP_ALLOCATION_ASSERT, true > AllowHeapAllocation
Definition: assert-scope.h:114

References Capacity(), cell_space(), code_space(), CommittedMemory(), v8::internal::Isolate::counters(), crankshaft_codegen_bytes_generated_, v8::internal::Deoptimizer::DeoptimizeAll(), full_codegen_bytes_generated_, v8::internal::StoreBuffer::GCEpilogue(), gcs_since_last_deopt_, v8::internal::Isolate::global_handles(), isolate(), isolate_, v8::internal::KB, lo_space(), map_space(), MaximumCommittedMemory(), new_space(), new_space_top_after_last_gc_, old_data_space(), old_pointer_space(), ProcessPretenuringFeedback(), property_cell_space(), ReportStatisticsAfterGC(), ShouldZapGarbage(), SizeOfObjects(), store_buffer(), v8::internal::NewSpace::top(), UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE, UPDATE_COUNTERS_FOR_SPACE, UpdateMaximumCommitted(), and ZapFromSpace().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GarbageCollectionPrologue()

void v8::internal::Heap::GarbageCollectionPrologue ( )
private

Definition at line 410 of file heap.cc.

410  {
411  {
412  AllowHeapAllocation for_the_first_part_of_prologue;
414  gc_count_++;
416 
417  if (FLAG_flush_code && FLAG_flush_code_incrementally) {
419  }
420 
421 #ifdef VERIFY_HEAP
422  if (FLAG_verify_heap) {
423  Verify();
424  }
425 #endif
426  }
427 
428  // Reset GC statistics.
433  nodes_promoted_ = 0;
434 
436 
437 #ifdef DEBUG
438  DCHECK(!AllowHeapAllocation::IsAllowed() && gc_state_ == NOT_IN_GC);
439 
440  if (FLAG_gc_verbose) Print();
441 
443 #endif // DEBUG
444 
446 
447  if (isolate()->concurrent_osr_enabled()) {
449  }
450 
453  } else {
455  }
457 }
void CheckNewSpaceExpansionCriteria()
Definition: heap.cc:1291
void ClearJSFunctionResultCaches()
Definition: heap.cc:980
void ReportStatisticsBeforeGC()
Definition: heap.cc:291

References v8::internal::OptimizingCompilerThread::AgeBufferedOsrJobs(), CheckNewSpaceExpansionCriteria(), ClearJSFunctionResultCaches(), DCHECK, v8::internal::MarkCompactCollector::EnableCodeFlushing(), gc_count_, gc_state_, v8::internal::StoreBuffer::GCPrologue(), v8::internal::NewSpace::IsAtMaximumCapacity(), isolate(), mark_compact_collector(), maximum_size_scavenges_, new_space_, nodes_copied_in_new_space_, nodes_died_in_new_space_, nodes_promoted_, NOT_IN_GC, v8::internal::Isolate::optimizing_compiler_thread(), promoted_objects_size_, ReportStatisticsBeforeGC(), semi_space_copied_object_size_, store_buffer(), unflattened_strings_length_, and UpdateMaximumCommitted().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ gc_count()

int v8::internal::Heap::gc_count ( ) const
inline

Definition at line 1228 of file heap.h.

1228 { return gc_count_; }

References gc_count_.

◆ gc_state()

HeapState v8::internal::Heap::gc_state ( )
inline

Definition at line 955 of file heap.h.

955 { return gc_state_; }

References gc_state_.

Referenced by v8::internal::MustRecordSlots(), v8::internal::GlobalHandles::PostGarbageCollectionProcessing(), v8::internal::IC::SetTargetAtAddress(), v8::internal::IncrementalMarking::Start(), v8::internal::IncrementalMarking::Step(), and v8::internal::IncrementalMarking::WorthActivating().

+ Here is the caller graph for this function:

◆ GcSafeSizeOfOldObject()

int v8::internal::Heap::GcSafeSizeOfOldObject ( HeapObject object)
staticprivate

Definition at line 228 of file heap.cc.

228  {
229  if (IntrusiveMarking::IsMarked(object)) {
231  }
232  return object->SizeFromMap(object->map());
233 }
static int SizeOfMarkedObject(HeapObject *object)
Definition: heap.h:2432
static bool IsMarked(HeapObject *object)
Definition: heap.h:2411

References v8::internal::IntrusiveMarking::IsMarked(), v8::internal::HeapObject::map(), and v8::internal::IntrusiveMarking::SizeOfMarkedObject().

Referenced by MarkMapPointersAsEncoded().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GcSafeSizeOfOldObjectFunction()

HeapObjectCallback v8::internal::Heap::GcSafeSizeOfOldObjectFunction ( )
inline

Definition at line 874 of file heap.h.

874  {
876  }

References gc_safe_size_of_old_object_.

◆ get_max_alive_after_gc()

intptr_t v8::internal::Heap::get_max_alive_after_gc ( )
inline

Definition at line 1192 of file heap.h.

1192 { return max_alive_after_gc_; }

References max_alive_after_gc_.

Referenced by TearDown().

+ Here is the caller graph for this function:

◆ get_max_gc_pause()

double v8::internal::Heap::get_max_gc_pause ( )
inline

Definition at line 1189 of file heap.h.

1189 { return max_gc_pause_; }

References max_gc_pause_.

Referenced by TearDown().

+ Here is the caller graph for this function:

◆ get_min_in_mutator()

double v8::internal::Heap::get_min_in_mutator ( )
inline

Definition at line 1195 of file heap.h.

1195 { return min_in_mutator_; }

References min_in_mutator_.

Referenced by TearDown().

+ Here is the caller graph for this function:

◆ global_ic_age()

int v8::internal::Heap::global_ic_age ( )
inline

Definition at line 1268 of file heap.h.

1268 { return global_ic_age_; }

References global_ic_age_.

Referenced by v8::internal::Compiler::CompileScript(), and v8::internal::Compiler::GetFunctionFromEval().

+ Here is the caller graph for this function:

◆ HasBeenSetUp()

bool v8::internal::Heap::HasBeenSetUp ( )

Definition at line 221 of file heap.cc.

221  {
222  return old_pointer_space_ != NULL && old_data_space_ != NULL &&
223  code_space_ != NULL && map_space_ != NULL && cell_space_ != NULL &&
225 }

References cell_space_, code_space_, lo_space_, map_space_, NULL, old_data_space_, old_pointer_space_, and property_cell_space_.

Referenced by Available(), Capacity(), CommittedMemory(), CommittedMemoryExecutable(), CommittedPhysicalMemory(), ConfigureHeap(), Contains(), v8::internal::Isolate::Init(), InSpace(), v8::internal::CpuProfiler::StartProcessorIfNotStarted(), and UpdateMaximumCommitted().

+ Here is the caller graph for this function:

◆ HashSeed()

uint32_t v8::internal::Heap::HashSeed ( )
inline

Definition at line 1237 of file heap.h.

1237  {
1238  uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
1239  DCHECK(FLAG_randomize_hashes || seed == 0);
1240  return seed;
1241  }

References DCHECK.

Referenced by v8::internal::BackgroundParsingTask::BackgroundParsingTask(), v8::internal::NativeObjectsExplorer::FindOrAddGroupInfo(), v8::internal::HeapObjectsMap::GenerateId(), v8::internal::StringTable::LookupTwoCharsStringIfExists(), and v8::internal::Parser::Parse().

+ Here is the caller graph for this function:

◆ hidden_string()

String* v8::internal::Heap::hidden_string ( )
inline

Definition at line 788 of file heap.h.

788 { return hidden_string_; }

References hidden_string_.

Referenced by v8::internal::JSObject::DefineAccessor(), and v8::internal::V8HeapExplorer::ExtractPropertyReferences().

+ Here is the caller graph for this function:

◆ IdleNotification()

bool v8::internal::Heap::IdleNotification ( int  idle_time_in_ms)

Definition at line 4296 of file heap.cc.

4296  {
4297  // If incremental marking is off, we do not perform idle notification.
4298  if (!FLAG_incremental_marking) return true;
4299  base::ElapsedTimer timer;
4300  timer.Start();
4301  isolate()->counters()->gc_idle_time_allotted_in_ms()->AddSample(
4302  idle_time_in_ms);
4303  HistogramTimerScope idle_notification_scope(
4304  isolate_->counters()->gc_idle_notification());
4305 
4306  GCIdleTimeHandler::HeapState heap_state;
4307  heap_state.contexts_disposed = contexts_disposed_;
4308  heap_state.size_of_objects = static_cast<size_t>(SizeOfObjects());
4309  heap_state.incremental_marking_stopped = incremental_marking()->IsStopped();
4310  // TODO(ulan): Start incremental marking only for large heaps.
4311  heap_state.can_start_incremental_marking =
4313  heap_state.sweeping_in_progress =
4315  heap_state.mark_compact_speed_in_bytes_per_ms =
4316  static_cast<size_t>(tracer()->MarkCompactSpeedInBytesPerMillisecond());
4317  heap_state.incremental_marking_speed_in_bytes_per_ms = static_cast<size_t>(
4319  heap_state.scavenge_speed_in_bytes_per_ms =
4320  static_cast<size_t>(tracer()->ScavengeSpeedInBytesPerMillisecond());
4321  heap_state.available_new_space_memory = new_space_.Available();
4322  heap_state.new_space_capacity = new_space_.Capacity();
4323  heap_state.new_space_allocation_throughput_in_bytes_per_ms =
4324  static_cast<size_t>(
4326 
4327  GCIdleTimeAction action =
4328  gc_idle_time_handler_.Compute(idle_time_in_ms, heap_state);
4329 
4330  bool result = false;
4331  switch (action.type) {
4332  case DONE:
4333  result = true;
4334  break;
4336  if (incremental_marking()->IsStopped()) {
4338  }
4339  AdvanceIdleIncrementalMarking(action.parameter);
4340  break;
4341  case DO_FULL_GC: {
4342  HistogramTimerScope scope(isolate_->counters()->gc_context());
4343  const char* message = contexts_disposed_
4344  ? "idle notification: contexts disposed"
4345  : "idle notification: finalize idle round";
4348  break;
4349  }
4350  case DO_SCAVENGE:
4351  CollectGarbage(NEW_SPACE, "idle notification: scavenge");
4352  break;
4353  case DO_FINALIZE_SWEEPING:
4355  break;
4356  case DO_NOTHING:
4357  break;
4358  }
4359 
4360  int actual_time_ms = static_cast<int>(timer.Elapsed().InMilliseconds());
4361  if (actual_time_ms <= idle_time_in_ms) {
4362  if (action.type != DONE && action.type != DO_NOTHING) {
4363  isolate()->counters()->gc_idle_time_limit_undershot()->AddSample(
4364  idle_time_in_ms - actual_time_ms);
4365  }
4366  } else {
4367  isolate()->counters()->gc_idle_time_limit_overshot()->AddSample(
4368  actual_time_ms - idle_time_in_ms);
4369  }
4370 
4371  if (FLAG_trace_idle_notification) {
4372  PrintF("Idle notification: requested idle time %d ms, actual time %d ms [",
4373  idle_time_in_ms, actual_time_ms);
4374  action.Print();
4375  PrintF("]\n");
4376  }
4377 
4378  contexts_disposed_ = 0;
4379  return result;
4380 }
GCIdleTimeAction Compute(size_t idle_time_in_ms, HeapState heap_state)
intptr_t MarkCompactSpeedInBytesPerMillisecond() const
Definition: gc-tracer.cc:448
intptr_t IncrementalMarkingSpeedInBytesPerMillisecond() const
Definition: gc-tracer.cc:407
intptr_t ScavengeSpeedInBytesPerMillisecond() const
Definition: gc-tracer.cc:432
intptr_t NewSpaceAllocationThroughputInBytesPerMillisecond() const
Definition: gc-tracer.cc:465
void AdvanceIdleIncrementalMarking(intptr_t step_size)
Definition: heap.cc:4267

References AdvanceIdleIncrementalMarking(), v8::internal::NewSpace::Available(), v8::internal::GCIdleTimeHandler::HeapState::available_new_space_memory, v8::internal::GCIdleTimeHandler::HeapState::can_start_incremental_marking, v8::internal::NewSpace::Capacity(), CollectAllGarbage(), CollectGarbage(), v8::internal::GCIdleTimeHandler::Compute(), v8::internal::GCIdleTimeHandler::HeapState::contexts_disposed, contexts_disposed_, v8::internal::Isolate::counters(), v8::internal::DO_FINALIZE_SWEEPING, v8::internal::DO_FULL_GC, v8::internal::DO_INCREMENTAL_MARKING, v8::internal::DO_NOTHING, v8::internal::DO_SCAVENGE, v8::internal::DONE, v8::internal::MarkCompactCollector::EnsureSweepingCompleted(), gc_idle_time_handler_, incremental_marking(), v8::internal::GCIdleTimeHandler::HeapState::incremental_marking_speed_in_bytes_per_ms, v8::internal::GCIdleTimeHandler::HeapState::incremental_marking_stopped, v8::internal::GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond(), isolate(), isolate_, v8::internal::IncrementalMarking::IsStopped(), kReduceMemoryFootprintMask, mark_compact_collector(), v8::internal::GCIdleTimeHandler::HeapState::mark_compact_speed_in_bytes_per_ms, v8::internal::GCTracer::MarkCompactSpeedInBytesPerMillisecond(), v8::internal::NEW_SPACE, new_space_, v8::internal::GCIdleTimeHandler::HeapState::new_space_allocation_throughput_in_bytes_per_ms, v8::internal::GCIdleTimeHandler::HeapState::new_space_capacity, v8::internal::GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(), v8::internal::GCIdleTimeHandler::NotifyIdleMarkCompact(), v8::internal::GCIdleTimeAction::parameter, v8::internal::GCIdleTimeAction::Print(), v8::internal::PrintF(), v8::internal::GCIdleTimeHandler::HeapState::scavenge_speed_in_bytes_per_ms, v8::internal::GCTracer::ScavengeSpeedInBytesPerMillisecond(), v8::internal::IncrementalMarking::ShouldActivate(), v8::internal::GCIdleTimeHandler::HeapState::size_of_objects, SizeOfObjects(), v8::internal::IncrementalMarking::Start(), v8::internal::GCIdleTimeHandler::HeapState::sweeping_in_progress, v8::internal::MarkCompactCollector::sweeping_in_progress(), tracer(), and v8::internal::GCIdleTimeAction::type.

Referenced by v8::Isolate::IdleNotification().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IncreaseTotalRegexpCodeGenerated()

void v8::internal::Heap::IncreaseTotalRegexpCodeGenerated ( int  size)
inline

Definition at line 1172 of file heap.h.

1172  {
1174  }

References size, and total_regexp_code_generated_.

Referenced by v8::internal::RegExpCompiler::Assemble().

+ Here is the caller graph for this function:

◆ increment_scan_on_scavenge_pages()

void v8::internal::Heap::increment_scan_on_scavenge_pages ( )
inline

Definition at line 739 of file heap.h.

739  {
741  if (FLAG_gc_verbose) {
742  PrintF("Scan-on-scavenge pages: %d\n", scan_on_scavenge_pages_);
743  }
744  }

References v8::internal::PrintF(), and scan_on_scavenge_pages_.

Referenced by v8::internal::MemoryChunk::set_scan_on_scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ incremental_marking()

IncrementalMarking* v8::internal::Heap::incremental_marking ( )
inline

Definition at line 1205 of file heap.h.

1205 { return &incremental_marking_; }

References incremental_marking_.

Referenced by v8::internal::RecordWriteStub::Activate(), AdjustLiveBytes(), AdvanceIdleIncrementalMarking(), v8::internal::FreeList::Allocate(), v8::internal::LargeObjectSpace::AllocateRaw(), ClearNormalizedMapCaches(), CollectAllAvailableGarbage(), v8::internal::MarkCompactCollector::CollectGarbage(), CollectGarbage(), v8::internal::CopyDictionaryToObjectElements(), v8::internal::CopyObjectToObjectElements(), v8::internal::Map::DeprecateTarget(), v8::internal::Map::EnsureDescriptorSlack(), v8::internal::CodeFlusher::EvictCandidate(), v8::internal::CodeFlusher::EvictOptimizedCodeMap(), v8::internal::HeapObject::GetWriteBarrierMode(), IdleNotification(), v8::internal::NewSpacePage::Initialize(), v8::internal::LargePage::Initialize(), v8::internal::Page::Initialize(), v8::internal::IncrementalMarkingMarkingVisitor::INLINE(), v8::internal::MarkCompactCollector::InvalidateCode(), IterateAndMarkPointersToFromSpace(), v8::internal::MarkCompactCollector::MarkLiveObjects(), MoveElements(), OldGenerationAllocationLimitReached(), PerformGarbageCollection(), v8::internal::MarkCompactCollector::Prepare(), v8::internal::IncrementalMarking::RecordWriteFromCode(), Scavenge(), SelectScavengingVisitorsTable(), v8::internal::JSFunction::set_code(), v8::internal::HeapObject::set_map(), v8::internal::MemoryChunk::set_scan_on_scavenge(), v8::internal::IC::SetTargetAtAddress(), v8::internal::NewSpace::SlowAllocateRaw(), v8::internal::HeapObject::synchronized_set_map(), TearDown(), v8::internal::Marking::TransferMark(), v8::internal::IncrementalMarkingMarkingVisitor::VisitFixedArrayIncremental(), and WorthActivatingIncrementalMarking().

+ Here is the caller graph for this function:

◆ IncrementCodeGeneratedBytes()

void v8::internal::Heap::IncrementCodeGeneratedBytes ( bool  is_crankshafted,
int  size 
)
inline

Definition at line 1176 of file heap.h.

1176  {
1177  if (is_crankshafted) {
1179  } else {
1181  }
1182  }

References crankshaft_codegen_bytes_generated_, full_codegen_bytes_generated_, and size.

Referenced by v8::internal::CodeGenerator::MakeCodeEpilogue().

+ Here is the caller graph for this function:

◆ IncrementNodesCopiedInNewSpace()

void v8::internal::Heap::IncrementNodesCopiedInNewSpace ( )
inline

Definition at line 1126 of file heap.h.

References nodes_copied_in_new_space_.

Referenced by v8::internal::GlobalHandles::PostGarbageCollectionProcessing().

+ Here is the caller graph for this function:

◆ IncrementNodesDiedInNewSpace()

void v8::internal::Heap::IncrementNodesDiedInNewSpace ( )
inline

Definition at line 1124 of file heap.h.

References nodes_died_in_new_space_.

Referenced by v8::internal::GlobalHandles::PostGarbageCollectionProcessing().

+ Here is the caller graph for this function:

◆ IncrementNodesPromoted()

void v8::internal::Heap::IncrementNodesPromoted ( )
inline

Definition at line 1128 of file heap.h.

1128 { nodes_promoted_++; }

References nodes_promoted_.

Referenced by v8::internal::GlobalHandles::PostGarbageCollectionProcessing().

+ Here is the caller graph for this function:

◆ IncrementPromotedObjectsSize()

void v8::internal::Heap::IncrementPromotedObjectsSize ( int  object_size)
inline

Definition at line 1114 of file heap.h.

1114  {
1115  DCHECK(object_size > 0);
1116  promoted_objects_size_ += object_size;
1117  }

References DCHECK, and promoted_objects_size_.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::PromoteObject(), and v8::internal::MarkCompactCollector::TryPromoteObject().

+ Here is the caller graph for this function:

◆ IncrementSemiSpaceCopiedObjectSize()

void v8::internal::Heap::IncrementSemiSpaceCopiedObjectSize ( int  object_size)
inline

Definition at line 1119 of file heap.h.

1119  {
1120  DCHECK(object_size > 0);
1121  semi_space_copied_object_size_ += object_size;
1122  }

References DCHECK, and semi_space_copied_object_size_.

Referenced by v8::internal::MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage(), and v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::SemiSpaceCopyObject().

+ Here is the caller graph for this function:

◆ IncrementYoungSurvivorsCounter()

void v8::internal::Heap::IncrementYoungSurvivorsCounter ( int  survived)
inline

Definition at line 1130 of file heap.h.

1130  {
1131  DCHECK(survived >= 0);
1132  survived_since_last_expansion_ += survived;
1133  }

References DCHECK, and survived_since_last_expansion_.

Referenced by v8::internal::MarkCompactCollector::EvacuateNewSpace(), and Scavenge().

+ Here is the caller graph for this function:

◆ InFromSpace()

bool v8::internal::Heap::InFromSpace ( Object object)
inline

Definition at line 334 of file heap-inl.h.

334  {
335  return new_space_.FromSpaceContains(object);
336 }
bool FromSpaceContains(Address address)
Definition: spaces.h:2494

References v8::internal::NewSpace::FromSpaceContains(), and new_space_.

Referenced by IterateAndMarkPointersToFromSpace(), v8::internal::StoreBuffer::IteratePointersInStoreBuffer(), v8::internal::CodeFlusher::IteratePointersToFromSpace(), v8::internal::ScavengeWeakObjectRetainer::RetainAs(), ScavengeObject(), ScavengeObjectSlow(), UpdateAllocationSiteFeedback(), UpdateNewSpaceReferencesInExternalStringTable(), and v8::internal::PointersUpdatingVisitor::UpdateSlot().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InitializeAllocationMemento()

void v8::internal::Heap::InitializeAllocationMemento ( AllocationMemento memento,
AllocationSite allocation_site 
)
private

Definition at line 3563 of file heap.cc.

3564  {
3565  memento->set_map_no_write_barrier(allocation_memento_map());
3566  DCHECK(allocation_site->map() == allocation_site_map());
3567  memento->set_allocation_site(allocation_site, SKIP_WRITE_BARRIER);
3568  if (FLAG_allocation_site_pretenuring) {
3569  allocation_site->IncrementMementoCreateCount();
3570  }
3571 }

References DCHECK, v8::internal::AllocationSite::IncrementMementoCreateCount(), v8::internal::HeapObject::map(), v8::internal::HeapObject::set_map_no_write_barrier(), and v8::internal::SKIP_WRITE_BARRIER.

Referenced by Allocate(), and CopyJSObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InitializeAllocationSitesScratchpad()

void v8::internal::Heap::InitializeAllocationSitesScratchpad ( )
private

Definition at line 3068 of file heap.cc.

3068  {
3069  DCHECK(allocation_sites_scratchpad()->length() ==
3071  for (int i = 0; i < kAllocationSiteScratchpadSize; i++) {
3072  allocation_sites_scratchpad()->set_undefined(i);
3073  }
3074 }

References DCHECK, and kAllocationSiteScratchpadSize.

Referenced by CreateInitialObjects().

+ Here is the caller graph for this function:

◆ InitializeJSObjectFromMap()

void v8::internal::Heap::InitializeJSObjectFromMap ( JSObject obj,
FixedArray properties,
Map map 
)
private

Definition at line 3600 of file heap.cc.

3601  {
3602  obj->set_properties(properties);
3603  obj->initialize_elements();
3604  // TODO(1240798): Initialize the object's body using valid initial values
3605  // according to the object's initial map. For example, if the map's
3606  // instance type is JS_ARRAY_TYPE, the length field should be initialized
3607  // to a number (e.g. Smi::FromInt(0)) and the elements initialized to a
3608  // fixed array (e.g. Heap::empty_fixed_array()). Currently, the object
3609  // verification code has to cope with (temporarily) invalid objects. See
3610  // for example, JSArray::JSArrayVerify).
3611  Object* filler;
3612  // We cannot always fill with one_pointer_filler_map because objects
3613  // created from API functions expect their internal fields to be initialized
3614  // with undefined_value.
3615  // Pre-allocated fields need to be initialized with undefined_value as well
3616  // so that object accesses before the constructor completes (e.g. in the
3617  // debugger) will not cause a crash.
3618  if (map->constructor()->IsJSFunction() &&
3619  JSFunction::cast(map->constructor())
3620  ->IsInobjectSlackTrackingInProgress()) {
3621  // We might want to shrink the object later.
3622  DCHECK(obj->GetInternalFieldCount() == 0);
3623  filler = Heap::one_pointer_filler_map();
3624  } else {
3625  filler = Heap::undefined_value();
3626  }
3627  obj->InitializeBody(map, Heap::undefined_value(), filler);
3628 }

References DCHECK, v8::internal::JSObject::GetInternalFieldCount(), v8::internal::JSObject::initialize_elements(), v8::internal::JSObject::InitializeBody(), and map.

Referenced by AllocateJSObjectFromMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InitializeWeakObjectToCodeTable()

void v8::internal::Heap::InitializeWeakObjectToCodeTable ( )
inline

Definition at line 1348 of file heap.h.

1348  {
1349  set_weak_object_to_code_table(undefined_value());
1350  }

References set_weak_object_to_code_table().

Referenced by v8::internal::Deserializer::Deserialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InitialSemiSpaceSize()

int v8::internal::Heap::InitialSemiSpaceSize ( )
inline

Definition at line 554 of file heap.h.

554 { return initial_semispace_size_; }

References initial_semispace_size_.

Referenced by v8::internal::NewSpace::SetUp().

+ Here is the caller graph for this function:

◆ INLINE() [1/2]

v8::internal::Heap::INLINE ( void   RecordWriteAddress address, int offset)

◆ INLINE() [2/2]

v8::internal::Heap::INLINE ( void   RecordWritesAddress address, int start, int len)

◆ inline_allocation_disabled()

bool v8::internal::Heap::inline_allocation_disabled ( )
inline

Definition at line 1044 of file heap.h.

1044 { return inline_allocation_disabled_; }

References inline_allocation_disabled_.

Referenced by v8::internal::FreeList::Allocate().

+ Here is the caller graph for this function:

◆ InNewSpace() [1/2]

bool v8::internal::Heap::InNewSpace ( Address  address)
inline

Definition at line 331 of file heap-inl.h.

331 { return new_space_.Contains(address); }
bool Contains(Address a)
Definition: spaces.h:2349

References v8::internal::NewSpace::Contains(), and new_space_.

+ Here is the call graph for this function:

◆ InNewSpace() [2/2]

bool v8::internal::Heap::InNewSpace ( Object object)
inline

Definition at line 322 of file heap-inl.h.

322  {
323  bool result = new_space_.Contains(object);
324  DCHECK(!result || // Either not in new space
325  gc_state_ != NOT_IN_GC || // ... or in the middle of GC
326  InToSpace(object)); // ... or in to-space (where we allocate).
327  return result;
328 }
bool InToSpace(Object *object)
Definition: heap-inl.h:339

References v8::internal::NewSpace::Contains(), DCHECK, gc_state_, InToSpace(), new_space_, and NOT_IN_GC.

Referenced by v8::internal::ExternalStringTable::AddOldString(), v8::internal::ExternalStringTable::AddString(), AddWeakObjectToCodeDependency(), AllocateFixedArrayWithFiller(), v8::internal::HOptimizedGraphBuilder::BuildFastLiteral(), v8::internal::ExternalStringTable::CleanUp(), v8::internal::Map::ClearCodeCache(), CopyAndTenureFixedCOWArray(), v8::internal::CopyDictionaryToObjectElements(), CopyFixedArrayWithMap(), CopyJSObject(), v8::internal::CopyObjectToObjectElements(), v8::internal::EternalHandles::Create(), v8::internal::GlobalHandles::Create(), v8::internal::Runtime::CreateArrayLiteralBoilerplate(), CreateInitialMaps(), v8::internal::CreateObjectLiteralBoilerplate(), v8::internal::FastElementsAccessor< FastElementsAccessorSubclass, KindTraits >::DeleteCommon(), v8::internal::HashTable< Derived, Shape, Key >::EnsureCapacity(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateShortcutCandidate(), v8::internal::StoreBuffer::FindPointersToNewSpaceInRegion(), v8::internal::HeapObject::GetWriteBarrierMode(), v8::internal::IsUnscavengedHeapObject(), IterateAndMarkPointersToFromSpace(), v8::internal::StoreBuffer::IteratePointersInStoreBuffer(), MoveElements(), v8::internal::FINAL< kOperandKind, kNumCachedOperands >::New(), v8::internal::FixedArray::NoIncrementalWriteBarrierSet(), v8::internal::FixedArray::NoWriteBarrierSet(), v8::internal::GlobalHandles::PostGarbageCollectionProcessing(), v8::internal::EternalHandles::PostGarbageCollectionProcessing(), v8::internal::JSObject::PrepareElementsForSort(), v8::internal::Deserializer::ReadChunk(), v8::internal::MarkCompactCollector::RecordMigratedSlot(), v8::internal::ReplaceCodeObject(), RootCanBeTreatedAsConstant(), v8::internal::Serializer::RootIndex(), v8::internal::ScavengeVisitor::ScavengePointer(), set_weak_object_to_code_table(), v8::internal::ShortCircuitConsString(), v8::internal::HashTable< Derived, Shape, Key >::Shrink(), v8::internal::String::SlowFlatten(), v8::internal::StoreBuffer::Uniq(), v8::internal::JSObject::UpdateAllocationSite(), v8::internal::IncrementalMarking::UpdateMarkingDequeAfterScavenge(), UpdateNewSpaceReferencesInExternalStringTable(), v8::internal::ExternalStringTable::Verify(), and v8::internal::NewSpaceScavenger::VisitPointer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InNewSpacePage()

bool v8::internal::Heap::InNewSpacePage ( Address  address)
inline

◆ InOldDataSpace() [1/2]

bool v8::internal::Heap::InOldDataSpace ( Address  address)
inline

Definition at line 354 of file heap-inl.h.

354  {
355  return old_data_space_->Contains(address);
356 }

References v8::internal::PagedSpace::Contains(), and old_data_space_.

Referenced by CanMoveObjectStart(), and InOldDataSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InOldDataSpace() [2/2]

bool v8::internal::Heap::InOldDataSpace ( Object object)
inline

Definition at line 359 of file heap-inl.h.

359  {
360  return InOldDataSpace(reinterpret_cast<Address>(object));
361 }

References InOldDataSpace().

+ Here is the call graph for this function:

◆ InOldPointerSpace() [1/2]

bool v8::internal::Heap::InOldPointerSpace ( Address  address)
inline

Definition at line 344 of file heap-inl.h.

344  {
345  return old_pointer_space_->Contains(address);
346 }

References v8::internal::PagedSpace::Contains(), and old_pointer_space_.

Referenced by CanMoveObjectStart(), and InOldPointerSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InOldPointerSpace() [2/2]

bool v8::internal::Heap::InOldPointerSpace ( Object object)
inline

Definition at line 349 of file heap-inl.h.

349  {
350  return InOldPointerSpace(reinterpret_cast<Address>(object));
351 }

References InOldPointerSpace().

+ Here is the call graph for this function:

◆ InSpace() [1/2]

bool v8::internal::Heap::InSpace ( Address  addr,
AllocationSpace  space 
)

Definition at line 4464 of file heap.cc.

4464  {
4465  if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false;
4466  if (!HasBeenSetUp()) return false;
4467 
4468  switch (space) {
4469  case NEW_SPACE:
4470  return new_space_.ToSpaceContains(addr);
4471  case OLD_POINTER_SPACE:
4472  return old_pointer_space_->Contains(addr);
4473  case OLD_DATA_SPACE:
4474  return old_data_space_->Contains(addr);
4475  case CODE_SPACE:
4476  return code_space_->Contains(addr);
4477  case MAP_SPACE:
4478  return map_space_->Contains(addr);
4479  case CELL_SPACE:
4480  return cell_space_->Contains(addr);
4481  case PROPERTY_CELL_SPACE:
4482  return property_cell_space_->Contains(addr);
4483  case LO_SPACE:
4484  return lo_space_->SlowContains(addr);
4485  case INVALID_SPACE:
4486  break;
4487  }
4488  UNREACHABLE();
4489  return false;
4490 }

References v8::internal::CELL_SPACE, cell_space_, v8::internal::CODE_SPACE, code_space_, v8::internal::PagedSpace::Contains(), HasBeenSetUp(), v8::internal::INVALID_SPACE, isolate_, v8::internal::MemoryAllocator::IsOutsideAllocatedSpace(), v8::internal::LO_SPACE, lo_space_, v8::internal::MAP_SPACE, map_space_, v8::internal::Isolate::memory_allocator(), v8::internal::NEW_SPACE, new_space_, v8::internal::OLD_DATA_SPACE, old_data_space_, v8::internal::OLD_POINTER_SPACE, old_pointer_space_, v8::internal::PROPERTY_CELL_SPACE, property_cell_space_, v8::internal::LargeObjectSpace::SlowContains(), space(), v8::internal::NewSpace::ToSpaceContains(), and UNREACHABLE.

Referenced by InSpace(), and v8::internal::Serializer::SpaceOfObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ InSpace() [2/2]

bool v8::internal::Heap::InSpace ( HeapObject value,
AllocationSpace  space 
)

Definition at line 4459 of file heap.cc.

4459  {
4460  return InSpace(value->address(), space);
4461 }
bool InSpace(Address addr, AllocationSpace space)
Definition: heap.cc:4464

References v8::internal::HeapObject::address(), InSpace(), and space().

+ Here is the call graph for this function:

◆ InternalizeString()

MUST_USE_RESULT AllocationResult v8::internal::Heap::InternalizeString ( String str)
private

◆ InternalizeStringWithKey()

MUST_USE_RESULT AllocationResult v8::internal::Heap::InternalizeStringWithKey ( HashTableKey key)
private

◆ InToSpace()

bool v8::internal::Heap::InToSpace ( Object object)
inline

Definition at line 339 of file heap-inl.h.

339  {
340  return new_space_.ToSpaceContains(object);
341 }

References new_space_, and v8::internal::NewSpace::ToSpaceContains().

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::INLINE(), InNewSpace(), and IterateAndMarkPointersToFromSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsHeapIterable()

bool v8::internal::Heap::IsHeapIterable ( )

Definition at line 4248 of file heap.cc.

4248  {
4249  // TODO(hpayer): This function is not correct. Allocation folding in old
4250  // space breaks the iterability.
4252 }

References new_space(), new_space_top_after_last_gc_, and v8::internal::NewSpace::top().

Referenced by MakeHeapIterable().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsHighSurvivalRate()

bool v8::internal::Heap::IsHighSurvivalRate ( )
inlineprivate

Definition at line 1925 of file heap.h.

1925 { return high_survival_rate_period_length_ > 0; }

References high_survival_rate_period_length_.

Referenced by PerformGarbageCollection().

+ Here is the caller graph for this function:

◆ IsInGCPostProcessing()

bool v8::internal::Heap::IsInGCPostProcessing ( )
inline

Definition at line 957 of file heap.h.

957 { return gc_post_processing_depth_ > 0; }

References gc_post_processing_depth_.

Referenced by v8::String::MakeExternal().

+ Here is the caller graph for this function:

◆ isolate()

Isolate * v8::internal::Heap::isolate ( )
inline

Definition at line 589 of file heap-inl.h.

589  {
590  return reinterpret_cast<Isolate*>(
591  reinterpret_cast<intptr_t>(this) -
592  reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
593 }
friend class Isolate
Definition: heap.h:2029

References v8::internal::Isolate::heap().

Referenced by v8::internal::IncrementalMarking::Abort(), AddWeakObjectToCodeDependency(), v8::internal::MarkCompactCollector::AfterMarking(), AllocateConstantPoolArray(), AllocateExtendedConstantPoolArray(), v8::internal::LargeObjectSpace::AllocateRaw(), AllocateSymbol(), v8::internal::AllocationTracker::AllocationEvent(), v8::internal::ArrayPrototypeHasNoElements(), v8::internal::RegExpCompiler::Assemble(), CallGCEpilogueCallbacks(), CallGCPrologueCallbacks(), CheckpointObjectStats(), CollectAllAvailableGarbage(), v8::internal::MemoryChunk::CommitArea(), CommittedMemoryExecutable(), v8::internal::StoreBuffer::Compact(), CreateApiObjects(), v8::internal::PagedSpace::CreateEmergencyMemory(), CreateFixedStubs(), CreateInitialObjects(), v8::internal::Deoptimizer::DeoptimizeGlobalObject(), v8::internal::Deoptimizer::DeoptimizeMarkedCodeForContext(), EnsureWeakObjectToCodeTable(), v8::internal::EnumerateCompiledFunctions(), v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates(), v8::internal::PagedSpace::Expand(), v8::internal::V8HeapExplorer::ExtractJSArrayBufferReferences(), v8::internal::V8HeapExplorer::ExtractJSObjectReferences(), v8::internal::V8HeapExplorer::ExtractSharedFunctionInfoReferences(), v8::internal::RootsReferencesExtractor::FillReferences(), v8::internal::IncrementalMarking::Finalize(), v8::internal::FindHidden(), v8::internal::PagedSpace::FreeEmergencyMemory(), v8::internal::LargeObjectSpace::FreeUnmarkedObjects(), GarbageCollectionEpilogue(), GarbageCollectionPrologue(), v8::internal::HeapObject::GetIsolate(), IdleNotification(), v8::internal::MarkCompactMarkingVisitor::INLINE(), v8::internal::IsJSArrayFastElementMovingAllowed(), v8::internal::MarkCompactCollector::isolate(), IterateSmiRoots(), v8::internal::IncrementalMarking::MarkingComplete(), v8::internal::MarkCompactCollector::MarkLiveObjects(), NotifyContextDisposed(), v8::internal::PagedSpace::PagedSpace(), v8::internal::MarkCompactCollector::PrepareForCodeFlushing(), v8::internal::GCTracer::Print(), v8::internal::GCTracer::PrintNVP(), v8::internal::IncrementalMarking::RecordCodeTargetPatch(), RecordStats(), v8::internal::JSObject::ReferencesObject(), v8::internal::PagedSpace::ReleasePage(), v8::internal::NewSpace::ReportStatistics(), RightTrimFixedArray(), Scavenge(), SelectScavengingVisitorsTable(), v8::internal::V8HeapExplorer::SetGcSubrootReference(), v8::internal::DependentCode::SetMarkedForDeoptimization(), v8::internal::HeapProfiler::SetRetainedObjectInfo(), SetStackLimits(), SetUp(), v8::internal::NewSpace::SetUp(), v8::internal::SemiSpace::ShrinkTo(), v8::internal::PagedSpace::SizeOfFirstPage(), v8::internal::IncrementalMarking::Start(), v8::internal::GCTracer::Start(), v8::internal::IncrementalMarking::StartMarking(), v8::internal::IncrementalMarking::Step(), v8::internal::GCTracer::Stop(), v8::internal::V8HeapExplorer::TagGlobalObjects(), v8::internal::PagedSpace::TearDown(), v8::internal::NewSpace::TearDown(), v8::internal::LargeObjectSpace::TearDown(), TearDownArrayBuffers(), v8::internal::String::ToCString(), v8::internal::ReplacementStringBuilder::ToString(), v8::internal::String::ToWideCString(), v8::internal::SeqString::Truncate(), v8::internal::SlotsBuffer::UpdateSlots(), v8::internal::SlotsBuffer::UpdateSlotsWithFilter(), v8::internal::RelocInfo::Visit(), v8::internal::WeakListVisitor< JSArrayBuffer >::VisitPhantomObject(), and v8::internal::IncrementalMarking::WorthActivating().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IsOneByte() [1/3]

template<>
bool v8::internal::Heap::IsOneByte ( String str,
int  chars 
)
inline

Definition at line 61 of file heap-inl.h.

61  {
62  return str->IsOneByteRepresentation();
63 }

References v8::internal::String::IsOneByteRepresentation().

+ Here is the call graph for this function:

◆ IsOneByte() [2/3]

template<typename T >
static bool v8::internal::Heap::IsOneByte ( T  t,
int  chars 
)
inlinestatic

Referenced by AllocateInternalizedStringFromUtf8(), and AllocateInternalizedStringImpl().

+ Here is the caller graph for this function:

◆ IsOneByte() [3/3]

template<>
bool v8::internal::Heap::IsOneByte ( Vector< const char >  str,
int  chars 
)
inline

Definition at line 54 of file heap-inl.h.

54  {
55  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
56  return chars == str.length();
57 }

References v8::internal::Vector< T >::length().

+ Here is the call graph for this function:

◆ IterateAndMarkPointersToFromSpace()

void v8::internal::Heap::IterateAndMarkPointersToFromSpace ( Address  start,
Address  end,
ObjectSlotCallback  callback 
)

Definition at line 4540 of file heap.cc.

4541  {
4542  Address slot_address = start;
4543 
4544  // We are not collecting slots on new space objects during mutation
4545  // thus we have to scan for pointers to evacuation candidates when we
4546  // promote objects. But we should not record any slots in non-black
4547  // objects. Grey object's slots would be rescanned.
4548  // White object might not survive until the end of collection
4549  // it would be a violation of the invariant to record it's slots.
4550  bool record_slots = false;
4551  if (incremental_marking()->IsCompacting()) {
4552  MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::FromAddress(start));
4553  record_slots = Marking::IsBlack(mark_bit);
4554  }
4555 
4556  while (slot_address < end) {
4557  Object** slot = reinterpret_cast<Object**>(slot_address);
4558  Object* object = *slot;
4559  // If the store buffer becomes overfull we mark pages as being exempt from
4560  // the store buffer. These pages are scanned to find pointers that point
4561  // to the new space. In that case we may hit newly promoted objects and
4562  // fix the pointers before the promotion queue gets to them. Thus the 'if'.
4563  if (object->IsHeapObject()) {
4564  if (Heap::InFromSpace(object)) {
4565  callback(reinterpret_cast<HeapObject**>(slot),
4566  HeapObject::cast(object));
4567  Object* new_object = *slot;
4568  if (InNewSpace(new_object)) {
4569  SLOW_DCHECK(Heap::InToSpace(new_object));
4570  SLOW_DCHECK(new_object->IsHeapObject());
4572  reinterpret_cast<Address>(slot));
4573  }
4574  SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
4575  } else if (record_slots &&
4576  MarkCompactCollector::IsOnEvacuationCandidate(object)) {
4577  mark_compact_collector()->RecordSlot(slot, slot, object);
4578  }
4579  }
4580  slot_address += kPointerSize;
4581  }
4582 }
bool InFromSpace(Object *object)
Definition: heap-inl.h:334
void EnterDirectlyIntoStoreBuffer(Address addr)

References v8::internal::StoreBuffer::EnterDirectlyIntoStoreBuffer(), v8::internal::HeapObject::FromAddress(), incremental_marking(), InFromSpace(), InNewSpace(), InToSpace(), v8::internal::kPointerSize, mark_compact_collector(), SLOW_DCHECK, and store_buffer_.

Referenced by DoScavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateRoots()

void v8::internal::Heap::IterateRoots ( ObjectVisitor v,
VisitMode  mode 
)

Definition at line 4722 of file heap.cc.

4722  {
4724  IterateWeakRoots(v, mode);
4725 }
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4728
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4747

References IterateStrongRoots(), IterateWeakRoots(), and mode().

Referenced by v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates(), v8::internal::V8HeapExplorer::IterateAndExtractReferences(), v8::internal::ReplaceCodeObject(), and Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateSmiRoots()

void v8::internal::Heap::IterateSmiRoots ( ObjectVisitor v)

Definition at line 4739 of file heap.cc.

4739  {
4740  // Acquire execution access since we are going to read stack limit values.
4741  ExecutionAccess access(isolate());
4742  v->VisitPointers(&roots_[kSmiRootsStart], &roots_[kRootListLength]);
4743  v->Synchronize(VisitorSynchronization::kSmiRootList);
4744 }

References isolate(), kRootListLength, kSmiRootsStart, and roots_.

Referenced by v8::internal::Deserializer::Deserialize(), and v8::internal::StartupSerializer::SerializeStrongReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateStrongRoots()

void v8::internal::Heap::IterateStrongRoots ( ObjectVisitor v,
VisitMode  mode 
)

Definition at line 4747 of file heap.cc.

4747  {
4748  v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
4749  v->Synchronize(VisitorSynchronization::kStrongRootList);
4750 
4751  v->VisitPointer(bit_cast<Object**>(&hidden_string_));
4752  v->Synchronize(VisitorSynchronization::kInternalizedString);
4753 
4754  isolate_->bootstrapper()->Iterate(v);
4755  v->Synchronize(VisitorSynchronization::kBootstrapper);
4756  isolate_->Iterate(v);
4757  v->Synchronize(VisitorSynchronization::kTop);
4758  Relocatable::Iterate(isolate_, v);
4759  v->Synchronize(VisitorSynchronization::kRelocatable);
4760 
4761  if (isolate_->deoptimizer_data() != NULL) {
4763  }
4764  v->Synchronize(VisitorSynchronization::kDebug);
4766  v->Synchronize(VisitorSynchronization::kCompilationCache);
4767 
4768  // Iterate over local handles in handle scopes.
4771  v->Synchronize(VisitorSynchronization::kHandleScope);
4772 
4773  // Iterate over the builtin code objects and code stubs in the
4774  // heap. Note that it is not necessary to iterate over code objects
4775  // on scavenge collections.
4776  if (mode != VISIT_ALL_IN_SCAVENGE) {
4778  }
4779  v->Synchronize(VisitorSynchronization::kBuiltins);
4780 
4781  // Iterate over global handles.
4782  switch (mode) {
4783  case VISIT_ONLY_STRONG:
4785  break;
4786  case VISIT_ALL_IN_SCAVENGE:
4788  break;
4790  case VISIT_ALL:
4792  break;
4793  }
4794  v->Synchronize(VisitorSynchronization::kGlobalHandles);
4795 
4796  // Iterate over eternal handles.
4797  if (mode == VISIT_ALL_IN_SCAVENGE) {
4799  } else {
4801  }
4802  v->Synchronize(VisitorSynchronization::kEternalHandles);
4803 
4804  // Iterate over pointers being held by inactive threads.
4806  v->Synchronize(VisitorSynchronization::kThreadManager);
4807 
4808  // Iterate over the pointers the Serialization/Deserialization code is
4809  // holding.
4810  // During garbage collection this keeps the partial snapshot cache alive.
4811  // During deserialization of the startup snapshot this creates the partial
4812  // snapshot cache and deserializes the objects it refers to. During
4813  // serialization this does nothing, since the partial snapshot cache is
4814  // empty. However the next thing we do is create the partial snapshot,
4815  // filling up the partial snapshot cache with objects it needs as we go.
4817  // We don't do a v->Synchronize call here, because in debug mode that will
4818  // output a flag to the snapshot. However at this point the serializer and
4819  // deserializer are deliberately a little unsynchronized (see above) so the
4820  // checking of the sync flag in the snapshot would fail.
4821 }
void IterateBuiltins(ObjectVisitor *v)
Definition: builtins.cc:1599
void Iterate(ObjectVisitor *v)
void Iterate(ObjectVisitor *v)
Definition: deoptimizer.cc:53
void IterateAllRoots(ObjectVisitor *visitor)
void IterateNewSpaceRoots(ObjectVisitor *visitor)
void IterateAllRoots(ObjectVisitor *v)
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
void IterateStrongRoots(ObjectVisitor *v)
void Iterate(v8::internal::ObjectVisitor *v)
Definition: api.cc:7590
HandleScopeImplementer * handle_scope_implementer()
Definition: isolate.h:901
DeoptimizerData * deoptimizer_data()
Definition: isolate.h:877
Builtins * builtins()
Definition: isolate.h:947
void Iterate(ObjectVisitor *v)
Definition: isolate.cc:206
void IterateDeferredHandles(ObjectVisitor *visitor)
Definition: isolate.cc:212
ThreadManager * thread_manager()
Definition: isolate.h:921
EternalHandles * eternal_handles()
Definition: isolate.h:919
static void Iterate(Isolate *isolate, ObjectVisitor *visitor)
Definition: serialize.cc:1293
void Iterate(ObjectVisitor *v)
Definition: v8threads.cc:329
@ VISIT_ONLY_STRONG
Definition: globals.h:397
@ VISIT_ALL_IN_SWEEP_NEWSPACE
Definition: globals.h:396
@ VISIT_ALL_IN_SCAVENGE
Definition: globals.h:395

References v8::internal::Isolate::bootstrapper(), v8::internal::Isolate::builtins(), v8::internal::Isolate::compilation_cache(), v8::internal::Isolate::deoptimizer_data(), v8::internal::Isolate::eternal_handles(), v8::internal::Isolate::global_handles(), v8::internal::Isolate::handle_scope_implementer(), hidden_string_, isolate_, v8::internal::SerializerDeserializer::Iterate(), v8::internal::CompilationCache::Iterate(), v8::internal::DeoptimizerData::Iterate(), v8::internal::Isolate::Iterate(), v8::internal::ThreadManager::Iterate(), v8::internal::HandleScopeImplementer::Iterate(), v8::internal::GlobalHandles::IterateAllRoots(), v8::internal::EternalHandles::IterateAllRoots(), v8::internal::Builtins::IterateBuiltins(), v8::internal::Isolate::IterateDeferredHandles(), v8::internal::EternalHandles::IterateNewSpaceRoots(), v8::internal::GlobalHandles::IterateNewSpaceStrongAndDependentRoots(), v8::internal::GlobalHandles::IterateStrongRoots(), kStrongRootListLength, mode(), NULL, roots_, v8::internal::Isolate::thread_manager(), v8::internal::VISIT_ALL, v8::internal::VISIT_ALL_IN_SCAVENGE, v8::internal::VISIT_ALL_IN_SWEEP_NEWSPACE, and v8::internal::VISIT_ONLY_STRONG.

Referenced by v8::internal::Deserializer::Deserialize(), IterateRoots(), v8::internal::MarkCompactCollector::MarkRoots(), v8::internal::StartupSerializer::SerializeStrongReferences(), and v8::internal::IncrementalMarking::StartMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IterateWeakRoots()

void v8::internal::Heap::IterateWeakRoots ( ObjectVisitor v,
VisitMode  mode 
)

Definition at line 4728 of file heap.cc.

4728  {
4729  v->VisitPointer(reinterpret_cast<Object**>(&roots_[kStringTableRootIndex]));
4730  v->Synchronize(VisitorSynchronization::kStringTable);
4732  // Scavenge collections have special processing for this.
4734  }
4735  v->Synchronize(VisitorSynchronization::kExternalStringsTable);
4736 }
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:654

References external_string_table_, v8::internal::ExternalStringTable::Iterate(), kStringTableRootIndex, mode(), roots_, v8::internal::VISIT_ALL_IN_SCAVENGE, and v8::internal::VISIT_ALL_IN_SWEEP_NEWSPACE.

Referenced by v8::internal::Deserializer::Deserialize(), IterateRoots(), and v8::internal::StartupSerializer::SerializeWeakReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ LeftTrimFixedArray()

FixedArrayBase * v8::internal::Heap::LeftTrimFixedArray ( FixedArrayBase obj,
int  elements_to_trim 
)

Definition at line 3266 of file heap.cc.

3267  {
3268  const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
3269  const int bytes_to_trim = elements_to_trim * element_size;
3270  Map* map = object->map();
3271 
3272  // For now this trick is only applied to objects in new and paged space.
3273  // In large object space the object's start must coincide with chunk
3274  // and thus the trick is just not applicable.
3275  DCHECK(!lo_space()->Contains(object));
3276  DCHECK(object->map() != fixed_cow_array_map());
3277 
3281 
3282  const int len = object->length();
3283  DCHECK(elements_to_trim <= len);
3284 
3285  // Calculate location of new array start.
3286  Address new_start = object->address() + bytes_to_trim;
3287 
3288  // Technically in new space this write might be omitted (except for
3289  // debug mode which iterates through the heap), but to play safer
3290  // we still do it.
3291  CreateFillerObjectAt(object->address(), bytes_to_trim);
3292 
3293  // Initialize header of the trimmed array. Since left trimming is only
3294  // performed on pages which are not concurrently swept creating a filler
3295  // object does not require synchronization.
3296  DCHECK(CanMoveObjectStart(object));
3297  Object** former_start = HeapObject::RawField(object, 0);
3298  int new_start_index = elements_to_trim * (element_size / kPointerSize);
3299  former_start[new_start_index] = map;
3300  former_start[new_start_index + 1] = Smi::FromInt(len - elements_to_trim);
3301  FixedArrayBase* new_object =
3302  FixedArrayBase::cast(HeapObject::FromAddress(new_start));
3303 
3304  // Maintain consistency of live bytes during incremental marking
3305  marking()->TransferMark(object->address(), new_start);
3306  AdjustLiveBytes(new_start, -bytes_to_trim, Heap::FROM_MUTATOR);
3307 
3308  // Notify the heap profiler of change in object layout.
3309  OnMoveEvent(new_object, object, new_object->Size());
3310  return new_object;
3311 }
static const int kHeaderSize
Definition: objects.h:2393
static const int kMapOffset
Definition: objects.h:1427
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1311
Marking * marking()
Definition: heap.h:1203
bool CanMoveObjectStart(HeapObject *object)
Definition: heap.cc:3235
void AdjustLiveBytes(Address address, int by, InvocationMode mode)
Definition: heap.cc:3254
void OnMoveEvent(HeapObject *target, HeapObject *source, int size_in_bytes)
Definition: heap-inl.h:245
void TransferMark(Address old_start, Address new_start)
const int kDoubleSize
Definition: globals.h:127

References v8::internal::HeapObject::address(), AdjustLiveBytes(), CanMoveObjectStart(), Contains(), CreateFillerObjectAt(), DCHECK, FROM_MUTATOR, v8::internal::HeapObject::FromAddress(), v8::internal::Smi::FromInt(), v8::internal::kDoubleSize, v8::internal::FixedArrayBase::kHeaderSize, v8::internal::FixedArrayBase::kLengthOffset, v8::internal::HeapObject::kMapOffset, v8::internal::kPointerSize, lo_space(), map, v8::internal::HeapObject::map(), marking(), OnMoveEvent(), v8::internal::HeapObject::RawField(), v8::internal::HeapObject::Size(), STATIC_ASSERT(), and v8::internal::Marking::TransferMark().

Referenced by v8::internal::BUILTIN().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ lo_space()

◆ LookupSingleCharacterStringFromCode()

MUST_USE_RESULT AllocationResult v8::internal::Heap::LookupSingleCharacterStringFromCode ( uint16_t  code)
private

◆ LookupWeakObjectToCodeDependency()

DependentCode * v8::internal::Heap::LookupWeakObjectToCodeDependency ( Handle< Object obj)

Definition at line 5360 of file heap.cc.

5360  {
5361  Object* dep = WeakHashTable::cast(weak_object_to_code_table_)->Lookup(obj);
5362  if (dep->IsDependentCode()) return DependentCode::cast(dep);
5363  return DependentCode::cast(empty_fixed_array());
5364 }

References weak_object_to_code_table_.

Referenced by v8::internal::AddWeakObjectToCodeDependency().

+ Here is the caller graph for this function:

◆ MakeHeapIterable()

void v8::internal::Heap::MakeHeapIterable ( )
private

Definition at line 4255 of file heap.cc.

4255  {
4256  DCHECK(AllowHeapAllocation::IsAllowed());
4257  if (!IsHeapIterable()) {
4258  CollectAllGarbage(kMakeHeapIterableMask, "Heap::MakeHeapIterable");
4259  }
4260  if (mark_compact_collector()->sweeping_in_progress()) {
4262  }
4264 }
bool IsHeapIterable()
Definition: heap.cc:4248

References CollectAllGarbage(), DCHECK, v8::internal::MarkCompactCollector::EnsureSweepingCompleted(), IsHeapIterable(), kMakeHeapIterableMask, and mark_compact_collector().

Referenced by v8::internal::BASE_EMBEDDED< Visitor >::MakeHeapIterableHelper::MakeHeapIterableHelper().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ map_space()

◆ MapForExternalArrayType()

Map * v8::internal::Heap::MapForExternalArrayType ( ExternalArrayType  array_type)

Definition at line 3101 of file heap.cc.

3101  {
3102  return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
3103 }
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
Definition: heap.cc:3106

References RootIndexForExternalArrayType(), and roots_.

Referenced by AllocateExternalArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MapForFixedTypedArray()

Map * v8::internal::Heap::MapForFixedTypedArray ( ExternalArrayType  array_type)

Definition at line 3123 of file heap.cc.

3123  {
3124  return Map::cast(roots_[RootIndexForFixedTypedArray(array_type)]);
3125 }
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type)
Definition: heap.cc:3128

References RootIndexForFixedTypedArray(), and roots_.

Referenced by AllocateFixedTypedArray().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ mark_compact_collector()

MarkCompactCollector* v8::internal::Heap::mark_compact_collector ( )
inline

Definition at line 1197 of file heap.h.

1197  {
1198  return &mark_compact_collector_;
1199  }

References mark_compact_collector_.

Referenced by v8::internal::AbortIncrementalMarkingAndCollectGarbage(), AddAllocationSiteToScratchpad(), v8::internal::Context::AddOptimizedFunction(), v8::internal::SharedFunctionInfo::ClearOptimizedCodeMap(), CollectAllAvailableGarbage(), CollectGarbage(), v8::internal::Isolate::Deinit(), v8::internal::Deoptimizer::DeoptimizeMarkedCodeForContext(), v8::internal::WeakListVisitor< Context >::DoWeakList(), v8::internal::LargeObjectSpace::FreeUnmarkedObjects(), GarbageCollectionPrologue(), IdleNotification(), v8::internal::IncrementalMarkingMarkingVisitor::INLINE(), v8::internal::MarkCompactMarkingVisitor::INLINE(), IterateAndMarkPointersToFromSpace(), v8::internal::StoreBuffer::IteratePointersToNewSpace(), MakeHeapIterable(), v8::internal::StaticMarkingVisitor< StaticVisitor >::MarkMapContents(), v8::internal::StaticMarkingVisitor< StaticVisitor >::MarkTransitionArray(), v8::internal::MustRecordSlots(), OldGenerationAllocationLimit(), v8::internal::CodeFlusher::ProcessJSFunctionCandidates(), v8::internal::CodeFlusher::ProcessOptimizedCodeMaps(), v8::internal::CodeFlusher::ProcessSharedFunctionInfoCandidates(), v8::internal::IncrementalMarking::RecordWriteIntoCodeSlow(), v8::internal::IncrementalMarking::RecordWriteOfCodeEntrySlow(), v8::internal::IncrementalMarking::RecordWriteSlow(), v8::internal::SharedFunctionInfo::ReplaceCode(), v8::internal::MarkCompactWeakObjectRetainer::RetainAs(), v8::internal::MarkCompactCollector::SweeperTask::Run(), Scavenge(), v8::internal::IC::SetTargetAtAddress(), SetUp(), v8::internal::PagedSpace::SlowAllocateRaw(), v8::internal::IncrementalMarking::Start(), v8::internal::IncrementalMarking::StartMarking(), v8::internal::IncrementalMarking::Step(), TearDown(), v8::internal::MarkCompactMarkingVisitor::UpdateRegExpCodeAgeAndFlush(), v8::internal::WeakListVisitor< JSArrayBuffer >::VisitLiveObject(), v8::internal::MarkCompactMarkingVisitor::VisitRegExpAndFlushCode(), v8::internal::VisitWeakList(), and v8::internal::PagedSpace::WaitForSweeperThreadsAndRetryAllocation().

+ Here is the caller graph for this function:

◆ MarkCompact()

void v8::internal::Heap::MarkCompact ( )
private

Definition at line 1181 of file heap.cc.

1181  {
1183  LOG(isolate_, ResourceEvent("markcompact", "begin"));
1184 
1185  uint64_t size_of_objects_before_gc = SizeOfObjects();
1186 
1188 
1189  ms_count_++;
1190 
1192 
1194 
1195  LOG(isolate_, ResourceEvent("markcompact", "end"));
1196 
1197  gc_state_ = NOT_IN_GC;
1198 
1199  isolate_->counters()->objs_since_last_full()->Set(0);
1200 
1201  flush_monomorphic_ics_ = false;
1202 
1203  if (FLAG_allocation_site_pretenuring) {
1204  EvaluateOldSpaceLocalPretenuring(size_of_objects_before_gc);
1205  }
1206 }
void MarkCompactPrologue()
Definition: heap.cc:1209
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc)
Definition: heap.cc:1684
#define LOG(isolate, Call)
Definition: log.h:69

References v8::internal::MarkCompactCollector::CollectGarbage(), v8::internal::Isolate::counters(), EvaluateOldSpaceLocalPretenuring(), flush_monomorphic_ics_, gc_state_, isolate_, LOG, MARK_COMPACT, mark_compact_collector_, MarkCompactPrologue(), ms_count_, NOT_IN_GC, v8::internal::MarkCompactCollector::Prepare(), and SizeOfObjects().

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MarkCompactPrologue()

void v8::internal::Heap::MarkCompactPrologue ( )
private

Definition at line 1209 of file heap.cc.

1209  {
1210  // At any old GC clear the keyed lookup cache to enable collection of unused
1211  // maps.
1215  RegExpResultsCache::Clear(string_split_cache());
1216  RegExpResultsCache::Clear(regexp_multiple_cache());
1217 
1219 
1221 
1223  if (FLAG_cleanup_code_caches_at_gc) {
1224  polymorphic_code_cache()->set_cache(undefined_value());
1225  }
1226 
1228 }
void FlushNumberStringCache()
Definition: heap.cc:3051
void ClearNormalizedMapCaches()
Definition: heap.cc:1003
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:711
static void Clear(FixedArray *cache)
Definition: heap.cc:3031

References v8::internal::KeyedLookupCache::Clear(), v8::internal::DescriptorLookupCache::Clear(), v8::internal::ContextSlotCache::Clear(), v8::internal::RegExpResultsCache::Clear(), ClearNormalizedMapCaches(), v8::internal::Isolate::compilation_cache(), CompletelyClearInstanceofCache(), v8::internal::Isolate::context_slot_cache(), v8::internal::Isolate::descriptor_lookup_cache(), FlushNumberStringCache(), isolate_, v8::internal::Isolate::keyed_lookup_cache(), and v8::internal::CompilationCache::MarkCompactPrologue().

Referenced by MarkCompact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ marking()

Marking* v8::internal::Heap::marking ( )
inline

Definition at line 1203 of file heap.h.

1203 { return &marking_; }

References marking_.

Referenced by LeftTrimFixedArray().

+ Here is the caller graph for this function:

◆ MarkMapPointersAsEncoded()

void v8::internal::Heap::MarkMapPointersAsEncoded ( bool  encoded)
inlineprivate

Definition at line 1601 of file heap.h.

1601  {
1602  DCHECK(!encoded);
1604  }
static int GcSafeSizeOfOldObject(HeapObject *object)
Definition: heap.cc:228

References DCHECK, gc_safe_size_of_old_object_, and GcSafeSizeOfOldObject().

Referenced by SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MaxExecutableSize()

intptr_t v8::internal::Heap::MaxExecutableSize ( )
inline

Definition at line 556 of file heap.h.

556 { return max_executable_size_; }

References max_executable_size_.

Referenced by SetUp().

+ Here is the caller graph for this function:

◆ MaximumCommittedMemory()

intptr_t v8::internal::Heap::MaximumCommittedMemory ( )
inline

Definition at line 572 of file heap.h.

572 { return maximum_committed_; }

References maximum_committed_.

Referenced by GarbageCollectionEpilogue(), and TearDown().

+ Here is the caller graph for this function:

◆ MaximumSizeScavenge()

bool v8::internal::Heap::MaximumSizeScavenge ( )
inline

Definition at line 1282 of file heap.h.

1282 { return maximum_size_scavenges_ > 0; }

References maximum_size_scavenges_.

Referenced by ProcessPretenuringFeedback().

+ Here is the caller graph for this function:

◆ MaxOldGenerationSize()

intptr_t v8::internal::Heap::MaxOldGenerationSize ( )
inline

Definition at line 555 of file heap.h.

555 { return max_old_generation_size_; }

References max_old_generation_size_.

Referenced by v8::internal::IncrementalMarking::SpaceLeftInOldSpace().

+ Here is the caller graph for this function:

◆ MaxReserved()

intptr_t v8::internal::Heap::MaxReserved ( )
inline

Definition at line 549 of file heap.h.

549  {
551  }

References max_old_generation_size_, and reserved_semispace_size_.

Referenced by v8::Isolate::GetHeapStatistics(), and SetUp().

+ Here is the caller graph for this function:

◆ MaxSemiSpaceSize()

int v8::internal::Heap::MaxSemiSpaceSize ( )
inline

Definition at line 552 of file heap.h.

552 { return max_semi_space_size_; }

References max_semi_space_size_.

Referenced by v8::internal::IncrementalMarking::SpeedUp().

+ Here is the caller graph for this function:

◆ MoveBlock()

void v8::internal::Heap::MoveBlock ( Address  dst,
Address  src,
int  byte_size 
)
inlinestatic

Definition at line 475 of file heap-inl.h.

475  {
476  DCHECK(IsAligned(byte_size, kPointerSize));
477 
478  int size_in_words = byte_size / kPointerSize;
479 
480  if ((dst < src) || (dst >= (src + byte_size))) {
481  Object** src_slot = reinterpret_cast<Object**>(src);
482  Object** dst_slot = reinterpret_cast<Object**>(dst);
483  Object** end_slot = src_slot + size_in_words;
484 
485  while (src_slot != end_slot) {
486  *dst_slot++ = *src_slot++;
487  }
488  } else {
489  MemMove(dst, src, static_cast<size_t>(byte_size));
490  }
491 }
void MemMove(void *dest, const void *src, size_t size)
Definition: utils.h:353

References DCHECK, v8::internal::IsAligned(), v8::internal::kPointerSize, and v8::internal::MemMove().

Referenced by v8::internal::MarkCompactCollector::MigrateObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ MoveElements()

void v8::internal::Heap::MoveElements ( FixedArray array,
int  dst_index,
int  src_index,
int  len 
)

Definition at line 868 of file heap.cc.

869  {
870  if (len == 0) return;
871 
872  DCHECK(array->map() != fixed_cow_array_map());
873  Object** dst_objects = array->data_start() + dst_index;
874  MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize);
875  if (!InNewSpace(array)) {
876  for (int i = 0; i < len; i++) {
877  // TODO(hpayer): check store buffer for entries
878  if (InNewSpace(dst_objects[i])) {
879  RecordWrite(array->address(), array->OffsetOfElementAt(dst_index + i));
880  }
881  }
882  }
884 }

References v8::internal::HeapObject::address(), v8::internal::FixedArray::data_start(), DCHECK, incremental_marking(), InNewSpace(), v8::internal::kPointerSize, v8::internal::HeapObject::map(), v8::internal::MemMove(), v8::internal::FixedArray::OffsetOfElementAt(), and v8::internal::IncrementalMarking::RecordWrites().

Referenced by v8::internal::BUILTIN().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ms_count()

unsigned int v8::internal::Heap::ms_count ( )
inline

Definition at line 816 of file heap.h.

816 { return ms_count_; }

References ms_count_.

◆ native_contexts_list()

◆ new_space()

NewSpace* v8::internal::Heap::new_space ( )
inline

Definition at line 593 of file heap.h.

593 { return &new_space_; }

References new_space_.

Referenced by v8::internal::IncrementalMarking::Abort(), v8::internal::IncrementalMarking::ActivateIncrementalWriteBarrier(), v8::internal::MarkCompactCollector::ClearMarkbits(), v8::internal::SpaceIterator::CreateIterator(), v8::internal::IncrementalMarking::DeactivateIncrementalWriteBarrier(), DisableInlineAllocation(), v8::internal::DiscoverGreyObjectsInNewSpace(), EnableInlineAllocation(), v8::internal::StoreBuffer::EnterDirectlyIntoStoreBuffer(), v8::internal::MarkCompactCollector::EvacuateNewSpace(), v8::internal::IncrementalMarking::Finalize(), GarbageCollectionEpilogue(), v8::internal::StatisticsExtension::GetCounters(), v8::internal::PromotionQueue::Initialize(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::INLINE(), v8::internal::PromotionQueue::insert(), IsHeapIterable(), v8::internal::MarkCompactCollector::MarkLiveObjects(), PerformGarbageCollection(), v8::internal::IncrementalMarking::PrepareForScavenge(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::RecordCopiedObject(), ReserveSpace(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::SemiSpaceCopyObject(), SetUp(), v8::internal::IncrementalMarking::Start(), v8::internal::GCTracer::Start(), v8::internal::GCTracer::Stop(), and v8::internal::SeqString::Truncate().

+ Here is the caller graph for this function:

◆ NewSpaceAllocationLimitAddress()

Address* v8::internal::Heap::NewSpaceAllocationLimitAddress ( )
inline

Definition at line 630 of file heap.h.

630  {
632  }
Address * allocation_limit_address()
Definition: spaces.h:2463

References v8::internal::NewSpace::allocation_limit_address(), and new_space_.

+ Here is the call graph for this function:

◆ NewSpaceAllocationTopAddress()

Address* v8::internal::Heap::NewSpaceAllocationTopAddress ( )
inline

Definition at line 627 of file heap.h.

627  {
629  }
Address * allocation_top_address()
Definition: spaces.h:2460

References v8::internal::NewSpace::allocation_top_address(), and new_space_.

+ Here is the call graph for this function:

◆ NewSpaceMask()

uintptr_t v8::internal::Heap::NewSpaceMask ( )
inline

Definition at line 590 of file heap.h.

590 { return new_space_.mask(); }
uintptr_t mask()
Definition: spaces.h:2446

References v8::internal::NewSpace::mask(), and new_space_.

+ Here is the call graph for this function:

◆ NewSpaceStart()

Address v8::internal::Heap::NewSpaceStart ( )
inline

Definition at line 589 of file heap.h.

589 { return new_space_.start(); }

References new_space_, and v8::internal::NewSpace::start().

+ Here is the call graph for this function:

◆ NewSpaceTop()

Address v8::internal::Heap::NewSpaceTop ( )
inline

Definition at line 591 of file heap.h.

591 { return new_space_.top(); }

References new_space_, and v8::internal::NewSpace::top().

Referenced by FindAllocationMemento(), v8::internal::StringTracker::IsFreshUnusedString(), and v8::internal::StringTracker::RecordWrite().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NextGCIsLikelyToBeFull()

bool v8::internal::Heap::NextGCIsLikelyToBeFull ( )
inline

Definition at line 1135 of file heap.h.

1135  {
1136  if (FLAG_gc_global) return true;
1137 
1138  if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
1139 
1140  intptr_t adjusted_allocation_limit =
1142 
1143  if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
1144 
1145  return false;
1146  }
intptr_t PromotedTotalSize()
Definition: heap.h:996

References v8::internal::NewSpace::Capacity(), gc_count_, new_space_, old_generation_allocation_limit_, and PromotedTotalSize().

Referenced by v8::internal::IncrementalMarking::ShouldActivate(), and WorthActivatingIncrementalMarking().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ NO_INLINE() [1/2]

v8::internal::Heap::NO_INLINE ( void   CreateJSConstructEntryStub())
private

◆ NO_INLINE() [2/2]

v8::internal::Heap::NO_INLINE ( void   CreateJSEntryStub())
private

◆ NotifyContextDisposed()

int v8::internal::Heap::NotifyContextDisposed ( )

Definition at line 857 of file heap.cc.

857  {
858  if (isolate()->concurrent_recompilation_enabled()) {
859  // Flush the queued recompilation tasks.
861  }
862  flush_monomorphic_ics_ = true;
863  AgeInlineCaches();
864  return ++contexts_disposed_;
865 }
void AgeInlineCaches()
Definition: heap.h:1270

References AgeInlineCaches(), contexts_disposed_, v8::internal::OptimizingCompilerThread::Flush(), flush_monomorphic_ics_, isolate(), and v8::internal::Isolate::optimizing_compiler_thread().

Referenced by v8::Isolate::ContextDisposedNotification().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ old_data_space()

◆ old_pointer_space()

◆ OldDataSpaceAllocationLimitAddress()

Address* v8::internal::Heap::OldDataSpaceAllocationLimitAddress ( )
inline

Definition at line 644 of file heap.h.

644  {
646  }
Address * allocation_limit_address()
Definition: spaces.h:1758

References v8::internal::PagedSpace::allocation_limit_address(), and old_data_space_.

+ Here is the call graph for this function:

◆ OldDataSpaceAllocationTopAddress()

Address* v8::internal::Heap::OldDataSpaceAllocationTopAddress ( )
inline

Definition at line 641 of file heap.h.

641  {
643  }
Address * allocation_top_address()
Definition: spaces.h:1755

References v8::internal::PagedSpace::allocation_top_address(), and old_data_space_.

+ Here is the call graph for this function:

◆ OldGenerationAllocationLimit()

intptr_t v8::internal::Heap::OldGenerationAllocationLimit ( intptr_t  old_gen_size,
int  freed_global_handles 
)

Definition at line 4984 of file heap.cc.

4985  {
4986  const int kMaxHandles = 1000;
4987  const int kMinHandles = 100;
4988  double min_factor = 1.1;
4989  double max_factor = 4;
4990  // We set the old generation growing factor to 2 to grow the heap slower on
4991  // memory-constrained devices.
4993  max_factor = 2;
4994  }
4995  // If there are many freed global handles, then the next full GC will
4996  // likely collect a lot of garbage. Choose the heap growing factor
4997  // depending on freed global handles.
4998  // TODO(ulan, hpayer): Take into account mutator utilization.
4999  double factor;
5000  if (freed_global_handles <= kMinHandles) {
5001  factor = max_factor;
5002  } else if (freed_global_handles >= kMaxHandles) {
5003  factor = min_factor;
5004  } else {
5005  // Compute factor using linear interpolation between points
5006  // (kMinHandles, max_factor) and (kMaxHandles, min_factor).
5007  factor = max_factor -
5008  (freed_global_handles - kMinHandles) * (max_factor - min_factor) /
5009  (kMaxHandles - kMinHandles);
5010  }
5011 
5012  if (FLAG_stress_compaction ||
5014  factor = min_factor;
5015  }
5016 
5017  intptr_t limit = static_cast<intptr_t>(old_gen_size * factor);
5018  limit = Max(limit, kMinimumOldGenerationAllocationLimit);
5019  limit += new_space_.Capacity();
5020  intptr_t halfway_to_the_max = (old_gen_size + max_old_generation_size_) / 2;
5021  return Min(limit, halfway_to_the_max);
5022 }
static const int kMaxOldSpaceSizeMediumMemoryDevice
Definition: heap.h:1025

References v8::internal::NewSpace::Capacity(), kMaxOldSpaceSizeMediumMemoryDevice, kMinimumOldGenerationAllocationLimit, mark_compact_collector(), v8::internal::Max(), max_old_generation_size_, v8::internal::Min(), new_space_, and v8::internal::MarkCompactCollector::reduce_memory_footprint_.

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationAllocationLimitReached()

bool v8::internal::Heap::OldGenerationAllocationLimitReached ( )
inline

Definition at line 364 of file heap-inl.h.

364  {
365  if (!incremental_marking()->IsStopped()) return false;
366  return OldGenerationSpaceAvailable() < 0;
367 }
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1003

References incremental_marking(), and OldGenerationSpaceAvailable().

Referenced by SelectGarbageCollector().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldGenerationCapacityAvailable()

intptr_t v8::internal::Heap::OldGenerationCapacityAvailable ( )
inline

Definition at line 1007 of file heap.h.

1007  {
1009  }

References max_old_generation_size_, and PromotedTotalSize().

+ Here is the call graph for this function:

◆ OldGenerationSpaceAvailable()

intptr_t v8::internal::Heap::OldGenerationSpaceAvailable ( )
inline

Definition at line 1003 of file heap.h.

1003  {
1005  }

References old_generation_allocation_limit_, and PromotedTotalSize().

Referenced by OldGenerationAllocationLimitReached().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OldPointerSpaceAllocationLimitAddress()

Address* v8::internal::Heap::OldPointerSpaceAllocationLimitAddress ( )
inline

Definition at line 637 of file heap.h.

637  {
639  }

References v8::internal::PagedSpace::allocation_limit_address(), and old_pointer_space_.

+ Here is the call graph for this function:

◆ OldPointerSpaceAllocationTopAddress()

Address* v8::internal::Heap::OldPointerSpaceAllocationTopAddress ( )
inline

Definition at line 634 of file heap.h.

634  {
636  }

References v8::internal::PagedSpace::allocation_top_address(), and old_pointer_space_.

+ Here is the call graph for this function:

◆ OnAllocationEvent()

void v8::internal::Heap::OnAllocationEvent ( HeapObject object,
int  size_in_bytes 
)
inline

Definition at line 224 of file heap-inl.h.

224  {
225  HeapProfiler* profiler = isolate_->heap_profiler();
226  if (profiler->is_tracking_allocations()) {
227  profiler->AllocationEvent(object->address(), size_in_bytes);
228  }
229 
230  if (FLAG_verify_predictable) {
232 
233  UpdateAllocationsHash(object);
234  UpdateAllocationsHash(size_in_bytes);
235 
236  if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
238  dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
240  }
241  }
242 }
void UpdateAllocationsHash(HeapObject *object)
Definition: heap-inl.h:277
void PrintAlloctionsHash()
Definition: heap-inl.h:301
HeapProfiler * heap_profiler() const
Definition: isolate.h:972
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes A file to write the raw context snapshot bytes Write V8 startup blob Print the time it takes to lazily compile hydrogen code stubs dump only objects containing this substring stress the GC compactor to flush out pretty print source code for builtins print C code to recreate TurboFan graphs report heap spill statistics along with enable possessive quantifier syntax for testing Minimal Log code events to the log file without profiling log positions Log statistical profiling Used with turns on browser compatible mode for profiling Enable perf linux profiler(experimental annotate support).") DEFINE_STRING(gc_fake_mmap

References v8::internal::HeapObject::address(), allocations_count_, dump_allocations_hash_countdown_, v8::internal::Isolate::heap_profiler(), isolate_, PrintAlloctionsHash(), profiler(), and UpdateAllocationsHash().

Referenced by AllocateCode(), AllocateRaw(), and v8::internal::Deserializer::ReadObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ OnMoveEvent()

void v8::internal::Heap::OnMoveEvent ( HeapObject target,
HeapObject source,
int  size_in_bytes 
)
inline

Definition at line 245 of file heap-inl.h.

246  {
247  HeapProfiler* heap_profiler = isolate_->heap_profiler();
248  if (heap_profiler->is_tracking_object_moves()) {
249  heap_profiler->ObjectMoveEvent(source->address(), target->address(),
250  size_in_bytes);
251  }
252 
254  isolate_->cpu_profiler()->is_profiling()) {
255  if (target->IsSharedFunctionInfo()) {
256  PROFILE(isolate_, SharedFunctionInfoMoveEvent(source->address(),
257  target->address()));
258  }
259  }
260 
261  if (FLAG_verify_predictable) {
263 
264  UpdateAllocationsHash(source);
265  UpdateAllocationsHash(target);
266  UpdateAllocationsHash(size_in_bytes);
267 
268  if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
270  dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
272  }
273  }
274 }
void ObjectMoveEvent(Address from, Address to, int size)
CpuProfiler * cpu_profiler() const
Definition: isolate.h:971
Logger * logger()
Definition: isolate.h:866
bool is_logging_code_events()
Definition: log.h:315
#define PROFILE(IsolateGetter, Call)
Definition: cpu-profiler.h:181

References v8::internal::HeapObject::address(), allocations_count_, v8::internal::Isolate::cpu_profiler(), dump_allocations_hash_countdown_, v8::internal::Isolate::heap_profiler(), v8::internal::Logger::is_logging_code_events(), v8::internal::HeapProfiler::is_tracking_object_moves(), isolate_, v8::internal::Isolate::logger(), v8::internal::HeapProfiler::ObjectMoveEvent(), PrintAlloctionsHash(), PROFILE, and UpdateAllocationsHash().

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::INLINE(), LeftTrimFixedArray(), and v8::internal::MarkCompactCollector::MigrateObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ paged_space()

PagedSpace* v8::internal::Heap::paged_space ( int  idx)
inline

Definition at line 601 of file heap.h.

601  {
602  switch (idx) {
603  case OLD_POINTER_SPACE:
604  return old_pointer_space();
605  case OLD_DATA_SPACE:
606  return old_data_space();
607  case MAP_SPACE:
608  return map_space();
609  case CELL_SPACE:
610  return cell_space();
611  case PROPERTY_CELL_SPACE:
612  return property_cell_space();
613  case CODE_SPACE:
614  return code_space();
615  case NEW_SPACE:
616  case LO_SPACE:
617  UNREACHABLE();
618  }
619  return NULL;
620  }

References v8::internal::CELL_SPACE, cell_space(), v8::internal::CODE_SPACE, code_space(), v8::internal::LO_SPACE, v8::internal::MAP_SPACE, map_space(), v8::internal::NEW_SPACE, NULL, v8::internal::OLD_DATA_SPACE, old_data_space(), v8::internal::OLD_POINTER_SPACE, old_pointer_space(), v8::internal::PROPERTY_CELL_SPACE, property_cell_space(), and UNREACHABLE.

Referenced by v8::internal::MarkCompactCollector::EnsureSweepingCompleted(), and ReserveSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PerformGarbageCollection()

bool v8::internal::Heap::PerformGarbageCollection ( GarbageCollector  collector,
const GCCallbackFlags  gc_callback_flags = kNoGCCallbackFlags 
)
private

Definition at line 1042 of file heap.cc.

1043  {
1044  int freed_global_handles = 0;
1045 
1046  if (collector != SCAVENGER) {
1047  PROFILE(isolate_, CodeMovingGCEvent());
1048  }
1049 
1050 #ifdef VERIFY_HEAP
1051  if (FLAG_verify_heap) {
1052  VerifyStringTable(this);
1053  }
1054 #endif
1055 
1056  GCType gc_type =
1058 
1059  {
1060  GCCallbacksScope scope(this);
1061  if (scope.CheckReenter()) {
1062  AllowHeapAllocation allow_allocation;
1063  GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL);
1064  VMState<EXTERNAL> state(isolate_);
1065  HandleScope handle_scope(isolate_);
1067  }
1068  }
1069 
1071 
1072  int start_new_space_size = Heap::new_space()->SizeAsInt();
1073 
1074  if (IsHighSurvivalRate()) {
1075  // We speed up the incremental marker if it is running so that it
1076  // does not fall behind the rate of promotion, which would cause a
1077  // constantly growing old space.
1079  }
1080 
1081  if (collector == MARK_COMPACTOR) {
1082  // Perform mark-sweep with optional compaction.
1083  MarkCompact();
1085  // Temporarily set the limit for case when PostGarbageCollectionProcessing
1086  // allocates and triggers GC. The real limit is set at after
1087  // PostGarbageCollectionProcessing.
1090  old_gen_exhausted_ = false;
1091  } else {
1092  Scavenge();
1093  }
1094 
1095  UpdateSurvivalStatistics(start_new_space_size);
1096 
1097  isolate_->counters()->objs_since_last_young()->Set(0);
1098 
1099  // Callbacks that fire after this point might trigger nested GCs and
1100  // restart incremental marking, the assertion can't be moved down.
1101  DCHECK(collector == SCAVENGER || incremental_marking()->IsStopped());
1102 
1104  {
1105  AllowHeapAllocation allow_allocation;
1106  GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL);
1107  freed_global_handles =
1109  }
1111 
1113 
1114  // Update relocatables.
1115  Relocatable::PostGarbageCollectionProcessing(isolate_);
1116 
1117  if (collector == MARK_COMPACTOR) {
1118  // Register the amount of external allocated memory.
1122  PromotedSpaceSizeOfObjects(), freed_global_handles);
1123  }
1124 
1125  {
1126  GCCallbacksScope scope(this);
1127  if (scope.CheckReenter()) {
1128  AllowHeapAllocation allow_allocation;
1129  GCTracer::Scope scope(tracer(), GCTracer::Scope::EXTERNAL);
1130  VMState<EXTERNAL> state(isolate_);
1131  HandleScope handle_scope(isolate_);
1132  CallGCEpilogueCallbacks(gc_type, gc_callback_flags);
1133  }
1134  }
1135 
1136 #ifdef VERIFY_HEAP
1137  if (FLAG_verify_heap) {
1138  VerifyStringTable(this);
1139  }
1140 #endif
1141 
1142  return freed_global_handles > 0;
1143 }
void PostGarbageCollectionProcessing(Heap *heap)
int PostGarbageCollectionProcessing(GarbageCollector collector)
void Scavenge()
Definition: heap.cc:1420
void MarkCompact()
Definition: heap.cc:1181
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1146
bool IsHighSurvivalRate()
Definition: heap.h:1925
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1163
friend class GCCallbacksScope
Definition: heap.h:2026
void EnsureFromSpaceIsCommitted()
Definition: heap.cc:971
intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size, int freed_global_handles)
Definition: heap.cc:4984
intptr_t PromotedSpaceSizeOfObjects()
Definition: heap.cc:4967
void UpdateSurvivalStatistics(int start_new_space_size)
Definition: heap.cc:1023
@ kNoGCCallbackFlags
Definition: v8.h:4210
GCType
Applications can register callback functions which will be called before and after a garbage collecti...
Definition: v8.h:4203
@ kGCTypeScavenge
Definition: v8.h:4204
@ kGCTypeMarkSweepCompact
Definition: v8.h:4205

References amount_of_external_allocated_memory_, amount_of_external_allocated_memory_at_last_global_gc_, CallGCEpilogueCallbacks(), CallGCPrologueCallbacks(), v8::internal::GCCallbacksScope::CheckReenter(), v8::internal::Isolate::counters(), DCHECK, EnsureFromSpaceIsCommitted(), v8::internal::Isolate::eternal_handles(), v8::internal::GCTracer::Scope::EXTERNAL, gc_post_processing_depth_, v8::internal::Isolate::global_handles(), incremental_marking(), IsHighSurvivalRate(), isolate_, v8::kGCTypeMarkSweepCompact, v8::kGCTypeScavenge, v8::kNoGCCallbackFlags, v8::internal::MARK_COMPACTOR, MarkCompact(), new_space(), v8::internal::IncrementalMarking::NotifyOfHighPromotionRate(), old_gen_exhausted_, old_generation_allocation_limit_, OldGenerationAllocationLimit(), v8::internal::GlobalHandles::PostGarbageCollectionProcessing(), v8::internal::EternalHandles::PostGarbageCollectionProcessing(), PROFILE, PromotedSpaceSizeOfObjects(), Scavenge(), v8::internal::SCAVENGER, v8::internal::NewSpace::SizeAsInt(), sweep_generation_, tracer(), and UpdateSurvivalStatistics().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrintAlloctionsHash()

void v8::internal::Heap::PrintAlloctionsHash ( )
inlineprivate

Definition at line 301 of file heap-inl.h.

301  {
302  uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
303  PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
304 }

References allocations_count_, v8::internal::PrintF(), and raw_allocations_hash_.

Referenced by OnAllocationEvent(), OnMoveEvent(), and TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PrintShortHeapStatistics()

void v8::internal::Heap::PrintShortHeapStatistics ( )

Definition at line 313 of file heap.cc.

313  {
314  if (!FLAG_trace_gc_verbose) return;
315  PrintPID("Memory allocator, used: %6" V8_PTR_PREFIX
316  "d KB"
317  ", available: %6" V8_PTR_PREFIX "d KB\n",
320  PrintPID("New space, used: %6" V8_PTR_PREFIX
321  "d KB"
322  ", available: %6" V8_PTR_PREFIX
323  "d KB"
324  ", committed: %6" V8_PTR_PREFIX "d KB\n",
327  PrintPID("Old pointers, used: %6" V8_PTR_PREFIX
328  "d KB"
329  ", available: %6" V8_PTR_PREFIX
330  "d KB"
331  ", committed: %6" V8_PTR_PREFIX "d KB\n",
335  PrintPID("Old data space, used: %6" V8_PTR_PREFIX
336  "d KB"
337  ", available: %6" V8_PTR_PREFIX
338  "d KB"
339  ", committed: %6" V8_PTR_PREFIX "d KB\n",
343  PrintPID("Code space, used: %6" V8_PTR_PREFIX
344  "d KB"
345  ", available: %6" V8_PTR_PREFIX
346  "d KB"
347  ", committed: %6" V8_PTR_PREFIX "d KB\n",
350  PrintPID("Map space, used: %6" V8_PTR_PREFIX
351  "d KB"
352  ", available: %6" V8_PTR_PREFIX
353  "d KB"
354  ", committed: %6" V8_PTR_PREFIX "d KB\n",
357  PrintPID("Cell space, used: %6" V8_PTR_PREFIX
358  "d KB"
359  ", available: %6" V8_PTR_PREFIX
360  "d KB"
361  ", committed: %6" V8_PTR_PREFIX "d KB\n",
364  PrintPID("PropertyCell space, used: %6" V8_PTR_PREFIX
365  "d KB"
366  ", available: %6" V8_PTR_PREFIX
367  "d KB"
368  ", committed: %6" V8_PTR_PREFIX "d KB\n",
372  PrintPID("Large object space, used: %6" V8_PTR_PREFIX
373  "d KB"
374  ", available: %6" V8_PTR_PREFIX
375  "d KB"
376  ", committed: %6" V8_PTR_PREFIX "d KB\n",
379  PrintPID("All spaces, used: %6" V8_PTR_PREFIX
380  "d KB"
381  ", available: %6" V8_PTR_PREFIX
382  "d KB"
383  ", committed: %6" V8_PTR_PREFIX "d KB\n",
384  this->SizeOfObjects() / KB, this->Available() / KB,
385  this->CommittedMemory() / KB);
386  PrintPID("External memory reported: %6" V8_PTR_PREFIX "d KB\n",
387  static_cast<intptr_t>(amount_of_external_allocated_memory_ / KB));
388  PrintPID("Total time spent in GC : %.1f ms\n", total_gc_time_ms_);
389 }
intptr_t Available()
Definition: heap.cc:211
virtual intptr_t SizeOfObjects()
Definition: spaces.h:2742
virtual intptr_t Size()
Definition: spaces.h:2360
virtual intptr_t SizeOfObjects()
Definition: spaces.cc:2502
#define V8_PTR_PREFIX
Definition: macros.h:360

References amount_of_external_allocated_memory_, Available(), v8::internal::MemoryAllocator::Available(), v8::internal::PagedSpace::Available(), v8::internal::NewSpace::Available(), v8::internal::LargeObjectSpace::Available(), cell_space_, code_space_, CommittedMemory(), v8::internal::PagedSpace::CommittedMemory(), v8::internal::NewSpace::CommittedMemory(), v8::internal::LargeObjectSpace::CommittedMemory(), isolate_, v8::internal::KB, lo_space_, map_space_, v8::internal::Isolate::memory_allocator(), new_space_, old_data_space_, old_pointer_space_, v8::internal::PrintPID(), property_cell_space_, v8::internal::MemoryAllocator::Size(), v8::internal::NewSpace::Size(), SizeOfObjects(), v8::internal::PagedSpace::SizeOfObjects(), v8::internal::LargeObjectSpace::SizeOfObjects(), total_gc_time_ms_, and V8_PTR_PREFIX.

Referenced by v8::internal::GCTracer::Stop().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessAllocationSites()

void v8::internal::Heap::ProcessAllocationSites ( WeakObjectRetainer retainer)
private

Definition at line 1660 of file heap.cc.

1660  {
1661  Object* allocation_site_obj =
1663  set_allocation_sites_list(allocation_site_obj);
1664 }
template Object * VisitWeakList< AllocationSite >(Heap *heap, Object *list, WeakObjectRetainer *retainer)

References allocation_sites_list(), set_allocation_sites_list(), and v8::internal::VisitWeakList< AllocationSite >().

Referenced by ProcessWeakReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessArrayBuffers()

void v8::internal::Heap::ProcessArrayBuffers ( WeakObjectRetainer retainer)
private

Definition at line 1642 of file heap.cc.

1642  {
1643  Object* array_buffer_obj =
1645  set_array_buffers_list(array_buffer_obj);
1646 }
Object * array_buffers_list() const
Definition: heap.h:796
template Object * VisitWeakList< JSArrayBuffer >(Heap *heap, Object *list, WeakObjectRetainer *retainer)

References array_buffers_list(), set_array_buffers_list(), and v8::internal::VisitWeakList< JSArrayBuffer >().

Referenced by ProcessWeakReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessNativeContexts()

void v8::internal::Heap::ProcessNativeContexts ( WeakObjectRetainer retainer)
private

Definition at line 1635 of file heap.cc.

1635  {
1636  Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
1637  // Update the head of the list of contexts.
1639 }
template Object * VisitWeakList< Context >(Heap *heap, Object *list, WeakObjectRetainer *retainer)

References native_contexts_list(), set_native_contexts_list(), and v8::internal::VisitWeakList< Context >().

Referenced by ProcessWeakReferences().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessPretenuringFeedback()

void v8::internal::Heap::ProcessPretenuringFeedback ( )
private

Definition at line 493 of file heap.cc.

493  {
494  if (FLAG_allocation_site_pretenuring) {
495  int tenure_decisions = 0;
496  int dont_tenure_decisions = 0;
497  int allocation_mementos_found = 0;
498  int allocation_sites = 0;
499  int active_allocation_sites = 0;
500 
501  // If the scratchpad overflowed, we have to iterate over the allocation
502  // sites list.
503  // TODO(hpayer): We iterate over the whole list of allocation sites when
504  // we grew to the maximum semi-space size to deopt maybe tenured
505  // allocation sites. We could hold the maybe tenured allocation sites
506  // in a seperate data structure if this is a performance problem.
507  bool deopt_maybe_tenured = DeoptMaybeTenuredAllocationSites();
508  bool use_scratchpad =
510  !deopt_maybe_tenured;
511 
512  int i = 0;
513  Object* list_element = allocation_sites_list();
514  bool trigger_deoptimization = false;
515  bool maximum_size_scavenge = MaximumSizeScavenge();
516  while (use_scratchpad ? i < allocation_sites_scratchpad_length_
517  : list_element->IsAllocationSite()) {
518  AllocationSite* site =
519  use_scratchpad
520  ? AllocationSite::cast(allocation_sites_scratchpad()->get(i))
521  : AllocationSite::cast(list_element);
522  allocation_mementos_found += site->memento_found_count();
523  if (site->memento_found_count() > 0) {
524  active_allocation_sites++;
525  if (site->DigestPretenuringFeedback(maximum_size_scavenge)) {
526  trigger_deoptimization = true;
527  }
528  if (site->GetPretenureMode() == TENURED) {
529  tenure_decisions++;
530  } else {
531  dont_tenure_decisions++;
532  }
533  allocation_sites++;
534  }
535 
536  if (deopt_maybe_tenured && site->IsMaybeTenure()) {
537  site->set_deopt_dependent_code(true);
538  trigger_deoptimization = true;
539  }
540 
541  if (use_scratchpad) {
542  i++;
543  } else {
544  list_element = site->weak_next();
545  }
546  }
547 
548  if (trigger_deoptimization) {
549  isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
550  }
551 
553 
554  if (FLAG_trace_pretenuring_statistics &&
555  (allocation_mementos_found > 0 || tenure_decisions > 0 ||
556  dont_tenure_decisions > 0)) {
557  PrintF(
558  "GC: (mode, #visited allocation sites, #active allocation sites, "
559  "#mementos, #tenure decisions, #donttenure decisions) "
560  "(%s, %d, %d, %d, %d, %d)\n",
561  use_scratchpad ? "use scratchpad" : "use list", allocation_sites,
562  active_allocation_sites, allocation_mementos_found, tenure_decisions,
563  dont_tenure_decisions);
564  }
565  }
566 }
void FlushAllocationSitesScratchpad()
Definition: heap.cc:3060
bool DeoptMaybeTenuredAllocationSites()
Definition: heap.h:1284
bool MaximumSizeScavenge()
Definition: heap.h:1282
StackGuard * stack_guard()
Definition: isolate.h:872

References allocation_sites_list(), allocation_sites_scratchpad_length_, DeoptMaybeTenuredAllocationSites(), v8::internal::AllocationSite::DigestPretenuringFeedback(), FlushAllocationSitesScratchpad(), v8::internal::AllocationSite::GetPretenureMode(), v8::internal::AllocationSite::IsMaybeTenure(), isolate_, kAllocationSiteScratchpadSize, MaximumSizeScavenge(), v8::internal::AllocationSite::memento_found_count(), v8::internal::PrintF(), v8::internal::AllocationSite::set_deopt_dependent_code(), v8::internal::Isolate::stack_guard(), and v8::internal::TENURED.

Referenced by GarbageCollectionEpilogue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ProcessWeakReferences()

void v8::internal::Heap::ProcessWeakReferences ( WeakObjectRetainer retainer)

Definition at line 1626 of file heap.cc.

1626  {
1627  ProcessArrayBuffers(retainer);
1628  ProcessNativeContexts(retainer);
1629  // TODO(mvstanton): AllocationSites only need to be processed during
1630  // MARK_COMPACT, as they live in old space. Verify and address.
1631  ProcessAllocationSites(retainer);
1632 }
void ProcessNativeContexts(WeakObjectRetainer *retainer)
Definition: heap.cc:1635
void ProcessAllocationSites(WeakObjectRetainer *retainer)
Definition: heap.cc:1660
void ProcessArrayBuffers(WeakObjectRetainer *retainer)
Definition: heap.cc:1642

References ProcessAllocationSites(), ProcessArrayBuffers(), and ProcessNativeContexts().

Referenced by v8::internal::MarkCompactCollector::AfterMarking(), v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates(), and Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PromotedExternalMemorySize()

int64_t v8::internal::Heap::PromotedExternalMemorySize ( )
private

Definition at line 4975 of file heap.cc.

References amount_of_external_allocated_memory_, and amount_of_external_allocated_memory_at_last_global_gc_.

Referenced by PromotedTotalSize().

+ Here is the caller graph for this function:

◆ PromotedSpaceSizeOfObjects()

intptr_t v8::internal::Heap::PromotedSpaceSizeOfObjects ( )

Definition at line 4967 of file heap.cc.

References cell_space_, code_space_, lo_space_, map_space_, old_data_space_, old_pointer_space_, property_cell_space_, v8::internal::PagedSpace::SizeOfObjects(), and v8::internal::LargeObjectSpace::SizeOfObjects().

Referenced by v8::internal::IncrementalMarking::BlackToGreyAndUnshift(), PerformGarbageCollection(), PromotedTotalSize(), Scavenge(), v8::internal::IncrementalMarking::SpaceLeftInOldSpace(), and v8::internal::IncrementalMarking::WorthActivating().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ PromotedTotalSize()

intptr_t v8::internal::Heap::PromotedTotalSize ( )
inline

Definition at line 996 of file heap.h.

996  {
998  if (total > kMaxInt) return static_cast<intptr_t>(kMaxInt);
999  if (total < 0) return 0;
1000  return static_cast<intptr_t>(total);
1001  }
int64_t PromotedExternalMemorySize()
Definition: heap.cc:4975

References v8::internal::kMaxInt, PromotedExternalMemorySize(), and PromotedSpaceSizeOfObjects().

Referenced by NextGCIsLikelyToBeFull(), OldGenerationCapacityAvailable(), OldGenerationSpaceAvailable(), v8::internal::IncrementalMarking::ResetStepCounters(), and v8::internal::IncrementalMarking::SpeedUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ promotion_queue()

PromotionQueue* v8::internal::Heap::promotion_queue ( )
inline

◆ property_cell_space()

◆ public_set_code_stubs()

void v8::internal::Heap::public_set_code_stubs ( UnseededNumberDictionary value)
inline

Definition at line 867 of file heap.h.

867  {
868  roots_[kCodeStubsRootIndex] = value;
869  }

References roots_.

◆ public_set_empty_script()

void v8::internal::Heap::public_set_empty_script ( Script script)
inline

Definition at line 883 of file heap.h.

883  {
884  roots_[kEmptyScriptRootIndex] = script;
885  }

References roots_.

◆ public_set_materialized_objects()

void v8::internal::Heap::public_set_materialized_objects ( FixedArray objects)
inline

Definition at line 891 of file heap.h.

891  {
892  roots_[kMaterializedObjectsRootIndex] = objects;
893  }

References roots_.

Referenced by v8::internal::MaterializedObjectStore::EnsureStackEntries().

+ Here is the caller graph for this function:

◆ public_set_non_monomorphic_cache()

void v8::internal::Heap::public_set_non_monomorphic_cache ( UnseededNumberDictionary value)
inline

Definition at line 879 of file heap.h.

879  {
880  roots_[kNonMonomorphicCacheRootIndex] = value;
881  }

References roots_.

Referenced by v8::internal::FillCache().

+ Here is the caller graph for this function:

◆ public_set_store_buffer_top()

void v8::internal::Heap::public_set_store_buffer_top ( Address top)
inline

Definition at line 887 of file heap.h.

887  {
888  roots_[kStoreBufferTopRootIndex] = reinterpret_cast<Smi*>(top);
889  }

References roots_.

Referenced by v8::internal::StoreBuffer::Compact(), v8::internal::StoreBuffer::Mark(), v8::internal::StoreBuffer::SetUp(), and v8::internal::StoreBuffer::TearDown().

+ Here is the caller graph for this function:

◆ QueueMemoryChunkForFree()

void v8::internal::Heap::QueueMemoryChunkForFree ( MemoryChunk chunk)

Definition at line 6019 of file heap.cc.

6019  {
6020  chunk->set_next_chunk(chunks_queued_for_free_);
6021  chunks_queued_for_free_ = chunk;
6022 }

References chunks_queued_for_free_, and v8::internal::MemoryChunk::set_next_chunk().

Referenced by v8::internal::LargeObjectSpace::FreeUnmarkedObjects(), and v8::internal::PagedSpace::ReleasePage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RecordCodeSubTypeStats()

void v8::internal::Heap::RecordCodeSubTypeStats ( int  code_sub_type,
int  code_age,
size_t  size 
)
inline

Definition at line 1306 of file heap.h.

1306  {
1307  int code_sub_type_index = FIRST_CODE_KIND_SUB_TYPE + code_sub_type;
1308  int code_age_index =
1310  DCHECK(code_sub_type_index >= FIRST_CODE_KIND_SUB_TYPE &&
1311  code_sub_type_index < FIRST_CODE_AGE_SUB_TYPE);
1312  DCHECK(code_age_index >= FIRST_CODE_AGE_SUB_TYPE &&
1313  code_age_index < OBJECT_STATS_COUNT);
1314  object_counts_[code_sub_type_index]++;
1315  object_sizes_[code_sub_type_index] += size;
1316  object_counts_[code_age_index]++;
1317  object_sizes_[code_age_index] += size;
1318  }

References DCHECK, FIRST_CODE_AGE_SUB_TYPE, FIRST_CODE_KIND_SUB_TYPE, v8::internal::Code::kFirstCodeAge, object_counts_, object_sizes_, OBJECT_STATS_COUNT, and size.

Referenced by v8::internal::MarkCompactMarkingVisitor::ObjectStatsTracker< MarkCompactMarkingVisitor::kVisitCode >::Visit().

+ Here is the caller graph for this function:

◆ RecordFixedArraySubTypeStats()

void v8::internal::Heap::RecordFixedArraySubTypeStats ( int  array_sub_type,
size_t  size 
)
inline

◆ RecordObjectStats()

void v8::internal::Heap::RecordObjectStats ( InstanceType  type,
size_t  size 
)
inline

Definition at line 1300 of file heap.h.

1300  {
1301  DCHECK(type <= LAST_TYPE);
1302  object_counts_[type]++;
1303  object_sizes_[type] += size;
1304  }

References DCHECK, v8::internal::LAST_TYPE, object_counts_, object_sizes_, and size.

Referenced by v8::internal::MarkCompactMarkingVisitor::ObjectStatsVisitBase().

+ Here is the caller graph for this function:

◆ RecordStats()

void v8::internal::Heap::RecordStats ( HeapStats stats,
bool  take_snapshot = false 
)

Definition at line 4929 of file heap.cc.

4929  {
4930  *stats->start_marker = HeapStats::kStartMarker;
4931  *stats->end_marker = HeapStats::kEndMarker;
4932  *stats->new_space_size = new_space_.SizeAsInt();
4933  *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
4934  *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects();
4935  *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
4936  *stats->old_data_space_size = old_data_space_->SizeOfObjects();
4937  *stats->old_data_space_capacity = old_data_space_->Capacity();
4938  *stats->code_space_size = code_space_->SizeOfObjects();
4939  *stats->code_space_capacity = code_space_->Capacity();
4940  *stats->map_space_size = map_space_->SizeOfObjects();
4941  *stats->map_space_capacity = map_space_->Capacity();
4942  *stats->cell_space_size = cell_space_->SizeOfObjects();
4943  *stats->cell_space_capacity = cell_space_->Capacity();
4944  *stats->property_cell_space_size = property_cell_space_->SizeOfObjects();
4945  *stats->property_cell_space_capacity = property_cell_space_->Capacity();
4946  *stats->lo_space_size = lo_space_->Size();
4947  isolate_->global_handles()->RecordStats(stats);
4948  *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
4949  *stats->memory_allocator_capacity =
4950  isolate()->memory_allocator()->Size() +
4952  *stats->os_error = base::OS::GetLastError();
4954  if (take_snapshot) {
4955  HeapIterator iterator(this);
4956  for (HeapObject* obj = iterator.next(); obj != NULL;
4957  obj = iterator.next()) {
4958  InstanceType type = obj->map()->instance_type();
4959  DCHECK(0 <= type && type <= LAST_TYPE);
4960  stats->objects_per_type[type]++;
4961  stats->size_per_type[type] += obj->Size();
4962  }
4963  }
4964 }
static int GetLastError()
void RecordStats(HeapStats *stats)
static const int kStartMarker
Definition: heap.h:2044
static const int kEndMarker
Definition: heap.h:2045
friend class HeapIterator
Definition: heap.h:2028

References v8::internal::MemoryAllocator::Available(), v8::internal::PagedSpace::Capacity(), v8::internal::NewSpace::Capacity(), cell_space_, v8::internal::HeapStats::cell_space_capacity, v8::internal::HeapStats::cell_space_size, code_space_, v8::internal::HeapStats::code_space_capacity, v8::internal::HeapStats::code_space_size, DCHECK, v8::internal::HeapStats::end_marker, v8::base::OS::GetLastError(), v8::internal::Isolate::global_handles(), HeapIterator, isolate(), isolate_, v8::internal::HeapStats::kEndMarker, v8::internal::HeapStats::kStartMarker, v8::internal::LAST_TYPE, lo_space_, v8::internal::HeapStats::lo_space_size, map_space_, v8::internal::HeapStats::map_space_capacity, v8::internal::HeapStats::map_space_size, v8::internal::Isolate::memory_allocator(), v8::internal::HeapStats::memory_allocator_capacity, v8::internal::HeapStats::memory_allocator_size, new_space_, v8::internal::HeapStats::new_space_capacity, v8::internal::HeapStats::new_space_size, NULL, v8::internal::HeapStats::objects_per_type, old_data_space_, v8::internal::HeapStats::old_data_space_capacity, v8::internal::HeapStats::old_data_space_size, old_pointer_space_, v8::internal::HeapStats::old_pointer_space_capacity, v8::internal::HeapStats::old_pointer_space_size, v8::internal::HeapStats::os_error, property_cell_space_, v8::internal::HeapStats::property_cell_space_capacity, v8::internal::HeapStats::property_cell_space_size, v8::internal::GlobalHandles::RecordStats(), v8::internal::MemoryAllocator::Size(), v8::internal::LargeObjectSpace::Size(), v8::internal::HeapStats::size_per_type, v8::internal::NewSpace::SizeAsInt(), v8::internal::PagedSpace::SizeOfObjects(), and v8::internal::HeapStats::start_marker.

+ Here is the call graph for this function:

◆ RememberUnmappedPage()

void v8::internal::Heap::RememberUnmappedPage ( Address  page,
bool  compacted 
)

Definition at line 6072 of file heap.cc.

6072  {
6073  uintptr_t p = reinterpret_cast<uintptr_t>(page);
6074  // Tag the page pointer to make it findable in the dump file.
6075  if (compacted) {
6076  p ^= 0xc1ead & (Page::kPageSize - 1); // Cleared.
6077  } else {
6078  p ^= 0x1d1ed & (Page::kPageSize - 1); // I died.
6079  }
6081  reinterpret_cast<Address>(p);
6084 }
Address remembered_unmapped_pages_[kRememberedUnmappedPages]
Definition: heap.h:1487
static const int kRememberedUnmappedPages
Definition: heap.h:1485

References v8::internal::Page::kPageSize, kRememberedUnmappedPages, remembered_unmapped_pages_, and remembered_unmapped_pages_index_.

Referenced by v8::internal::MemoryAllocator::Free(), and Heap().

+ Here is the caller graph for this function:

◆ RemoveGCEpilogueCallback()

void v8::internal::Heap::RemoveGCEpilogueCallback ( v8::Isolate::GCEpilogueCallback  callback)

Definition at line 5328 of file heap.cc.

5328  {
5329  DCHECK(callback != NULL);
5330  for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
5331  if (gc_epilogue_callbacks_[i].callback == callback) {
5332  gc_epilogue_callbacks_.Remove(i);
5333  return;
5334  }
5335  }
5336  UNREACHABLE();
5337 }

References DCHECK, gc_epilogue_callbacks_, NULL, and UNREACHABLE.

Referenced by v8::Isolate::RemoveGCEpilogueCallback(), and v8::V8::RemoveGCEpilogueCallback().

+ Here is the caller graph for this function:

◆ RemoveGCPrologueCallback()

void v8::internal::Heap::RemoveGCPrologueCallback ( v8::Isolate::GCPrologueCallback  callback)

Definition at line 5307 of file heap.cc.

5307  {
5308  DCHECK(callback != NULL);
5309  for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
5310  if (gc_prologue_callbacks_[i].callback == callback) {
5311  gc_prologue_callbacks_.Remove(i);
5312  return;
5313  }
5314  }
5315  UNREACHABLE();
5316 }

References DCHECK, gc_prologue_callbacks_, NULL, and UNREACHABLE.

Referenced by v8::Isolate::RemoveGCPrologueCallback(), and v8::V8::RemoveGCPrologueCallback().

+ Here is the caller graph for this function:

◆ RepairFreeListsAfterBoot()

void v8::internal::Heap::RepairFreeListsAfterBoot ( )

Definition at line 484 of file heap.cc.

484  {
485  PagedSpaces spaces(this);
486  for (PagedSpace* space = spaces.next(); space != NULL;
487  space = spaces.next()) {
488  space->RepairFreeListsAfterBoot();
489  }
490 }

References NULL, and space().

Referenced by v8::internal::Deserializer::Deserialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReportStatisticsAfterGC()

void v8::internal::Heap::ReportStatisticsAfterGC ( )
private

Definition at line 394 of file heap.cc.

394  {
395 // Similar to the before GC, we use some complicated logic to ensure that
396 // NewSpace statistics are logged exactly once when --log-gc is turned on.
397 #if defined(DEBUG)
398  if (FLAG_heap_stats) {
400  ReportHeapStatistics("After GC");
401  } else if (FLAG_log_gc) {
403  }
404 #else
405  if (FLAG_log_gc) new_space_.ReportStatistics();
406 #endif // DEBUG
407 }

References v8::internal::NewSpace::CollectStatistics(), new_space_, and v8::internal::NewSpace::ReportStatistics().

Referenced by GarbageCollectionEpilogue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReportStatisticsBeforeGC()

void v8::internal::Heap::ReportStatisticsBeforeGC ( )
private

Definition at line 291 of file heap.cc.

291  {
292 // Heap::ReportHeapStatistics will also log NewSpace statistics when
293 // compiled --log-gc is set. The following logic is used to avoid
294 // double logging.
295 #ifdef DEBUG
296  if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
297  if (FLAG_heap_stats) {
298  ReportHeapStatistics("Before GC");
299  } else if (FLAG_log_gc) {
301  }
302  if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
303 #else
304  if (FLAG_log_gc) {
308  }
309 #endif // DEBUG
310 }

References v8::internal::NewSpace::ClearHistograms(), v8::internal::NewSpace::CollectStatistics(), new_space_, and v8::internal::NewSpace::ReportStatistics().

Referenced by GarbageCollectionPrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ReservedSemiSpaceSize()

int v8::internal::Heap::ReservedSemiSpaceSize ( )
inline

Definition at line 553 of file heap.h.

553 { return reserved_semispace_size_; }

References reserved_semispace_size_.

◆ ReserveSpace()

void v8::internal::Heap::ReserveSpace ( int sizes,
Address addresses 
)

Definition at line 920 of file heap.cc.

920  {
921  bool gc_performed = true;
922  int counter = 0;
923  static const int kThreshold = 20;
924  while (gc_performed && counter++ < kThreshold) {
925  gc_performed = false;
927  if (sizes[space] == 0) continue;
928  bool perform_gc = false;
929  if (space == LO_SPACE) {
930  perform_gc = !lo_space()->CanAllocateSize(sizes[space]);
931  } else {
932  AllocationResult allocation;
933  if (space == NEW_SPACE) {
934  allocation = new_space()->AllocateRaw(sizes[space]);
935  } else {
936  allocation = paged_space(space)->AllocateRaw(sizes[space]);
937  }
938  FreeListNode* node;
939  if (allocation.To(&node)) {
940  // Mark with a free list node, in case we have a GC before
941  // deserializing.
942  node->set_size(this, sizes[space]);
944  locations_out[space] = node->address();
945  } else {
946  perform_gc = true;
947  }
948  }
949  if (perform_gc) {
950  if (space == NEW_SPACE) {
952  "failed to reserve space in the new space");
953  } else {
955  this, static_cast<AllocationSpace>(space),
956  "failed to reserve space in paged or large object space");
957  }
958  gc_performed = true;
959  break; // Abort for-loop over spaces and retry.
960  }
961  }
962  }
963 
964  if (gc_performed) {
965  // Failed to reserve the space after several attempts.
966  V8::FatalProcessOutOfMemory("Heap::ReserveSpace");
967  }
968 }
PagedSpace * paged_space(int idx)
Definition: heap.h:601
bool CanAllocateSize(int size)
Definition: spaces.h:2735
static const int kNumberOfPreallocatedSpaces
Definition: serialize.h:152
static bool AbortIncrementalMarkingAndCollectGarbage(Heap *heap, AllocationSpace space, const char *gc_reason=NULL)
Definition: heap.cc:911

References v8::internal::AbortIncrementalMarkingAndCollectGarbage(), v8::internal::HeapObject::address(), v8::internal::PagedSpace::AllocateRaw(), v8::internal::LargeObjectSpace::CanAllocateSize(), CollectGarbage(), DCHECK, v8::internal::V8::FatalProcessOutOfMemory(), v8::internal::SerializerDeserializer::kNumberOfPreallocatedSpaces, v8::internal::SerializerDeserializer::kNumberOfSpaces, v8::internal::LO_SPACE, lo_space(), v8::internal::NEW_SPACE, new_space(), paged_space(), v8::internal::FreeListNode::set_size(), space(), and v8::internal::AllocationResult::To().

Referenced by v8::internal::Deserializer::Deserialize(), and v8::internal::Deserializer::DeserializePartial().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ResetAllAllocationSitesDependentCode()

void v8::internal::Heap::ResetAllAllocationSitesDependentCode ( PretenureFlag  flag)
private

Definition at line 1667 of file heap.cc.

1667  {
1668  DisallowHeapAllocation no_allocation_scope;
1669  Object* cur = allocation_sites_list();
1670  bool marked = false;
1671  while (cur->IsAllocationSite()) {
1672  AllocationSite* casted = AllocationSite::cast(cur);
1673  if (casted->GetPretenureMode() == flag) {
1674  casted->ResetPretenureDecision();
1675  casted->set_deopt_dependent_code(true);
1676  marked = true;
1677  }
1678  cur = casted->weak_next();
1679  }
1680  if (marked) isolate_->stack_guard()->RequestDeoptMarkedAllocationSites();
1681 }
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418

References allocation_sites_list(), v8::internal::flag, v8::internal::AllocationSite::GetPretenureMode(), isolate_, v8::internal::AllocationSite::ResetPretenureDecision(), v8::internal::AllocationSite::set_deopt_dependent_code(), and v8::internal::Isolate::stack_guard().

Referenced by EvaluateOldSpaceLocalPretenuring().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RightTrimFixedArray()

template<Heap::InvocationMode mode>
void v8::internal::Heap::RightTrimFixedArray ( FixedArrayBase obj,
int  elements_to_trim 
)

Definition at line 3322 of file heap.cc.

3322  {
3323  const int element_size = object->IsFixedArray() ? kPointerSize : kDoubleSize;
3324  const int bytes_to_trim = elements_to_trim * element_size;
3325 
3326  // For now this trick is only applied to objects in new and paged space.
3327  DCHECK(object->map() != fixed_cow_array_map());
3328 
3329  const int len = object->length();
3330  DCHECK(elements_to_trim < len);
3331 
3332  // Calculate location of new array end.
3333  Address new_end = object->address() + object->Size() - bytes_to_trim;
3334 
3335  // Technically in new space this write might be omitted (except for
3336  // debug mode which iterates through the heap), but to play safer
3337  // we still do it.
3338  // We do not create a filler for objects in large object space.
3339  // TODO(hpayer): We should shrink the large object page if the size
3340  // of the object changed significantly.
3341  if (!lo_space()->Contains(object)) {
3342  CreateFillerObjectAt(new_end, bytes_to_trim);
3343  }
3344 
3345  // Initialize header of the trimmed array. We are storing the new length
3346  // using release store after creating a filler for the left-over space to
3347  // avoid races with the sweeper thread.
3348  object->synchronized_set_length(len - elements_to_trim);
3349 
3350  // Maintain consistency of live bytes during incremental marking
3351  AdjustLiveBytes(object->address(), -bytes_to_trim, mode);
3352 
3353  // Notify the heap profiler of change in object layout. The array may not be
3354  // moved during GC, and size has to be adjusted nevertheless.
3355  HeapProfiler* profiler = isolate()->heap_profiler();
3356  if (profiler->is_tracking_allocations()) {
3357  profiler->UpdateObjectSizeEvent(object->address(), object->Size());
3358  }
3359 }

References v8::internal::HeapObject::address(), AdjustLiveBytes(), Contains(), CreateFillerObjectAt(), DCHECK, v8::internal::Isolate::heap_profiler(), isolate(), v8::internal::kDoubleSize, v8::internal::kPointerSize, lo_space(), v8::internal::HeapObject::map(), mode(), profiler(), and v8::internal::HeapObject::Size().

Referenced by v8::internal::MarkCompactCollector::ClearMapTransitions(), v8::internal::SharedFunctionInfo::EvictFromOptimizedCodeMap(), v8::internal::FastElementsAccessor< FastElementsAccessorSubclass, KindTraits >::SetLengthWithoutNormalize(), v8::internal::FixedArray::Shrink(), v8::internal::MarkCompactCollector::TrimDescriptorArray(), v8::internal::MarkCompactCollector::TrimEnumCache(), and v8::internal::SharedFunctionInfo::TrimOptimizedCodeMap().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ RootCanBeTreatedAsConstant()

bool v8::internal::Heap::RootCanBeTreatedAsConstant ( RootListIndex  root_index)

Definition at line 2940 of file heap.cc.

2940  {
2941  return !RootCanBeWrittenAfterInitialization(root_index) &&
2942  !InNewSpace(roots_array_start()[root_index]);
2943 }
Object ** roots_array_start()
Definition: heap.h:896
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index)
Definition: heap.cc:2912

References InNewSpace(), RootCanBeWrittenAfterInitialization(), and roots_array_start().

+ Here is the call graph for this function:

◆ RootCanBeWrittenAfterInitialization()

bool v8::internal::Heap::RootCanBeWrittenAfterInitialization ( Heap::RootListIndex  root_index)
static

Definition at line 2912 of file heap.cc.

2912  {
2913  RootListIndex writable_roots[] = {
2914  kStoreBufferTopRootIndex,
2915  kStackLimitRootIndex,
2916  kNumberStringCacheRootIndex,
2917  kInstanceofCacheFunctionRootIndex,
2918  kInstanceofCacheMapRootIndex,
2919  kInstanceofCacheAnswerRootIndex,
2920  kCodeStubsRootIndex,
2921  kNonMonomorphicCacheRootIndex,
2922  kPolymorphicCodeCacheRootIndex,
2923  kLastScriptIdRootIndex,
2924  kEmptyScriptRootIndex,
2925  kRealStackLimitRootIndex,
2926  kArgumentsAdaptorDeoptPCOffsetRootIndex,
2927  kConstructStubDeoptPCOffsetRootIndex,
2928  kGetterStubDeoptPCOffsetRootIndex,
2929  kSetterStubDeoptPCOffsetRootIndex,
2931  };
2932 
2933  for (unsigned int i = 0; i < arraysize(writable_roots); i++) {
2934  if (root_index == writable_roots[i]) return true;
2935  }
2936  return false;
2937 }

References arraysize, and kStringTableRootIndex.

Referenced by RootCanBeTreatedAsConstant().

+ Here is the caller graph for this function:

◆ RootIndexForEmptyExternalArray()

Heap::RootListIndex v8::internal::Heap::RootIndexForEmptyExternalArray ( ElementsKind  kind)

Definition at line 3145 of file heap.cc.

3146  {
3147  switch (elementsKind) {
3148 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3149  case EXTERNAL_##TYPE##_ELEMENTS: \
3150  return kEmptyExternal##Type##ArrayRootIndex;
3151 
3153 #undef ELEMENT_KIND_TO_ROOT_INDEX
3154 
3155  default:
3156  UNREACHABLE();
3157  return kUndefinedValueRootIndex;
3158  }
3159 }
#define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size)

References ELEMENT_KIND_TO_ROOT_INDEX, TYPED_ARRAYS, and UNREACHABLE.

Referenced by EmptyExternalArrayForMap().

+ Here is the caller graph for this function:

◆ RootIndexForEmptyFixedTypedArray()

Heap::RootListIndex v8::internal::Heap::RootIndexForEmptyFixedTypedArray ( ElementsKind  kind)

Definition at line 3162 of file heap.cc.

3163  {
3164  switch (elementsKind) {
3165 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3166  case TYPE##_ELEMENTS: \
3167  return kEmptyFixed##Type##ArrayRootIndex;
3168 
3170 #undef ELEMENT_KIND_TO_ROOT_INDEX
3171  default:
3172  UNREACHABLE();
3173  return kUndefinedValueRootIndex;
3174  }
3175 }

References ELEMENT_KIND_TO_ROOT_INDEX, TYPED_ARRAYS, and UNREACHABLE.

Referenced by EmptyFixedTypedArrayForMap().

+ Here is the caller graph for this function:

◆ RootIndexForExternalArrayType()

Heap::RootListIndex v8::internal::Heap::RootIndexForExternalArrayType ( ExternalArrayType  array_type)

Definition at line 3106 of file heap.cc.

3107  {
3108  switch (array_type) {
3109 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3110  case kExternal##Type##Array: \
3111  return kExternal##Type##ArrayMapRootIndex;
3112 
3114 #undef ARRAY_TYPE_TO_ROOT_INDEX
3115 
3116  default:
3117  UNREACHABLE();
3118  return kUndefinedValueRootIndex;
3119  }
3120 }
#define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size)

References ARRAY_TYPE_TO_ROOT_INDEX, TYPED_ARRAYS, and UNREACHABLE.

Referenced by MapForExternalArrayType().

+ Here is the caller graph for this function:

◆ RootIndexForFixedTypedArray()

Heap::RootListIndex v8::internal::Heap::RootIndexForFixedTypedArray ( ExternalArrayType  array_type)

Definition at line 3128 of file heap.cc.

3129  {
3130  switch (array_type) {
3131 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3132  case kExternal##Type##Array: \
3133  return kFixed##Type##ArrayMapRootIndex;
3134 
3136 #undef ARRAY_TYPE_TO_ROOT_INDEX
3137 
3138  default:
3139  UNREACHABLE();
3140  return kUndefinedValueRootIndex;
3141  }
3142 }

References ARRAY_TYPE_TO_ROOT_INDEX, TYPED_ARRAYS, and UNREACHABLE.

Referenced by MapForFixedTypedArray().

+ Here is the caller graph for this function:

◆ roots_array_start()

Object** v8::internal::Heap::roots_array_start ( )
inline

Definition at line 896 of file heap.h.

896 { return roots_; }

References roots_.

Referenced by v8::internal::Deserializer::ReadChunk(), RootCanBeTreatedAsConstant(), v8::internal::Serializer::RootIndex(), v8::internal::Serializer::ShouldBeSkipped(), and v8::internal::Serializer::VisitPointers().

+ Here is the caller graph for this function:

◆ Scavenge()

void v8::internal::Heap::Scavenge ( )
private

Definition at line 1420 of file heap.cc.

1420  {
1421  RelocationLock relocation_lock(this);
1422 
1423 #ifdef VERIFY_HEAP
1424  if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
1425 #endif
1426 
1427  gc_state_ = SCAVENGE;
1428 
1429  // Implements Cheney's copying algorithm
1430  LOG(isolate_, ResourceEvent("scavenge", "begin"));
1431 
1432  // Clear descriptor cache.
1434 
1435  // Used for updating survived_since_last_expansion_ at function end.
1436  intptr_t survived_watermark = PromotedSpaceSizeOfObjects();
1437 
1439 
1441 
1442  // Flip the semispaces. After flipping, to space is empty, from space has
1443  // live objects.
1444  new_space_.Flip();
1446 
1447  // We need to sweep newly copied objects which can be either in the
1448  // to space or promoted to the old generation. For to-space
1449  // objects, we treat the bottom of the to space as a queue. Newly
1450  // copied and unswept objects lie between a 'front' mark and the
1451  // allocation pointer.
1452  //
1453  // Promoted objects can go into various old-generation spaces, and
1454  // can be allocated internally in the spaces (from the free list).
1455  // We treat the top of the to space as a queue of addresses of
1456  // promoted objects. The addresses of newly promoted and unswept
1457  // objects lie between a 'front' mark and a 'rear' mark that is
1458  // updated as a side effect of promoting an object.
1459  //
1460  // There is guaranteed to be enough room at the top of the to space
1461  // for the addresses of promoted objects: every object promoted
1462  // frees up its size in bytes from the top of the new space, and
1463  // objects are at least one pointer in size.
1464  Address new_space_front = new_space_.ToSpaceStart();
1466 
1467 #ifdef DEBUG
1468  store_buffer()->Clean();
1469 #endif
1470 
1471  ScavengeVisitor scavenge_visitor(this);
1472  // Copy roots.
1473  IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1474 
1475  // Copy objects reachable from the old generation.
1476  {
1477  StoreBufferRebuildScope scope(this, store_buffer(),
1480  }
1481 
1482  // Copy objects reachable from simple cells by scavenging cell values
1483  // directly.
1484  HeapObjectIterator cell_iterator(cell_space_);
1485  for (HeapObject* heap_object = cell_iterator.Next(); heap_object != NULL;
1486  heap_object = cell_iterator.Next()) {
1487  if (heap_object->IsCell()) {
1488  Cell* cell = Cell::cast(heap_object);
1489  Address value_address = cell->ValueAddress();
1490  scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1491  }
1492  }
1493 
1494  // Copy objects reachable from global property cells by scavenging global
1495  // property cell values directly.
1496  HeapObjectIterator js_global_property_cell_iterator(property_cell_space_);
1497  for (HeapObject* heap_object = js_global_property_cell_iterator.Next();
1498  heap_object != NULL;
1499  heap_object = js_global_property_cell_iterator.Next()) {
1500  if (heap_object->IsPropertyCell()) {
1501  PropertyCell* cell = PropertyCell::cast(heap_object);
1502  Address value_address = cell->ValueAddress();
1503  scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1504  Address type_address = cell->TypeAddress();
1505  scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(type_address));
1506  }
1507  }
1508 
1509  // Copy objects reachable from the encountered weak collections list.
1510  scavenge_visitor.VisitPointer(&encountered_weak_collections_);
1511 
1512  // Copy objects reachable from the code flushing candidates list.
1514  if (collector->is_code_flushing_enabled()) {
1515  collector->code_flusher()->IteratePointersToFromSpace(&scavenge_visitor);
1516  }
1517 
1518  new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1519 
1520  while (isolate()->global_handles()->IterateObjectGroups(
1521  &scavenge_visitor, &IsUnscavengedHeapObject)) {
1522  new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1523  }
1526 
1530  &scavenge_visitor);
1531  new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1532 
1535 
1537 
1539 
1540  ScavengeWeakObjectRetainer weak_object_retainer(this);
1541  ProcessWeakReferences(&weak_object_retainer);
1542 
1543  DCHECK(new_space_front == new_space_.top());
1544 
1545  // Set age mark.
1547 
1550 
1551  // Update how much has survived scavenge.
1552  IncrementYoungSurvivorsCounter(static_cast<int>(
1553  (PromotedSpaceSizeOfObjects() - survived_watermark) + new_space_.Size()));
1554 
1555  LOG(isolate_, ResourceEvent("scavenge", "end"));
1556 
1557  gc_state_ = NOT_IN_GC;
1558 }
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4722
Address DoScavenge(ObjectVisitor *scavenge_visitor, Address new_space_front)
Definition: heap.cc:1743
void SelectScavengingVisitorsTable()
Definition: heap.cc:2224
static String * UpdateNewSpaceReferenceInExternalStringTableEntry(Heap *heap, Object **pointer)
Definition: heap.cc:1561
void ProcessWeakReferences(WeakObjectRetainer *retainer)
Definition: heap.cc:1626
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
Definition: heap.cc:1576
void IncrementYoungSurvivorsCounter(int survived)
Definition: heap.h:1130
friend class MarkCompactCollector
Definition: heap.h:2030
void LowerInlineAllocationLimit(intptr_t step)
Definition: spaces.h:2473
void set_age_mark(Address mark)
Definition: spaces.h:2441
Address ToSpaceStart()
Definition: spaces.h:2488
void ResetAllocationInfo()
Definition: spaces.cc:1311
intptr_t inline_allocation_limit_step()
Definition: spaces.h:2544
void IteratePointersToNewSpace(ObjectSlotCallback callback)
static bool IsUnscavengedHeapObject(Heap *heap, Object **p)
Definition: heap.cc:1303

References cell_space_, v8::internal::DescriptorLookupCache::Clear(), v8::internal::MarkCompactCollector::code_flusher(), DCHECK, v8::internal::Isolate::descriptor_lookup_cache(), v8::internal::PromotionQueue::Destroy(), DoScavenge(), encountered_weak_collections_, v8::internal::NewSpace::Flip(), gc_state_, v8::internal::Isolate::global_handles(), v8::internal::GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(), incremental_marking(), IncrementYoungSurvivorsCounter(), v8::internal::PromotionQueue::Initialize(), v8::internal::NewSpace::inline_allocation_limit_step(), v8::internal::MarkCompactCollector::is_code_flushing_enabled(), isolate(), isolate_, v8::internal::IsUnscavengedHeapObject(), v8::internal::GlobalHandles::IterateNewSpaceWeakIndependentRoots(), v8::internal::CodeFlusher::IteratePointersToFromSpace(), v8::internal::StoreBuffer::IteratePointersToNewSpace(), IterateRoots(), LOG, v8::internal::NewSpace::LowerInlineAllocationLimit(), mark_compact_collector(), new_space_, v8::internal::HeapObjectIterator::Next(), NOT_IN_GC, NULL, v8::internal::IncrementalMarking::PrepareForScavenge(), ProcessWeakReferences(), PromotedSpaceSizeOfObjects(), promotion_queue_, property_cell_space_, v8::internal::GlobalHandles::RemoveImplicitRefGroups(), v8::internal::GlobalHandles::RemoveObjectGroups(), v8::internal::NewSpace::ResetAllocationInfo(), SCAVENGE, ScavengeObject(), ScavengeStoreBufferCallback(), SelectScavengingVisitorsTable(), v8::internal::NewSpace::set_age_mark(), v8::internal::NewSpace::Size(), store_buffer(), v8::internal::NewSpace::top(), v8::internal::NewSpace::ToSpaceStart(), v8::internal::PropertyCell::TypeAddress(), v8::internal::IncrementalMarking::UpdateMarkingDequeAfterScavenge(), UpdateNewSpaceReferenceInExternalStringTableEntry(), UpdateNewSpaceReferencesInExternalStringTable(), v8::internal::Cell::ValueAddress(), v8::internal::VISIT_ALL_IN_SCAVENGE, and v8::internal::ScavengeVisitor::VisitPointer().

Referenced by PerformGarbageCollection().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ScavengeObject()

void v8::internal::Heap::ScavengeObject ( HeapObject **  p,
HeapObject object 
)
inlinestatic

Definition at line 554 of file heap-inl.h.

554  {
555  DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
556 
557  // We use the first word (where the map pointer usually is) of a heap
558  // object to record the forwarding pointer. A forwarding pointer can
559  // point to an old space, the code space, or the to space of the new
560  // generation.
561  MapWord first_word = object->map_word();
562 
563  // If the first word is a forwarding address, the object has already been
564  // copied.
565  if (first_word.IsForwardingAddress()) {
566  HeapObject* dest = first_word.ToForwardingAddress();
567  DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
568  *p = dest;
569  return;
570  }
571 
573 
574  // AllocationMementos are unrooted and shouldn't survive a scavenge
575  DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
576  // Call the slow part of scavenge object.
577  return ScavengeObjectSlow(p, object);
578 }
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
Definition: heap-inl.h:536
static void ScavengeObjectSlow(HeapObject **p, HeapObject *object)
Definition: heap.cc:2262

References DCHECK, v8::internal::HeapObject::GetHeap(), v8::internal::HeapObject::GetIsolate(), v8::internal::Isolate::heap(), IGNORE_SCRATCHPAD_SLOT, InFromSpace(), v8::internal::HeapObject::map(), ScavengeObjectSlow(), and UpdateAllocationSiteFeedback().

Referenced by DoScavenge(), Scavenge(), ScavengePointer(), v8::internal::ScavengeVisitor::ScavengePointer(), and v8::internal::NewSpaceScavenger::VisitPointer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ScavengeObjectSlow()

void v8::internal::Heap::ScavengeObjectSlow ( HeapObject **  p,
HeapObject object 
)
staticprivate

Definition at line 2262 of file heap.cc.

2262  {
2263  SLOW_DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
2264  MapWord first_word = object->map_word();
2265  SLOW_DCHECK(!first_word.IsForwardingAddress());
2266  Map* map = first_word.ToMap();
2267  map->GetHeap()->DoScavengeObject(map, p, object);
2268 }

References v8::internal::HeapObject::GetIsolate(), v8::internal::Isolate::heap(), InFromSpace(), map, and SLOW_DCHECK.

Referenced by ScavengeObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ScavengePointer()

void v8::internal::Heap::ScavengePointer ( HeapObject **  p)
inlinestatic

Definition at line 494 of file heap-inl.h.

494 { ScavengeObject(p, *p); }

References ScavengeObject().

+ Here is the call graph for this function:

◆ ScavengeStoreBufferCallback()

void v8::internal::Heap::ScavengeStoreBufferCallback ( Heap heap,
MemoryChunk page,
StoreBufferEvent  event 
)
staticprivate

Definition at line 1309 of file heap.cc.

1310  {
1311  heap->store_buffer_rebuilder_.Callback(page, event);
1312 }

References v8::internal::StoreBufferRebuilder::Callback(), and store_buffer_rebuilder_.

Referenced by DoScavenge(), v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates(), and Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SelectGarbageCollector()

GarbageCollector v8::internal::Heap::SelectGarbageCollector ( AllocationSpace  space,
const char **  reason 
)
private

Definition at line 236 of file heap.cc.

237  {
238  // Is global GC requested?
239  if (space != NEW_SPACE) {
240  isolate_->counters()->gc_compactor_caused_by_request()->Increment();
241  *reason = "GC in old space requested";
242  return MARK_COMPACTOR;
243  }
244 
245  if (FLAG_gc_global || (FLAG_stress_compaction && (gc_count_ & 1) != 0)) {
246  *reason = "GC in old space forced by flags";
247  return MARK_COMPACTOR;
248  }
249 
250  // Is enough data promoted to justify a global GC?
252  isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
253  *reason = "promotion limit reached";
254  return MARK_COMPACTOR;
255  }
256 
257  // Have allocation in OLD and LO failed?
258  if (old_gen_exhausted_) {
259  isolate_->counters()
260  ->gc_compactor_caused_by_oldspace_exhaustion()
261  ->Increment();
262  *reason = "old generations exhausted";
263  return MARK_COMPACTOR;
264  }
265 
266  // Is there enough space left in OLD to guarantee that a scavenge can
267  // succeed?
268  //
269  // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
270  // for object promotion. It counts only the bytes that the memory
271  // allocator has not yet allocated from the OS and assigned to any space,
272  // and does not count available bytes already in the old space or code
273  // space. Undercounting is safe---we may get an unrequested full GC when
274  // a scavenge would have succeeded.
276  isolate_->counters()
277  ->gc_compactor_caused_by_oldspace_exhaustion()
278  ->Increment();
279  *reason = "scavenge might not succeed";
280  return MARK_COMPACTOR;
281  }
282 
283  // Default
284  *reason = NULL;
285  return SCAVENGER;
286 }
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:364

References v8::internal::Isolate::counters(), gc_count_, isolate_, v8::internal::MARK_COMPACTOR, v8::internal::MemoryAllocator::MaxAvailable(), v8::internal::Isolate::memory_allocator(), v8::internal::NEW_SPACE, new_space_, NULL, old_gen_exhausted_, OldGenerationAllocationLimitReached(), v8::internal::SCAVENGER, v8::internal::NewSpace::Size(), and space().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SelectScavengingVisitorsTable()

void v8::internal::Heap::SelectScavengingVisitorsTable ( )
private

Definition at line 2224 of file heap.cc.

2224  {
2225  bool logging_and_profiling =
2226  FLAG_verify_predictable || isolate()->logger()->is_logging() ||
2227  isolate()->cpu_profiler()->is_profiling() ||
2228  (isolate()->heap_profiler() != NULL &&
2230 
2231  if (!incremental_marking()->IsMarking()) {
2232  if (!logging_and_profiling) {
2233  scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2235  } else {
2236  scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2238  }
2239  } else {
2240  if (!logging_and_profiling) {
2241  scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2243  } else {
2244  scavenging_visitors_table_.CopyFrom(ScavengingVisitor<
2246  }
2247 
2248  if (incremental_marking()->IsCompacting()) {
2249  // When compacting forbid short-circuiting of cons-strings.
2250  // Scavenging code relies on the fact that new space object
2251  // can't be evacuated into evacuation candidate but
2252  // short-circuiting violates this assumption.
2254  StaticVisitorBase::kVisitShortcutCandidate,
2256  StaticVisitorBase::kVisitConsString));
2257  }
2258  }
2259 }
bool is_tracking_object_moves() const
Definition: heap-profiler.h:61
bool is_logging()
Definition: log.h:311
Callback GetVisitorById(StaticVisitorBase::VisitorId id)
void Register(StaticVisitorBase::VisitorId id, Callback callback)
void CopyFrom(VisitorDispatchTable *other)
@ IGNORE_MARKS
Definition: heap.cc:1818
@ TRANSFER_MARKS
Definition: heap.cc:1818
@ LOGGING_AND_PROFILING_ENABLED
Definition: heap.cc:1813
@ LOGGING_AND_PROFILING_DISABLED
Definition: heap.cc:1814

References v8::internal::VisitorDispatchTable< Callback >::CopyFrom(), v8::internal::Isolate::cpu_profiler(), v8::internal::VisitorDispatchTable< Callback >::GetVisitorById(), v8::internal::Isolate::heap_profiler(), v8::internal::IGNORE_MARKS, incremental_marking(), v8::internal::Logger::is_logging(), v8::internal::HeapProfiler::is_tracking_object_moves(), v8::internal::IncrementalMarking::IsCompacting(), isolate(), v8::internal::Isolate::logger(), v8::internal::LOGGING_AND_PROFILING_DISABLED, v8::internal::LOGGING_AND_PROFILING_ENABLED, NULL, v8::internal::VisitorDispatchTable< Callback >::Register(), scavenging_visitors_table_, and v8::internal::TRANSFER_MARKS.

Referenced by Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SelectSpace()

static AllocationSpace v8::internal::Heap::SelectSpace ( int  object_size,
AllocationSpace  preferred_old_space,
PretenureFlag  pretenure 
)
inlinestaticprivate

◆ set_allocation_sites_list()

void v8::internal::Heap::set_allocation_sites_list ( Object object)
inline

Definition at line 798 of file heap.h.

798  {
799  allocation_sites_list_ = object;
800  }

References allocation_sites_list_.

Referenced by CreateHeapObjects(), v8::internal::Deserializer::Deserialize(), Heap(), ProcessAllocationSites(), and v8::internal::Deserializer::RelinkAllocationSite().

+ Here is the caller graph for this function:

◆ set_array_buffers_list()

void v8::internal::Heap::set_array_buffers_list ( Object object)
inline

Definition at line 795 of file heap.h.

795 { array_buffers_list_ = object; }

References array_buffers_list_.

Referenced by CreateHeapObjects(), v8::internal::Deserializer::Deserialize(), Heap(), ProcessArrayBuffers(), v8::internal::Runtime::SetupArrayBuffer(), and TearDownArrayBuffers().

+ Here is the caller graph for this function:

◆ set_encountered_weak_collections()

void v8::internal::Heap::set_encountered_weak_collections ( Object weak_collection)
inline

Definition at line 808 of file heap.h.

808  {
809  encountered_weak_collections_ = weak_collection;
810  }

References encountered_weak_collections_.

Referenced by v8::internal::MarkCompactCollector::AbortWeakCollections(), v8::internal::MarkCompactCollector::ClearWeakCollections(), Heap(), and v8::internal::StaticMarkingVisitor< IncrementalMarkingMarkingVisitor >::MarkInlinedFunctionsCode().

+ Here is the caller graph for this function:

◆ set_native_contexts_list()

void v8::internal::Heap::set_native_contexts_list ( Object object)
inline

Definition at line 790 of file heap.h.

790  {
791  native_contexts_list_ = object;
792  }

References native_contexts_list_.

Referenced by v8::internal::AddToWeakNativeContextList(), CreateHeapObjects(), v8::internal::Deserializer::Deserialize(), Heap(), and ProcessNativeContexts().

+ Here is the caller graph for this function:

◆ set_weak_object_to_code_table()

void v8::internal::Heap::set_weak_object_to_code_table ( Object value)
inlineprivate

Definition at line 1935 of file heap.h.

1935  {
1936  DCHECK(!InNewSpace(value));
1938  }

References DCHECK, InNewSpace(), and weak_object_to_code_table_.

Referenced by AddWeakObjectToCodeDependency(), EnsureWeakObjectToCodeTable(), and InitializeWeakObjectToCodeTable().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetArgumentsAdaptorDeoptPCOffset()

void v8::internal::Heap::SetArgumentsAdaptorDeoptPCOffset ( int  pc_offset)
inline

Definition at line 1243 of file heap.h.

1243  {
1244  DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
1245  set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
1246  }

References DCHECK, and v8::internal::Smi::FromInt().

+ Here is the call graph for this function:

◆ SetConstructStubDeoptPCOffset()

void v8::internal::Heap::SetConstructStubDeoptPCOffset ( int  pc_offset)
inline

Definition at line 1248 of file heap.h.

1248  {
1249  DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
1250  set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1251  }

References DCHECK, and v8::internal::Smi::FromInt().

+ Here is the call graph for this function:

◆ SetGetterStubDeoptPCOffset()

void v8::internal::Heap::SetGetterStubDeoptPCOffset ( int  pc_offset)
inline

Definition at line 1253 of file heap.h.

1253  {
1254  DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
1255  set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1256  }

References DCHECK, and v8::internal::Smi::FromInt().

+ Here is the call graph for this function:

◆ SetSetterStubDeoptPCOffset()

void v8::internal::Heap::SetSetterStubDeoptPCOffset ( int  pc_offset)
inline

Definition at line 1258 of file heap.h.

1258  {
1259  DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
1260  set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
1261  }

References DCHECK, and v8::internal::Smi::FromInt().

+ Here is the call graph for this function:

◆ SetStackLimits()

void v8::internal::Heap::SetStackLimits ( )

Definition at line 5173 of file heap.cc.

5173  {
5174  DCHECK(isolate_ != NULL);
5175  DCHECK(isolate_ == isolate());
5176  // On 64 bit machines, pointers are generally out of range of Smis. We write
5177  // something that looks like an out of range Smi to the GC.
5178 
5179  // Set up the special root array entries containing the stack limits.
5180  // These are actually addresses, but the tag makes the GC ignore it.
5181  roots_[kStackLimitRootIndex] = reinterpret_cast<Object*>(
5182  (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
5183  roots_[kRealStackLimitRootIndex] = reinterpret_cast<Object*>(
5184  (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
5185 }
const intptr_t kSmiTagMask
Definition: v8.h:5744
const int kSmiTag
Definition: v8.h:5742

References DCHECK, isolate(), isolate_, v8::internal::kSmiTag, v8::internal::kSmiTagMask, NULL, roots_, and v8::internal::Isolate::stack_guard().

Referenced by v8::internal::Isolate::Init().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetUp()

bool v8::internal::Heap::SetUp ( )

Definition at line 5059 of file heap.cc.

5059  {
5060 #ifdef DEBUG
5061  allocation_timeout_ = FLAG_gc_interval;
5062 #endif
5063 
5064  // Initialize heap spaces and initial maps and objects. Whenever something
5065  // goes wrong, just return false. The caller should check the results and
5066  // call Heap::TearDown() to release allocated memory.
5067  //
5068  // If the heap is not yet configured (e.g. through the API), configure it.
5069  // Configuration is based on the flags new-space-size (really the semispace
5070  // size) and old-space-size if set or the initial values of semispace_size_
5071  // and old_generation_size_ otherwise.
5072  if (!configured_) {
5073  if (!ConfigureHeapDefault()) return false;
5074  }
5075 
5076  base::CallOnce(&initialize_gc_once, &InitializeGCOnce);
5077 
5078  MarkMapPointersAsEncoded(false);
5079 
5080  // Set up memory allocator.
5082  return false;
5083 
5084  // Set up new space.
5086  return false;
5087  }
5089 
5090  // Initialize old pointer space.
5091  old_pointer_space_ = new OldSpace(this, max_old_generation_size_,
5093  if (old_pointer_space_ == NULL) return false;
5094  if (!old_pointer_space_->SetUp()) return false;
5095 
5096  // Initialize old data space.
5098  NOT_EXECUTABLE);
5099  if (old_data_space_ == NULL) return false;
5100  if (!old_data_space_->SetUp()) return false;
5101 
5102  if (!isolate_->code_range()->SetUp(code_range_size_)) return false;
5103 
5104  // Initialize the code space, set its maximum capacity to the old
5105  // generation size. It needs executable memory.
5106  code_space_ =
5107  new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
5108  if (code_space_ == NULL) return false;
5109  if (!code_space_->SetUp()) return false;
5110 
5111  // Initialize map space.
5112  map_space_ = new MapSpace(this, max_old_generation_size_, MAP_SPACE);
5113  if (map_space_ == NULL) return false;
5114  if (!map_space_->SetUp()) return false;
5115 
5116  // Initialize simple cell space.
5117  cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
5118  if (cell_space_ == NULL) return false;
5119  if (!cell_space_->SetUp()) return false;
5120 
5121  // Initialize global property cell space.
5122  property_cell_space_ = new PropertyCellSpace(this, max_old_generation_size_,
5124  if (property_cell_space_ == NULL) return false;
5125  if (!property_cell_space_->SetUp()) return false;
5126 
5127  // The large object code space may contain code or data. We set the memory
5128  // to be non-executable here for safety, but this means we need to enable it
5129  // explicitly when allocating large code objects.
5130  lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE);
5131  if (lo_space_ == NULL) return false;
5132  if (!lo_space_->SetUp()) return false;
5133 
5134  // Set up the seed that is used to randomize the string hash function.
5135  DCHECK(hash_seed() == 0);
5136  if (FLAG_randomize_hashes) {
5137  if (FLAG_hash_seed == 0) {
5138  int rnd = isolate()->random_number_generator()->NextInt();
5139  set_hash_seed(Smi::FromInt(rnd & Name::kHashBitMask));
5140  } else {
5141  set_hash_seed(Smi::FromInt(FLAG_hash_seed));
5142  }
5143  }
5144 
5145  LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
5146  LOG(isolate_, IntPtrTEvent("heap-available", Available()));
5147 
5148  store_buffer()->SetUp();
5149 
5151 
5152  return true;
5153 }
bool SetUp(size_t requested_size)
Definition: spaces.cc:99
intptr_t MaxExecutableSize()
Definition: heap.h:556
intptr_t MaxReserved()
Definition: heap.h:549
bool ConfigureHeapDefault()
Definition: heap.cc:4926
void MarkMapPointersAsEncoded(bool encoded)
Definition: heap.h:1601
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable)
Definition: spaces.cc:262
bool SetUp(int reserved_semispace_size_, int max_semi_space_size)
Definition: spaces.cc:1175
void CallOnce(OnceType *once, NoArgFunction init_func)
Definition: once.h:82
static void InitializeGCOnce()
Definition: heap.cc:5052

References Available(), v8::base::CallOnce(), Capacity(), v8::internal::CELL_SPACE, cell_space_, v8::internal::Isolate::code_range(), code_range_size_, v8::internal::CODE_SPACE, code_space_, configured_, ConfigureHeapDefault(), DCHECK, v8::internal::EXECUTABLE, v8::internal::Smi::FromInt(), v8::internal::InitializeGCOnce(), isolate(), isolate_, v8::internal::Name::kHashBitMask, v8::internal::LO_SPACE, lo_space_, LOG, v8::internal::MAP_SPACE, map_space_, mark_compact_collector(), MarkMapPointersAsEncoded(), max_old_generation_size_, max_semi_space_size_, MaxExecutableSize(), MaxReserved(), v8::internal::Isolate::memory_allocator(), new_space(), new_space_, new_space_top_after_last_gc_, v8::internal::NOT_EXECUTABLE, NULL, v8::internal::OLD_DATA_SPACE, old_data_space_, v8::internal::OLD_POINTER_SPACE, old_pointer_space_, v8::internal::PROPERTY_CELL_SPACE, property_cell_space_, v8::internal::Isolate::random_number_generator(), reserved_semispace_size_, v8::internal::MarkCompactCollector::SetUp(), v8::internal::PagedSpace::SetUp(), v8::internal::LargeObjectSpace::SetUp(), v8::internal::StoreBuffer::SetUp(), v8::internal::NewSpace::SetUp(), v8::internal::MemoryAllocator::SetUp(), v8::internal::CodeRange::SetUp(), store_buffer(), and v8::internal::NewSpace::top().

Referenced by v8::internal::Isolate::Init().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldBePromoted()

bool v8::internal::Heap::ShouldBePromoted ( Address  old_address,
int  object_size 
)
inline

Definition at line 370 of file heap-inl.h.

370  {
371  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
372  Address age_mark = new_space_.age_mark();
373  return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
374  (!page->ContainsLimit(age_mark) || old_address < age_mark);
375 }
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:2021
Address age_mark()
Definition: spaces.h:2439

References v8::internal::NewSpace::age_mark(), v8::internal::MemoryChunk::ContainsLimit(), v8::internal::NewSpacePage::FromAddress(), v8::internal::MemoryChunk::IsFlagSet(), new_space_, and v8::internal::MemoryChunk::NEW_SPACE_BELOW_AGE_MARK.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ShouldZapGarbage()

static bool v8::internal::Heap::ShouldZapGarbage ( )
inlinestatic

Definition at line 926 of file heap.h.

926  {
927 #ifdef DEBUG
928  return true;
929 #else
930 #ifdef VERIFY_HEAP
931  return FLAG_verify_heap;
932 #else
933  return false;
934 #endif
935 #endif
936  }

Referenced by v8::internal::SharedFunctionInfo::AddToOptimizedCodeMap(), AddWeakObjectToCodeDependency(), v8::internal::MemoryAllocator::AllocateChunk(), v8::internal::LargeObjectSpace::AllocateRaw(), v8::internal::MemoryChunk::CommitArea(), v8::internal::MemoryAllocator::CommitBlock(), and GarbageCollectionEpilogue().

+ Here is the caller graph for this function:

◆ SizeOfObjects()

intptr_t v8::internal::Heap::SizeOfObjects ( )

Definition at line 460 of file heap.cc.

460  {
461  intptr_t total = 0;
462  AllSpaces spaces(this);
463  for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
464  total += space->SizeOfObjects();
465  }
466  return total;
467 }

References NULL, and space().

Referenced by EvaluateOldSpaceLocalPretenuring(), GarbageCollectionEpilogue(), v8::Isolate::GetHeapStatistics(), IdleNotification(), MarkCompact(), PrintShortHeapStatistics(), v8::internal::GCTracer::Start(), v8::internal::GCTracer::Stop(), and UpdateCumulativeGCStatistics().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ STATIC_ASSERT() [1/5]

v8::internal::Heap::STATIC_ASSERT ( kempty_stringRootIndex  = =Internals::kEmptyStringRootIndex)

◆ STATIC_ASSERT() [2/5]

v8::internal::Heap::STATIC_ASSERT ( kFalseValueRootIndex  = =Internals::kFalseValueRootIndex)

◆ STATIC_ASSERT() [3/5]

v8::internal::Heap::STATIC_ASSERT ( kNullValueRootIndex  = =Internals::kNullValueRootIndex)

◆ STATIC_ASSERT() [4/5]

v8::internal::Heap::STATIC_ASSERT ( kTrueValueRootIndex  = =Internals::kTrueValueRootIndex)

◆ STATIC_ASSERT() [5/5]

v8::internal::Heap::STATIC_ASSERT ( kUndefinedValueRootIndex  = =Internals::kUndefinedValueRootIndex)

Referenced by AllocateCell(), AllocateForeign(), AllocateHeapNumber(), AllocatePropertyCell(), AllocateSymbol(), LeftTrimFixedArray(), and UpdateAllocationsHash().

+ Here is the caller graph for this function:

◆ store_buffer()

StoreBuffer* v8::internal::Heap::store_buffer ( )
inline

Definition at line 1201 of file heap.h.

1201 { return &store_buffer_; }

References store_buffer_.

Referenced by DoScavenge(), v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates(), FreeQueuedChunks(), GarbageCollectionEpilogue(), GarbageCollectionPrologue(), v8::internal::MarkCompactCollector::RecordMigratedSlot(), Scavenge(), SetUp(), v8::internal::StoreBuffer::StoreBufferOverflow(), and TearDown().

+ Here is the caller graph for this function:

◆ store_buffer_top_address()

Address* v8::internal::Heap::store_buffer_top_address ( )
inline

Definition at line 898 of file heap.h.

898  {
899  return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
900  }

References roots_.

Referenced by v8::internal::StoreBuffer::TopAddress().

+ Here is the caller graph for this function:

◆ sweep_generation()

int v8::internal::Heap::sweep_generation ( )
inline

Definition at line 1212 of file heap.h.

1212 { return sweep_generation_; }

References sweep_generation_.

Referenced by v8::internal::MarkCompactMarkingVisitor::UpdateRegExpCodeAgeAndFlush().

+ Here is the caller graph for this function:

◆ synthetic_time()

double v8::internal::Heap::synthetic_time ( )
inline

Definition at line 943 of file heap.h.

943 { return allocations_count_ / 2.0; }

References allocations_count_.

Referenced by v8::Shell::PerformanceNow().

+ Here is the caller graph for this function:

◆ TargetSpace()

OldSpace * v8::internal::Heap::TargetSpace ( HeapObject object)
inline

Definition at line 392 of file heap-inl.h.

392  {
393  InstanceType type = object->map()->instance_type();
396 }

References old_data_space_, v8::internal::OLD_POINTER_SPACE, old_pointer_space_, space(), and TargetSpaceId().

Referenced by v8::internal::MarkCompactCollector::TryPromoteObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TargetSpaceId()

AllocationSpace v8::internal::Heap::TargetSpaceId ( InstanceType  type)
inlinestatic

Definition at line 399 of file heap-inl.h.

399  {
400  // Heap numbers and sequential strings are promoted to old data space, all
401  // other object types are promoted to old pointer space. We do not use
402  // object->IsHeapNumber() and object->IsSeqString() because we already
403  // know that object has the heap object tag.
404 
405  // These objects are never allocated in new space.
406  DCHECK(type != MAP_TYPE);
407  DCHECK(type != CODE_TYPE);
408  DCHECK(type != ODDBALL_TYPE);
409  DCHECK(type != CELL_TYPE);
410  DCHECK(type != PROPERTY_CELL_TYPE);
411 
412  if (type <= LAST_NAME_TYPE) {
413  if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
415  // There are four string representations: sequential strings, external
416  // strings, cons strings, and sliced strings.
417  // Only the latter two contain non-map-word pointers to heap objects.
418  return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
420  : OLD_DATA_SPACE;
421  } else {
422  return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
423  }
424 }
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ LAST_NAME_TYPE
Definition: objects.h:755
@ LAST_DATA_TYPE
Definition: objects.h:766
const uint32_t kIsIndirectStringTag
Definition: objects.h:569
const uint32_t kIsIndirectStringMask
Definition: objects.h:568

References v8::internal::CELL_TYPE, v8::internal::CODE_TYPE, DCHECK, v8::internal::FIRST_NONSTRING_TYPE, v8::internal::kIsIndirectStringMask, v8::internal::kIsIndirectStringTag, v8::internal::LAST_DATA_TYPE, v8::internal::LAST_NAME_TYPE, v8::internal::MAP_TYPE, v8::internal::ODDBALL_TYPE, v8::internal::OLD_DATA_SPACE, v8::internal::OLD_POINTER_SPACE, v8::internal::PROPERTY_CELL_TYPE, and v8::internal::SYMBOL_TYPE.

Referenced by Allocate(), AllowedToBeMigrated(), v8::internal::FINAL< kOperandKind, kNumCachedOperands >::ComputeFlags(), and TargetSpace().

+ Here is the caller graph for this function:

◆ TearDown()

void v8::internal::Heap::TearDown ( )

Definition at line 5188 of file heap.cc.

5188  {
5189 #ifdef VERIFY_HEAP
5190  if (FLAG_verify_heap) {
5191  Verify();
5192  }
5193 #endif
5194 
5196 
5197  if (FLAG_print_cumulative_gc_stat) {
5198  PrintF("\n");
5199  PrintF("gc_count=%d ", gc_count_);
5200  PrintF("mark_sweep_count=%d ", ms_count_);
5201  PrintF("max_gc_pause=%.1f ", get_max_gc_pause());
5202  PrintF("total_gc_time=%.1f ", total_gc_time_ms_);
5203  PrintF("min_in_mutator=%.1f ", get_min_in_mutator());
5204  PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ", get_max_alive_after_gc());
5205  PrintF("total_marking_time=%.1f ", tracer_.cumulative_sweeping_duration());
5206  PrintF("total_sweeping_time=%.1f ", tracer_.cumulative_sweeping_duration());
5207  PrintF("\n\n");
5208  }
5209 
5210  if (FLAG_print_max_heap_committed) {
5211  PrintF("\n");
5212  PrintF("maximum_committed_by_heap=%" V8_PTR_PREFIX "d ",
5214  PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ",
5216  PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ",
5218  PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
5220  PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
5222  PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ",
5224  PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ",
5226  PrintF("maximum_committed_by_cell_space=%" V8_PTR_PREFIX "d ",
5228  PrintF("maximum_committed_by_property_space=%" V8_PTR_PREFIX "d ",
5230  PrintF("maximum_committed_by_lo_space=%" V8_PTR_PREFIX "d ",
5232  PrintF("\n\n");
5233  }
5234 
5235  if (FLAG_verify_predictable) {
5237  }
5238 
5240 
5242 
5244 
5246 
5247  new_space_.TearDown();
5248 
5249  if (old_pointer_space_ != NULL) {
5251  delete old_pointer_space_;
5253  }
5254 
5255  if (old_data_space_ != NULL) {
5257  delete old_data_space_;
5259  }
5260 
5261  if (code_space_ != NULL) {
5262  code_space_->TearDown();
5263  delete code_space_;
5264  code_space_ = NULL;
5265  }
5266 
5267  if (map_space_ != NULL) {
5268  map_space_->TearDown();
5269  delete map_space_;
5270  map_space_ = NULL;
5271  }
5272 
5273  if (cell_space_ != NULL) {
5274  cell_space_->TearDown();
5275  delete cell_space_;
5276  cell_space_ = NULL;
5277  }
5278 
5279  if (property_cell_space_ != NULL) {
5281  delete property_cell_space_;
5283  }
5284 
5285  if (lo_space_ != NULL) {
5286  lo_space_->TearDown();
5287  delete lo_space_;
5288  lo_space_ = NULL;
5289  }
5290 
5291  store_buffer()->TearDown();
5293 
5295 }
double cumulative_sweeping_duration() const
Definition: gc-tracer.h:275
double get_max_gc_pause()
Definition: heap.h:1189
void TearDownArrayBuffers()
Definition: heap.cc:1649
double get_min_in_mutator()
Definition: heap.h:1195
intptr_t get_max_alive_after_gc()
Definition: heap.h:1192
intptr_t MaximumCommittedMemory()
Definition: spaces.h:2744
intptr_t MaximumCommittedMemory()
Definition: spaces.h:2391
intptr_t MaximumCommittedMemory()
Definition: spaces.h:1695

References cell_space_, code_space_, v8::internal::GCTracer::cumulative_sweeping_duration(), external_string_table_, gc_count_, get_max_alive_after_gc(), get_max_gc_pause(), get_min_in_mutator(), v8::internal::Isolate::global_handles(), incremental_marking(), isolate_, lo_space_, map_space_, mark_compact_collector(), MaximumCommittedMemory(), v8::internal::PagedSpace::MaximumCommittedMemory(), v8::internal::NewSpace::MaximumCommittedMemory(), v8::internal::LargeObjectSpace::MaximumCommittedMemory(), v8::internal::Isolate::memory_allocator(), ms_count_, new_space_, NULL, old_data_space_, old_pointer_space_, PrintAlloctionsHash(), v8::internal::PrintF(), property_cell_space_, store_buffer(), v8::internal::GlobalHandles::TearDown(), v8::internal::ExternalStringTable::TearDown(), v8::internal::IncrementalMarking::TearDown(), v8::internal::MarkCompactCollector::TearDown(), v8::internal::MemoryAllocator::TearDown(), v8::internal::PagedSpace::TearDown(), v8::internal::NewSpace::TearDown(), v8::internal::LargeObjectSpace::TearDown(), v8::internal::StoreBuffer::TearDown(), TearDownArrayBuffers(), total_gc_time_ms_, tracer_, UpdateMaximumCommitted(), and V8_PTR_PREFIX.

Referenced by v8::internal::Isolate::Deinit().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ TearDownArrayBuffers()

void v8::internal::Heap::TearDownArrayBuffers ( )
private

Definition at line 1649 of file heap.cc.

1649  {
1650  Object* undefined = undefined_value();
1651  for (Object* o = array_buffers_list(); o != undefined;) {
1652  JSArrayBuffer* buffer = JSArrayBuffer::cast(o);
1653  Runtime::FreeArrayBuffer(isolate(), buffer);
1654  o = buffer->weak_next();
1655  }
1656  set_array_buffers_list(undefined);
1657 }
static void FreeArrayBuffer(Isolate *isolate, JSArrayBuffer *phantom_array_buffer)

References array_buffers_list(), v8::internal::Runtime::FreeArrayBuffer(), isolate(), and set_array_buffers_list().

Referenced by TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ToBoolean()

Object * v8::internal::Heap::ToBoolean ( bool  condition)
inline

Definition at line 706 of file heap-inl.h.

706  {
707  return condition ? true_value() : false_value();
708 }

Referenced by v8::internal::GetOwnProperty(), and v8::internal::RUNTIME_FUNCTION().

+ Here is the caller graph for this function:

◆ total_regexp_code_generated()

double v8::internal::Heap::total_regexp_code_generated ( )
inline

Definition at line 1171 of file heap.h.

1171 { return total_regexp_code_generated_; }

References total_regexp_code_generated_.

Referenced by v8::internal::RegExpCompiler::Assemble().

+ Here is the caller graph for this function:

◆ tracer()

GCTracer* v8::internal::Heap::tracer ( )
inline

Definition at line 1166 of file heap.h.

1166 { return &tracer_; }

References tracer_.

Referenced by CollectGarbage(), v8::internal::IncrementalMarking::Hurry(), IdleNotification(), v8::internal::MarkCompactCollector::MarkLiveObjects(), PerformGarbageCollection(), v8::internal::IncrementalMarking::Step(), and v8::internal::MarkCompactCollector::SweepSpaces().

+ Here is the caller graph for this function:

◆ UncommitFromSpace()

bool v8::internal::Heap::UncommitFromSpace ( )
inlineprivate

Definition at line 1840 of file heap.h.

1840 { return new_space_.UncommitFromSpace(); }
bool UncommitFromSpace()
Definition: spaces.h:2539

References new_space_, and v8::internal::NewSpace::UncommitFromSpace().

Referenced by AdvanceIdleIncrementalMarking(), and CollectAllAvailableGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateAllocationsHash() [1/2]

void v8::internal::Heap::UpdateAllocationsHash ( HeapObject object)
inlineprivate

Definition at line 277 of file heap-inl.h.

277  {
278  Address object_address = object->address();
279  MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
280  AllocationSpace allocation_space = memory_chunk->owner()->identity();
281 
283  uint32_t value =
284  static_cast<uint32_t>(object_address - memory_chunk->address()) |
285  (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
286 
287  UpdateAllocationsHash(value);
288 }
const int kPageSizeBits
Definition: build_config.h:159
const int kSpaceTagSize
Definition: globals.h:374

References v8::internal::MemoryChunk::address(), v8::internal::MemoryChunk::FromAddress(), v8::internal::Space::identity(), kPageSizeBits, v8::internal::kSpaceTagSize, v8::internal::MemoryChunk::owner(), and STATIC_ASSERT().

Referenced by OnAllocationEvent(), and OnMoveEvent().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateAllocationsHash() [2/2]

void v8::internal::Heap::UpdateAllocationsHash ( uint32_t  value)
inlineprivate

Definition at line 291 of file heap-inl.h.

291  {
292  uint16_t c1 = static_cast<uint16_t>(value);
293  uint16_t c2 = static_cast<uint16_t>(value >> 16);
295  StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
297  StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
298 }
unsigned short uint16_t
Definition: unicode.cc:23

References raw_allocations_hash_.

◆ UpdateAllocationSiteFeedback()

void v8::internal::Heap::UpdateAllocationSiteFeedback ( HeapObject object,
ScratchpadSlotMode  mode 
)
inlinestatic

Definition at line 536 of file heap-inl.h.

537  {
538  Heap* heap = object->GetHeap();
539  DCHECK(heap->InFromSpace(object));
540 
541  if (!FLAG_allocation_site_pretenuring ||
542  !AllocationSite::CanTrack(object->map()->instance_type()))
543  return;
544 
545  AllocationMemento* memento = heap->FindAllocationMemento(object);
546  if (memento == NULL) return;
547 
548  if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
549  heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
550  }
551 }

References AddAllocationSiteToScratchpad(), v8::internal::AllocationSite::CanTrack(), DCHECK, FindAllocationMemento(), v8::internal::AllocationMemento::GetAllocationSite(), v8::internal::AllocationSite::IncrementMementoFoundCount(), InFromSpace(), v8::internal::Map::instance_type(), v8::internal::HeapObject::map(), mode(), and NULL.

Referenced by v8::internal::MarkCompactCollector::DiscoverAndEvacuateBlackObjectsOnPage(), and ScavengeObject().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateCumulativeGCStatistics()

void v8::internal::Heap::UpdateCumulativeGCStatistics ( double  duration,
double  spent_in_mutator,
double  marking_time 
)

Definition at line 5881 of file heap.cc.

5883  {
5884  if (FLAG_print_cumulative_gc_stat) {
5885  total_gc_time_ms_ += duration;
5886  max_gc_pause_ = Max(max_gc_pause_, duration);
5888  min_in_mutator_ = Min(min_in_mutator_, spent_in_mutator);
5889  } else if (FLAG_trace_gc_verbose) {
5890  total_gc_time_ms_ += duration;
5891  }
5892 
5893  marking_time_ += marking_time;
5894 }

References marking_time_, v8::internal::Max(), max_alive_after_gc_, max_gc_pause_, v8::internal::Min(), min_in_mutator_, SizeOfObjects(), and total_gc_time_ms_.

Referenced by v8::internal::GCTracer::Stop().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateMaximumCommitted()

void v8::internal::Heap::UpdateMaximumCommitted ( )

Definition at line 201 of file heap.cc.

201  {
202  if (!HasBeenSetUp()) return;
203 
204  intptr_t current_committed_memory = CommittedMemory();
205  if (current_committed_memory > maximum_committed_) {
206  maximum_committed_ = current_committed_memory;
207  }
208 }

References CommittedMemory(), HasBeenSetUp(), and maximum_committed_.

Referenced by GarbageCollectionEpilogue(), GarbageCollectionPrologue(), and TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateNewSpaceReferenceInExternalStringTableEntry()

String * v8::internal::Heap::UpdateNewSpaceReferenceInExternalStringTableEntry ( Heap heap,
Object **  pointer 
)
staticprivate

Definition at line 1561 of file heap.cc.

1562  {
1563  MapWord first_word = HeapObject::cast(*p)->map_word();
1564 
1565  if (!first_word.IsForwardingAddress()) {
1566  // Unreachable external string can be finalized.
1567  heap->FinalizeExternalString(String::cast(*p));
1568  return NULL;
1569  }
1570 
1571  // String is still reachable.
1572  return String::cast(first_word.ToForwardingAddress());
1573 }

References FinalizeExternalString(), v8::internal::HeapObject::map_word(), and NULL.

Referenced by Scavenge().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateNewSpaceReferencesInExternalStringTable()

void v8::internal::Heap::UpdateNewSpaceReferencesInExternalStringTable ( ExternalStringTableUpdaterCallback  updater_func)

Definition at line 1576 of file heap.cc.

1577  {
1578 #ifdef VERIFY_HEAP
1579  if (FLAG_verify_heap) {
1581  }
1582 #endif
1583 
1584  if (external_string_table_.new_space_strings_.is_empty()) return;
1585 
1587  Object** end = start + external_string_table_.new_space_strings_.length();
1588  Object** last = start;
1589 
1590  for (Object** p = start; p < end; ++p) {
1591  DCHECK(InFromSpace(*p));
1592  String* target = updater_func(this, p);
1593 
1594  if (target == NULL) continue;
1595 
1596  DCHECK(target->IsExternalString());
1597 
1598  if (InNewSpace(target)) {
1599  // String is still in new space. Update the table entry.
1600  *last = target;
1601  ++last;
1602  } else {
1603  // String got promoted. Move it to the old string list.
1605  }
1606  }
1607 
1608  DCHECK(last <= end);
1609  external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1610 }
void AddOldString(String *string)
Definition: heap-inl.h:684
void ShrinkNewStrings(int position)
Definition: heap-inl.h:691
List< Object * > new_space_strings_
Definition: heap.h:502

References v8::internal::ExternalStringTable::AddOldString(), DCHECK, external_string_table_, InFromSpace(), InNewSpace(), v8::internal::ExternalStringTable::new_space_strings_, NULL, v8::internal::ExternalStringTable::ShrinkNewStrings(), and v8::internal::ExternalStringTable::Verify().

Referenced by Scavenge(), and UpdateReferencesInExternalStringTable().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateOldSpaceLimits()

void v8::internal::Heap::UpdateOldSpaceLimits ( )
inlineprivate

◆ UpdateReferencesInExternalStringTable()

void v8::internal::Heap::UpdateReferencesInExternalStringTable ( ExternalStringTableUpdaterCallback  updater_func)

Definition at line 1613 of file heap.cc.

1614  {
1615  // Update old space string references.
1616  if (external_string_table_.old_space_strings_.length() > 0) {
1618  Object** end = start + external_string_table_.old_space_strings_.length();
1619  for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1620  }
1621 
1623 }
List< Object * > old_space_strings_
Definition: heap.h:503

References external_string_table_, v8::internal::ExternalStringTable::old_space_strings_, and UpdateNewSpaceReferencesInExternalStringTable().

Referenced by v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ UpdateSurvivalStatistics()

void v8::internal::Heap::UpdateSurvivalStatistics ( int  start_new_space_size)
private

Definition at line 1023 of file heap.cc.

1023  {
1024  if (start_new_space_size == 0) return;
1025 
1026  promotion_rate_ = (static_cast<double>(promoted_objects_size_) /
1027  static_cast<double>(start_new_space_size) * 100);
1028 
1030  (static_cast<double>(semi_space_copied_object_size_) /
1031  static_cast<double>(start_new_space_size) * 100);
1032 
1033  double survival_rate = promotion_rate_ + semi_space_copied_rate_;
1034 
1035  if (survival_rate > kYoungSurvivalRateHighThreshold) {
1037  } else {
1039  }
1040 }
static const int kYoungSurvivalRateHighThreshold
Definition: heap.h:1903

References high_survival_rate_period_length_, kYoungSurvivalRateHighThreshold, promoted_objects_size_, promotion_rate_, semi_space_copied_object_size_, and semi_space_copied_rate_.

Referenced by PerformGarbageCollection().

+ Here is the caller graph for this function:

◆ VisitExternalResources()

void v8::internal::Heap::VisitExternalResources ( v8::ExternalResourceVisitor visitor)

Definition at line 1707 of file heap.cc.

1707  {
1708  DisallowHeapAllocation no_allocation;
1709  // All external strings are listed in the external string table.
1710 
1711  class ExternalStringTableVisitorAdapter : public ObjectVisitor {
1712  public:
1713  explicit ExternalStringTableVisitorAdapter(
1714  v8::ExternalResourceVisitor* visitor)
1715  : visitor_(visitor) {}
1716  virtual void VisitPointers(Object** start, Object** end) {
1717  for (Object** p = start; p < end; p++) {
1718  DCHECK((*p)->IsExternalString());
1719  visitor_->VisitExternalString(
1720  Utils::ToLocal(Handle<String>(String::cast(*p))));
1721  }
1722  }
1723 
1724  private:
1725  v8::ExternalResourceVisitor* visitor_;
1726  } external_string_table_visitor(visitor);
1727 
1728  external_string_table_.Iterate(&external_string_table_visitor);
1729 }
Interface for iterating through all external resources in the heap.
Definition: v8.h:4943
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)

References DCHECK, external_string_table_, v8::internal::ExternalStringTable::Iterate(), and v8::Utils::ToLocal().

Referenced by v8::V8::VisitExternalResources().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ weak_object_to_code_table()

Object* v8::internal::Heap::weak_object_to_code_table ( )
inline

Definition at line 806 of file heap.h.

806 { return weak_object_to_code_table_; }

References weak_object_to_code_table_.

Referenced by v8::internal::MarkCompactCollector::ClearNonLiveReferences(), EnsureWeakObjectToCodeTable(), and v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates().

+ Here is the caller graph for this function:

◆ weak_object_to_code_table_address()

Object** v8::internal::Heap::weak_object_to_code_table_address ( )
inlineprivate

Definition at line 1940 of file heap.h.

1940  {
1942  }

References weak_object_to_code_table_.

Referenced by v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates().

+ Here is the caller graph for this function:

◆ WorthActivatingIncrementalMarking()

bool v8::internal::Heap::WorthActivatingIncrementalMarking ( )
private

Definition at line 4290 of file heap.cc.

References incremental_marking(), v8::internal::IncrementalMarking::IsStopped(), NextGCIsLikelyToBeFull(), and v8::internal::IncrementalMarking::WorthActivating().

Referenced by CollectGarbage().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ZapFromSpace()

void v8::internal::Heap::ZapFromSpace ( )
private

Definition at line 4527 of file heap.cc.

4527  {
4528  NewSpacePageIterator it(new_space_.FromSpaceStart(),
4530  while (it.has_next()) {
4531  NewSpacePage* page = it.next();
4532  for (Address cursor = page->area_start(), limit = page->area_end();
4533  cursor < limit; cursor += kPointerSize) {
4535  }
4536  }
4537 }
static Address & Address_at(Address addr)
Definition: v8memory.h:56
Address FromSpaceEnd()
Definition: spaces.h:2485
Address FromSpaceStart()
Definition: spaces.h:2484
const Address kFromSpaceZapValue
Definition: globals.h:272

References v8::internal::Memory::Address_at(), v8::internal::MemoryChunk::area_end(), v8::internal::MemoryChunk::area_start(), v8::internal::NewSpace::FromSpaceEnd(), v8::internal::NewSpace::FromSpaceStart(), v8::internal::kFromSpaceZapValue, v8::internal::kPointerSize, and new_space_.

Referenced by GarbageCollectionEpilogue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

Friends And Related Function Documentation

◆ AlwaysAllocateScope

friend class AlwaysAllocateScope
friend

Definition at line 2023 of file heap.h.

◆ Deserializer

friend class Deserializer
friend

Definition at line 2024 of file heap.h.

◆ Factory

friend class Factory
friend

Definition at line 2025 of file heap.h.

Referenced by CreateApiObjects(), and CreateInitialObjects().

◆ GCCallbacksScope

friend class GCCallbacksScope
friend

Definition at line 2026 of file heap.h.

◆ GCTracer

friend class GCTracer
friend

Definition at line 2027 of file heap.h.

◆ HeapIterator

friend class HeapIterator
friend

Definition at line 2028 of file heap.h.

Referenced by RecordStats().

◆ Isolate

friend class Isolate
friend

Definition at line 2029 of file heap.h.

◆ MapCompact

friend class MapCompact
friend

Definition at line 2032 of file heap.h.

◆ MarkCompactCollector

friend class MarkCompactCollector
friend

Definition at line 2030 of file heap.h.

◆ MarkCompactMarkingVisitor

friend class MarkCompactMarkingVisitor
friend

Definition at line 2031 of file heap.h.

◆ Page

friend class Page
friend

Definition at line 2036 of file heap.h.

Member Data Documentation

◆ allocation_sites_list_

Object* v8::internal::Heap::allocation_sites_list_
private

◆ allocation_sites_scratchpad_length_

int v8::internal::Heap::allocation_sites_scratchpad_length_
private

◆ allocations_count_

uint32_t v8::internal::Heap::allocations_count_
private

◆ always_allocate_scope_depth_

◆ amount_of_external_allocated_memory_

int64_t v8::internal::Heap::amount_of_external_allocated_memory_
private

◆ amount_of_external_allocated_memory_at_last_global_gc_

int64_t v8::internal::Heap::amount_of_external_allocated_memory_at_last_global_gc_
private

◆ array_buffers_list_

Object* v8::internal::Heap::array_buffers_list_
private

Definition at line 1526 of file heap.h.

Referenced by array_buffers_list(), and set_array_buffers_list().

◆ cell_space_

◆ chunks_queued_for_free_

MemoryChunk* v8::internal::Heap::chunks_queued_for_free_
private

Definition at line 2017 of file heap.h.

Referenced by FreeQueuedChunks(), and QueueMemoryChunkForFree().

◆ code_range_size_

size_t v8::internal::Heap::code_range_size_
private

Definition at line 1428 of file heap.h.

Referenced by ConfigureHeap(), and SetUp().

◆ code_space_

◆ configured_

bool v8::internal::Heap::configured_
private

Definition at line 2011 of file heap.h.

Referenced by ConfigureHeap(), and SetUp().

◆ constant_string_table

const Heap::ConstantStringTable v8::internal::Heap::constant_string_table
staticprivate
Initial value:
= {
#define CONSTANT_STRING_ELEMENT(name, contents)
}

Definition at line 1559 of file heap.h.

Referenced by CreateInitialObjects().

◆ contexts_disposed_

int v8::internal::Heap::contexts_disposed_
private

Definition at line 1446 of file heap.h.

Referenced by IdleNotification(), and NotifyContextDisposed().

◆ crankshaft_codegen_bytes_generated_

size_t v8::internal::Heap::crankshaft_codegen_bytes_generated_
private

Definition at line 1989 of file heap.h.

Referenced by GarbageCollectionEpilogue(), and IncrementCodeGeneratedBytes().

◆ dump_allocations_hash_countdown_

uint32_t v8::internal::Heap::dump_allocations_hash_countdown_
private

Definition at line 1476 of file heap.h.

Referenced by OnAllocationEvent(), and OnMoveEvent().

◆ encountered_weak_collections_

Object* v8::internal::Heap::encountered_weak_collections_
private

◆ external_string_table_

◆ flush_monomorphic_ics_

bool v8::internal::Heap::flush_monomorphic_ics_
private

Definition at line 1450 of file heap.h.

Referenced by flush_monomorphic_ics(), MarkCompact(), and NotifyContextDisposed().

◆ full_codegen_bytes_generated_

size_t v8::internal::Heap::full_codegen_bytes_generated_
private

Definition at line 1988 of file heap.h.

Referenced by GarbageCollectionEpilogue(), and IncrementCodeGeneratedBytes().

◆ gc_callbacks_depth_

◆ gc_count_

unsigned int v8::internal::Heap::gc_count_
private

◆ gc_count_at_last_idle_gc_

unsigned int v8::internal::Heap::gc_count_at_last_idle_gc_
private

Definition at line 1985 of file heap.h.

Referenced by AdvanceIdleIncrementalMarking().

◆ gc_epilogue_callbacks_

List<GCEpilogueCallbackPair> v8::internal::Heap::gc_epilogue_callbacks_
private

◆ gc_idle_time_handler_

GCIdleTimeHandler v8::internal::Heap::gc_idle_time_handler_
private

Definition at line 1984 of file heap.h.

Referenced by AdvanceIdleIncrementalMarking(), and IdleNotification().

◆ gc_post_processing_depth_

int v8::internal::Heap::gc_post_processing_depth_
private

Definition at line 1463 of file heap.h.

Referenced by IsInGCPostProcessing(), and PerformGarbageCollection().

◆ gc_prologue_callbacks_

List<GCPrologueCallbackPair> v8::internal::Heap::gc_prologue_callbacks_
private

◆ gc_safe_size_of_old_object_

HeapObjectCallback v8::internal::Heap::gc_safe_size_of_old_object_
private

Definition at line 1597 of file heap.h.

Referenced by GcSafeSizeOfOldObjectFunction(), and MarkMapPointersAsEncoded().

◆ gc_state_

HeapState v8::internal::Heap::gc_state_
private

◆ gcs_since_last_deopt_

int v8::internal::Heap::gcs_since_last_deopt_
private

Definition at line 1994 of file heap.h.

Referenced by GarbageCollectionEpilogue().

◆ global_ic_age_

int v8::internal::Heap::global_ic_age_
private

Definition at line 1448 of file heap.h.

Referenced by AgeInlineCaches(), AllocateCode(), and global_ic_age().

◆ hidden_string_

String* v8::internal::Heap::hidden_string_
private

Definition at line 1564 of file heap.h.

Referenced by CreateInitialObjects(), hidden_string(), and IterateStrongRoots().

◆ high_survival_rate_period_length_

int v8::internal::Heap::high_survival_rate_period_length_
private

Definition at line 1908 of file heap.h.

Referenced by IsHighSurvivalRate(), and UpdateSurvivalStatistics().

◆ incremental_marking_

IncrementalMarking v8::internal::Heap::incremental_marking_
private

Definition at line 1982 of file heap.h.

Referenced by incremental_marking().

◆ initial_semispace_size_

int v8::internal::Heap::initial_semispace_size_
private

Definition at line 1431 of file heap.h.

Referenced by ConfigureHeap(), and InitialSemiSpaceSize().

◆ inline_allocation_disabled_

bool v8::internal::Heap::inline_allocation_disabled_
private

◆ isolate_

◆ kAbortIncrementalMarkingMask

const int v8::internal::Heap::kAbortIncrementalMarkingMask = 2
static

◆ kAllocationSiteScratchpadSize

const int v8::internal::Heap::kAllocationSiteScratchpadSize = 256
staticprivate

◆ kArgumentsCalleeIndex

const int v8::internal::Heap::kArgumentsCalleeIndex = 1
static

Definition at line 679 of file heap.h.

◆ kArgumentsLengthIndex

const int v8::internal::Heap::kArgumentsLengthIndex = 0
static

Definition at line 677 of file heap.h.

Referenced by v8::internal::BUILTIN().

◆ kIdleScavengeThreshold

const int v8::internal::Heap::kIdleScavengeThreshold = 5
staticprivate

Definition at line 2004 of file heap.h.

◆ kInitialEvalCacheSize

const int v8::internal::Heap::kInitialEvalCacheSize = 64
staticprivate

Definition at line 1949 of file heap.h.

◆ kInitialNumberStringCacheSize

const int v8::internal::Heap::kInitialNumberStringCacheSize = 256
staticprivate

Definition at line 1950 of file heap.h.

Referenced by CreateInitialObjects(), and FullSizeNumberStringCacheLength().

◆ kInitialStringTableSize

const int v8::internal::Heap::kInitialStringTableSize = 2048
staticprivate

Definition at line 1948 of file heap.h.

Referenced by CreateInitialObjects().

◆ kMakeHeapIterableMask

◆ kMaxExecutableSizeHighMemoryDevice

const int v8::internal::Heap::kMaxExecutableSizeHighMemoryDevice
static
Initial value:
=
static const int kPointerMultiplier
Definition: heap.h:1014

Definition at line 1035 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxExecutableSizeHugeMemoryDevice

const int v8::internal::Heap::kMaxExecutableSizeHugeMemoryDevice
static
Initial value:

Definition at line 1037 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxExecutableSizeLowMemoryDevice

const int v8::internal::Heap::kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier
static

Definition at line 1032 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxExecutableSizeMediumMemoryDevice

const int v8::internal::Heap::kMaxExecutableSizeMediumMemoryDevice
static
Initial value:

Definition at line 1033 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxMarkCompactsInIdleRound

const int v8::internal::Heap::kMaxMarkCompactsInIdleRound = 7
staticprivate

Definition at line 2003 of file heap.h.

◆ kMaxOldSpaceSizeHighMemoryDevice

const int v8::internal::Heap::kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier
static

Definition at line 1027 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxOldSpaceSizeHugeMemoryDevice

const int v8::internal::Heap::kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier
static

Definition at line 1028 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxOldSpaceSizeLowMemoryDevice

const int v8::internal::Heap::kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier
static

Definition at line 1024 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxOldSpaceSizeMediumMemoryDevice

const int v8::internal::Heap::kMaxOldSpaceSizeMediumMemoryDevice
static
Initial value:

Definition at line 1025 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults(), and OldGenerationAllocationLimit().

◆ kMaxSemiSpaceSizeHighMemoryDevice

const int v8::internal::Heap::kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier
static

Definition at line 1019 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxSemiSpaceSizeHugeMemoryDevice

const int v8::internal::Heap::kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier
static

Definition at line 1020 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxSemiSpaceSizeLowMemoryDevice

const int v8::internal::Heap::kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier
static

Definition at line 1017 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMaxSemiSpaceSizeMediumMemoryDevice

const int v8::internal::Heap::kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier
static

Definition at line 1018 of file heap.h.

Referenced by v8::ResourceConstraints::ConfigureDefaults().

◆ kMinimumOldGenerationAllocationLimit

const intptr_t v8::internal::Heap::kMinimumOldGenerationAllocationLimit
static
Initial value:

Definition at line 1011 of file heap.h.

Referenced by OldGenerationAllocationLimit().

◆ kNoGCFlags

◆ kOldSpaceRoots

const int v8::internal::Heap::kOldSpaceRoots = 0x20
static

Definition at line 1235 of file heap.h.

Referenced by v8::internal::Deserializer::ReadChunk().

◆ kOldSurvivalRateLowThreshold

const int v8::internal::Heap::kOldSurvivalRateLowThreshold = 10
staticprivate

Definition at line 1906 of file heap.h.

Referenced by EvaluateOldSpaceLocalPretenuring().

◆ kPointerMultiplier

const int v8::internal::Heap::kPointerMultiplier = i::kPointerSize / 4
static

Definition at line 1014 of file heap.h.

◆ kReduceMemoryFootprintMask

const int v8::internal::Heap::kReduceMemoryFootprintMask = 1
static

◆ kRememberedUnmappedPages

const int v8::internal::Heap::kRememberedUnmappedPages = 128
staticprivate

Definition at line 1485 of file heap.h.

Referenced by RememberUnmappedPage().

◆ kSloppyArgumentsObjectSize

const int v8::internal::Heap::kSloppyArgumentsObjectSize
static
Initial value:

Definition at line 671 of file heap.h.

◆ kStrictArgumentsObjectSize

const int v8::internal::Heap::kStrictArgumentsObjectSize
static
Initial value:

Definition at line 674 of file heap.h.

◆ kYoungSurvivalRateAllowedDeviation

const int v8::internal::Heap::kYoungSurvivalRateAllowedDeviation = 15
staticprivate

Definition at line 1904 of file heap.h.

◆ kYoungSurvivalRateHighThreshold

const int v8::internal::Heap::kYoungSurvivalRateHighThreshold = 90
staticprivate

Definition at line 1903 of file heap.h.

Referenced by UpdateSurvivalStatistics().

◆ lo_space_

◆ map_space_

◆ mark_compact_collector_

MarkCompactCollector v8::internal::Heap::mark_compact_collector_
private

Definition at line 1976 of file heap.h.

Referenced by CollectAllGarbage(), mark_compact_collector(), and MarkCompact().

◆ marking_

Marking v8::internal::Heap::marking_
private

Definition at line 1980 of file heap.h.

Referenced by marking().

◆ marking_time_

double v8::internal::Heap::marking_time_
private

Definition at line 1971 of file heap.h.

Referenced by UpdateCumulativeGCStatistics().

◆ max_alive_after_gc_

intptr_t v8::internal::Heap::max_alive_after_gc_
private

Definition at line 1965 of file heap.h.

Referenced by get_max_alive_after_gc(), and UpdateCumulativeGCStatistics().

◆ max_executable_size_

intptr_t v8::internal::Heap::max_executable_size_
private

Definition at line 1433 of file heap.h.

Referenced by ConfigureHeap(), and MaxExecutableSize().

◆ max_gc_pause_

double v8::internal::Heap::max_gc_pause_
private

Definition at line 1959 of file heap.h.

Referenced by get_max_gc_pause(), and UpdateCumulativeGCStatistics().

◆ max_old_generation_size_

intptr_t v8::internal::Heap::max_old_generation_size_
private

◆ max_semi_space_size_

int v8::internal::Heap::max_semi_space_size_
private

◆ maximum_committed_

intptr_t v8::internal::Heap::maximum_committed_
private

Definition at line 1434 of file heap.h.

Referenced by MaximumCommittedMemory(), and UpdateMaximumCommitted().

◆ maximum_size_scavenges_

unsigned int v8::internal::Heap::maximum_size_scavenges_
private

◆ min_in_mutator_

double v8::internal::Heap::min_in_mutator_
private

Definition at line 1968 of file heap.h.

Referenced by get_min_in_mutator(), and UpdateCumulativeGCStatistics().

◆ ms_count_

unsigned int v8::internal::Heap::ms_count_
private

Definition at line 1479 of file heap.h.

Referenced by MarkCompact(), ms_count(), and TearDown().

◆ native_contexts_list_

Object* v8::internal::Heap::native_contexts_list_
private

Definition at line 1525 of file heap.h.

Referenced by native_contexts_list(), and set_native_contexts_list().

◆ new_space_

◆ new_space_top_after_last_gc_

Address v8::internal::Heap::new_space_top_after_last_gc_
private

Definition at line 1464 of file heap.h.

Referenced by GarbageCollectionEpilogue(), IsHeapIterable(), and SetUp().

◆ nodes_copied_in_new_space_

int v8::internal::Heap::nodes_copied_in_new_space_
private

◆ nodes_died_in_new_space_

int v8::internal::Heap::nodes_died_in_new_space_
private

◆ nodes_promoted_

int v8::internal::Heap::nodes_promoted_
private

◆ object_counts_

size_t v8::internal::Heap::object_counts_[OBJECT_STATS_COUNT]
private

◆ object_counts_last_time_

size_t v8::internal::Heap::object_counts_last_time_[OBJECT_STATS_COUNT]
private

Definition at line 1954 of file heap.h.

Referenced by CheckpointObjectStats(), and ClearObjectStats().

◆ object_sizes_

size_t v8::internal::Heap::object_sizes_[OBJECT_STATS_COUNT]
private

◆ object_sizes_last_time_

size_t v8::internal::Heap::object_sizes_last_time_[OBJECT_STATS_COUNT]
private

Definition at line 1956 of file heap.h.

Referenced by CheckpointObjectStats(), and ClearObjectStats().

◆ old_data_space_

◆ old_gen_exhausted_

bool v8::internal::Heap::old_gen_exhausted_
private

Definition at line 1517 of file heap.h.

Referenced by AllocateRaw(), PerformGarbageCollection(), and SelectGarbageCollector().

◆ old_generation_allocation_limit_

intptr_t v8::internal::Heap::old_generation_allocation_limit_
private

◆ old_pointer_space_

◆ promoted_objects_size_

intptr_t v8::internal::Heap::promoted_objects_size_
private

◆ promotion_queue_

PromotionQueue v8::internal::Heap::promotion_queue_
private

Definition at line 2007 of file heap.h.

Referenced by promotion_queue(), and Scavenge().

◆ promotion_rate_

double v8::internal::Heap::promotion_rate_
private

Definition at line 1910 of file heap.h.

Referenced by v8::internal::GCTracer::PrintNVP(), and UpdateSurvivalStatistics().

◆ property_cell_space_

◆ raw_allocations_hash_

uint32_t v8::internal::Heap::raw_allocations_hash_
private

Definition at line 1473 of file heap.h.

Referenced by PrintAlloctionsHash(), and UpdateAllocationsHash().

◆ relocation_mutex_

base::Mutex v8::internal::Heap::relocation_mutex_
private

◆ remembered_unmapped_pages_

Address v8::internal::Heap::remembered_unmapped_pages_[kRememberedUnmappedPages]
private

Definition at line 1487 of file heap.h.

Referenced by RememberUnmappedPage().

◆ remembered_unmapped_pages_index_

int v8::internal::Heap::remembered_unmapped_pages_index_
private

Definition at line 1486 of file heap.h.

Referenced by RememberUnmappedPage().

◆ reserved_semispace_size_

int v8::internal::Heap::reserved_semispace_size_
private

Definition at line 1429 of file heap.h.

Referenced by ConfigureHeap(), Heap(), MaxReserved(), ReservedSemiSpaceSize(), and SetUp().

◆ roots_

◆ scan_on_scavenge_pages_

int v8::internal::Heap::scan_on_scavenge_pages_
private

Definition at line 1452 of file heap.h.

Referenced by decrement_scan_on_scavenge_pages(), and increment_scan_on_scavenge_pages().

◆ scavenging_visitors_table_

VisitorDispatchTable<ScavengingCallback> v8::internal::Heap::scavenging_visitors_table_
private

Definition at line 2015 of file heap.h.

Referenced by DoScavengeObject(), and SelectScavengingVisitorsTable().

◆ semi_space_copied_object_size_

intptr_t v8::internal::Heap::semi_space_copied_object_size_
private

◆ semi_space_copied_rate_

double v8::internal::Heap::semi_space_copied_rate_
private

Definition at line 1912 of file heap.h.

Referenced by v8::internal::GCTracer::PrintNVP(), and UpdateSurvivalStatistics().

◆ store_buffer_

StoreBuffer v8::internal::Heap::store_buffer_
private

Definition at line 1978 of file heap.h.

Referenced by IterateAndMarkPointersToFromSpace(), and store_buffer().

◆ store_buffer_rebuilder_

StoreBufferRebuilder v8::internal::Heap::store_buffer_rebuilder_
private

Definition at line 1539 of file heap.h.

Referenced by ScavengeStoreBufferCallback().

◆ string_type_table

const Heap::StringTypeTable v8::internal::Heap::string_type_table
staticprivate
Initial value:
= {
#define STRING_TYPE_ELEMENT(type, size, name, camel_name)
}

Definition at line 1558 of file heap.h.

Referenced by CreateInitialMaps().

◆ struct_table

const Heap::StructTable v8::internal::Heap::struct_table
staticprivate
Initial value:
= {
#define STRUCT_TABLE_ELEMENT(NAME, Name, name)
}

Definition at line 1560 of file heap.h.

Referenced by CreateInitialMaps().

◆ survived_since_last_expansion_

int v8::internal::Heap::survived_since_last_expansion_
private

Definition at line 1438 of file heap.h.

Referenced by CheckNewSpaceExpansionCriteria(), and IncrementYoungSurvivorsCounter().

◆ sweep_generation_

int v8::internal::Heap::sweep_generation_
private

Definition at line 1441 of file heap.h.

Referenced by PerformGarbageCollection(), and sweep_generation().

◆ sweeping_time_

double v8::internal::Heap::sweeping_time_
private

Definition at line 1974 of file heap.h.

◆ total_gc_time_ms_

double v8::internal::Heap::total_gc_time_ms_
private

Definition at line 1962 of file heap.h.

Referenced by PrintShortHeapStatistics(), TearDown(), and UpdateCumulativeGCStatistics().

◆ total_regexp_code_generated_

double v8::internal::Heap::total_regexp_code_generated_
private

Definition at line 1882 of file heap.h.

Referenced by IncreaseTotalRegexpCodeGenerated(), and total_regexp_code_generated().

◆ tracer_

GCTracer v8::internal::Heap::tracer_
private

Definition at line 1884 of file heap.h.

Referenced by TearDown(), and tracer().

◆ unflattened_strings_length_

int v8::internal::Heap::unflattened_strings_length_
private

Definition at line 1490 of file heap.h.

Referenced by GarbageCollectionPrologue().

◆ weak_object_to_code_table_


The documentation for this class was generated from the following files: