V8 Project
v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode > Class Template Reference
+ Inheritance diagram for v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >:
+ Collaboration diagram for v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >:

Classes

class  ObjectEvacuationStrategy
 

Static Public Member Functions

static void Initialize ()
 
static VisitorDispatchTable< ScavengingCallback > * GetTable ()
 
- Static Public Member Functions inherited from v8::internal::StaticVisitorBase
static VisitorId GetVisitorId (int instance_type, int instance_size)
 
static VisitorId GetVisitorId (Map *map)
 
static VisitorId GetVisitorIdForSize (VisitorId base, VisitorId generic, int object_size)
 

Private Types

enum  ObjectContents { DATA_OBJECT , POINTER_OBJECT }
 

Private Member Functions

 INLINE (static void MigrateObject(Heap *heap, HeapObject *source, HeapObject *target, int size))
 

Static Private Member Functions

static void RecordCopiedObject (Heap *heap, HeapObject *obj)
 
template<int alignment>
static bool SemiSpaceCopyObject (Map *map, HeapObject **slot, HeapObject *object, int object_size)
 
template<ObjectContents object_contents, int alignment>
static bool PromoteObject (Map *map, HeapObject **slot, HeapObject *object, int object_size)
 
template<ObjectContents object_contents, int alignment>
static void EvacuateObject (Map *map, HeapObject **slot, HeapObject *object, int object_size)
 
static void EvacuateJSFunction (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateFixedArray (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateFixedDoubleArray (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateFixedTypedArray (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateFixedFloat64Array (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateByteArray (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateSeqOneByteString (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateSeqTwoByteString (Map *map, HeapObject **slot, HeapObject *object)
 
static void EvacuateShortcutCandidate (Map *map, HeapObject **slot, HeapObject *object)
 

Static Private Attributes

static VisitorDispatchTable< ScavengingCallbacktable_
 

Additional Inherited Members

- Public Types inherited from v8::internal::StaticVisitorBase
enum  VisitorId {
  kVisitorIdCount , kVisitDataObject = kVisitDataObject2 , kVisitJSObject = kVisitJSObject2 , kVisitStruct = kVisitStruct2 ,
  kMinObjectSizeInWords = 2
}
 
- Public Member Functions inherited from v8::internal::StaticVisitorBase
 STATIC_ASSERT (kVisitorIdCount<=256)
 

Detailed Description

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
class v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >

Definition at line 1823 of file heap.cc.

Member Enumeration Documentation

◆ ObjectContents

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
enum v8::internal::ScavengingVisitor::ObjectContents
private
Enumerator
DATA_OBJECT 
POINTER_OBJECT 

Definition at line 1899 of file heap.cc.

Member Function Documentation

◆ EvacuateByteArray()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateByteArray ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2124 of file heap.cc.

2125  {
2126  int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
2127  EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object,
2128  object_size);
2129  }
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map

References map.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the caller graph for this function:

◆ EvacuateFixedArray()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedArray ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2091 of file heap.cc.

2092  {
2093  int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
2094  EvacuateObject<POINTER_OBJECT, kObjectAlignment>(map, slot, object,
2095  object_size);
2096  }
static int SizeOf(Map *map, HeapObject *object)
Definition: objects.h:2491

References map, and v8::internal::FixedArray::BodyDescriptor::SizeOf().

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvacuateFixedDoubleArray()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedDoubleArray ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2099 of file heap.cc.

2100  {
2101  int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
2102  int object_size = FixedDoubleArray::SizeFor(length);
2103  EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object,
2104  object_size);
2105  }
static int SizeFor(int length)
Definition: objects.h:2531

References map, and v8::internal::FixedDoubleArray::SizeFor().

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvacuateFixedFloat64Array()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedFloat64Array ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2116 of file heap.cc.

2117  {
2118  int object_size = reinterpret_cast<FixedFloat64Array*>(object)->size();
2119  EvacuateObject<DATA_OBJECT, kDoubleAlignment>(map, slot, object,
2120  object_size);
2121  }
enable harmony numeric enable harmony object literal extensions Optimize object size

References map, and size.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the caller graph for this function:

◆ EvacuateFixedTypedArray()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedTypedArray ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2108 of file heap.cc.

2109  {
2110  int object_size = reinterpret_cast<FixedTypedArrayBase*>(object)->size();
2111  EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object,
2112  object_size);
2113  }

References map, and size.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the caller graph for this function:

◆ EvacuateJSFunction()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateJSFunction ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2067 of file heap.cc.

2068  {
2069  ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
2070  JSFunction::kSize>(map, slot, object);
2071 
2072  MapWord map_word = object->map_word();
2073  DCHECK(map_word.IsForwardingAddress());
2074  HeapObject* target = map_word.ToForwardingAddress();
2075 
2076  MarkBit mark_bit = Marking::MarkBitFrom(target);
2077  if (Marking::IsBlack(mark_bit)) {
2078  // This object is black and it might not be rescanned by marker.
2079  // We should explicitly record code entry slot for compaction because
2080  // promotion queue processing (IterateAndMarkPointersToFromSpace) will
2081  // miss it as it is not HeapObject-tagged.
2082  Address code_entry_slot =
2083  target->address() + JSFunction::kCodeEntryOffset;
2084  Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
2085  map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
2086  code_entry_slot, code);
2087  }
2088  }
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:5029
static const int kSize
Definition: objects.h:7385
static const int kCodeEntryOffset
Definition: objects.h:7376
#define DCHECK(condition)
Definition: logging.h:205
byte * Address
Definition: globals.h:101

References v8::internal::HeapObject::address(), DCHECK, v8::internal::Code::GetObjectFromEntryAddress(), v8::internal::JSFunction::kCodeEntryOffset, v8::internal::JSFunction::kSize, and map.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EvacuateObject()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
template<ObjectContents object_contents, int alignment>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateObject ( Map map,
HeapObject **  slot,
HeapObject object,
int  object_size 
)
inlinestaticprivate

Definition at line 2041 of file heap.cc.

2042  {
2044  SLOW_DCHECK(object->Size() == object_size);
2045  Heap* heap = map->GetHeap();
2046 
2047  if (!heap->ShouldBePromoted(object->address(), object_size)) {
2048  // A semi-space copy may fail due to fragmentation. In that case, we
2049  // try to promote the object.
2050  if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
2051  return;
2052  }
2053  }
2054 
2055  if (PromoteObject<object_contents, alignment>(map, slot, object,
2056  object_size)) {
2057  return;
2058  }
2059 
2060  // If promotion failed, we try to copy the object to the other semi-space
2061  if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) return;
2062 
2063  UNREACHABLE();
2064  }
#define SLOW_DCHECK(condition)
Definition: checks.h:30
static const int kMaxRegularHeapObjectSize
Definition: spaces.h:754
#define UNREACHABLE()
Definition: logging.h:30

References v8::internal::HeapObject::address(), v8::internal::Page::kMaxRegularHeapObjectSize, map, v8::internal::Heap::ShouldBePromoted(), v8::internal::HeapObject::Size(), SLOW_DCHECK, and UNREACHABLE.

+ Here is the call graph for this function:

◆ EvacuateSeqOneByteString()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateSeqOneByteString ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2132 of file heap.cc.

2133  {
2134  int object_size = SeqOneByteString::cast(object)
2135  ->SeqOneByteStringSize(map->instance_type());
2136  EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object,
2137  object_size);
2138  }

References map.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the caller graph for this function:

◆ EvacuateSeqTwoByteString()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateSeqTwoByteString ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2141 of file heap.cc.

2142  {
2143  int object_size = SeqTwoByteString::cast(object)
2144  ->SeqTwoByteStringSize(map->instance_type());
2145  EvacuateObject<DATA_OBJECT, kObjectAlignment>(map, slot, object,
2146  object_size);
2147  }

References map.

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the caller graph for this function:

◆ EvacuateShortcutCandidate()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateShortcutCandidate ( Map map,
HeapObject **  slot,
HeapObject object 
)
inlinestaticprivate

Definition at line 2150 of file heap.cc.

2151  {
2152  DCHECK(IsShortcutCandidate(map->instance_type()));
2153 
2154  Heap* heap = map->GetHeap();
2155 
2156  if (marks_handling == IGNORE_MARKS &&
2157  ConsString::cast(object)->unchecked_second() == heap->empty_string()) {
2158  HeapObject* first =
2159  HeapObject::cast(ConsString::cast(object)->unchecked_first());
2160 
2161  *slot = first;
2162 
2163  if (!heap->InNewSpace(first)) {
2164  object->set_map_word(MapWord::FromForwardingAddress(first));
2165  return;
2166  }
2167 
2168  MapWord first_word = first->map_word();
2169  if (first_word.IsForwardingAddress()) {
2170  HeapObject* target = first_word.ToForwardingAddress();
2171 
2172  *slot = target;
2173  object->set_map_word(MapWord::FromForwardingAddress(target));
2174  return;
2175  }
2176 
2177  heap->DoScavengeObject(first->map(), slot, first);
2178  object->set_map_word(MapWord::FromForwardingAddress(*slot));
2179  return;
2180  }
2181 
2182  int object_size = ConsString::kSize;
2183  EvacuateObject<POINTER_OBJECT, kObjectAlignment>(map, slot, object,
2184  object_size);
2185  }
static const int kSize
Definition: objects.h:9063
@ IGNORE_MARKS
Definition: heap.cc:1818
static bool IsShortcutCandidate(int type)
Definition: objects.h:605

References DCHECK, v8::internal::Heap::DoScavengeObject(), v8::internal::IGNORE_MARKS, v8::internal::Heap::InNewSpace(), v8::internal::IsShortcutCandidate(), v8::internal::ConsString::kSize, map, v8::internal::HeapObject::map(), v8::internal::HeapObject::map_word(), and v8::internal::HeapObject::set_map_word().

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GetTable()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static VisitorDispatchTable<ScavengingCallback>* v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::GetTable ( )
inlinestatic

Definition at line 1894 of file heap.cc.

1894  {
1895  return &table_;
1896  }
static VisitorDispatchTable< ScavengingCallback > table_
Definition: heap.cc:2204

References v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::table_.

◆ Initialize()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::Initialize ( )
inlinestatic

Definition at line 1825 of file heap.cc.

1825  {
1826  table_.Register(kVisitSeqOneByteString, &EvacuateSeqOneByteString);
1827  table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
1828  table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1829  table_.Register(kVisitByteArray, &EvacuateByteArray);
1830  table_.Register(kVisitFixedArray, &EvacuateFixedArray);
1831  table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
1832  table_.Register(kVisitFixedTypedArray, &EvacuateFixedTypedArray);
1833  table_.Register(kVisitFixedFloat64Array, &EvacuateFixedFloat64Array);
1834 
1835  table_.Register(
1836  kVisitNativeContext,
1837  &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1838  Context::kSize>);
1839 
1840  table_.Register(
1841  kVisitConsString,
1842  &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1844 
1845  table_.Register(
1846  kVisitSlicedString,
1847  &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1849 
1850  table_.Register(
1851  kVisitSymbol,
1852  &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1853  Symbol::kSize>);
1854 
1855  table_.Register(
1856  kVisitSharedFunctionInfo,
1857  &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1859 
1860  table_.Register(kVisitJSWeakCollection,
1862 
1863  table_.Register(kVisitJSArrayBuffer,
1865 
1866  table_.Register(kVisitJSTypedArray,
1868 
1869  table_.Register(kVisitJSDataView,
1871 
1872  table_.Register(kVisitJSRegExp,
1874 
1875  if (marks_handling == IGNORE_MARKS) {
1876  table_.Register(
1877  kVisitJSFunction,
1878  &ObjectEvacuationStrategy<POINTER_OBJECT>::template VisitSpecialized<
1880  } else {
1881  table_.Register(kVisitJSFunction, &EvacuateJSFunction);
1882  }
1883 
1884  table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
1885  kVisitDataObject, kVisitDataObjectGeneric>();
1886 
1887  table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1888  kVisitJSObject, kVisitJSObjectGeneric>();
1889 
1890  table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1891  kVisitStruct, kVisitStructGeneric>();
1892  }
static const int kSize
Definition: contexts.h:572
static void Visit(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2197
static void EvacuateFixedFloat64Array(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2116
static void EvacuateSeqOneByteString(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2132
static void EvacuateSeqTwoByteString(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2141
static void EvacuateByteArray(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2124
static void EvacuateShortcutCandidate(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2150
static void EvacuateFixedDoubleArray(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2099
static void EvacuateFixedArray(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2091
static void EvacuateJSFunction(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2067
static void EvacuateFixedTypedArray(Map *map, HeapObject **slot, HeapObject *object)
Definition: heap.cc:2108
static const int kSize
Definition: objects.h:9106
static const int kSize
Definition: objects.h:8569
void Register(StaticVisitorBase::VisitorId id, Callback callback)

References v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateByteArray(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedArray(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedDoubleArray(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedFloat64Array(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateFixedTypedArray(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateJSFunction(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateSeqOneByteString(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateSeqTwoByteString(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::EvacuateShortcutCandidate(), v8::internal::IGNORE_MARKS, v8::internal::Context::kSize, v8::internal::SharedFunctionInfo::kSize, v8::internal::JSFunction::kSize, v8::internal::Symbol::kSize, v8::internal::ConsString::kSize, v8::internal::SlicedString::kSize, v8::internal::StaticVisitorBase::kVisitDataObject, v8::internal::StaticVisitorBase::kVisitJSObject, v8::internal::StaticVisitorBase::kVisitStruct, v8::internal::VisitorDispatchTable< Callback >::Register(), v8::internal::VisitorDispatchTable< Callback >::RegisterSpecializations(), and v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::table_.

Referenced by v8::internal::InitializeScavengingVisitorsTables().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ INLINE()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::INLINE ( static void   MigrateObjectHeap *heap, HeapObject *source, HeapObject *target, int size)
inlineprivate

Definition at line 1919 of file heap.cc.

1920  {
1921  // If we migrate into to-space, then the to-space top pointer should be
1922  // right after the target object. Incorporate double alignment
1923  // over-allocation.
1924  DCHECK(!heap->InToSpace(target) ||
1925  target->address() + size == heap->new_space()->top() ||
1926  target->address() + size + kPointerSize == heap->new_space()->top());
1927 
1928  // Make sure that we do not overwrite the promotion queue which is at
1929  // the end of to-space.
1930  DCHECK(!heap->InToSpace(target) ||
1931  heap->promotion_queue()->IsBelowPromotionQueue(
1932  heap->new_space()->top()));
1933 
1934  // Copy the content of source to target.
1935  heap->CopyBlock(target->address(), source->address(), size);
1936 
1937  // Set the forwarding address.
1938  source->set_map_word(MapWord::FromForwardingAddress(target));
1939 
1940  if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1941  // Update NewSpace stats if necessary.
1942  RecordCopiedObject(heap, target);
1943  heap->OnMoveEvent(target, source, size);
1944  }
1945 
1946  if (marks_handling == TRANSFER_MARKS) {
1947  if (Marking::TransferColor(source, target)) {
1948  MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
1949  }
1950  }
1951  }
static void IncrementLiveBytesFromGC(Address address, int by)
Definition: spaces.h:517
static void RecordCopiedObject(Heap *heap, HeapObject *obj)
Definition: heap.cc:1901
const int kPointerSize
Definition: globals.h:129
@ TRANSFER_MARKS
Definition: heap.cc:1818
@ LOGGING_AND_PROFILING_ENABLED
Definition: heap.cc:1813

References v8::internal::HeapObject::address(), v8::internal::Heap::CopyBlock(), DCHECK, v8::internal::MemoryChunk::IncrementLiveBytesFromGC(), v8::internal::Heap::InToSpace(), v8::internal::PromotionQueue::IsBelowPromotionQueue(), v8::internal::kPointerSize, v8::internal::LOGGING_AND_PROFILING_ENABLED, v8::internal::Heap::new_space(), v8::internal::Heap::OnMoveEvent(), v8::internal::Heap::promotion_queue(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::RecordCopiedObject(), v8::internal::HeapObject::set_map_word(), size, v8::internal::NewSpace::top(), and v8::internal::TRANSFER_MARKS.

+ Here is the call graph for this function:

◆ PromoteObject()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
template<ObjectContents object_contents, int alignment>
static bool v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::PromoteObject ( Map map,
HeapObject **  slot,
HeapObject object,
int  object_size 
)
inlinestaticprivate

Definition at line 1994 of file heap.cc.

1995  {
1996  Heap* heap = map->GetHeap();
1997 
1998  int allocation_size = object_size;
1999  if (alignment != kObjectAlignment) {
2000  DCHECK(alignment == kDoubleAlignment);
2001  allocation_size += kPointerSize;
2002  }
2003 
2004  AllocationResult allocation;
2005  if (object_contents == DATA_OBJECT) {
2006  DCHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
2007  allocation = heap->old_data_space()->AllocateRaw(allocation_size);
2008  } else {
2009  DCHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
2010  allocation = heap->old_pointer_space()->AllocateRaw(allocation_size);
2011  }
2012 
2013  HeapObject* target = NULL; // Initialization to please compiler.
2014  if (allocation.To(&target)) {
2015  if (alignment != kObjectAlignment) {
2016  target = EnsureDoubleAligned(heap, target, allocation_size);
2017  }
2018 
2019  // Order is important: slot might be inside of the target if target
2020  // was allocated over a dead object and slot comes from the store
2021  // buffer.
2022  *slot = target;
2023  MigrateObject(heap, object, target, object_size);
2024 
2025  if (object_contents == POINTER_OBJECT) {
2026  if (map->instance_type() == JS_FUNCTION_TYPE) {
2027  heap->promotion_queue()->insert(target,
2029  } else {
2030  heap->promotion_queue()->insert(target, object_size);
2031  }
2032  }
2033  heap->IncrementPromotedObjectsSize(object_size);
2034  return true;
2035  }
2036  return false;
2037  }
static const int kNonWeakFieldsEndOffset
Definition: objects.h:7383
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
static HeapObject * EnsureDoubleAligned(Heap *heap, HeapObject *object, int size)
Definition: heap.cc:1799
@ JS_FUNCTION_TYPE
Definition: objects.h:749
const intptr_t kObjectAlignment
Definition: globals.h:226
@ OLD_DATA_SPACE
Definition: globals.h:361
@ OLD_POINTER_SPACE
Definition: globals.h:360
const intptr_t kDoubleAlignment
Definition: globals.h:234

References v8::internal::PagedSpace::AllocateRaw(), v8::internal::Heap::AllowedToBeMigrated(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::DATA_OBJECT, DCHECK, v8::internal::EnsureDoubleAligned(), v8::internal::Heap::IncrementPromotedObjectsSize(), v8::internal::PromotionQueue::insert(), v8::internal::JS_FUNCTION_TYPE, v8::internal::kDoubleAlignment, v8::internal::JSFunction::kNonWeakFieldsEndOffset, v8::internal::kObjectAlignment, v8::internal::kPointerSize, map, NULL, v8::internal::OLD_DATA_SPACE, v8::internal::Heap::old_data_space(), v8::internal::OLD_POINTER_SPACE, v8::internal::Heap::old_pointer_space(), v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::POINTER_OBJECT, v8::internal::Heap::promotion_queue(), and v8::internal::AllocationResult::To().

+ Here is the call graph for this function:

◆ RecordCopiedObject()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
static void v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::RecordCopiedObject ( Heap heap,
HeapObject obj 
)
inlinestaticprivate

Definition at line 1901 of file heap.cc.

1901  {
1902  bool should_record = false;
1903 #ifdef DEBUG
1904  should_record = FLAG_heap_stats;
1905 #endif
1906  should_record = should_record || FLAG_log_gc;
1907  if (should_record) {
1908  if (heap->new_space()->Contains(obj)) {
1909  heap->new_space()->RecordAllocation(obj);
1910  } else {
1911  heap->new_space()->RecordPromotion(obj);
1912  }
1913  }
1914  }

References v8::internal::NewSpace::Contains(), v8::internal::Heap::new_space(), v8::internal::NewSpace::RecordAllocation(), and v8::internal::NewSpace::RecordPromotion().

Referenced by v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::INLINE().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SemiSpaceCopyObject()

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
template<int alignment>
static bool v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::SemiSpaceCopyObject ( Map map,
HeapObject **  slot,
HeapObject object,
int  object_size 
)
inlinestaticprivate

Definition at line 1954 of file heap.cc.

1955  {
1956  Heap* heap = map->GetHeap();
1957 
1958  int allocation_size = object_size;
1959  if (alignment != kObjectAlignment) {
1960  DCHECK(alignment == kDoubleAlignment);
1961  allocation_size += kPointerSize;
1962  }
1963 
1964  DCHECK(heap->AllowedToBeMigrated(object, NEW_SPACE));
1965  AllocationResult allocation =
1966  heap->new_space()->AllocateRaw(allocation_size);
1967 
1968  HeapObject* target = NULL; // Initialization to please compiler.
1969  if (allocation.To(&target)) {
1970  // Order is important here: Set the promotion limit before storing a
1971  // filler for double alignment or migrating the object. Otherwise we
1972  // may end up overwriting promotion queue entries when we migrate the
1973  // object.
1974  heap->promotion_queue()->SetNewLimit(heap->new_space()->top());
1975 
1976  if (alignment != kObjectAlignment) {
1977  target = EnsureDoubleAligned(heap, target, allocation_size);
1978  }
1979 
1980  // Order is important: slot might be inside of the target if target
1981  // was allocated over a dead object and slot comes from the store
1982  // buffer.
1983  *slot = target;
1984  MigrateObject(heap, object, target, object_size);
1985 
1986  heap->IncrementSemiSpaceCopiedObjectSize(object_size);
1987  return true;
1988  }
1989  return false;
1990  }

References v8::internal::Heap::AllowedToBeMigrated(), DCHECK, v8::internal::EnsureDoubleAligned(), v8::internal::Heap::IncrementSemiSpaceCopiedObjectSize(), v8::internal::kDoubleAlignment, v8::internal::kObjectAlignment, v8::internal::kPointerSize, map, v8::internal::NEW_SPACE, v8::internal::Heap::new_space(), NULL, v8::internal::Heap::promotion_queue(), v8::internal::PromotionQueue::SetNewLimit(), v8::internal::AllocationResult::To(), and v8::internal::NewSpace::top().

+ Here is the call graph for this function:

Member Data Documentation

◆ table_

template<MarksHandling marks_handling, LoggingAndProfiling logging_and_profiling_mode>
VisitorDispatchTable< ScavengingCallback > v8::internal::ScavengingVisitor< marks_handling, logging_and_profiling_mode >::table_
staticprivate

The documentation for this class was generated from the following file: