V8 Project
v8::internal::StoreBuffer Class Reference

#include <store-buffer.h>

+ Collaboration diagram for v8::internal::StoreBuffer:

Public Member Functions

 StoreBuffer (Heap *heap)
 
Address TopAddress ()
 
void SetUp ()
 
void TearDown ()
 
void Mark (Address addr)
 
void EnterDirectlyIntoStoreBuffer (Address addr)
 
void IteratePointersToNewSpace (ObjectSlotCallback callback)
 
void IteratePointersToNewSpaceAndClearMaps (ObjectSlotCallback callback)
 
void Compact ()
 
void GCPrologue ()
 
void GCEpilogue ()
 
Object *** Limit ()
 
Object *** Start ()
 
Object *** Top ()
 
void SetTop (Object ***top)
 
bool old_buffer_is_sorted ()
 
bool old_buffer_is_filtered ()
 
void SortUniq ()
 
void EnsureSpace (intptr_t space_needed)
 
void Verify ()
 
bool PrepareForIteration ()
 
void Filter (int flag)
 

Static Public Member Functions

static void StoreBufferOverflow (Isolate *isolate)
 

Static Public Attributes

static const int kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2)
 
static const int kStoreBufferSize = kStoreBufferOverflowBit
 
static const int kStoreBufferLength = kStoreBufferSize / sizeof(Address)
 
static const int kOldStoreBufferLength = kStoreBufferLength * 16
 
static const int kHashSetLengthLog2 = 12
 
static const int kHashSetLength = 1 << kHashSetLengthLog2
 

Private Member Functions

void ClearFilteringHashSets ()
 
bool SpaceAvailable (intptr_t space_needed)
 
void Uniq ()
 
void ExemptPopularPages (int prime_sample_step, int threshold)
 
void ClearDeadObject (HeapObject *object)
 
void IteratePointersToNewSpace (ObjectSlotCallback callback, bool clear_maps)
 
void FindPointersToNewSpaceInRegion (Address start, Address end, ObjectSlotCallback slot_callback, bool clear_maps)
 
void IteratePointersOnPage (PagedSpace *space, Page *page, RegionCallback region_callback, ObjectSlotCallback slot_callback)
 
void IteratePointersInStoreBuffer (ObjectSlotCallback slot_callback, bool clear_maps)
 

Private Attributes

Heapheap_
 
Addressstart_
 
Addresslimit_
 
Addressold_start_
 
Addressold_limit_
 
Addressold_top_
 
Addressold_reserved_limit_
 
base::VirtualMemoryold_virtual_memory_
 
bool old_buffer_is_sorted_
 
bool old_buffer_is_filtered_
 
bool during_gc_
 
bool store_buffer_rebuilding_enabled_
 
StoreBufferCallback callback_
 
bool may_move_store_buffer_entries_
 
base::VirtualMemoryvirtual_memory_
 
uintptr_thash_set_1_
 
uintptr_thash_set_2_
 
bool hash_sets_are_empty_
 

Friends

class StoreBufferRebuildScope
 
class DontMoveStoreBufferEntriesScope
 

Detailed Description

Definition at line 28 of file store-buffer.h.

Constructor & Destructor Documentation

◆ StoreBuffer()

v8::internal::StoreBuffer::StoreBuffer ( Heap heap)
explicit

Definition at line 16 of file store-buffer.cc.

17  : heap_(heap),
18  start_(NULL),
19  limit_(NULL),
22  old_top_(NULL),
24  old_buffer_is_sorted_(false),
26  during_gc_(false),
28  callback_(NULL),
33  hash_sets_are_empty_(true) {}
StoreBufferCallback callback_
Definition: store-buffer.h:130
base::VirtualMemory * virtual_memory_
Definition: store-buffer.h:133
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL

Member Function Documentation

◆ ClearDeadObject()

void v8::internal::StoreBuffer::ClearDeadObject ( HeapObject object)
inlineprivate

Definition at line 54 of file store-buffer-inl.h.

54  {
55  Address& map_field = Memory::Address_at(object->address());
56  if (heap_->map_space()->Contains(map_field)) {
57  map_field = NULL;
58  }
59 }
MapSpace * map_space()
Definition: heap.h:597
static Address & Address_at(Address addr)
Definition: v8memory.h:56
bool Contains(Address a)
Definition: spaces-inl.h:150
byte * Address
Definition: globals.h:101

References v8::internal::HeapObject::address(), v8::internal::Memory::Address_at(), v8::internal::PagedSpace::Contains(), heap_, v8::internal::Heap::map_space(), and NULL.

Referenced by FindPointersToNewSpaceInRegion(), and IteratePointersInStoreBuffer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ClearFilteringHashSets()

void v8::internal::StoreBuffer::ClearFilteringHashSets ( )
private

Definition at line 312 of file store-buffer.cc.

312  {
313  if (!hash_sets_are_empty_) {
314  memset(reinterpret_cast<void*>(hash_set_1_), 0,
315  sizeof(uintptr_t) * kHashSetLength);
316  memset(reinterpret_cast<void*>(hash_set_2_), 0,
317  sizeof(uintptr_t) * kHashSetLength);
318  hash_sets_are_empty_ = true;
319  }
320 }
static const int kHashSetLength
Definition: store-buffer.h:72

References hash_set_1_, hash_set_2_, hash_sets_are_empty_, and kHashSetLength.

Referenced by Filter(), GCPrologue(), PrepareForIteration(), SetUp(), and SortUniq().

+ Here is the caller graph for this function:

◆ Compact()

void v8::internal::StoreBuffer::Compact ( )

Definition at line 528 of file store-buffer.cc.

528  {
529  Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
530 
531  if (top == start_) return;
532 
533  // There's no check of the limit in the loop below so we check here for
534  // the worst case (compaction doesn't eliminate any pointers).
535  DCHECK(top <= limit_);
537  EnsureSpace(top - start_);
539  // Goes through the addresses in the store buffer attempting to remove
540  // duplicates. In the interest of speed this is a lossy operation. Some
541  // duplicates will remain. We have two hash sets with different hash
542  // functions to reduce the number of unnecessary clashes.
543  hash_sets_are_empty_ = false; // Hash sets are in use.
544  for (Address* current = start_; current < top; current++) {
545  DCHECK(!heap_->cell_space()->Contains(*current));
546  DCHECK(!heap_->code_space()->Contains(*current));
547  DCHECK(!heap_->old_data_space()->Contains(*current));
548  uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current);
549  // Shift out the last bits including any tags.
550  int_addr >>= kPointerSizeLog2;
551  // The upper part of an address is basically random because of ASLR and OS
552  // non-determinism, so we use only the bits within a page for hashing to
553  // make v8's behavior (more) deterministic.
554  uintptr_t hash_addr =
556  int hash1 = ((hash_addr ^ (hash_addr >> kHashSetLengthLog2)) &
557  (kHashSetLength - 1));
558  if (hash_set_1_[hash1] == int_addr) continue;
559  uintptr_t hash2 = (hash_addr - (hash_addr >> kHashSetLengthLog2));
560  hash2 ^= hash2 >> (kHashSetLengthLog2 * 2);
561  hash2 &= (kHashSetLength - 1);
562  if (hash_set_2_[hash2] == int_addr) continue;
563  if (hash_set_1_[hash1] == 0) {
564  hash_set_1_[hash1] = int_addr;
565  } else if (hash_set_2_[hash2] == 0) {
566  hash_set_2_[hash2] = int_addr;
567  } else {
568  // Rather than slowing down we just throw away some entries. This will
569  // cause some duplicates to remain undetected.
570  hash_set_1_[hash1] = int_addr;
571  hash_set_2_[hash2] = 0;
572  }
573  old_buffer_is_sorted_ = false;
574  old_buffer_is_filtered_ = false;
575  *old_top_++ = reinterpret_cast<Address>(int_addr << kPointerSizeLog2);
577  }
578  heap_->isolate()->counters()->store_buffer_compactions()->Increment();
579 }
OldSpace * code_space()
Definition: heap.h:596
void public_set_store_buffer_top(Address *top)
Definition: heap.h:887
CellSpace * cell_space()
Definition: heap.h:598
Isolate * isolate()
Definition: heap-inl.h:589
OldSpace * old_data_space()
Definition: heap.h:595
Counters * counters()
Definition: isolate.h:857
static const intptr_t kPageAlignmentMask
Definition: spaces.h:757
void EnsureSpace(intptr_t space_needed)
static const int kHashSetLengthLog2
Definition: store-buffer.h:71
#define DCHECK(condition)
Definition: logging.h:205
const int kPointerSizeLog2
Definition: globals.h:147

References v8::internal::Heap::cell_space(), v8::internal::Heap::code_space(), v8::internal::PagedSpace::Contains(), v8::internal::Isolate::counters(), DCHECK, EnsureSpace(), hash_set_1_, hash_set_2_, hash_sets_are_empty_, heap_, v8::internal::Heap::isolate(), kHashSetLength, kHashSetLengthLog2, v8::internal::Page::kPageAlignmentMask, v8::internal::kPointerSizeLog2, limit_, may_move_store_buffer_entries_, old_buffer_is_filtered_, old_buffer_is_sorted_, v8::internal::Heap::old_data_space(), old_limit_, old_top_, v8::internal::Heap::public_set_store_buffer_top(), and start_.

Referenced by EnsureSpace(), v8::internal::Heap::FreeQueuedChunks(), Mark(), PrepareForIteration(), SortUniq(), and StoreBufferOverflow().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnsureSpace()

void v8::internal::StoreBuffer::EnsureSpace ( intptr_t  space_needed)

Definition at line 127 of file store-buffer.cc.

127  {
128  while (old_limit_ - old_top_ < space_needed &&
130  size_t grow = old_limit_ - old_start_; // Double size.
131  CHECK(old_virtual_memory_->Commit(reinterpret_cast<void*>(old_limit_),
132  grow * kPointerSize, false));
133  old_limit_ += grow;
134  }
135 
136  if (SpaceAvailable(space_needed)) return;
137 
138  if (old_buffer_is_filtered_) return;
140  Compact();
141 
143  bool page_has_scan_on_scavenge_flag = false;
144 
145  PointerChunkIterator it(heap_);
146  MemoryChunk* chunk;
147  while ((chunk = it.next()) != NULL) {
148  if (chunk->scan_on_scavenge()) {
149  page_has_scan_on_scavenge_flag = true;
150  break;
151  }
152  }
153 
154  if (page_has_scan_on_scavenge_flag) {
156  }
157 
158  if (SpaceAvailable(space_needed)) return;
159 
160  // Sample 1 entry in 97 and filter out the pages where we estimate that more
161  // than 1 in 8 pointers are to new space.
162  static const int kSampleFinenesses = 5;
163  static const struct Samples {
164  int prime_sample_step;
165  int threshold;
166  } samples[kSampleFinenesses] = {
167  {97, ((Page::kPageSize / kPointerSize) / 97) / 8},
168  {23, ((Page::kPageSize / kPointerSize) / 23) / 16},
169  {7, ((Page::kPageSize / kPointerSize) / 7) / 32},
170  {3, ((Page::kPageSize / kPointerSize) / 3) / 256},
171  {1, 0}};
172  for (int i = 0; i < kSampleFinenesses; i++) {
173  ExemptPopularPages(samples[i].prime_sample_step, samples[i].threshold);
174  // As a last resort we mark all pages as being exempt from the store buffer.
175  DCHECK(i != (kSampleFinenesses - 1) || old_top_ == old_start_);
176  if (SpaceAvailable(space_needed)) return;
177  }
178  UNREACHABLE();
179 }
bool Commit(void *address, size_t size, bool is_executable)
static const int kPageSize
Definition: spaces.h:748
void ExemptPopularPages(int prime_sample_step, int threshold)
bool SpaceAvailable(intptr_t space_needed)
base::VirtualMemory * old_virtual_memory_
Definition: store-buffer.h:121
#define UNREACHABLE()
Definition: logging.h:30
#define CHECK(condition)
Definition: logging.h:36
const int kPointerSize
Definition: globals.h:129

References CHECK, v8::base::VirtualMemory::Commit(), Compact(), DCHECK, ExemptPopularPages(), Filter(), heap_, v8::internal::Page::kPageSize, v8::internal::kPointerSize, may_move_store_buffer_entries_, NULL, old_buffer_is_filtered_, old_limit_, old_reserved_limit_, old_start_, old_top_, old_virtual_memory_, v8::internal::MemoryChunk::scan_on_scavenge(), v8::internal::MemoryChunk::SCAN_ON_SCAVENGE, SpaceAvailable(), and UNREACHABLE.

Referenced by v8::internal::StoreBufferRebuilder::Callback(), and Compact().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ EnterDirectlyIntoStoreBuffer()

void v8::internal::StoreBuffer::EnterDirectlyIntoStoreBuffer ( Address  addr)
inline

Definition at line 34 of file store-buffer-inl.h.

34  {
36  SLOW_DCHECK(!heap_->cell_space()->Contains(addr) &&
37  !heap_->code_space()->Contains(addr) &&
38  !heap_->old_data_space()->Contains(addr) &&
39  !heap_->new_space()->Contains(addr));
40  Address* top = old_top_;
41  *top++ = addr;
42  old_top_ = top;
43  old_buffer_is_sorted_ = false;
45  if (top >= old_limit_) {
46  DCHECK(callback_ != NULL);
47  (*callback_)(heap_, MemoryChunk::FromAnyPointerAddress(heap_, addr),
49  }
50  }
51 }
#define SLOW_DCHECK(condition)
Definition: checks.h:30
NewSpace * new_space()
Definition: heap.h:593
static MemoryChunk * FromAnyPointerAddress(Heap *heap, Address addr)
Definition: spaces-inl.h:169
bool Contains(Address a)
Definition: spaces.h:2349
@ kStoreBufferFullEvent
Definition: globals.h:494

References callback_, v8::internal::Heap::cell_space(), v8::internal::Heap::code_space(), v8::internal::PagedSpace::Contains(), v8::internal::NewSpace::Contains(), DCHECK, v8::internal::MemoryChunk::FromAnyPointerAddress(), heap_, v8::internal::kStoreBufferFullEvent, v8::internal::Heap::new_space(), NULL, old_buffer_is_filtered_, old_buffer_is_sorted_, v8::internal::Heap::old_data_space(), old_limit_, old_top_, SLOW_DCHECK, and store_buffer_rebuilding_enabled_.

Referenced by FindPointersToNewSpaceInRegion(), v8::internal::Heap::IterateAndMarkPointersToFromSpace(), and IteratePointersInStoreBuffer().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ ExemptPopularPages()

void v8::internal::StoreBuffer::ExemptPopularPages ( int  prime_sample_step,
int  threshold 
)
private

Definition at line 184 of file store-buffer.cc.

184  {
185  PointerChunkIterator it(heap_);
186  MemoryChunk* chunk;
187  while ((chunk = it.next()) != NULL) {
188  chunk->set_store_buffer_counter(0);
189  }
190  bool created_new_scan_on_scavenge_pages = false;
191  MemoryChunk* previous_chunk = NULL;
192  for (Address* p = old_start_; p < old_top_; p += prime_sample_step) {
193  Address addr = *p;
194  MemoryChunk* containing_chunk = NULL;
195  if (previous_chunk != NULL && previous_chunk->Contains(addr)) {
196  containing_chunk = previous_chunk;
197  } else {
198  containing_chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr);
199  }
200  int old_counter = containing_chunk->store_buffer_counter();
201  if (old_counter >= threshold) {
202  containing_chunk->set_scan_on_scavenge(true);
203  created_new_scan_on_scavenge_pages = true;
204  }
205  containing_chunk->set_store_buffer_counter(old_counter + 1);
206  previous_chunk = containing_chunk;
207  }
208  if (created_new_scan_on_scavenge_pages) {
210  }
212 }

References v8::internal::MemoryChunk::Contains(), Filter(), v8::internal::MemoryChunk::FromAnyPointerAddress(), heap_, NULL, old_buffer_is_filtered_, old_start_, old_top_, v8::internal::MemoryChunk::SCAN_ON_SCAVENGE, v8::internal::MemoryChunk::set_scan_on_scavenge(), v8::internal::MemoryChunk::set_store_buffer_counter(), and v8::internal::MemoryChunk::store_buffer_counter().

Referenced by EnsureSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Filter()

void v8::internal::StoreBuffer::Filter ( int  flag)

Definition at line 215 of file store-buffer.cc.

215  {
216  Address* new_top = old_start_;
217  MemoryChunk* previous_chunk = NULL;
218  for (Address* p = old_start_; p < old_top_; p++) {
219  Address addr = *p;
220  MemoryChunk* containing_chunk = NULL;
221  if (previous_chunk != NULL && previous_chunk->Contains(addr)) {
222  containing_chunk = previous_chunk;
223  } else {
224  containing_chunk = MemoryChunk::FromAnyPointerAddress(heap_, addr);
225  previous_chunk = containing_chunk;
226  }
227  if (!containing_chunk->IsFlagSet(flag)) {
228  *new_top++ = addr;
229  }
230  }
231  old_top_ = new_top;
232 
233  // Filtering hash sets are inconsistent with the store buffer after this
234  // operation.
236 }
kFeedbackVectorOffset flag
Definition: objects-inl.h:5418

References ClearFilteringHashSets(), v8::internal::MemoryChunk::Contains(), v8::internal::flag, v8::internal::MemoryChunk::FromAnyPointerAddress(), heap_, v8::internal::MemoryChunk::IsFlagSet(), NULL, old_start_, and old_top_.

Referenced by EnsureSpace(), ExemptPopularPages(), v8::internal::Heap::FreeQueuedChunks(), and PrepareForIteration().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ FindPointersToNewSpaceInRegion()

void v8::internal::StoreBuffer::FindPointersToNewSpaceInRegion ( Address  start,
Address  end,
ObjectSlotCallback  slot_callback,
bool  clear_maps 
)
private

Definition at line 370 of file store-buffer.cc.

372  {
373  for (Address slot_address = start; slot_address < end;
374  slot_address += kPointerSize) {
375  Object** slot = reinterpret_cast<Object**>(slot_address);
376  Object* object = reinterpret_cast<Object*>(
377  base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot)));
378  if (heap_->InNewSpace(object)) {
379  HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
380  DCHECK(heap_object->IsHeapObject());
381  // The new space object was not promoted if it still contains a map
382  // pointer. Clear the map field now lazily.
383  if (clear_maps) ClearDeadObject(heap_object);
384  slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object);
385  object = reinterpret_cast<Object*>(
386  base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot)));
387  if (heap_->InNewSpace(object)) {
388  EnterDirectlyIntoStoreBuffer(slot_address);
389  }
390  }
391  }
392 }
bool InNewSpace(Object *object)
Definition: heap-inl.h:322
void EnterDirectlyIntoStoreBuffer(Address addr)
void ClearDeadObject(HeapObject *object)
intptr_t AtomicWord
Definition: atomicops.h:57
Atomic8 NoBarrier_Load(volatile const Atomic8 *ptr)
kSerializedDataOffset Object
Definition: objects-inl.h:5322

References ClearDeadObject(), DCHECK, EnterDirectlyIntoStoreBuffer(), heap_, v8::internal::Heap::InNewSpace(), v8::internal::kPointerSize, and v8::base::NoBarrier_Load().

Referenced by IteratePointersToNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GCEpilogue()

void v8::internal::StoreBuffer::GCEpilogue ( )

Definition at line 360 of file store-buffer.cc.

360  {
361  during_gc_ = false;
362 #ifdef VERIFY_HEAP
363  if (FLAG_verify_heap) {
364  Verify();
365  }
366 #endif
367 }

References during_gc_, and Verify().

Referenced by v8::internal::Heap::GarbageCollectionEpilogue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ GCPrologue()

void v8::internal::StoreBuffer::GCPrologue ( )

Definition at line 323 of file store-buffer.cc.

323  {
325  during_gc_ = true;
326 }

References ClearFilteringHashSets(), and during_gc_.

Referenced by v8::internal::Heap::GarbageCollectionPrologue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IteratePointersInStoreBuffer()

void v8::internal::StoreBuffer::IteratePointersInStoreBuffer ( ObjectSlotCallback  slot_callback,
bool  clear_maps 
)
private

Definition at line 395 of file store-buffer.cc.

396  {
397  Address* limit = old_top_;
399  {
401  for (Address* current = old_start_; current < limit; current++) {
402 #ifdef DEBUG
403  Address* saved_top = old_top_;
404 #endif
405  Object** slot = reinterpret_cast<Object**>(*current);
406  Object* object = reinterpret_cast<Object*>(
407  base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot)));
408  if (heap_->InFromSpace(object)) {
409  HeapObject* heap_object = reinterpret_cast<HeapObject*>(object);
410  // The new space object was not promoted if it still contains a map
411  // pointer. Clear the map field now lazily.
412  if (clear_maps) ClearDeadObject(heap_object);
413  slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object);
414  object = reinterpret_cast<Object*>(
415  base::NoBarrier_Load(reinterpret_cast<base::AtomicWord*>(slot)));
416  if (heap_->InNewSpace(object)) {
417  EnterDirectlyIntoStoreBuffer(reinterpret_cast<Address>(slot));
418  }
419  }
420  DCHECK(old_top_ == saved_top + 1 || old_top_ == saved_top);
421  }
422  }
423 }
bool InFromSpace(Object *object)
Definition: heap-inl.h:334
friend class DontMoveStoreBufferEntriesScope
Definition: store-buffer.h:174

References ClearDeadObject(), DCHECK, EnterDirectlyIntoStoreBuffer(), heap_, v8::internal::Heap::InFromSpace(), v8::internal::Heap::InNewSpace(), v8::base::NoBarrier_Load(), old_start_, and old_top_.

Referenced by IteratePointersToNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ IteratePointersOnPage()

void v8::internal::StoreBuffer::IteratePointersOnPage ( PagedSpace space,
Page page,
RegionCallback  region_callback,
ObjectSlotCallback  slot_callback 
)
private

◆ IteratePointersToNewSpace() [1/2]

void v8::internal::StoreBuffer::IteratePointersToNewSpace ( ObjectSlotCallback  callback)

Definition at line 426 of file store-buffer.cc.

426  {
427  IteratePointersToNewSpace(slot_callback, false);
428 }
void IteratePointersToNewSpace(ObjectSlotCallback callback)

Referenced by IteratePointersToNewSpaceAndClearMaps(), and v8::internal::Heap::Scavenge().

+ Here is the caller graph for this function:

◆ IteratePointersToNewSpace() [2/2]

void v8::internal::StoreBuffer::IteratePointersToNewSpace ( ObjectSlotCallback  callback,
bool  clear_maps 
)
private

Definition at line 437 of file store-buffer.cc.

438  {
439  // We do not sort or remove duplicated entries from the store buffer because
440  // we expect that callback will rebuild the store buffer thus removing
441  // all duplicates and pointers to old space.
442  bool some_pages_to_scan = PrepareForIteration();
443 
444  // TODO(gc): we want to skip slots on evacuation candidates
445  // but we can't simply figure that out from slot address
446  // because slot can belong to a large object.
447  IteratePointersInStoreBuffer(slot_callback, clear_maps);
448 
449  // We are done scanning all the pointers that were in the store buffer, but
450  // there may be some pages marked scan_on_scavenge that have pointers to new
451  // space that are not in the store buffer. We must scan them now. As we
452  // scan, the surviving pointers to new space will be added to the store
453  // buffer. If there are still a lot of pointers to new space then we will
454  // keep the scan_on_scavenge flag on the page and discard the pointers that
455  // were added to the store buffer. If there are not many pointers to new
456  // space left on the page we will keep the pointers in the store buffer and
457  // remove the flag from the page.
458  if (some_pages_to_scan) {
459  if (callback_ != NULL) {
461  }
462  PointerChunkIterator it(heap_);
463  MemoryChunk* chunk;
464  while ((chunk = it.next()) != NULL) {
465  if (chunk->scan_on_scavenge()) {
466  chunk->set_scan_on_scavenge(false);
467  if (callback_ != NULL) {
468  (*callback_)(heap_, chunk, kStoreBufferScanningPageEvent);
469  }
470  if (chunk->owner() == heap_->lo_space()) {
471  LargePage* large_page = reinterpret_cast<LargePage*>(chunk);
472  HeapObject* array = large_page->GetObject();
473  DCHECK(array->IsFixedArray());
474  Address start = array->address();
475  Address end = start + array->Size();
476  FindPointersToNewSpaceInRegion(start, end, slot_callback, clear_maps);
477  } else {
478  Page* page = reinterpret_cast<Page*>(chunk);
479  PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner());
480  if (owner == heap_->map_space()) {
481  DCHECK(page->WasSwept());
482  HeapObjectIterator iterator(page, NULL);
483  for (HeapObject* heap_object = iterator.Next(); heap_object != NULL;
484  heap_object = iterator.Next()) {
485  // We skip free space objects.
486  if (!heap_object->IsFiller()) {
487  DCHECK(heap_object->IsMap());
489  heap_object->address() + Map::kPointerFieldsBeginOffset,
490  heap_object->address() + Map::kPointerFieldsEndOffset,
491  slot_callback, clear_maps);
492  }
493  }
494  } else {
495  if (!page->SweepingCompleted()) {
497  if (!page->SweepingCompleted()) {
498  // We were not able to sweep that page, i.e., a concurrent
499  // sweeper thread currently owns this page.
500  // TODO(hpayer): This may introduce a huge pause here. We
501  // just care about finish sweeping of the scan on scavenge page.
503  }
504  }
505  CHECK(page->owner() == heap_->old_pointer_space());
506  HeapObjectIterator iterator(page, NULL);
507  for (HeapObject* heap_object = iterator.Next(); heap_object != NULL;
508  heap_object = iterator.Next()) {
509  // We iterate over objects that contain new space pointers only.
510  if (!heap_object->MayContainRawValues()) {
512  heap_object->address() + HeapObject::kHeaderSize,
513  heap_object->address() + heap_object->Size(), slot_callback,
514  clear_maps);
515  }
516  }
517  }
518  }
519  }
520  }
521  if (callback_ != NULL) {
522  (*callback_)(heap_, NULL, kStoreBufferScanningPageEvent);
523  }
524  }
525 }
static const int kHeaderSize
Definition: objects.h:1428
OldSpace * old_pointer_space()
Definition: heap.h:594
LargeObjectSpace * lo_space()
Definition: heap.h:600
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1197
static const int kPointerFieldsEndOffset
Definition: objects.h:6207
static const int kPointerFieldsBeginOffset
Definition: objects.h:6206
int SweepInParallel(PagedSpace *space, int required_freed_bytes)
void FindPointersToNewSpaceInRegion(Address start, Address end, ObjectSlotCallback slot_callback, bool clear_maps)
void IteratePointersInStoreBuffer(ObjectSlotCallback slot_callback, bool clear_maps)
@ kStoreBufferScanningPageEvent
Definition: globals.h:496
@ kStoreBufferStartScanningPagesEvent
Definition: globals.h:495

References v8::internal::HeapObject::address(), callback_, CHECK, DCHECK, v8::internal::MarkCompactCollector::EnsureSweepingCompleted(), FindPointersToNewSpaceInRegion(), v8::internal::LargePage::GetObject(), heap_, IteratePointersInStoreBuffer(), v8::internal::HeapObject::kHeaderSize, v8::internal::Map::kPointerFieldsBeginOffset, v8::internal::Map::kPointerFieldsEndOffset, v8::internal::kStoreBufferScanningPageEvent, v8::internal::kStoreBufferStartScanningPagesEvent, v8::internal::Heap::lo_space(), v8::internal::Heap::map_space(), v8::internal::Heap::mark_compact_collector(), v8::internal::HeapObjectIterator::Next(), NULL, v8::internal::Heap::old_pointer_space(), v8::internal::MemoryChunk::owner(), PrepareForIteration(), v8::internal::MemoryChunk::scan_on_scavenge(), v8::internal::MemoryChunk::set_scan_on_scavenge(), v8::internal::HeapObject::Size(), v8::internal::MemoryChunk::SweepingCompleted(), v8::internal::MarkCompactCollector::SweepInParallel(), and v8::internal::Page::WasSwept().

+ Here is the call graph for this function:

◆ IteratePointersToNewSpaceAndClearMaps()

void v8::internal::StoreBuffer::IteratePointersToNewSpaceAndClearMaps ( ObjectSlotCallback  callback)

Definition at line 431 of file store-buffer.cc.

432  {
433  IteratePointersToNewSpace(slot_callback, true);
434 }

References IteratePointersToNewSpace().

Referenced by v8::internal::MarkCompactCollector::EvacuateNewSpaceAndCandidates().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Limit()

Object*** v8::internal::StoreBuffer::Limit ( )
inline

Definition at line 79 of file store-buffer.h.

79 { return reinterpret_cast<Object***>(old_limit_); }

References old_limit_.

Referenced by v8::internal::StoreBufferRebuilder::Callback(), and SetTop().

+ Here is the caller graph for this function:

◆ Mark()

void v8::internal::StoreBuffer::Mark ( Address  addr)
inline

Definition at line 18 of file store-buffer-inl.h.

18  {
19  DCHECK(!heap_->cell_space()->Contains(addr));
20  DCHECK(!heap_->code_space()->Contains(addr));
21  DCHECK(!heap_->old_data_space()->Contains(addr));
22  Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
23  *top++ = addr;
25  if ((reinterpret_cast<uintptr_t>(top) & kStoreBufferOverflowBit) != 0) {
26  DCHECK(top == limit_);
27  Compact();
28  } else {
29  DCHECK(top < limit_);
30  }
31 }
static const int kStoreBufferOverflowBit
Definition: store-buffer.h:67

References v8::internal::Heap::cell_space(), v8::internal::Heap::code_space(), Compact(), v8::internal::PagedSpace::Contains(), DCHECK, heap_, kStoreBufferOverflowBit, limit_, v8::internal::Heap::old_data_space(), and v8::internal::Heap::public_set_store_buffer_top().

Referenced by v8::internal::MarkCompactCollector::RecordMigratedSlot().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ old_buffer_is_filtered()

bool v8::internal::StoreBuffer::old_buffer_is_filtered ( )
inline

Definition at line 89 of file store-buffer.h.

89 { return old_buffer_is_filtered_; }

References old_buffer_is_filtered_.

◆ old_buffer_is_sorted()

bool v8::internal::StoreBuffer::old_buffer_is_sorted ( )
inline

Definition at line 88 of file store-buffer.h.

88 { return old_buffer_is_sorted_; }

References old_buffer_is_sorted_.

◆ PrepareForIteration()

bool v8::internal::StoreBuffer::PrepareForIteration ( )

Definition at line 253 of file store-buffer.cc.

253  {
254  Compact();
255  PointerChunkIterator it(heap_);
256  MemoryChunk* chunk;
257  bool page_has_scan_on_scavenge_flag = false;
258  while ((chunk = it.next()) != NULL) {
259  if (chunk->scan_on_scavenge()) {
260  page_has_scan_on_scavenge_flag = true;
261  break;
262  }
263  }
264 
265  if (page_has_scan_on_scavenge_flag) {
267  }
268 
269  // Filtering hash sets are inconsistent with the store buffer after
270  // iteration.
272 
273  return page_has_scan_on_scavenge_flag;
274 }

References ClearFilteringHashSets(), Compact(), Filter(), heap_, NULL, v8::internal::MemoryChunk::scan_on_scavenge(), and v8::internal::MemoryChunk::SCAN_ON_SCAVENGE.

Referenced by IteratePointersToNewSpace().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetTop()

void v8::internal::StoreBuffer::SetTop ( Object ***  top)
inline

Definition at line 82 of file store-buffer.h.

82  {
83  DCHECK(top >= Start());
84  DCHECK(top <= Limit());
85  old_top_ = reinterpret_cast<Address*>(top);
86  }

References DCHECK, Limit(), old_top_, and Start().

Referenced by v8::internal::StoreBufferRebuilder::Callback().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SetUp()

void v8::internal::StoreBuffer::SetUp ( )

Definition at line 36 of file store-buffer.cc.

36  {
37  virtual_memory_ = new base::VirtualMemory(kStoreBufferSize * 3);
38  uintptr_t start_as_int =
39  reinterpret_cast<uintptr_t>(virtual_memory_->address());
40  start_ =
41  reinterpret_cast<Address*>(RoundUp(start_as_int, kStoreBufferSize * 2));
43 
45  new base::VirtualMemory(kOldStoreBufferLength * kPointerSize);
47  reinterpret_cast<Address*>(old_virtual_memory_->address());
48  // Don't know the alignment requirements of the OS, but it is certainly not
49  // less than 0xfff.
50  DCHECK((reinterpret_cast<uintptr_t>(old_start_) & 0xfff) == 0);
51  int initial_length =
52  static_cast<int>(base::OS::CommitPageSize() / kPointerSize);
53  DCHECK(initial_length > 0);
54  DCHECK(initial_length <= kOldStoreBufferLength);
55  old_limit_ = old_start_ + initial_length;
57 
58  CHECK(old_virtual_memory_->Commit(reinterpret_cast<void*>(old_start_),
60  false));
61 
62  DCHECK(reinterpret_cast<Address>(start_) >= virtual_memory_->address());
63  DCHECK(reinterpret_cast<Address>(limit_) >= virtual_memory_->address());
64  Address* vm_limit = reinterpret_cast<Address*>(
65  reinterpret_cast<char*>(virtual_memory_->address()) +
67  DCHECK(start_ <= vm_limit);
68  DCHECK(limit_ <= vm_limit);
69  USE(vm_limit);
70  DCHECK((reinterpret_cast<uintptr_t>(limit_) & kStoreBufferOverflowBit) != 0);
71  DCHECK((reinterpret_cast<uintptr_t>(limit_ - 1) & kStoreBufferOverflowBit) ==
72  0);
73 
74  CHECK(virtual_memory_->Commit(reinterpret_cast<Address>(start_),
76  false)); // Not executable.
78 
81  hash_sets_are_empty_ = false;
82 
84 }
static intptr_t CommitPageSize()
static const int kOldStoreBufferLength
Definition: store-buffer.h:70
static const int kStoreBufferSize
Definition: store-buffer.h:68
void USE(T)
Definition: macros.h:322
static void RoundUp(Vector< char > buffer, int *length, int *decimal_point)
Definition: fixed-dtoa.cc:171

References v8::base::VirtualMemory::address(), CHECK, ClearFilteringHashSets(), v8::base::VirtualMemory::Commit(), v8::base::OS::CommitPageSize(), DCHECK, hash_set_1_, hash_set_2_, hash_sets_are_empty_, heap_, kHashSetLength, kOldStoreBufferLength, v8::internal::kPointerSize, kStoreBufferOverflowBit, kStoreBufferSize, limit_, old_limit_, old_reserved_limit_, old_start_, old_top_, old_virtual_memory_, v8::internal::Heap::public_set_store_buffer_top(), v8::internal::RoundUp(), v8::base::VirtualMemory::size(), start_, USE(), and virtual_memory_.

Referenced by v8::internal::Heap::SetUp().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ SortUniq()

void v8::internal::StoreBuffer::SortUniq ( )

Definition at line 239 of file store-buffer.cc.

239  {
240  Compact();
241  if (old_buffer_is_sorted_) return;
242  std::sort(old_start_, old_top_);
243  Uniq();
244 
245  old_buffer_is_sorted_ = true;
246 
247  // Filtering hash sets are inconsistent with the store buffer after this
248  // operation.
250 }

References ClearFilteringHashSets(), Compact(), old_buffer_is_sorted_, old_start_, old_top_, and Uniq().

+ Here is the call graph for this function:

◆ SpaceAvailable()

bool v8::internal::StoreBuffer::SpaceAvailable ( intptr_t  space_needed)
private

Definition at line 122 of file store-buffer.cc.

122  {
123  return old_limit_ - old_top_ >= space_needed;
124 }

References old_limit_, and old_top_.

Referenced by EnsureSpace().

+ Here is the caller graph for this function:

◆ Start()

Object*** v8::internal::StoreBuffer::Start ( )
inline

Definition at line 80 of file store-buffer.h.

80 { return reinterpret_cast<Object***>(old_start_); }

References old_start_.

Referenced by SetTop().

+ Here is the caller graph for this function:

◆ StoreBufferOverflow()

void v8::internal::StoreBuffer::StoreBufferOverflow ( Isolate isolate)
static

Definition at line 98 of file store-buffer.cc.

98  {
99  isolate->heap()->store_buffer()->Compact();
100  isolate->counters()->store_buffer_overflows()->Increment();
101 }

References Compact(), v8::internal::Isolate::counters(), v8::internal::Isolate::heap(), and v8::internal::Heap::store_buffer().

+ Here is the call graph for this function:

◆ TearDown()

void v8::internal::StoreBuffer::TearDown ( )

Definition at line 87 of file store-buffer.cc.

87  {
88  delete virtual_memory_;
89  delete old_virtual_memory_;
90  delete[] hash_set_1_;
91  delete[] hash_set_2_;
93  start_ = limit_ = NULL;
95 }

References hash_set_1_, hash_set_2_, heap_, limit_, NULL, old_limit_, old_reserved_limit_, old_start_, old_top_, old_virtual_memory_, v8::internal::Heap::public_set_store_buffer_top(), start_, and virtual_memory_.

Referenced by v8::internal::Heap::TearDown().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Top()

Object*** v8::internal::StoreBuffer::Top ( )
inline

Definition at line 81 of file store-buffer.h.

81 { return reinterpret_cast<Object***>(old_top_); }

References old_top_.

Referenced by v8::internal::StoreBufferRebuilder::Callback().

+ Here is the caller graph for this function:

◆ TopAddress()

Address v8::internal::StoreBuffer::TopAddress ( )
inline

Definition at line 13 of file store-buffer-inl.h.

13  {
14  return reinterpret_cast<Address>(heap_->store_buffer_top_address());
15 }
Address * store_buffer_top_address()
Definition: heap.h:898

References heap_, and v8::internal::Heap::store_buffer_top_address().

+ Here is the call graph for this function:

◆ Uniq()

void v8::internal::StoreBuffer::Uniq ( )
private

Definition at line 104 of file store-buffer.cc.

104  {
105  // Remove adjacent duplicates and cells that do not point at new space.
106  Address previous = NULL;
107  Address* write = old_start_;
109  for (Address* read = old_start_; read < old_top_; read++) {
110  Address current = *read;
111  if (current != previous) {
112  if (heap_->InNewSpace(*reinterpret_cast<Object**>(current))) {
113  *write++ = current;
114  }
115  }
116  previous = current;
117  }
118  old_top_ = write;
119 }

References DCHECK, heap_, v8::internal::Heap::InNewSpace(), may_move_store_buffer_entries_, NULL, old_start_, and old_top_.

Referenced by SortUniq().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

◆ Verify()

void v8::internal::StoreBuffer::Verify ( )

Definition at line 353 of file store-buffer.cc.

353  {
354 #ifdef VERIFY_HEAP
355  VerifyPointers(heap_->lo_space());
356 #endif
357 }

References heap_, and v8::internal::Heap::lo_space().

Referenced by GCEpilogue().

+ Here is the call graph for this function:
+ Here is the caller graph for this function:

Friends And Related Function Documentation

◆ DontMoveStoreBufferEntriesScope

friend class DontMoveStoreBufferEntriesScope
friend

Definition at line 174 of file store-buffer.h.

◆ StoreBufferRebuildScope

friend class StoreBufferRebuildScope
friend

Definition at line 173 of file store-buffer.h.

Member Data Documentation

◆ callback_

◆ during_gc_

bool v8::internal::StoreBuffer::during_gc_
private

Definition at line 125 of file store-buffer.h.

Referenced by GCEpilogue(), and GCPrologue().

◆ hash_set_1_

uintptr_t* v8::internal::StoreBuffer::hash_set_1_
private

Definition at line 138 of file store-buffer.h.

Referenced by ClearFilteringHashSets(), Compact(), SetUp(), and TearDown().

◆ hash_set_2_

uintptr_t* v8::internal::StoreBuffer::hash_set_2_
private

Definition at line 139 of file store-buffer.h.

Referenced by ClearFilteringHashSets(), Compact(), SetUp(), and TearDown().

◆ hash_sets_are_empty_

bool v8::internal::StoreBuffer::hash_sets_are_empty_
private

Definition at line 140 of file store-buffer.h.

Referenced by ClearFilteringHashSets(), Compact(), and SetUp().

◆ heap_

◆ kHashSetLength

const int v8::internal::StoreBuffer::kHashSetLength = 1 << kHashSetLengthLog2
static

Definition at line 72 of file store-buffer.h.

Referenced by ClearFilteringHashSets(), Compact(), and SetUp().

◆ kHashSetLengthLog2

const int v8::internal::StoreBuffer::kHashSetLengthLog2 = 12
static

Definition at line 71 of file store-buffer.h.

Referenced by Compact().

◆ kOldStoreBufferLength

const int v8::internal::StoreBuffer::kOldStoreBufferLength = kStoreBufferLength * 16
static

Definition at line 70 of file store-buffer.h.

Referenced by SetUp().

◆ kStoreBufferLength

const int v8::internal::StoreBuffer::kStoreBufferLength = kStoreBufferSize / sizeof(Address)
static

Definition at line 69 of file store-buffer.h.

◆ kStoreBufferOverflowBit

const int v8::internal::StoreBuffer::kStoreBufferOverflowBit = 1 << (14 + kPointerSizeLog2)
static

Definition at line 67 of file store-buffer.h.

Referenced by Mark(), and SetUp().

◆ kStoreBufferSize

const int v8::internal::StoreBuffer::kStoreBufferSize = kStoreBufferOverflowBit
static

Definition at line 68 of file store-buffer.h.

Referenced by v8::internal::StoreBufferRebuilder::Callback(), and SetUp().

◆ limit_

Address* v8::internal::StoreBuffer::limit_
private

Definition at line 115 of file store-buffer.h.

Referenced by Compact(), Mark(), SetUp(), and TearDown().

◆ may_move_store_buffer_entries_

◆ old_buffer_is_filtered_

bool v8::internal::StoreBuffer::old_buffer_is_filtered_
private

◆ old_buffer_is_sorted_

bool v8::internal::StoreBuffer::old_buffer_is_sorted_
private

◆ old_limit_

Address* v8::internal::StoreBuffer::old_limit_
private

◆ old_reserved_limit_

Address* v8::internal::StoreBuffer::old_reserved_limit_
private

Definition at line 120 of file store-buffer.h.

Referenced by EnsureSpace(), SetUp(), and TearDown().

◆ old_start_

Address* v8::internal::StoreBuffer::old_start_
private

◆ old_top_

◆ old_virtual_memory_

base::VirtualMemory* v8::internal::StoreBuffer::old_virtual_memory_
private

Definition at line 121 of file store-buffer.h.

Referenced by EnsureSpace(), SetUp(), and TearDown().

◆ start_

Address* v8::internal::StoreBuffer::start_
private

Definition at line 114 of file store-buffer.h.

Referenced by Compact(), SetUp(), and TearDown().

◆ store_buffer_rebuilding_enabled_

bool v8::internal::StoreBuffer::store_buffer_rebuilding_enabled_
private

◆ virtual_memory_

base::VirtualMemory* v8::internal::StoreBuffer::virtual_memory_
private

Definition at line 133 of file store-buffer.h.

Referenced by SetUp(), and TearDown().


The documentation for this class was generated from the following files: