V8 Project
heap-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_
7 
8 #include <cmath>
9 
11 #include "src/cpu-profiler.h"
12 #include "src/heap/heap.h"
13 #include "src/heap/store-buffer.h"
15 #include "src/heap-profiler.h"
16 #include "src/isolate.h"
17 #include "src/list-inl.h"
18 #include "src/msan.h"
19 #include "src/objects.h"
20 
21 namespace v8 {
22 namespace internal {
23 
25  if (emergency_stack_ != NULL) {
26  emergency_stack_->Add(Entry(target, size));
27  return;
28  }
29 
30  if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
31  NewSpacePage* rear_page =
32  NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
33  DCHECK(!rear_page->prev_page()->is_anchor());
34  rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
35  }
36 
37  if ((rear_ - 2) < limit_) {
39  emergency_stack_->Add(Entry(target, size));
40  return;
41  }
42 
43  *(--rear_) = reinterpret_cast<intptr_t>(target);
44  *(--rear_) = size;
45 // Assert no overflow into live objects.
46 #ifdef DEBUG
48  reinterpret_cast<Address>(rear_));
49 #endif
50 }
51 
52 
53 template <>
54 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
55  // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
56  return chars == str.length();
57 }
58 
59 
60 template <>
61 bool inline Heap::IsOneByte(String* str, int chars) {
62  return str->IsOneByteRepresentation();
63 }
64 
65 
67  Vector<const char> str, int chars, uint32_t hash_field) {
68  if (IsOneByte(str, chars)) {
70  hash_field);
71  }
72  return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
73 }
74 
75 
76 template <typename T>
78  uint32_t hash_field) {
79  if (IsOneByte(t, chars)) {
80  return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
81  }
82  return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
83 }
84 
85 
87  Vector<const uint8_t> str, uint32_t hash_field) {
89  // Compute map and object size.
90  Map* map = one_byte_internalized_string_map();
93 
94  // Allocate string.
95  HeapObject* result;
96  {
98  if (!allocation.To(&result)) return allocation;
99  }
100 
101  // String maps are all immortal immovable objects.
102  result->set_map_no_write_barrier(map);
103  // Set length and hash fields of the allocated string.
104  String* answer = String::cast(result);
105  answer->set_length(str.length());
106  answer->set_hash_field(hash_field);
107 
108  DCHECK_EQ(size, answer->Size());
109 
110  // Fill in the characters.
112  str.length());
113 
114  return answer;
115 }
116 
117 
119  uint32_t hash_field) {
121  // Compute map and object size.
122  Map* map = internalized_string_map();
125 
126  // Allocate string.
127  HeapObject* result;
128  {
130  if (!allocation.To(&result)) return allocation;
131  }
132 
133  result->set_map(map);
134  // Set length and hash fields of the allocated string.
135  String* answer = String::cast(result);
136  answer->set_length(str.length());
137  answer->set_hash_field(hash_field);
138 
139  DCHECK_EQ(size, answer->Size());
140 
141  // Fill in the characters.
143  str.length() * kUC16Size);
144 
145  return answer;
146 }
147 
149  if (src->length() == 0) return src;
150  return CopyFixedArrayWithMap(src, src->map());
151 }
152 
153 
155  if (src->length() == 0) return src;
156  return CopyFixedDoubleArrayWithMap(src, src->map());
157 }
158 
159 
161  if (src->length() == 0) return src;
162  return CopyConstantPoolArrayWithMap(src, src->map());
163 }
164 
165 
167  AllocationSpace retry_space) {
168  DCHECK(AllowHandleAllocation::IsAllowed());
169  DCHECK(AllowHeapAllocation::IsAllowed());
171 #ifdef DEBUG
172  if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
173  Heap::allocation_timeout_-- <= 0) {
175  }
176  isolate_->counters()->objs_since_last_full()->Increment();
177  isolate_->counters()->objs_since_last_young()->Increment();
178 #endif
179 
180  HeapObject* object;
181  AllocationResult allocation;
182  if (NEW_SPACE == space) {
183  allocation = new_space_.AllocateRaw(size_in_bytes);
184  if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
185  space = retry_space;
186  } else {
187  if (allocation.To(&object)) {
188  OnAllocationEvent(object, size_in_bytes);
189  }
190  return allocation;
191  }
192  }
193 
194  if (OLD_POINTER_SPACE == space) {
195  allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
196  } else if (OLD_DATA_SPACE == space) {
197  allocation = old_data_space_->AllocateRaw(size_in_bytes);
198  } else if (CODE_SPACE == space) {
199  if (size_in_bytes <= code_space()->AreaSize()) {
200  allocation = code_space_->AllocateRaw(size_in_bytes);
201  } else {
202  // Large code objects are allocated in large object space.
203  allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
204  }
205  } else if (LO_SPACE == space) {
206  allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
207  } else if (CELL_SPACE == space) {
208  allocation = cell_space_->AllocateRaw(size_in_bytes);
209  } else if (PROPERTY_CELL_SPACE == space) {
210  allocation = property_cell_space_->AllocateRaw(size_in_bytes);
211  } else {
212  DCHECK(MAP_SPACE == space);
213  allocation = map_space_->AllocateRaw(size_in_bytes);
214  }
215  if (allocation.To(&object)) {
216  OnAllocationEvent(object, size_in_bytes);
217  } else {
218  old_gen_exhausted_ = true;
219  }
220  return allocation;
221 }
222 
223 
224 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
226  if (profiler->is_tracking_allocations()) {
227  profiler->AllocationEvent(object->address(), size_in_bytes);
228  }
229 
230  if (FLAG_verify_predictable) {
232 
233  UpdateAllocationsHash(object);
234  UpdateAllocationsHash(size_in_bytes);
235 
236  if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
238  dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
240  }
241  }
242 }
243 
244 
246  int size_in_bytes) {
247  HeapProfiler* heap_profiler = isolate_->heap_profiler();
248  if (heap_profiler->is_tracking_object_moves()) {
249  heap_profiler->ObjectMoveEvent(source->address(), target->address(),
250  size_in_bytes);
251  }
252 
254  isolate_->cpu_profiler()->is_profiling()) {
255  if (target->IsSharedFunctionInfo()) {
256  PROFILE(isolate_, SharedFunctionInfoMoveEvent(source->address(),
257  target->address()));
258  }
259  }
260 
261  if (FLAG_verify_predictable) {
263 
264  UpdateAllocationsHash(source);
265  UpdateAllocationsHash(target);
266  UpdateAllocationsHash(size_in_bytes);
267 
268  if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
270  dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
272  }
273  }
274 }
275 
276 
278  Address object_address = object->address();
279  MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
280  AllocationSpace allocation_space = memory_chunk->owner()->identity();
281 
283  uint32_t value =
284  static_cast<uint32_t>(object_address - memory_chunk->address()) |
285  (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
286 
287  UpdateAllocationsHash(value);
288 }
289 
290 
292  uint16_t c1 = static_cast<uint16_t>(value);
293  uint16_t c2 = static_cast<uint16_t>(value >> 16);
295  StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
297  StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
298 }
299 
300 
302  uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
303  PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
304 }
305 
306 
308  DCHECK(string->IsExternalString());
310  reinterpret_cast<v8::String::ExternalStringResourceBase**>(
311  reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
313 
314  // Dispose of the C++ object if it has not already been disposed.
315  if (*resource_addr != NULL) {
316  (*resource_addr)->Dispose();
317  *resource_addr = NULL;
318  }
319 }
320 
321 
322 bool Heap::InNewSpace(Object* object) {
323  bool result = new_space_.Contains(object);
324  DCHECK(!result || // Either not in new space
325  gc_state_ != NOT_IN_GC || // ... or in the middle of GC
326  InToSpace(object)); // ... or in to-space (where we allocate).
327  return result;
328 }
329 
330 
331 bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); }
332 
333 
334 bool Heap::InFromSpace(Object* object) {
335  return new_space_.FromSpaceContains(object);
336 }
337 
338 
339 bool Heap::InToSpace(Object* object) {
340  return new_space_.ToSpaceContains(object);
341 }
342 
343 
345  return old_pointer_space_->Contains(address);
346 }
347 
348 
350  return InOldPointerSpace(reinterpret_cast<Address>(object));
351 }
352 
353 
355  return old_data_space_->Contains(address);
356 }
357 
358 
360  return InOldDataSpace(reinterpret_cast<Address>(object));
361 }
362 
363 
365  if (!incremental_marking()->IsStopped()) return false;
366  return OldGenerationSpaceAvailable() < 0;
367 }
368 
369 
370 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
371  NewSpacePage* page = NewSpacePage::FromAddress(old_address);
372  Address age_mark = new_space_.age_mark();
374  (!page->ContainsLimit(age_mark) || old_address < age_mark);
375 }
376 
377 
378 void Heap::RecordWrite(Address address, int offset) {
379  if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
380 }
381 
382 
383 void Heap::RecordWrites(Address address, int start, int len) {
384  if (!InNewSpace(address)) {
385  for (int i = 0; i < len; i++) {
386  store_buffer_.Mark(address + start + i * kPointerSize);
387  }
388  }
389 }
390 
391 
393  InstanceType type = object->map()->instance_type();
396 }
397 
398 
400  // Heap numbers and sequential strings are promoted to old data space, all
401  // other object types are promoted to old pointer space. We do not use
402  // object->IsHeapNumber() and object->IsSeqString() because we already
403  // know that object has the heap object tag.
404 
405  // These objects are never allocated in new space.
406  DCHECK(type != MAP_TYPE);
407  DCHECK(type != CODE_TYPE);
408  DCHECK(type != ODDBALL_TYPE);
409  DCHECK(type != CELL_TYPE);
410  DCHECK(type != PROPERTY_CELL_TYPE);
411 
412  if (type <= LAST_NAME_TYPE) {
413  if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
415  // There are four string representations: sequential strings, external
416  // strings, cons strings, and sliced strings.
417  // Only the latter two contain non-map-word pointers to heap objects.
418  return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
420  : OLD_DATA_SPACE;
421  } else {
422  return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
423  }
424 }
425 
426 
428  // Object migration is governed by the following rules:
429  //
430  // 1) Objects in new-space can be migrated to one of the old spaces
431  // that matches their target space or they stay in new-space.
432  // 2) Objects in old-space stay in the same space when migrating.
433  // 3) Fillers (two or more words) can migrate due to left-trimming of
434  // fixed arrays in new-space, old-data-space and old-pointer-space.
435  // 4) Fillers (one word) can never migrate, they are skipped by
436  // incremental marking explicitly to prevent invalid pattern.
437  // 5) Short external strings can end up in old pointer space when a cons
438  // string in old pointer space is made external (String::MakeExternal).
439  //
440  // Since this function is used for debugging only, we do not place
441  // asserts here, but check everything explicitly.
442  if (obj->map() == one_pointer_filler_map()) return false;
443  InstanceType type = obj->map()->instance_type();
445  AllocationSpace src = chunk->owner()->identity();
446  switch (src) {
447  case NEW_SPACE:
448  return dst == src || dst == TargetSpaceId(type);
449  case OLD_POINTER_SPACE:
450  return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
451  obj->IsExternalString());
452  case OLD_DATA_SPACE:
453  return dst == src && dst == TargetSpaceId(type);
454  case CODE_SPACE:
455  return dst == src && type == CODE_TYPE;
456  case MAP_SPACE:
457  case CELL_SPACE:
458  case PROPERTY_CELL_SPACE:
459  case LO_SPACE:
460  return false;
461  case INVALID_SPACE:
462  break;
463  }
464  UNREACHABLE();
465  return false;
466 }
467 
468 
469 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
470  CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
471  static_cast<size_t>(byte_size / kPointerSize));
472 }
473 
474 
475 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
476  DCHECK(IsAligned(byte_size, kPointerSize));
477 
478  int size_in_words = byte_size / kPointerSize;
479 
480  if ((dst < src) || (dst >= (src + byte_size))) {
481  Object** src_slot = reinterpret_cast<Object**>(src);
482  Object** dst_slot = reinterpret_cast<Object**>(dst);
483  Object** end_slot = src_slot + size_in_words;
484 
485  while (src_slot != end_slot) {
486  *dst_slot++ = *src_slot++;
487  }
488  } else {
489  MemMove(dst, src, static_cast<size_t>(byte_size));
490  }
491 }
492 
493 
495 
496 
498  // Check if there is potentially a memento behind the object. If
499  // the last word of the memento is on another page we return
500  // immediately.
501  Address object_address = object->address();
502  Address memento_address = object_address + object->Size();
503  Address last_memento_word_address = memento_address + kPointerSize;
504  if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
505  return NULL;
506  }
507 
508  HeapObject* candidate = HeapObject::FromAddress(memento_address);
509  Map* candidate_map = candidate->map();
510  // This fast check may peek at an uninitialized word. However, the slow check
511  // below (memento_address == top) ensures that this is safe. Mark the word as
512  // initialized to silence MemorySanitizer warnings.
513  MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
514  if (candidate_map != allocation_memento_map()) return NULL;
515 
516  // Either the object is the last object in the new space, or there is another
517  // object of at least word size (the header map word) following it, so
518  // suffices to compare ptr and top here. Note that technically we do not have
519  // to compare with the current top pointer of the from space page during GC,
520  // since we always install filler objects above the top pointer of a from
521  // space page when performing a garbage collection. However, always performing
522  // the test makes it possible to have a single, unified version of
523  // FindAllocationMemento that is used both by the GC and the mutator.
524  Address top = NewSpaceTop();
525  DCHECK(memento_address == top ||
526  memento_address + HeapObject::kHeaderSize <= top ||
527  !NewSpacePage::OnSamePage(memento_address, top));
528  if (memento_address == top) return NULL;
529 
530  AllocationMemento* memento = AllocationMemento::cast(candidate);
531  if (!memento->IsValid()) return NULL;
532  return memento;
533 }
534 
535 
538  Heap* heap = object->GetHeap();
539  DCHECK(heap->InFromSpace(object));
540 
541  if (!FLAG_allocation_site_pretenuring ||
543  return;
544 
545  AllocationMemento* memento = heap->FindAllocationMemento(object);
546  if (memento == NULL) return;
547 
548  if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
550  }
551 }
552 
553 
555  DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
556 
557  // We use the first word (where the map pointer usually is) of a heap
558  // object to record the forwarding pointer. A forwarding pointer can
559  // point to an old space, the code space, or the to space of the new
560  // generation.
561  MapWord first_word = object->map_word();
562 
563  // If the first word is a forwarding address, the object has already been
564  // copied.
565  if (first_word.IsForwardingAddress()) {
566  HeapObject* dest = first_word.ToForwardingAddress();
567  DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
568  *p = dest;
569  return;
570  }
571 
573 
574  // AllocationMementos are unrooted and shouldn't survive a scavenge
575  DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
576  // Call the slow part of scavenge object.
577  return ScavengeObjectSlow(p, object);
578 }
579 
580 
581 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
582  const v8::GCCallbackFlags callbackFlags) {
583  const char* collector_reason = NULL;
584  GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
585  return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
586 }
587 
588 
590  return reinterpret_cast<Isolate*>(
591  reinterpret_cast<intptr_t>(this) -
592  reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
593 }
594 
595 
596 // Calls the FUNCTION_CALL function and retries it up to three times
597 // to guarantee that any allocations performed during the call will
598 // succeed if there's enough memory.
599 
600 // Warning: Do not use the identifiers __object__, __maybe_object__ or
601 // __scope__ in a call to this macro.
602 
603 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
604  if (__allocation__.To(&__object__)) { \
605  DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
606  RETURN_VALUE; \
607  }
608 
609 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
610  do { \
611  AllocationResult __allocation__ = FUNCTION_CALL; \
612  Object* __object__ = NULL; \
613  RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
614  (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
615  "allocation failure"); \
616  __allocation__ = FUNCTION_CALL; \
617  RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
618  (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
619  (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
620  { \
621  AlwaysAllocateScope __scope__(ISOLATE); \
622  __allocation__ = FUNCTION_CALL; \
623  } \
624  RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
625  /* TODO(1181417): Fix this. */ \
626  v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
627  RETURN_EMPTY; \
628  } while (false)
629 
630 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
631  RETURN_EMPTY) \
632  CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
633 
634 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
635  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, \
636  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
637  return Handle<TYPE>())
638 
639 
640 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
641  CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
642 
643 
645  DCHECK(string->IsExternalString());
646  if (heap_->InNewSpace(string)) {
647  new_space_strings_.Add(string);
648  } else {
649  old_space_strings_.Add(string);
650  }
651 }
652 
653 
655  if (!new_space_strings_.is_empty()) {
656  Object** start = &new_space_strings_[0];
657  v->VisitPointers(start, start + new_space_strings_.length());
658  }
659  if (!old_space_strings_.is_empty()) {
660  Object** start = &old_space_strings_[0];
661  v->VisitPointers(start, start + old_space_strings_.length());
662  }
663 }
664 
665 
666 // Verify() is inline to avoid ifdef-s around its calls in release
667 // mode.
669 #ifdef DEBUG
670  for (int i = 0; i < new_space_strings_.length(); ++i) {
671  Object* obj = Object::cast(new_space_strings_[i]);
672  DCHECK(heap_->InNewSpace(obj));
673  DCHECK(obj != heap_->the_hole_value());
674  }
675  for (int i = 0; i < old_space_strings_.length(); ++i) {
676  Object* obj = Object::cast(old_space_strings_[i]);
677  DCHECK(!heap_->InNewSpace(obj));
678  DCHECK(obj != heap_->the_hole_value());
679  }
680 #endif
681 }
682 
683 
685  DCHECK(string->IsExternalString());
686  DCHECK(!heap_->InNewSpace(string));
687  old_space_strings_.Add(string);
688 }
689 
690 
692  new_space_strings_.Rewind(position);
693 #ifdef VERIFY_HEAP
694  if (FLAG_verify_heap) {
695  Verify();
696  }
697 #endif
698 }
699 
700 
702  set_instanceof_cache_function(the_hole_value());
703 }
704 
705 
706 Object* Heap::ToBoolean(bool condition) {
707  return condition ? true_value() : false_value();
708 }
709 
710 
712  set_instanceof_cache_map(the_hole_value());
713  set_instanceof_cache_function(the_hole_value());
714 }
715 
716 
718  : heap_(isolate->heap()), daf_(isolate) {
719  // We shouldn't hit any nested scopes, because that requires
720  // non-handle code to call handle code. The code still works but
721  // performance will degrade, so we want to catch this situation
722  // in debug mode.
725 }
726 
727 
731 }
732 
733 
734 #ifdef VERIFY_HEAP
735 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
736  Isolate* isolate = Isolate::Current();
737  isolate->heap()->no_weak_object_verification_scope_depth_++;
738 }
739 
740 
741 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
742  Isolate* isolate = Isolate::Current();
743  isolate->heap()->no_weak_object_verification_scope_depth_--;
744 }
745 #endif
746 
747 
750 }
751 
752 
754 
755 
757  return heap_->gc_callbacks_depth_ == 1;
758 }
759 
760 
762  for (Object** current = start; current < end; current++) {
763  if ((*current)->IsHeapObject()) {
764  HeapObject* object = HeapObject::cast(*current);
765  CHECK(object->GetIsolate()->heap()->Contains(object));
766  CHECK(object->map()->IsMap());
767  }
768  }
769 }
770 
771 
773  for (Object** current = start; current < end; current++) {
774  CHECK((*current)->IsSmi());
775  }
776 }
777 }
778 } // namespace v8::internal
779 
780 #endif // V8_HEAP_HEAP_INL_H_
const int kPageSizeBits
Definition: build_config.h:159
Isolate represents an isolated instance of the V8 engine.
Definition: v8.h:4356
virtual void Dispose()
Internally V8 will call this Dispose method when the external string resource is no longer needed.
Definition: v8.h:1868
AllocationSite * GetAllocationSite()
Definition: objects.h:8295
static AllocationResult Retry(AllocationSpace space=NEW_SPACE)
Definition: spaces.h:1616
static bool CanTrack(InstanceType type)
Definition: objects-inl.h:1614
AlwaysAllocateScope(Isolate *isolate)
Definition: heap-inl.h:717
void Iterate(ObjectVisitor *v)
Definition: heap-inl.h:654
void AddOldString(String *string)
Definition: heap-inl.h:684
void AddString(String *string)
Definition: heap-inl.h:644
List< Object * > old_space_strings_
Definition: heap.h:503
void ShrinkNewStrings(int position)
Definition: heap-inl.h:691
List< Object * > new_space_strings_
Definition: heap.h:502
static const int kResourceOffset
Definition: objects.h:9136
void set_map_no_write_barrier(Map *value)
Definition: objects-inl.h:1435
void set_map(Map *value)
Definition: objects-inl.h:1404
Heap * GetHeap() const
Definition: objects-inl.h:1379
Isolate * GetIsolate() const
Definition: objects-inl.h:1387
static HeapObject * FromAddress(Address address)
Definition: objects-inl.h:1464
static const int kHeaderSize
Definition: objects.h:1428
bool is_tracking_object_moves() const
Definition: heap-profiler.h:61
void ObjectMoveEvent(Address from, Address to, int size)
MUST_USE_RESULT AllocationResult CopyFixedArray(FixedArray *src)
Definition: heap-inl.h:148
bool Contains(Address addr)
Definition: heap.cc:4447
StoreBuffer store_buffer_
Definition: heap.h:1978
OldSpace * TargetSpace(HeapObject *object)
Definition: heap-inl.h:392
static void ScavengeObject(HeapObject **p, HeapObject *object)
Definition: heap-inl.h:554
MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
Definition: heap.cc:4014
bool OldGenerationAllocationLimitReached()
Definition: heap-inl.h:364
void UpdateAllocationsHash(HeapObject *object)
Definition: heap-inl.h:277
uint32_t dump_allocations_hash_countdown_
Definition: heap.h:1476
Isolate * isolate_
Definition: heap.h:1424
uint32_t raw_allocations_hash_
Definition: heap.h:1473
OldSpace * code_space()
Definition: heap.h:596
MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray *src, Map *map)
Definition: heap.cc:3973
OldSpace * code_space_
Definition: heap.h:1457
void ClearInstanceofCache()
Definition: heap-inl.h:701
bool InOldDataSpace(Address address)
Definition: heap-inl.h:354
bool old_gen_exhausted_
Definition: heap.h:1517
Address NewSpaceTop()
Definition: heap.h:591
void OnAllocationEvent(HeapObject *object, int size_in_bytes)
Definition: heap-inl.h:224
@ IGNORE_SCRATCHPAD_SLOT
Definition: heap.h:974
MUST_USE_RESULT AllocationResult AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
Definition: heap-inl.h:66
MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
void FinalizeExternalString(String *string)
Definition: heap-inl.h:307
bool InNewSpace(Object *object)
Definition: heap-inl.h:322
uint32_t allocations_count_
Definition: heap.h:1470
intptr_t OldGenerationSpaceAvailable()
Definition: heap.h:1003
static bool IsOneByte(T t, int chars)
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
Definition: heap-inl.h:427
Object * ToBoolean(bool condition)
Definition: heap-inl.h:706
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
Definition: heap-inl.h:166
int always_allocate_scope_depth_
Definition: heap.h:1443
Isolate * isolate()
Definition: heap-inl.h:589
static void MoveBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:475
MUST_USE_RESULT AllocationResult CopyConstantPoolArray(ConstantPoolArray *src)
Definition: heap-inl.h:160
MUST_USE_RESULT AllocationResult CopyFixedDoubleArray(FixedDoubleArray *src)
Definition: heap-inl.h:154
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
Definition: heap-inl.h:536
HeapState gc_state_
Definition: heap.h:1462
PropertyCellSpace * property_cell_space_
Definition: heap.h:1460
OldSpace * old_pointer_space_
Definition: heap.h:1455
void AddAllocationSiteToScratchpad(AllocationSite *site, ScratchpadSlotMode mode)
Definition: heap.cc:3077
NewSpace new_space_
Definition: heap.h:1454
bool InFromSpace(Object *object)
Definition: heap-inl.h:334
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap-inl.h:581
CellSpace * cell_space_
Definition: heap.h:1459
int gc_callbacks_depth_
Definition: heap.h:2021
static AllocationSpace TargetSpaceId(InstanceType type)
Definition: heap-inl.h:399
STATIC_ASSERT(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
IncrementalMarking * incremental_marking()
Definition: heap.h:1205
MapSpace * map_space_
Definition: heap.h:1458
GarbageCollector SelectGarbageCollector(AllocationSpace space, const char **reason)
Definition: heap.cc:236
static AllocationSpace SelectSpace(int object_size, AllocationSpace preferred_old_space, PretenureFlag pretenure)
Definition: heap.h:1649
bool ShouldBePromoted(Address old_address, int object_size)
Definition: heap-inl.h:370
static void CopyBlock(Address dst, Address src, int byte_size)
Definition: heap-inl.h:469
void OnMoveEvent(HeapObject *target, HeapObject *source, int size_in_bytes)
Definition: heap-inl.h:245
NewSpace * new_space()
Definition: heap.h:593
AllocationMemento * FindAllocationMemento(HeapObject *object)
Definition: heap-inl.h:497
static void ScavengeObjectSlow(HeapObject **p, HeapObject *object)
Definition: heap.cc:2262
bool always_allocate()
Definition: heap.h:622
static void ScavengePointer(HeapObject **p)
Definition: heap-inl.h:494
void CompletelyClearInstanceofCache()
Definition: heap-inl.h:711
MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
Definition: heap.cc:3998
OldSpace * old_data_space_
Definition: heap.h:1456
bool InToSpace(Object *object)
Definition: heap-inl.h:339
void PrintAlloctionsHash()
Definition: heap-inl.h:301
bool InOldPointerSpace(Address address)
Definition: heap-inl.h:344
LargeObjectSpace * lo_space_
Definition: heap.h:1461
MUST_USE_RESULT AllocationResult AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
Definition: heap-inl.h:86
MUST_USE_RESULT AllocationResult AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
Definition: heap-inl.h:118
HeapProfiler * heap_profiler() const
Definition: isolate.h:972
Counters * counters()
Definition: isolate.h:857
CpuProfiler * cpu_profiler() const
Definition: isolate.h:971
Logger * logger()
Definition: isolate.h:866
MUST_USE_RESULT AllocationResult AllocateRaw(int object_size, Executability executable)
Definition: spaces.cc:2834
bool is_logging_code_events()
Definition: log.h:315
InstanceType instance_type()
Definition: objects-inl.h:4323
bool IsFlagSet(int flag)
Definition: spaces.h:417
bool ContainsLimit(Address addr)
Definition: spaces.h:355
Space * owner() const
Definition: spaces.h:307
static MemoryChunk * FromAddress(Address a)
Definition: spaces.h:276
void set_hash_field(uint32_t value)
Definition: objects-inl.h:3301
NewSpacePage * prev_page() const
Definition: spaces.h:1999
static bool OnSamePage(Address address1, Address address2)
Definition: spaces.h:2036
static bool IsAtStart(Address addr)
Definition: spaces.h:2009
static NewSpacePage * FromAddress(Address address_in_page)
Definition: spaces.h:2021
bool FromSpaceContains(Address address)
Definition: spaces.h:2494
Address age_mark()
Definition: spaces.h:2439
bool ToSpaceContains(Address address)
Definition: spaces.h:2491
bool Contains(Address a)
Definition: spaces.h:2349
bool Contains(Address a)
Definition: spaces-inl.h:150
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes)
Definition: spaces-inl.h:248
List< Entry > * emergency_stack_
Definition: heap.h:457
void insert(HeapObject *target, int size)
Definition: heap-inl.h:24
static void AssertValidRange(Address from, Address to)
Definition: spaces.h:2165
static int SizeFor(int length)
Definition: objects.h:8976
static const int kHeaderSize
Definition: objects.h:8941
static int SizeFor(int length)
Definition: objects.h:9015
AllocationSpace identity()
Definition: spaces.h:829
void Mark(Address addr)
static const int kMaxLength
Definition: objects.h:8820
bool IsOneByteRepresentation() const
Definition: objects-inl.h:337
void set_length(int value)
T * start() const
Definition: vector.h:47
int length() const
Definition: vector.h:41
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:761
void VisitPointers(Object **start, Object **end)
Definition: heap-inl.h:772
#define PROFILE(IsolateGetter, Call)
Definition: cpu-profiler.h:181
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes A file to write the raw context snapshot bytes Write V8 startup blob Print the time it takes to lazily compile hydrogen code stubs dump only objects containing this substring stress the GC compactor to flush out pretty print source code for builtins print C code to recreate TurboFan graphs report heap spill statistics along with enable possessive quantifier syntax for testing Minimal Log code events to the log file without profiling log positions Log statistical profiling Used with turns on browser compatible mode for profiling Enable perf linux profiler(experimental annotate support).") DEFINE_STRING(gc_fake_mmap
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define UNREACHABLE()
Definition: logging.h:30
#define CHECK(condition)
Definition: logging.h:36
#define CHECK_GE(a, b)
Definition: logging.h:178
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
#define MSAN_MEMORY_IS_INITIALIZED(p, s)
Definition: msan.h:30
unsigned short uint16_t
Definition: unicode.cc:23
const int kPointerSize
Definition: globals.h:129
@ NOT_EXECUTABLE
Definition: globals.h:391
void MemMove(void *dest, const void *src, size_t size)
Definition: utils.h:353
@ FIRST_NONSTRING_TYPE
Definition: objects.h:758
@ LAST_NAME_TYPE
Definition: objects.h:755
@ PROPERTY_CELL_TYPE
Definition: objects.h:665
@ ODDBALL_TYPE
Definition: objects.h:663
@ LAST_DATA_TYPE
Definition: objects.h:766
const int kUC16Size
Definition: globals.h:187
byte * Address
Definition: globals.h:101
void PrintF(const char *format,...)
Definition: utils.cc:80
@ INVALID_SPACE
Definition: globals.h:367
@ OLD_DATA_SPACE
Definition: globals.h:361
@ PROPERTY_CELL_SPACE
Definition: globals.h:365
@ OLD_POINTER_SPACE
Definition: globals.h:360
const int kHeapObjectTag
Definition: v8.h:5737
void CopyWords(T *dst, const T *src, size_t num_words)
Definition: utils.h:1112
const uint32_t kIsIndirectStringTag
Definition: objects.h:569
const int kSpaceTagSize
Definition: globals.h:374
bool IsAligned(T value, U alignment)
Definition: utils.h:123
void MemCopy(void *dest, const void *src, size_t size)
Definition: utils.h:350
const uint32_t kIsIndirectStringMask
Definition: objects.h:568
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
GCCallbackFlags
Definition: v8.h:4209
#define T(name, string, precedence)
Definition: token.cc:25