36 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
40 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
44 #if V8_TARGET_ARCH_MIPS64 && !V8_INTERPRETED_REGEXP
54 : amount_of_external_allocated_memory_(0),
55 amount_of_external_allocated_memory_at_last_global_gc_(0),
62 initial_semispace_size_(
Page::kPageSize),
69 maximum_committed_(0),
70 survived_since_last_expansion_(0),
72 always_allocate_scope_depth_(0),
73 contexts_disposed_(0),
75 flush_monomorphic_ics_(
false),
76 scan_on_scavenge_pages_(0),
78 old_pointer_space_(
NULL),
79 old_data_space_(
NULL),
83 property_cell_space_(
NULL),
86 gc_post_processing_depth_(0),
87 allocations_count_(0),
88 raw_allocations_hash_(0),
89 dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc),
92 remembered_unmapped_pages_index_(0),
93 unflattened_strings_length_(0),
95 allocation_timeout_(0),
97 old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
98 old_gen_exhausted_(
false),
99 inline_allocation_disabled_(
false),
100 store_buffer_rebuilder_(store_buffer()),
101 hidden_string_(
NULL),
102 gc_safe_size_of_old_object_(
NULL),
103 total_regexp_code_generated_(0),
105 high_survival_rate_period_length_(0),
106 promoted_objects_size_(0),
108 semi_space_copied_object_size_(0),
109 semi_space_copied_rate_(0),
110 nodes_died_in_new_space_(0),
111 nodes_copied_in_new_space_(0),
113 maximum_size_scavenges_(0),
115 total_gc_time_ms_(0.0),
116 max_alive_after_gc_(0),
120 mark_compact_collector_(this),
123 incremental_marking_(this),
124 gc_count_at_last_idle_gc_(0),
125 full_codegen_bytes_generated_(0),
126 crankshaft_codegen_bytes_generated_(0),
127 gcs_since_last_deopt_(0),
129 no_weak_object_verification_scope_depth_(0),
131 allocation_sites_scratchpad_length_(0),
132 promotion_queue_(this),
134 external_string_table_(this),
135 chunks_queued_for_free_(
NULL),
136 gc_callbacks_depth_(0) {
140 #if defined(V8_MAX_SEMISPACE_SIZE)
232 return object->SizeFromMap(object->
map());
237 const char** reason) {
241 *reason =
"GC in old space requested";
245 if (FLAG_gc_global || (FLAG_stress_compaction && (
gc_count_ & 1) != 0)) {
246 *reason =
"GC in old space forced by flags";
253 *reason =
"promotion limit reached";
260 ->gc_compactor_caused_by_oldspace_exhaustion()
262 *reason =
"old generations exhausted";
277 ->gc_compactor_caused_by_oldspace_exhaustion()
279 *reason =
"scavenge might not succeed";
297 if (FLAG_heap_stats) {
298 ReportHeapStatistics(
"Before GC");
299 }
else if (FLAG_log_gc) {
314 if (!FLAG_trace_gc_verbose)
return;
386 PrintPID(
"External memory reported: %6" V8_PTR_PREFIX
"d KB\n",
398 if (FLAG_heap_stats) {
400 ReportHeapStatistics(
"After GC");
401 }
else if (FLAG_log_gc) {
417 if (FLAG_flush_code && FLAG_flush_code_incrementally) {
422 if (FLAG_verify_heap) {
440 if (FLAG_gc_verbose) Print();
447 if (
isolate()->concurrent_osr_enabled()) {
462 AllSpaces spaces(
this);
464 total +=
space->SizeOfObjects();
474 Code* code = Code::cast(
object);
476 if (current_kind == Code::FUNCTION ||
477 current_kind == Code::OPTIMIZED_FUNCTION) {
485 PagedSpaces spaces(
this);
487 space = spaces.next()) {
488 space->RepairFreeListsAfterBoot();
494 if (FLAG_allocation_site_pretenuring) {
495 int tenure_decisions = 0;
496 int dont_tenure_decisions = 0;
497 int allocation_mementos_found = 0;
498 int allocation_sites = 0;
499 int active_allocation_sites = 0;
508 bool use_scratchpad =
510 !deopt_maybe_tenured;
514 bool trigger_deoptimization =
false;
517 : list_element->IsAllocationSite()) {
520 ? AllocationSite::cast(allocation_sites_scratchpad()->get(
i))
521 : AllocationSite::cast(list_element);
524 active_allocation_sites++;
526 trigger_deoptimization =
true;
531 dont_tenure_decisions++;
538 trigger_deoptimization =
true;
541 if (use_scratchpad) {
544 list_element = site->weak_next();
548 if (trigger_deoptimization) {
554 if (FLAG_trace_pretenuring_statistics &&
555 (allocation_mementos_found > 0 || tenure_decisions > 0 ||
556 dont_tenure_decisions > 0)) {
558 "GC: (mode, #visited allocation sites, #active allocation sites, "
559 "#mementos, #tenure decisions, #donttenure decisions) "
560 "(%s, %d, %d, %d, %d, %d)\n",
561 use_scratchpad ?
"use scratchpad" :
"use list", allocation_sites,
562 active_allocation_sites, allocation_mementos_found, tenure_decisions,
563 dont_tenure_decisions);
574 while (list_element->IsAllocationSite()) {
577 site->dependent_code()->MarkCodeForDeoptimization(
581 list_element = site->weak_next();
599 if (FLAG_verify_heap) {
608 if (FLAG_print_handles) PrintHandles();
609 if (FLAG_gc_verbose) Print();
610 if (FLAG_code_stats) ReportCodeStatistics(
"After GC");
612 if (FLAG_deopt_every_n_garbage_collections > 0) {
630 string_table()->NumberOfElements());
643 isolate_->
counters()->heap_fraction_new_space()->AddSample(
static_cast<int>(
654 isolate_->
counters()->heap_fraction_map_space()->AddSample(
static_cast<int>(
674 ->heap_sample_property_cell_space_committed()
684 #define UPDATE_COUNTERS_FOR_SPACE(space) \
685 isolate_->counters()->space##_bytes_available()->Set( \
686 static_cast<int>(space()->Available())); \
687 isolate_->counters()->space##_bytes_committed()->Set( \
688 static_cast<int>(space()->CommittedMemory())); \
689 isolate_->counters()->space##_bytes_used()->Set( \
690 static_cast<int>(space()->SizeOfObjects()));
691 #define UPDATE_FRAGMENTATION_FOR_SPACE(space) \
692 if (space()->CommittedMemory() > 0) { \
693 isolate_->counters()->external_fragmentation_##space()->AddSample( \
694 static_cast<int>(100 - \
695 (space()->SizeOfObjects() * 100.0) / \
696 space()->CommittedMemory())); \
698 #define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space) \
699 UPDATE_COUNTERS_FOR_SPACE(space) \
700 UPDATE_FRAGMENTATION_FOR_SPACE(space)
710 #undef UPDATE_COUNTERS_FOR_SPACE
711 #undef UPDATE_FRAGMENTATION_FOR_SPACE
712 #undef UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE
747 if (
isolate()->concurrent_recompilation_enabled()) {
755 const int kMaxNumberOfAttempts = 7;
756 const int kMinNumberOfAttempts = 2;
757 for (
int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
759 attempt + 1 >= kMinNumberOfAttempts) {
778 if (from_top < from_limit) {
779 int remaining_in_page =
static_cast<int>(from_limit - from_top);
786 const char* collector_reason,
797 allocation_timeout_ =
Max(6, FLAG_gc_interval);
803 if (FLAG_trace_incremental_marking) {
804 PrintF(
"[IncrementalMarking] Scavenge during marking.\n");
812 FLAG_incremental_marking_steps) {
814 const intptr_t kStepSizeWhenDelayedByScavenge = 1 *
MB;
818 if (FLAG_trace_incremental_marking) {
819 PrintF(
"[IncrementalMarking] Delaying MarkSweep.\n");
822 collector_reason =
"incremental marking delaying mark-sweep";
826 bool next_gc_likely_to_collect_more =
false;
829 tracer()->
Start(collector, gc_reason, collector_reason);
830 DCHECK(AllowHeapAllocation::IsAllowed());
835 HistogramTimerScope histogram_timer_scope(
838 next_gc_likely_to_collect_more =
853 return next_gc_likely_to_collect_more;
858 if (
isolate()->concurrent_recompilation_enabled()) {
870 if (len == 0)
return;
872 DCHECK(array->
map() != fixed_cow_array_map());
876 for (
int i = 0;
i < len;
i++) {
893 for (
Object** p = start; p < end; p++) {
894 if ((*p)->IsHeapObject()) {
896 CHECK((*p)->IsTheHole() || (*p)->IsUndefined() ||
897 (*p)->IsInternalizedString());
904 static void VerifyStringTable(Heap* heap) {
905 StringTableVerifier verifier;
906 heap->string_table()->IterateElements(&verifier);
921 bool gc_performed =
true;
923 static const int kThreshold = 20;
924 while (gc_performed && counter++ < kThreshold) {
925 gc_performed =
false;
927 if (sizes[
space] == 0)
continue;
928 bool perform_gc =
false;
939 if (allocation.
To(&node)) {
952 "failed to reserve space in the new space");
956 "failed to reserve space in paged or large object space");
984 while (!context->IsUndefined()) {
987 Object* caches_or_undefined =
989 if (!caches_or_undefined->IsUndefined()) {
990 FixedArray* caches = FixedArray::cast(caches_or_undefined);
992 int length = caches->
length();
993 for (
int i = 0;
i < length;
i++) {
994 JSFunctionResultCache::cast(caches->
get(
i))->Clear();
1010 while (!context->IsUndefined()) {
1015 if (!cache->IsUndefined()) {
1016 NormalizedMapCache::cast(cache)->Clear();
1024 if (start_new_space_size == 0)
return;
1027 static_cast<double>(start_new_space_size) * 100);
1031 static_cast<double>(start_new_space_size) * 100);
1044 int freed_global_handles = 0;
1051 if (FLAG_verify_heap) {
1052 VerifyStringTable(
this);
1107 freed_global_handles =
1115 Relocatable::PostGarbageCollectionProcessing(
isolate_);
1137 if (FLAG_verify_heap) {
1138 VerifyStringTable(
this);
1142 return freed_global_handles > 0;
1153 callback(gc_type,
flags);
1171 callback(gc_type, gc_callback_flags);
1183 LOG(
isolate_, ResourceEvent(
"markcompact",
"begin"));
1195 LOG(
isolate_, ResourceEvent(
"markcompact",
"end"));
1203 if (FLAG_allocation_site_pretenuring) {
1223 if (FLAG_cleanup_code_caches_at_gc) {
1224 polymorphic_code_cache()->set_cache(undefined_value());
1258 class VerifyNonPointerSpacePointersVisitor :
public ObjectVisitor {
1260 explicit VerifyNonPointerSpacePointersVisitor(
Heap* heap) : heap_(heap) {}
1262 for (
Object** current = start; current < end; current++) {
1263 if ((*current)->IsHeapObject()) {
1264 CHECK(!heap_->InNewSpace(HeapObject::cast(*current)));
1274 static void VerifyNonPointerSpacePointers(Heap* heap) {
1277 VerifyNonPointerSpacePointersVisitor v(heap);
1278 HeapObjectIterator code_it(heap->code_space());
1279 for (HeapObject*
object = code_it.Next();
object !=
NULL;
1280 object = code_it.Next())
1281 object->Iterate(&v);
1283 HeapObjectIterator data_it(heap->old_data_space());
1284 for (HeapObject*
object = data_it.Next();
object !=
NULL;
1285 object = data_it.Next())
1286 object->Iterate(&v);
1305 !HeapObject::cast(*p)->map_word().IsForwardingAddress();
1382 intptr_t* head_start =
rear_;
1390 while (head_start != head_end) {
1391 int size =
static_cast<int>(*(head_start++));
1408 MapWord map_word = HeapObject::cast(
object)->map_word();
1409 if (map_word.IsForwardingAddress()) {
1410 return map_word.ToForwardingAddress();
1424 if (FLAG_verify_heap) VerifyNonPointerSpacePointers(
this);
1430 LOG(
isolate_, ResourceEvent(
"scavenge",
"begin"));
1486 heap_object = cell_iterator.
Next()) {
1487 if (heap_object->IsCell()) {
1488 Cell* cell = Cell::cast(heap_object);
1497 for (
HeapObject* heap_object = js_global_property_cell_iterator.
Next();
1498 heap_object !=
NULL;
1499 heap_object = js_global_property_cell_iterator.
Next()) {
1500 if (heap_object->IsPropertyCell()) {
1518 new_space_front =
DoScavenge(&scavenge_visitor, new_space_front);
1520 while (
isolate()->global_handles()->IterateObjectGroups(
1522 new_space_front =
DoScavenge(&scavenge_visitor, new_space_front);
1531 new_space_front =
DoScavenge(&scavenge_visitor, new_space_front);
1563 MapWord first_word = HeapObject::cast(*p)->
map_word();
1565 if (!first_word.IsForwardingAddress()) {
1572 return String::cast(first_word.ToForwardingAddress());
1579 if (FLAG_verify_heap) {
1590 for (
Object** p = start; p < end; ++p) {
1592 String* target = updater_func(
this, p);
1594 if (target ==
NULL)
continue;
1596 DCHECK(target->IsExternalString());
1619 for (
Object** p = start; p < end; ++p) *p = updater_func(
this, p);
1643 Object* array_buffer_obj =
1650 Object* undefined = undefined_value();
1654 o = buffer->weak_next();
1661 Object* allocation_site_obj =
1670 bool marked =
false;
1671 while (cur->IsAllocationSite()) {
1678 cur = casted->weak_next();
1685 uint64_t size_of_objects_before_gc) {
1687 double old_generation_survival_rate =
1688 (
static_cast<double>(size_of_objects_after_gc) * 100) /
1689 static_cast<double>(size_of_objects_before_gc);
1697 if (FLAG_trace_pretenuring) {
1699 "Deopt all allocation sites dependent code due to low survival "
1700 "rate in the old generation %f\n",
1701 old_generation_survival_rate);
1711 class ExternalStringTableVisitorAdapter :
public ObjectVisitor {
1713 explicit ExternalStringTableVisitorAdapter(
1715 : visitor_(visitor) {}
1716 virtual void VisitPointers(
Object** start,
Object** end) {
1717 for (
Object** p = start; p < end; p++) {
1718 DCHECK((*p)->IsExternalString());
1719 visitor_->VisitExternalString(
1726 } external_string_table_visitor(visitor);
1754 NewSpaceScavenger::IterateBody(object->
map(),
object);
1774 DCHECK(!target->IsMap());
1784 return new_space_front;
1836 kVisitNativeContext,
1856 kVisitSharedFunctionInfo,
1902 bool should_record =
false;
1904 should_record = FLAG_heap_stats;
1906 should_record = should_record || FLAG_log_gc;
1907 if (should_record) {
1938 source->
set_map_word(MapWord::FromForwardingAddress(target));
1947 if (Marking::TransferColor(source, target)) {
1953 template <
int alignment>
1958 int allocation_size = object_size;
1966 heap->
new_space()->AllocateRaw(allocation_size);
1969 if (allocation.
To(&target)) {
1984 MigrateObject(heap,
object, target, object_size);
1993 template <ObjectContents
object_contents,
int alignment>
1998 int allocation_size = object_size;
2014 if (allocation.
To(&target)) {
2023 MigrateObject(heap,
object, target, object_size);
2040 template <ObjectContents
object_contents,
int alignment>
2050 if (SemiSpaceCopyObject<alignment>(
map, slot,
object, object_size)) {
2055 if (PromoteObject<object_contents, alignment>(
map, slot,
object,
2061 if (SemiSpaceCopyObject<alignment>(
map, slot,
object, object_size))
return;
2072 MapWord map_word =
object->map_word();
2073 DCHECK(map_word.IsForwardingAddress());
2074 HeapObject* target = map_word.ToForwardingAddress();
2076 MarkBit mark_bit = Marking::MarkBitFrom(target);
2077 if (Marking::IsBlack(mark_bit)) {
2085 map->GetHeap()->mark_compact_collector()->RecordCodeEntrySlot(
2086 code_entry_slot, code);
2094 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
map, slot,
object,
2103 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(
map, slot,
object,
2111 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
map, slot,
object,
2118 int object_size =
reinterpret_cast<FixedFloat64Array*
>(object)->
size();
2119 EvacuateObject<DATA_OBJECT, kDoubleAlignment>(
map, slot,
object,
2126 int object_size =
reinterpret_cast<ByteArray*
>(object)->ByteArraySize();
2127 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
map, slot,
object,
2134 int object_size = SeqOneByteString::cast(
object)
2135 ->SeqOneByteStringSize(
map->instance_type());
2136 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
map, slot,
object,
2143 int object_size = SeqTwoByteString::cast(
object)
2144 ->SeqTwoByteStringSize(
map->instance_type());
2145 EvacuateObject<DATA_OBJECT, kObjectAlignment>(
map, slot,
object,
2157 ConsString::cast(
object)->unchecked_second() == heap->empty_string()) {
2159 HeapObject::cast(ConsString::cast(
object)->unchecked_first());
2164 object->
set_map_word(MapWord::FromForwardingAddress(first));
2168 MapWord first_word = first->
map_word();
2169 if (first_word.IsForwardingAddress()) {
2170 HeapObject* target = first_word.ToForwardingAddress();
2173 object->
set_map_word(MapWord::FromForwardingAddress(target));
2178 object->set_map_word(MapWord::FromForwardingAddress(*slot));
2183 EvacuateObject<POINTER_OBJECT, kObjectAlignment>(
map, slot,
object,
2187 template <ObjectContents
object_contents>
2190 template <
int object_size>
2193 EvacuateObject<object_contents, kObjectAlignment>(
map, slot,
object,
2198 int object_size =
map->instance_size();
2199 EvacuateObject<object_contents, kObjectAlignment>(
map, slot,
object,
2225 bool logging_and_profiling =
2232 if (!logging_and_profiling) {
2240 if (!logging_and_profiling) {
2254 StaticVisitorBase::kVisitShortcutCandidate,
2256 StaticVisitorBase::kVisitConsString));
2264 MapWord first_word =
object->map_word();
2266 Map*
map = first_word.ToMap();
2267 map->GetHeap()->DoScavengeObject(
map, p,
object);
2272 int instance_size) {
2275 if (!allocation.
To(&result))
return allocation;
2278 reinterpret_cast<Map*
>(result)->set_map(raw_unchecked_meta_map());
2279 reinterpret_cast<Map*
>(result)->set_instance_type(instance_type);
2280 reinterpret_cast<Map*
>(result)->set_instance_size(instance_size);
2281 reinterpret_cast<Map*
>(result)->set_visitor_id(
2283 reinterpret_cast<Map*
>(result)->set_inobject_properties(0);
2284 reinterpret_cast<Map*
>(result)->set_pre_allocated_property_fields(0);
2285 reinterpret_cast<Map*
>(result)->set_unused_property_fields(0);
2286 reinterpret_cast<Map*
>(result)->set_bit_field(0);
2287 reinterpret_cast<Map*
>(result)->set_bit_field2(0);
2290 reinterpret_cast<Map*
>(result)->set_bit_field3(bit_field3);
2300 if (!allocation.
To(&result))
return allocation;
2303 Map*
map = Map::cast(result);
2304 map->set_instance_type(instance_type);
2305 map->set_visitor_id(
2309 map->set_instance_size(instance_size);
2310 map->set_inobject_properties(0);
2311 map->set_pre_allocated_property_fields(0);
2313 map->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2315 map->init_back_pointer(undefined_value());
2316 map->set_unused_property_fields(0);
2317 map->set_instance_descriptors(empty_descriptor_array());
2318 map->set_bit_field(0);
2322 map->set_bit_field3(bit_field3);
2323 map->set_elements_kind(elements_kind);
2334 if (!allocation.
To(&obj))
return allocation;
2346 #define STRING_TYPE_ELEMENT(type, size, name, camel_name) \
2347 { type, size, k##camel_name##MapRootIndex } \
2350 #undef STRING_TYPE_ELEMENT
2355 #define CONSTANT_STRING_ELEMENT(name, contents) \
2356 { contents, k##name##RootIndex } \
2359 #undef CONSTANT_STRING_ELEMENT
2364 #define STRUCT_TABLE_ELEMENT(NAME, Name, name) \
2365 { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex } \
2368 #undef STRUCT_TABLE_ELEMENT
2376 if (!allocation.
To(&obj))
return false;
2379 Map* new_meta_map =
reinterpret_cast<Map*
>(obj);
2380 set_meta_map(new_meta_map);
2381 new_meta_map->
set_map(new_meta_map);
2384 #define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name) \
2387 if (!AllocatePartialMap((instance_type), (size)).To(&map)) return false; \
2388 set_##field_name##_map(map); \
2395 constant_pool_array);
2397 #undef ALLOCATE_PARTIAL_MAP
2403 if (!allocation.
To(&obj))
return false;
2405 set_empty_fixed_array(FixedArray::cast(obj));
2409 if (!allocation.
To(&obj))
return false;
2411 set_null_value(Oddball::cast(obj));
2416 if (!allocation.
To(&obj))
return false;
2418 set_undefined_value(Oddball::cast(obj));
2423 set_exception(null_value());
2428 if (!allocation.
To(&obj))
return false;
2430 set_empty_descriptor_array(DescriptorArray::cast(obj));
2435 if (!allocation.
To(&obj))
return false;
2437 set_empty_constant_pool_array(ConstantPoolArray::cast(obj));
2440 meta_map()->set_code_cache(empty_fixed_array());
2441 meta_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2442 meta_map()->init_back_pointer(undefined_value());
2443 meta_map()->set_instance_descriptors(empty_descriptor_array());
2445 fixed_array_map()->set_code_cache(empty_fixed_array());
2446 fixed_array_map()->set_dependent_code(
2447 DependentCode::cast(empty_fixed_array()));
2448 fixed_array_map()->init_back_pointer(undefined_value());
2449 fixed_array_map()->set_instance_descriptors(empty_descriptor_array());
2451 undefined_map()->set_code_cache(empty_fixed_array());
2452 undefined_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2453 undefined_map()->init_back_pointer(undefined_value());
2454 undefined_map()->set_instance_descriptors(empty_descriptor_array());
2456 null_map()->set_code_cache(empty_fixed_array());
2457 null_map()->set_dependent_code(DependentCode::cast(empty_fixed_array()));
2458 null_map()->init_back_pointer(undefined_value());
2459 null_map()->set_instance_descriptors(empty_descriptor_array());
2461 constant_pool_array_map()->set_code_cache(empty_fixed_array());
2462 constant_pool_array_map()->set_dependent_code(
2463 DependentCode::cast(empty_fixed_array()));
2464 constant_pool_array_map()->init_back_pointer(undefined_value());
2465 constant_pool_array_map()->set_instance_descriptors(empty_descriptor_array());
2468 meta_map()->set_prototype(null_value());
2469 meta_map()->set_constructor(null_value());
2471 fixed_array_map()->set_prototype(null_value());
2472 fixed_array_map()->set_constructor(null_value());
2474 undefined_map()->set_prototype(null_value());
2475 undefined_map()->set_constructor(null_value());
2477 null_map()->set_prototype(null_value());
2478 null_map()->set_constructor(null_value());
2480 constant_pool_array_map()->set_prototype(null_value());
2481 constant_pool_array_map()->set_constructor(null_value());
2484 #define ALLOCATE_MAP(instance_type, size, field_name) \
2487 if (!AllocateMap((instance_type), size).To(&map)) return false; \
2488 set_##field_name##_map(map); \
2491 #define ALLOCATE_VARSIZE_MAP(instance_type, field_name) \
2492 ALLOCATE_MAP(instance_type, kVariableSizeSentinel, field_name)
2495 DCHECK(fixed_array_map() != fixed_cow_array_map());
2500 mutable_heap_number)
2516 if (!allocation.
To(&obj))
return false;
2520 Map*
map = Map::cast(obj);
2521 if (StringShape(entry.
type).IsCons())
map->mark_unstable();
2526 undetectable_string_map()->set_is_undetectable();
2529 undetectable_one_byte_string_map()->set_is_undetectable();
2535 #define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2536 ALLOCATE_MAP(EXTERNAL_##TYPE##_ARRAY_TYPE, ExternalArray::kAlignedSize, \
2537 external_##type##_array)
2540 #undef ALLOCATE_EXTERNAL_ARRAY_MAP
2542 #define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size) \
2543 ALLOCATE_VARSIZE_MAP(FIXED_##TYPE##_ARRAY_TYPE, fixed_##type##_array)
2546 #undef ALLOCATE_FIXED_TYPED_ARRAY_MAP
2576 native_context_map()->set_dictionary_map(
true);
2577 native_context_map()->set_visitor_id(
2578 StaticVisitorBase::kVisitNativeContext);
2581 shared_function_info)
2585 external_map()->set_is_extensible(
false);
2586 #undef ALLOCATE_VARSIZE_MAP
2594 set_empty_byte_array(byte_array);
2597 #define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size) \
2599 ExternalArray* obj; \
2600 if (!AllocateEmptyExternalArray(kExternal##Type##Array).To(&obj)) \
2602 set_empty_external_##type##_array(obj); \
2606 #undef ALLOCATE_EMPTY_EXTERNAL_ARRAY
2608 #define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size) \
2610 FixedTypedArrayBase* obj; \
2611 if (!AllocateEmptyFixedTypedArray(kExternal##Type##Array).To(&obj)) \
2613 set_empty_fixed_##type##_array(obj); \
2617 #undef ALLOCATE_EMPTY_FIXED_TYPED_ARRAY
2636 if (!allocation.
To(&result))
return allocation;
2640 HeapObject::cast(result)->set_map_no_write_barrier(
map);
2641 HeapNumber::cast(result)->set_value(value);
2653 if (!allocation.
To(&result))
return allocation;
2656 Cell::cast(result)->set_value(value);
2668 if (!allocation.
To(&result))
return allocation;
2672 cell->set_dependent_code(DependentCode::cast(empty_fixed_array()),
2674 cell->set_value(the_hole_value());
2691 set_neander_map(*new_neander_map);
2696 listeners->set_elements(*elements);
2697 set_message_listeners(*listeners);
2701 void Heap::CreateJSEntryStub() {
2703 set_js_entry_code(*stub.GetCode());
2707 void Heap::CreateJSConstructEntryStub() {
2708 JSEntryStub stub(
isolate(), StackFrame::ENTRY_CONSTRUCT);
2709 set_js_construct_entry_code(*stub.GetCode());
2724 CodeStub::GenerateStubsAheadOfTime(
isolate());
2738 Heap::CreateJSEntryStub();
2739 Heap::CreateJSConstructEntryStub();
2749 DCHECK(std::signbit(minus_zero_value()->
Number()) != 0);
2770 set_true_value(*factory->NewOddball(factory->boolean_map(),
"true",
2774 set_false_value(*factory->NewOddball(factory->boolean_map(),
"false",
2778 set_the_hole_value(*factory->NewOddball(factory->the_hole_map(),
"hole",
2782 set_uninitialized_value(*factory->NewOddball(
2783 factory->uninitialized_map(),
"uninitialized",
2786 set_arguments_marker(*factory->NewOddball(
2787 factory->arguments_marker_map(),
"arguments_marker",
2790 set_no_interceptor_result_sentinel(*factory->NewOddball(
2791 factory->no_interceptor_result_sentinel_map(),
2795 set_termination_exception(*factory->NewOddball(
2796 factory->termination_exception_map(),
"termination_exception",
2799 set_exception(*factory->NewOddball(factory->exception_map(),
"exception",
2826 set_polymorphic_code_cache(PolymorphicCodeCache::cast(
2839 set_intrinsic_function_names(*intrinsic_names);
2841 set_number_string_cache(
2845 set_single_character_string_cache(
2849 set_string_split_cache(*factory->NewFixedArray(
2851 set_regexp_multiple_cache(*factory->NewFixedArray(
2855 set_natives_source_cache(
2858 set_undefined_cell(*factory->NewCell(factory->undefined_value()));
2861 set_symbol_registry(undefined_value());
2864 set_observation_state(*factory->NewJSObjectFromMap(
2869 set_microtask_queue(empty_fixed_array());
2871 set_detailed_stack_trace_symbol(*factory->NewPrivateOwnSymbol());
2872 set_elements_transition_symbol(*factory->NewPrivateOwnSymbol());
2873 set_frozen_symbol(*factory->NewPrivateOwnSymbol());
2874 set_megamorphic_symbol(*factory->NewPrivateOwnSymbol());
2875 set_premonomorphic_symbol(*factory->NewPrivateOwnSymbol());
2876 set_generic_symbol(*factory->NewPrivateOwnSymbol());
2877 set_nonexistent_symbol(*factory->NewPrivateOwnSymbol());
2878 set_normal_ic_symbol(*factory->NewPrivateOwnSymbol());
2879 set_observed_symbol(*factory->NewPrivateOwnSymbol());
2880 set_stack_trace_symbol(*factory->NewPrivateOwnSymbol());
2881 set_uninitialized_symbol(*factory->NewPrivateOwnSymbol());
2882 set_home_object_symbol(*factory->NewPrivateOwnSymbol());
2886 slow_element_dictionary->set_requires_slow_elements();
2887 set_empty_slow_element_dictionary(*slow_element_dictionary);
2889 set_materialized_objects(*factory->NewFixedArray(0,
TENURED));
2894 set_allocation_sites_scratchpad(
2914 kStoreBufferTopRootIndex,
2915 kStackLimitRootIndex,
2916 kNumberStringCacheRootIndex,
2917 kInstanceofCacheFunctionRootIndex,
2918 kInstanceofCacheMapRootIndex,
2919 kInstanceofCacheAnswerRootIndex,
2920 kCodeStubsRootIndex,
2921 kNonMonomorphicCacheRootIndex,
2922 kPolymorphicCodeCacheRootIndex,
2923 kLastScriptIdRootIndex,
2924 kEmptyScriptRootIndex,
2925 kRealStackLimitRootIndex,
2926 kArgumentsAdaptorDeoptPCOffsetRootIndex,
2927 kConstructStubDeoptPCOffsetRootIndex,
2928 kGetterStubDeoptPCOffsetRootIndex,
2929 kSetterStubDeoptPCOffsetRootIndex,
2933 for (
unsigned int i = 0;
i <
arraysize(writable_roots);
i++) {
2934 if (root_index == writable_roots[
i])
return true;
2949 if (!key_string->IsInternalizedString())
return Smi::FromInt(0);
2951 DCHECK(key_pattern->IsString());
2952 if (!key_pattern->IsInternalizedString())
return Smi::FromInt(0);
2953 cache = heap->string_split_cache();
2956 DCHECK(key_pattern->IsFixedArray());
2957 cache = heap->regexp_multiple_cache();
2981 Factory* factory = isolate->
factory();
2983 if (!key_string->IsInternalizedString())
return;
2985 DCHECK(key_pattern->IsString());
2986 if (!key_pattern->IsInternalizedString())
return;
2987 cache = factory->string_split_cache();
2990 DCHECK(key_pattern->IsFixedArray());
2991 cache = factory->regexp_multiple_cache();
2994 uint32_t hash = key_string->Hash();
3020 for (
int i = 0;
i < value_array->length();
i++) {
3022 Handle<String> internalized_str = factory->InternalizeString(str);
3023 value_array->set(
i, *internalized_str);
3027 value_array->set_map_no_write_barrier(*factory->fixed_cow_array_map());
3044 Min(0x4000, number_string_cache_size));
3047 return number_string_cache_size * 2;
3053 int len = number_string_cache()->length();
3054 for (
int i = 0;
i < len;
i++) {
3055 number_string_cache()->set_undefined(
i);
3062 allocation_sites_scratchpad()->set_undefined(
i);
3069 DCHECK(allocation_sites_scratchpad()->length() ==
3072 allocation_sites_scratchpad()->set_undefined(
i);
3085 Object** slot = allocation_sites_scratchpad()->RawFieldOfElementAt(
3108 switch (array_type) {
3109 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3110 case kExternal##Type##Array: \
3111 return kExternal##Type##ArrayMapRootIndex;
3114 #undef ARRAY_TYPE_TO_ROOT_INDEX
3118 return kUndefinedValueRootIndex;
3130 switch (array_type) {
3131 #define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3132 case kExternal##Type##Array: \
3133 return kFixed##Type##ArrayMapRootIndex;
3136 #undef ARRAY_TYPE_TO_ROOT_INDEX
3140 return kUndefinedValueRootIndex;
3147 switch (elementsKind) {
3148 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3149 case EXTERNAL_##TYPE##_ELEMENTS: \
3150 return kEmptyExternal##Type##ArrayRootIndex;
3153 #undef ELEMENT_KIND_TO_ROOT_INDEX
3157 return kUndefinedValueRootIndex;
3164 switch (elementsKind) {
3165 #define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size) \
3166 case TYPE##_ELEMENTS: \
3167 return kEmptyFixed##Type##ArrayRootIndex;
3170 #undef ELEMENT_KIND_TO_ROOT_INDEX
3173 return kUndefinedValueRootIndex;
3179 return ExternalArray::cast(
3185 return FixedTypedArrayBase::cast(
3197 if (!allocation.
To(&result))
return allocation;
3212 if (!allocation.
To(&result))
return allocation;
3216 ByteArray::cast(result)->set_length(length);
3222 if (
size == 0)
return;
3230 FreeSpace::cast(filler)->set_size(
size);
3236 Address address =
object->address();
3249 return (!is_in_old_pointer_space && !is_in_old_data_space) ||
3256 Marking::IsBlack(Marking::MarkBitFrom(address))) {
3267 int elements_to_trim) {
3269 const int bytes_to_trim = elements_to_trim * element_size;
3270 Map*
map =
object->map();
3276 DCHECK(object->
map() != fixed_cow_array_map());
3282 const int len =
object->length();
3283 DCHECK(elements_to_trim <= len);
3286 Address new_start =
object->address() + bytes_to_trim;
3298 int new_start_index = elements_to_trim * (element_size /
kPointerSize);
3299 former_start[new_start_index] =
map;
3300 former_start[new_start_index + 1] =
Smi::FromInt(len - elements_to_trim);
3321 template<Heap::InvocationMode mode>
3324 const int bytes_to_trim = elements_to_trim * element_size;
3327 DCHECK(object->
map() != fixed_cow_array_map());
3329 const int len =
object->length();
3330 DCHECK(elements_to_trim < len);
3333 Address new_end =
object->address() +
object->Size() - bytes_to_trim;
3348 object->synchronized_set_length(len - elements_to_trim);
3356 if (
profiler->is_tracking_allocations()) {
3364 void* external_pointer,
3371 if (!allocation.
To(&result))
return allocation;
3375 ExternalArray::cast(result)->set_length(length);
3376 ExternalArray::cast(result)->set_external_pointer(external_pointer);
3382 switch (array_type) {
3383 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
3384 case kExternal##Type##Array: \
3385 *element_size = size; \
3386 *element_kind = TYPE##_ELEMENTS; \
3390 #undef TYPED_ARRAY_CASE
3408 #ifndef V8_HOST_ARCH_64_BIT
3417 if (!allocation.
To(&
object))
return allocation;
3437 if (!allocation.
To(&result))
return allocation;
3450 if (!allocation.
To(&result))
return allocation;
3456 Code* code = Code::cast(result);
3468 if (FLAG_enable_ool_constant_pool &&
3473 if (!allocation.
To(&new_constant_pool))
return allocation;
3475 new_constant_pool = empty_constant_pool_array();
3480 int obj_size = code->
Size();
3482 if (!allocation.
To(&result))
return allocation;
3487 CopyBlock(new_addr, old_addr, obj_size);
3488 Code* new_code = Code::cast(result);
3496 new_code->
Relocate(new_addr - old_addr);
3508 if (!allocation.
To(&reloc_info_array))
return allocation;
3511 if (FLAG_enable_ool_constant_pool &&
3516 if (!allocation.
To(&new_constant_pool))
return allocation;
3518 new_constant_pool = empty_constant_pool_array();
3527 size_t relocation_offset =
3533 if (!allocation.
To(&result))
return allocation;
3539 CopyBytes(new_addr, old_addr, relocation_offset);
3541 Code* new_code = Code::cast(result);
3542 new_code->set_relocation_info(reloc_info_array);
3549 static_cast<size_t>(reloc_info.
length()));
3554 new_code->
Relocate(new_addr - old_addr);
3557 if (FLAG_verify_heap) code->ObjectVerify();
3566 DCHECK(allocation_site->
map() == allocation_site_map());
3568 if (FLAG_allocation_site_pretenuring) {
3582 int size =
map->instance_size();
3583 if (allocation_site !=
NULL) {
3588 if (!allocation.
To(&result))
return allocation;
3591 if (allocation_site !=
NULL) {
3593 reinterpret_cast<Address>(result) +
map->instance_size());
3602 obj->set_properties(properties);
3618 if (
map->constructor()->IsJSFunction() &&
3619 JSFunction::cast(
map->constructor())
3620 ->IsInobjectSlackTrackingInProgress()) {
3623 filler = Heap::one_pointer_filler_map();
3625 filler = Heap::undefined_value();
3645 if (allocate_properties) {
3646 int prop_size =
map->InitialPropertiesLength();
3650 if (!allocation.
To(&properties))
return allocation;
3653 properties = empty_fixed_array();
3657 int size =
map->instance_size();
3661 if (!allocation.
To(&js_obj))
return allocation;
3678 constructor->
initial_map(), pretenure,
true, allocation_site);
3682 DCHECK(!allocation.
To(&obj) || !obj->IsGlobalObject());
3695 int object_size =
map->instance_size();
3708 if (!allocation.
To(&clone))
return allocation;
3719 int adjusted_object_size =
3723 if (!allocation.
To(&clone))
return allocation;
3732 reinterpret_cast<Address>(clone) + object_size);
3737 SLOW_DCHECK(JSObject::cast(clone)->GetElementsKind() ==
3739 FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
3740 FixedArray* properties = FixedArray::cast(source->properties());
3742 if (elements->
length() > 0) {
3746 if (elements->
map() == fixed_cow_array_map()) {
3747 allocation = FixedArray::cast(elements);
3753 if (!allocation.
To(&elem))
return allocation;
3755 JSObject::cast(clone)->set_elements(elem, wb_mode);
3758 if (properties->
length() > 0) {
3762 if (!allocation.
To(&prop))
return allocation;
3764 JSObject::cast(clone)->set_properties(prop, wb_mode);
3780 const uint8_t* stream =
reinterpret_cast<const uint8_t*
>(vector.
start());
3781 unsigned stream_length = vector.
length();
3782 while (stream_length != 0) {
3783 unsigned consumed = 0;
3786 DCHECK(consumed <= stream_length);
3787 stream_length -= consumed;
3800 DCHECK(stream_length == 0);
3817 template <
bool is_one_
byte,
typename T>
3828 map = one_byte_internalized_string_map();
3831 map = internalized_string_map();
3840 if (!allocation.
To(&result))
return allocation;
3845 String* answer = String::cast(result);
3882 if (!allocation.
To(&result))
return allocation;
3887 String::cast(result)->set_length(length);
3906 if (!allocation.
To(&result))
return allocation;
3911 String::cast(result)->set_length(length);
3924 if (!allocation.
To(&result))
return allocation;
3928 FixedArray::cast(result)->set_length(0);
3948 if (!allocation.
To(&obj))
return allocation;
3962 HeapObject::cast(obj)->set_map_no_write_barrier(fixed_cow_array_map());
3978 if (!allocation.
To(&obj))
return allocation;
4004 if (!allocation.
To(&obj))
return allocation;
4018 ConstantPoolArray::NumberOfEntries small(src,
4020 ConstantPoolArray::NumberOfEntries extended(
4024 if (!allocation.
To(&obj))
return allocation;
4026 ConstantPoolArray::NumberOfEntries small(src,
4029 if (!allocation.
To(&obj))
return allocation;
4055 DCHECK(empty_fixed_array()->IsFixedArray());
4056 if (length == 0)
return empty_fixed_array();
4062 if (!allocation.
To(&result))
return allocation;
4066 FixedArray* array = FixedArray::cast(result);
4079 if (length == 0)
return empty_fixed_array();
4084 if (!allocation.
To(&obj))
return allocation;
4088 FixedArray::cast(obj)->set_length(length);
4095 if (length == 0)
return empty_fixed_array();
4099 if (!allocation.
To(&elements))
return allocation;
4102 FixedDoubleArray::cast(elements)->set_length(length);
4113 #ifndef V8_HOST_ARCH_64_BIT
4121 if (!allocation.
To(&
object))
return allocation;
4129 const ConstantPoolArray::NumberOfEntries& small) {
4132 #ifndef V8_HOST_ARCH_64_BIT
4140 if (!allocation.
To(&
object))
return allocation;
4143 object->set_map_no_write_barrier(constant_pool_array_map());
4146 constant_pool->
Init(small);
4148 return constant_pool;
4153 const ConstantPoolArray::NumberOfEntries& small,
4154 const ConstantPoolArray::NumberOfEntries& extended) {
4158 #ifndef V8_HOST_ARCH_64_BIT
4166 if (!allocation.
To(&
object))
return allocation;
4169 object->set_map_no_write_barrier(constant_pool_array_map());
4174 return constant_pool;
4179 ConstantPoolArray::NumberOfEntries small(0, 0, 0, 0);
4185 if (!allocation.
To(&result))
return allocation;
4188 ConstantPoolArray::cast(result)->Init(small);
4200 if (!allocation.
To(&result))
return allocation;
4210 }
while (hash == 0 && attempts < 30);
4211 if (hash == 0) hash = 1;
4213 Symbol::cast(result)
4215 Symbol::cast(result)->set_name(undefined_value());
4218 DCHECK(!Symbol::cast(result)->is_private());
4226 #define MAKE_CASE(NAME, Name, name) \
4228 map = name##_map(); \
4236 int size =
map->instance_size();
4241 if (!allocation.
To(&result))
return allocation;
4256 DCHECK(AllowHeapAllocation::IsAllowed());
4272 bool uncommit =
false;
4279 "idle notification: finalize incremental");
4298 if (!FLAG_incremental_marking)
return true;
4299 base::ElapsedTimer timer;
4303 HistogramTimerScope idle_notification_scope(
4324 static_cast<size_t>(
4330 bool result =
false;
4331 switch (action.
type) {
4344 ?
"idle notification: contexts disposed"
4345 :
"idle notification: finalize idle round";
4360 int actual_time_ms =
static_cast<int>(timer.Elapsed().InMilliseconds());
4361 if (actual_time_ms <= idle_time_in_ms) {
4364 idle_time_in_ms - actual_time_ms);
4368 actual_time_ms - idle_time_in_ms);
4371 if (FLAG_trace_idle_notification) {
4372 PrintF(
"Idle notification: requested idle time %d ms, actual time %d ms [",
4373 idle_time_in_ms, actual_time_ms);
4385 void Heap::Print() {
4388 AllSpaces spaces(
this);
4395 void Heap::ReportCodeStatistics(
const char* title) {
4396 PrintF(
">>>>>> Code Stats (%s) >>>>>>\n", title);
4397 PagedSpace::ResetCodeStatistics(
isolate());
4402 PagedSpace::ReportCodeStatistics(
isolate());
4409 void Heap::ReportHeapStatistics(
const char* title) {
4411 PrintF(
">>>>>> =============== %s (%d) =============== >>>>>>\n", title,
4421 PrintF(
"Heap statistics : ");
4425 PrintF(
"Old pointer space : ");
4427 PrintF(
"Old data space : ");
4435 PrintF(
"PropertyCell space : ");
4437 PrintF(
"Large object space : ");
4439 PrintF(
">>>>>> ========================================= >>>>>>\n");
4494 void Heap::Verify() {
4505 VerifyPointersVisitor visitor;
4508 VerifySmisVisitor smis_visitor;
4516 VerifyPointersVisitor no_dirty_regions_visitor;
4530 while (it.has_next()) {
4550 bool record_slots =
false;
4553 record_slots = Marking::IsBlack(mark_bit);
4556 while (slot_address < end) {
4557 Object** slot =
reinterpret_cast<Object**
>(slot_address);
4563 if (object->IsHeapObject()) {
4565 callback(
reinterpret_cast<HeapObject**
>(slot),
4566 HeapObject::cast(
object));
4567 Object* new_object = *slot;
4572 reinterpret_cast<Address>(slot));
4574 SLOW_DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
4575 }
else if (record_slots &&
4576 MarkCompactCollector::IsOnEvacuationCandidate(
object)) {
4586 typedef bool (*CheckStoreBufferFilter)(
Object** addr);
4589 bool IsAMapPointerAddress(
Object** addr) {
4597 bool EverythingsAPointer(
Object** addr) {
return true; }
4600 static void CheckStoreBuffer(Heap* heap,
Object** current,
Object** limit,
4601 Object**** store_buffer_position,
4602 Object*** store_buffer_top,
4603 CheckStoreBufferFilter filter,
4604 Address special_garbage_start,
4605 Address special_garbage_end) {
4606 Map* free_space_map = heap->free_space_map();
4607 for (; current < limit; current++) {
4611 if (o == free_space_map) {
4613 FreeSpace* free_space =
4615 int skip = free_space->Size();
4616 DCHECK(current_address + skip <=
reinterpret_cast<Address>(limit));
4619 current =
reinterpret_cast<Object**
>(current_address);
4624 if (current_address == special_garbage_start &&
4625 special_garbage_end != special_garbage_start) {
4627 current =
reinterpret_cast<Object**
>(current_address);
4630 if (!(*filter)(current))
continue;
4631 DCHECK(current_address < special_garbage_start ||
4632 current_address >= special_garbage_end);
4638 if (!heap->InNewSpace(o))
continue;
4639 while (**store_buffer_position < current &&
4640 *store_buffer_position < store_buffer_top) {
4641 (*store_buffer_position)++;
4643 if (**store_buffer_position != current ||
4644 *store_buffer_position == store_buffer_top) {
4645 Object** obj_start = current;
4646 while (!(*obj_start)->IsMap()) obj_start--;
4656 void Heap::OldPointerSpaceCheckStoreBuffer() {
4658 PageIterator pages(
space);
4662 while (pages.has_next()) {
4663 Page* page = pages.next();
4664 Object** current =
reinterpret_cast<Object**
>(page->area_start());
4666 Address end = page->area_end();
4672 CheckStoreBuffer(
this, current, limit, &store_buffer_position,
4673 store_buffer_top, &EverythingsAPointer,
space->top(),
4679 void Heap::MapSpaceCheckStoreBuffer() {
4681 PageIterator pages(
space);
4685 while (pages.has_next()) {
4686 Page* page = pages.next();
4687 Object** current =
reinterpret_cast<Object**
>(page->area_start());
4689 Address end = page->area_end();
4695 CheckStoreBuffer(
this, current, limit, &store_buffer_position,
4696 store_buffer_top, &IsAMapPointerAddress,
space->top(),
4702 void Heap::LargeObjectSpaceCheckStoreBuffer() {
4703 LargeObjectIterator it(
lo_space());
4704 for (HeapObject*
object = it.Next();
object !=
NULL;
object = it.Next()) {
4708 if (object->IsFixedArray()) {
4711 Object** current =
reinterpret_cast<Object**
>(
object->address());
4713 reinterpret_cast<Object**
>(
object->address() +
object->Size());
4714 CheckStoreBuffer(
this, current, limit, &store_buffer_position,
4715 store_buffer_top, &EverythingsAPointer,
NULL,
NULL);
4730 v->Synchronize(VisitorSynchronization::kStringTable);
4735 v->Synchronize(VisitorSynchronization::kExternalStringsTable);
4741 ExecutionAccess access(
isolate());
4743 v->Synchronize(VisitorSynchronization::kSmiRootList);
4749 v->Synchronize(VisitorSynchronization::kStrongRootList);
4752 v->Synchronize(VisitorSynchronization::kInternalizedString);
4755 v->Synchronize(VisitorSynchronization::kBootstrapper);
4757 v->Synchronize(VisitorSynchronization::kTop);
4759 v->Synchronize(VisitorSynchronization::kRelocatable);
4764 v->Synchronize(VisitorSynchronization::kDebug);
4766 v->Synchronize(VisitorSynchronization::kCompilationCache);
4771 v->Synchronize(VisitorSynchronization::kHandleScope);
4779 v->Synchronize(VisitorSynchronization::kBuiltins);
4794 v->Synchronize(VisitorSynchronization::kGlobalHandles);
4802 v->Synchronize(VisitorSynchronization::kEternalHandles);
4806 v->Synchronize(VisitorSynchronization::kThreadManager);
4828 int max_executable_size,
size_t code_range_size) {
4832 if (max_semi_space_size > 0) {
4835 if (max_old_space_size > 0) {
4838 if (max_executable_size > 0) {
4843 if (FLAG_max_semi_space_size > 0) {
4846 if (FLAG_max_old_space_size > 0) {
4849 if (FLAG_max_executable_size > 0) {
4853 if (FLAG_stress_compaction) {
4866 if (FLAG_trace_gc) {
4867 PrintPID(
"Max semi-space size cannot be more than %d kbytes\n",
4890 if (FLAG_min_semi_space_size > 0) {
4891 int initial_semispace_size = FLAG_min_semi_space_size *
MB;
4894 if (FLAG_trace_gc) {
4896 "Min semi-space size cannot be more than the maximum"
4897 "semi-space size of %d MB\n",
4954 if (take_snapshot) {
4957 obj = iterator.next()) {
4985 int freed_global_handles) {
4986 const int kMaxHandles = 1000;
4987 const int kMinHandles = 100;
4988 double min_factor = 1.1;
4989 double max_factor = 4;
5000 if (freed_global_handles <= kMinHandles) {
5001 factor = max_factor;
5002 }
else if (freed_global_handles >= kMaxHandles) {
5003 factor = min_factor;
5007 factor = max_factor -
5008 (freed_global_handles - kMinHandles) * (max_factor - min_factor) /
5009 (kMaxHandles - kMinHandles);
5012 if (FLAG_stress_compaction ||
5014 factor = min_factor;
5017 intptr_t limit =
static_cast<intptr_t
>(old_gen_size * factor);
5021 return Min(limit, halfway_to_the_max);
5042 PagedSpaces spaces(
this);
5044 space = spaces.next()) {
5045 space->EmptyAllocationInfo();
5061 allocation_timeout_ = FLAG_gc_interval;
5135 DCHECK(hash_seed() == 0);
5136 if (FLAG_randomize_hashes) {
5137 if (FLAG_hash_seed == 0) {
5181 roots_[kStackLimitRootIndex] =
reinterpret_cast<Object*
>(
5183 roots_[kRealStackLimitRootIndex] =
reinterpret_cast<Object*
>(
5190 if (FLAG_verify_heap) {
5197 if (FLAG_print_cumulative_gc_stat) {
5210 if (FLAG_print_max_heap_committed) {
5235 if (FLAG_verify_predictable) {
5299 GCType gc_type,
bool pass_isolate) {
5320 GCType gc_type,
bool pass_isolate) {
5362 if (dep->IsDependentCode())
return DependentCode::cast(dep);
5363 return DependentCode::cast(empty_fixed_array());
5385 for (
Object** p = start; p < end; p++)
5386 PrintF(
" handle %p to %p\n",
reinterpret_cast<void*
>(p),
5387 reinterpret_cast<void*
>(*p));
5392 void Heap::PrintHandles() {
5394 PrintHandleVisitor v;
5401 Space* AllSpaces::next() {
5402 switch (counter_++) {
5404 return heap_->new_space();
5406 return heap_->old_pointer_space();
5408 return heap_->old_data_space();
5410 return heap_->code_space();
5412 return heap_->map_space();
5414 return heap_->cell_space();
5416 return heap_->property_cell_space();
5418 return heap_->lo_space();
5425 PagedSpace* PagedSpaces::next() {
5426 switch (counter_++) {
5428 return heap_->old_pointer_space();
5430 return heap_->old_data_space();
5432 return heap_->code_space();
5434 return heap_->map_space();
5436 return heap_->cell_space();
5438 return heap_->property_cell_space();
5445 OldSpace* OldSpaces::next() {
5446 switch (counter_++) {
5448 return heap_->old_pointer_space();
5450 return heap_->old_data_space();
5452 return heap_->code_space();
5470 size_func_(size_func) {}
5550 MarkReachableObjects();
5554 heap_->mark_compact_collector()->ClearMarkbits();
5558 MarkBit mark_bit = Marking::MarkBitFrom(
object);
5559 return !mark_bit.
Get();
5568 for (
Object** p = start; p < end; p++) {
5569 if (!(*p)->IsHeapObject())
continue;
5571 MarkBit mark_bit = Marking::MarkBitFrom(obj);
5572 if (!mark_bit.
Get()) {
5574 marking_stack_.Add(obj);
5580 while (!marking_stack_.is_empty()) {
5581 HeapObject* obj = marking_stack_.RemoveLast();
5592 heap_->IterateRoots(&visitor,
VISIT_ALL);
5601 HeapIterator::HeapIterator(
Heap* heap)
5602 : make_heap_iterable_helper_(heap),
5603 no_heap_allocation_(),
5605 filtering_(HeapIterator::kNoFiltering),
5611 HeapIterator::HeapIterator(Heap* heap,
5612 HeapIterator::HeapObjectsFiltering filtering)
5613 : make_heap_iterable_helper_(heap),
5614 no_heap_allocation_(),
5616 filtering_(filtering),
5622 HeapIterator::~HeapIterator() { Shutdown(); }
5625 void HeapIterator::Init() {
5627 space_iterator_ =
new SpaceIterator(heap_);
5628 switch (filtering_) {
5629 case kFilterUnreachable:
5630 filter_ =
new UnreachableObjectsFilter(heap_);
5635 object_iterator_ = space_iterator_->next();
5639 void HeapIterator::Shutdown() {
5643 if (filtering_ != kNoFiltering) {
5648 delete space_iterator_;
5649 space_iterator_ =
NULL;
5650 object_iterator_ =
NULL;
5656 HeapObject* HeapIterator::next() {
5657 if (filter_ ==
NULL)
return NextObject();
5659 HeapObject* obj = NextObject();
5660 while (obj !=
NULL && filter_->SkipObject(obj)) obj = NextObject();
5665 HeapObject* HeapIterator::NextObject() {
5667 if (object_iterator_ ==
NULL)
return NULL;
5669 if (HeapObject* obj = object_iterator_->next_object()) {
5674 while (space_iterator_->has_next()) {
5675 object_iterator_ = space_iterator_->next();
5676 if (HeapObject* obj = object_iterator_->next_object()) {
5682 object_iterator_ =
NULL;
5687 void HeapIterator::reset() {
5696 Object*
const PathTracer::kAnyGlobalObject =
NULL;
5698 class PathTracer::MarkVisitor :
public ObjectVisitor {
5700 explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
5703 for (
Object** p = start; !tracer_->found() && (p < end); p++) {
5704 if ((*p)->IsHeapObject()) tracer_->MarkRecursively(p,
this);
5709 PathTracer* tracer_;
5713 class PathTracer::UnmarkVisitor :
public ObjectVisitor {
5715 explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
5718 for (
Object** p = start; p < end; p++) {
5719 if ((*p)->IsHeapObject()) tracer_->UnmarkRecursively(p,
this);
5724 PathTracer* tracer_;
5728 void PathTracer::VisitPointers(
Object** start,
Object** end) {
5729 bool done = ((what_to_find_ == FIND_FIRST) && found_target_);
5731 for (
Object** p = start; !done && (p < end); p++) {
5732 if ((*p)->IsHeapObject()) {
5734 done = ((what_to_find_ == FIND_FIRST) && found_target_);
5740 void PathTracer::Reset() {
5741 found_target_ =
false;
5742 object_stack_.Clear();
5746 void PathTracer::TracePathFrom(
Object** root) {
5747 DCHECK((search_target_ == kAnyGlobalObject) ||
5748 search_target_->IsHeapObject());
5749 found_target_in_trace_ =
false;
5752 MarkVisitor mark_visitor(
this);
5753 MarkRecursively(root, &mark_visitor);
5755 UnmarkVisitor unmark_visitor(
this);
5756 UnmarkRecursively(root, &unmark_visitor);
5762 static bool SafeIsNativeContext(HeapObject* obj) {
5763 return obj->map() == obj->GetHeap()->raw_unchecked_native_context_map();
5767 void PathTracer::MarkRecursively(
Object** p, MarkVisitor* mark_visitor) {
5768 if (!(*p)->IsHeapObject())
return;
5770 HeapObject* obj = HeapObject::cast(*p);
5772 MapWord map_word = obj->map_word();
5773 if (!map_word.ToMap()->IsHeapObject())
return;
5775 if (found_target_in_trace_)
return;
5776 object_stack_.Add(obj);
5777 if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) ||
5778 (obj == search_target_)) {
5779 found_target_in_trace_ =
true;
5780 found_target_ =
true;
5784 bool is_native_context = SafeIsNativeContext(obj);
5787 Map*
map = Map::cast(map_word.ToMap());
5789 MapWord marked_map_word =
5790 MapWord::FromRawValue(obj->map_word().ToRawValue() + kMarkTag);
5791 obj->set_map_word(marked_map_word);
5801 mark_visitor->VisitPointers(start, end);
5803 obj->IterateBody(
map->instance_type(), obj->SizeFromMap(
map), mark_visitor);
5808 MarkRecursively(
reinterpret_cast<Object**
>(&
map), mark_visitor);
5810 if (!found_target_in_trace_) {
5811 object_stack_.RemoveLast();
5816 void PathTracer::UnmarkRecursively(
Object** p, UnmarkVisitor* unmark_visitor) {
5817 if (!(*p)->IsHeapObject())
return;
5819 HeapObject* obj = HeapObject::cast(*p);
5821 MapWord map_word = obj->map_word();
5822 if (map_word.ToMap()->IsHeapObject())
return;
5824 MapWord unmarked_map_word =
5825 MapWord::FromRawValue(map_word.ToRawValue() - kMarkTag);
5826 obj->set_map_word(unmarked_map_word);
5828 Map*
map = Map::cast(unmarked_map_word.ToMap());
5830 UnmarkRecursively(
reinterpret_cast<Object**
>(&
map), unmark_visitor);
5832 obj->IterateBody(
map->instance_type(), obj->SizeFromMap(
map), unmark_visitor);
5836 void PathTracer::ProcessResults() {
5837 if (found_target_) {
5838 OFStream os(stdout);
5839 os <<
"=====================================\n"
5840 <<
"==== Path to object ====\n"
5841 <<
"=====================================\n\n";
5843 DCHECK(!object_stack_.is_empty());
5844 for (
int i = 0;
i < object_stack_.length();
i++) {
5845 if (
i > 0) os <<
"\n |\n |\n V\n\n";
5846 object_stack_[
i]->Print(os);
5848 os <<
"=====================================\n";
5856 void Heap::TracePathToObjectFrom(
Object* target,
Object* root) {
5858 tracer.VisitPointer(&root);
5864 void Heap::TracePathToObject(
Object* target) {
5873 void Heap::TracePathToGlobal() {
5874 PathTracer
tracer(PathTracer::kAnyGlobalObject, PathTracer::FIND_ALL,
5882 double spent_in_mutator,
5883 double marking_time) {
5884 if (FLAG_print_cumulative_gc_stat) {
5889 }
else if (FLAG_trace_gc_verbose) {
5922 if (!
name->IsUniqueName()) {
5938 if (key.
map == free_entry_indicator) {
6000 if (FLAG_verify_heap) {
6047 while (inner <= inner_last) {
6053 if (area_end < inner->address()) area_end = chunk_end;
6090 if (clear_last_time_stats) {
6101 base::LockGuard<base::Mutex> lock_guard(
6104 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6105 counters->count_of_##name()->Increment( \
6106 static_cast<int>(object_counts_[name])); \
6107 counters->count_of_##name()->Decrement( \
6108 static_cast<int>(object_counts_last_time_[name])); \
6109 counters->size_of_##name()->Increment( \
6110 static_cast<int>(object_sizes_[name])); \
6111 counters->size_of_##name()->Decrement( \
6112 static_cast<int>(object_sizes_last_time_[name]));
6114 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6116 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6117 index = FIRST_CODE_KIND_SUB_TYPE + Code::name; \
6118 counters->count_of_CODE_TYPE_##name()->Increment( \
6119 static_cast<int>(object_counts_[index])); \
6120 counters->count_of_CODE_TYPE_##name()->Decrement( \
6121 static_cast<int>(object_counts_last_time_[index])); \
6122 counters->size_of_CODE_TYPE_##name()->Increment( \
6123 static_cast<int>(object_sizes_[index])); \
6124 counters->size_of_CODE_TYPE_##name()->Decrement( \
6125 static_cast<int>(object_sizes_last_time_[index]));
6127 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6128 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6129 index = FIRST_FIXED_ARRAY_SUB_TYPE + name; \
6130 counters->count_of_FIXED_ARRAY_##name()->Increment( \
6131 static_cast<int>(object_counts_[index])); \
6132 counters->count_of_FIXED_ARRAY_##name()->Decrement( \
6133 static_cast<int>(object_counts_last_time_[index])); \
6134 counters->size_of_FIXED_ARRAY_##name()->Increment( \
6135 static_cast<int>(object_sizes_[index])); \
6136 counters->size_of_FIXED_ARRAY_##name()->Decrement( \
6137 static_cast<int>(object_sizes_last_time_[index]));
6139 #undef ADJUST_LAST_TIME_OBJECT_COUNT
6140 #define ADJUST_LAST_TIME_OBJECT_COUNT(name) \
6142 FIRST_CODE_AGE_SUB_TYPE + Code::k##name##CodeAge - Code::kFirstCodeAge; \
6143 counters->count_of_CODE_AGE_##name()->Increment( \
6144 static_cast<int>(object_counts_[index])); \
6145 counters->count_of_CODE_AGE_##name()->Decrement( \
6146 static_cast<int>(object_counts_last_time_[index])); \
6147 counters->size_of_CODE_AGE_##name()->Increment( \
6148 static_cast<int>(object_sizes_[index])); \
6149 counters->size_of_CODE_AGE_##name()->Decrement( \
6150 static_cast<int>(object_sizes_last_time_[index]));
6152 #undef ADJUST_LAST_TIME_OBJECT_COUNT
#define CODE_AGE_LIST_COMPLETE(V)
#define SLOW_DCHECK(condition)
static uint16_t LeadSurrogate(uint32_t char_code)
static const uchar kMaxNonSurrogateCharCode
static uint16_t TrailSurrogate(uint32_t char_code)
static uchar ValueOf(const byte *str, unsigned length, unsigned *cursor)
static const uchar kBadChar
Interface for iterating through all external resources in the heap.
Isolate represents an isolated instance of the V8 engine.
void(* GCEpilogueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
void(* GCPrologueCallback)(Isolate *isolate, GCType type, GCCallbackFlags flags)
A JavaScript number value (ECMA-262, 4.3.20)
A JavaScript object (ECMA-262, 4.3.3)
static const int kNoScriptId
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static double nan_value()
static int GetLastError()
PretenureFlag GetPretenureMode()
int memento_found_count()
static bool CanTrack(InstanceType type)
void set_deopt_dependent_code(bool deopt)
void ResetPretenureDecision()
bool DigestPretenuringFeedback(bool maximum_size_scavenge)
void IncrementMementoCreateCount()
bool deopt_dependent_code()
void IterateBuiltins(ObjectVisitor *v)
static int SizeFor(int length)
static const int kMaxLength
void IteratePointersToFromSpace(ObjectVisitor *v)
bool contains(Address address)
bool SetUp(size_t requested_size)
ConstantPoolArray * constant_pool()
void Relocate(intptr_t delta)
void set_constant_pool(Object *constant_pool)
byte * relocation_start()
int instruction_size() const
void set_ic_age(int count)
static Object * GetObjectFromEntryAddress(Address location_of_address)
static int SizeFor(int body_size)
void Iterate(ObjectVisitor *v)
void MarkCompactPrologue()
static const int kExtendedFirstOffset
static const int kFirstEntryOffset
void ClearPtrEntries(Isolate *isolate)
void InitExtended(const NumberOfEntries &small, const NumberOfEntries &extended)
static const int kMaxSmallEntriesPerType
void Init(const NumberOfEntries &small)
static int SizeFor(const NumberOfEntries &small)
bool is_extended_layout()
static int SizeForExtended(const NumberOfEntries &small, const NumberOfEntries &extended)
static Context * cast(Object *context)
@ NORMALIZED_MAP_CACHE_INDEX
@ JSFUNCTION_RESULT_CACHES_INDEX
void Iterate(ObjectVisitor *v)
static void DeoptimizeAll(Isolate *isolate)
static void DeoptimizeMarkedCode(Isolate *isolate)
@ kAllocationSiteTenuringChangedGroup
static MUST_USE_RESULT Handle< UnseededNumberDictionary > New(Isolate *isolate, int at_least_space_for, PretenureFlag pretenure=NOT_TENURED)
void PostGarbageCollectionProcessing(Heap *heap)
void IterateAllRoots(ObjectVisitor *visitor)
void IterateNewSpaceRoots(ObjectVisitor *visitor)
static const int kAlignedSize
void Iterate(ObjectVisitor *v)
void AddOldString(String *string)
List< Object * > old_space_strings_
void ShrinkNewStrings(int position)
List< Object * > new_space_strings_
static const int kLengthOffset
void set_length(int value)
static const int kHeaderSize
static int SizeOf(Map *map, HeapObject *object)
static int OffsetOfElementAt(int index)
static int SizeFor(int length)
void set(int index, Object *value)
static const int kMaxLength
static int SizeFor(int length)
static const int kMaxLength
static const int kDataOffset
void set_foreign_address(Address value)
void set_size(Heap *heap, int size_in_bytes)
GCIdleTimeActionType type
void NotifyIdleMarkCompact()
GCIdleTimeAction Compute(size_t idle_time_in_ms, HeapState heap_state)
double cumulative_sweeping_duration() const
intptr_t MarkCompactSpeedInBytesPerMillisecond() const
intptr_t IncrementalMarkingSpeedInBytesPerMillisecond() const
intptr_t ScavengeSpeedInBytesPerMillisecond() const
void Start(GarbageCollector collector, const char *gc_reason, const char *collector_reason)
intptr_t NewSpaceAllocationThroughputInBytesPerMillisecond() const
void RecordStats(HeapStats *stats)
void IterateAllRoots(ObjectVisitor *v)
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
int PostGarbageCollectionProcessing(GarbageCollector collector)
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
void IterateStrongRoots(ObjectVisitor *v)
void RemoveObjectGroups()
void RemoveImplicitRefGroups()
void Iterate(v8::internal::ObjectVisitor *v)
static int NumberOfHandles(Isolate *isolate)
static MUST_USE_RESULT Handle< StringTable > New(Isolate *isolate, int at_least_space_for, MinimumCapacity capacity_option=USE_DEFAULT_MINIMUM_CAPACITY, PretenureFlag pretenure=NOT_TENURED)
void set_map_no_write_barrier(Map *value)
static const int kMapOffset
static Object ** RawField(HeapObject *obj, int offset)
Isolate * GetIsolate() const
static HeapObject * FromAddress(Address address)
void set_map_word(MapWord map_word)
WriteBarrierMode GetWriteBarrierMode(const DisallowHeapAllocation &promise)
void Iterate(ObjectVisitor *v)
virtual ~HeapObjectsFilter()
virtual bool SkipObject(HeapObject *object)=0
bool is_tracking_object_moves() const
intptr_t * code_space_size
intptr_t * map_space_size
intptr_t * map_space_capacity
static const int kStartMarker
intptr_t * code_space_capacity
intptr_t * memory_allocator_capacity
intptr_t * old_data_space_size
intptr_t * cell_space_size
intptr_t * old_pointer_space_size
static const int kEndMarker
intptr_t * memory_allocator_size
intptr_t * old_data_space_capacity
intptr_t * property_cell_space_capacity
intptr_t * old_pointer_space_capacity
intptr_t * property_cell_space_size
intptr_t * cell_space_capacity
int64_t amount_of_external_allocated_memory_
size_t CommittedPhysicalMemory()
MUST_USE_RESULT AllocationResult CopyFixedArray(FixedArray *src)
List< GCPrologueCallbackPair > gc_prologue_callbacks_
MUST_USE_RESULT AllocationResult CopyJSObject(JSObject *source, AllocationSite *site=NULL)
bool flush_monomorphic_ics_
MUST_USE_RESULT AllocationResult AllocateRawOneByteString(int length, PretenureFlag pretenure)
void AdvanceIdleIncrementalMarking(intptr_t step_size)
ExternalStringTable external_string_table_
bool Contains(Address addr)
intptr_t max_old_generation_size_
Address remembered_unmapped_pages_[kRememberedUnmappedPages]
void EnsureFillerObjectAtTop()
static const int kAllocationSiteScratchpadSize
void IterateRoots(ObjectVisitor *v, VisitMode mode)
int nodes_copied_in_new_space_
void IncrementSemiSpaceCopiedObjectSize(int object_size)
static int GcSafeSizeOfOldObject(HeapObject *object)
void set_array_buffers_list(Object *object)
OldSpace * old_pointer_space()
StoreBuffer store_buffer_
MUST_USE_RESULT AllocationResult AllocateEmptyFixedTypedArray(ExternalArrayType array_type)
FixedTypedArrayBase * EmptyFixedTypedArrayForMap(Map *map)
void VisitExternalResources(v8::ExternalResourceVisitor *visitor)
static void ScavengeObject(HeapObject **p, HeapObject *object)
void AddWeakObjectToCodeDependency(Handle< Object > obj, Handle< DependentCode > dep)
void DeoptMarkedAllocationSites()
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind)
friend class HeapIterator
static const int kReduceMemoryFootprintMask
MarkCompactCollector mark_compact_collector_
MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
static const ConstantStringTable constant_string_table[]
bool OldGenerationAllocationLimitReached()
int initial_semispace_size_
static const int kInitialStringTableSize
void ReserveSpace(int *sizes, Address *addresses)
StoreBufferRebuilder store_buffer_rebuilder_
unsigned int maximum_size_scavenges_
MUST_USE_RESULT AllocationResult AllocateEmptyConstantPoolArray()
PropertyCellSpace * property_cell_space()
intptr_t CommittedMemoryExecutable()
void MoveElements(FixedArray *array, int dst_index, int src_index, int len)
RootListIndex RootIndexForExternalArrayType(ExternalArrayType array_type)
MUST_USE_RESULT AllocationResult AllocateForeign(Address address, PretenureFlag pretenure=NOT_TENURED)
void MarkCompactPrologue()
MUST_USE_RESULT AllocationResult AllocateCell(Object *value)
MUST_USE_RESULT AllocationResult AllocatePartialMap(InstanceType instance_type, int instance_size)
void ClearObjectStats(bool clear_last_time_stats=false)
PagedSpace * paged_space(int idx)
GCIdleTimeHandler gc_idle_time_handler_
MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray *src, Map *map)
void ProcessPretenuringFeedback()
static const int kRememberedUnmappedPages
Address DoScavenge(ObjectVisitor *scavenge_visitor, Address new_space_front)
void InitializeJSObjectFromMap(JSObject *obj, FixedArray *properties, Map *map)
void RemoveGCPrologueCallback(v8::Isolate::GCPrologueCallback callback)
intptr_t MaximumCommittedMemory()
intptr_t max_executable_size_
MUST_USE_RESULT AllocationResult AllocateEmptyExternalArray(ExternalArrayType array_type)
void GarbageCollectionEpilogue()
List< GCEpilogueCallbackPair > gc_epilogue_callbacks_
MUST_USE_RESULT AllocationResult AllocateCode(int object_size, bool immovable)
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length)
Object * native_contexts_list() const
void CheckNewSpaceExpansionCriteria()
LargeObjectSpace * lo_space()
void SelectScavengingVisitorsTable()
void QueueMemoryChunkForFree(MemoryChunk *chunk)
int gcs_since_last_deopt_
Object * allocation_sites_list()
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(int length, PretenureFlag pretenure=NOT_TENURED)
void CheckpointObjectStats()
void ResetAllAllocationSitesDependentCode(PretenureFlag flag)
bool InOldDataSpace(Address address)
static const int kInitialNumberStringCacheSize
void DoScavengeObject(Map *map, HeapObject **slot, HeapObject *obj)
static String * UpdateNewSpaceReferenceInExternalStringTableEntry(Heap *heap, Object **pointer)
int survived_since_last_expansion_
size_t object_counts_[OBJECT_STATS_COUNT]
void OnAllocationEvent(HeapObject *object, int size_in_bytes)
void AddGCPrologueCallback(v8::Isolate::GCPrologueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
void CreateInitialObjects()
PromotionQueue * promotion_queue()
bool WorthActivatingIncrementalMarking()
void ProcessWeakReferences(WeakObjectRetainer *retainer)
MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
void RememberUnmappedPage(Address page, bool compacted)
void ReportStatisticsAfterGC()
void ProcessNativeContexts(WeakObjectRetainer *retainer)
static const int kMaxOldSpaceSizeMediumMemoryDevice
intptr_t old_generation_allocation_limit_
void FinalizeExternalString(String *string)
double get_max_gc_pause()
intptr_t MaxExecutableSize()
bool InNewSpace(Object *object)
int unflattened_strings_length_
void IterateSmiRoots(ObjectVisitor *v)
bool PerformGarbageCollection(GarbageCollector collector, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
MUST_USE_RESULT AllocationResult AllocateByteArray(int length, PretenureFlag pretenure=NOT_TENURED)
void GarbageCollectionPrologue()
void TearDownArrayBuffers()
ExternalArray * EmptyExternalArrayForMap(Map *map)
static const int kMakeHeapIterableMask
size_t crankshaft_codegen_bytes_generated_
void CreateFillerObjectAt(Address addr, int size)
void InitializeAllocationMemento(AllocationMemento *memento, AllocationSite *allocation_site)
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
size_t object_sizes_last_time_[OBJECT_STATS_COUNT]
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
MUST_USE_RESULT AllocationResult AllocateFixedArray(int length, PretenureFlag pretenure=NOT_TENURED)
static const int kAbortIncrementalMarkingMask
MUST_USE_RESULT AllocationResult AllocateConstantPoolArray(const ConstantPoolArray::NumberOfEntries &small)
Object ** roots_array_start()
MUST_USE_RESULT AllocationResult AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure)
Object * weak_object_to_code_table()
static const intptr_t kMinimumOldGenerationAllocationLimit
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator, double marking_time)
bool IsHighSurvivalRate()
StoreBuffer * store_buffer()
static const StringTypeTable string_type_table[]
Object * roots_[kRootListLength]
int allocation_sites_scratchpad_length_
int FullSizeNumberStringCacheLength()
void set_allocation_sites_list(Object *object)
double get_min_in_mutator()
void AddGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback, GCType gc_type_filter, bool pass_isolate=true)
Map * MapForExternalArrayType(ExternalArrayType array_type)
MUST_USE_RESULT AllocationResult CopyConstantPoolArray(ConstantPoolArray *src)
int64_t amount_of_external_allocated_memory_at_last_global_gc_
static const StructTable struct_table[]
bool ConfigureHeap(int max_semi_space_size, int max_old_space_size, int max_executable_size, size_t code_range_size)
bool RootCanBeTreatedAsConstant(RootListIndex root_index)
void ClearAllICsByKind(Code::Kind kind)
MUST_USE_RESULT AllocationResult CopyFixedDoubleArray(FixedDoubleArray *src)
MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray *src)
void ClearJSFunctionResultCaches()
MUST_USE_RESULT AllocationResult Allocate(Map *map, AllocationSpace space, AllocationSite *allocation_site=NULL)
MUST_USE_RESULT AllocationResult AllocateMap(InstanceType instance_type, int instance_size, ElementsKind elements_kind=TERMINAL_FAST_ELEMENTS_KIND)
void IterateWeakRoots(ObjectVisitor *v, VisitMode mode)
MUST_USE_RESULT AllocationResult AllocateJSObjectFromMap(Map *map, PretenureFlag pretenure=NOT_TENURED, bool alloc_props=true, AllocationSite *allocation_site=NULL)
int nodes_died_in_new_space_
MUST_USE_RESULT AllocationResult AllocateExtendedConstantPoolArray(const ConstantPoolArray::NumberOfEntries &small, const ConstantPoolArray::NumberOfEntries &extended)
int gc_post_processing_depth_
PropertyCellSpace * property_cell_space_
Object * encountered_weak_collections_
void UpdateNewSpaceReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
void DisableInlineAllocation()
static const int kNoGCFlags
static void ScavengeStoreBufferCallback(Heap *heap, MemoryChunk *page, StoreBufferEvent event)
intptr_t maximum_committed_
OldSpace * old_pointer_space_
void AddAllocationSiteToScratchpad(AllocationSite *site, ScratchpadSlotMode mode)
static const int kYoungSurvivalRateHighThreshold
bool InFromSpace(Object *object)
void IncrementPromotedObjectsSize(int object_size)
int remembered_unmapped_pages_index_
MemoryChunk * chunks_queued_for_free_
Object * weak_object_to_code_table_
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
size_t object_counts_last_time_[OBJECT_STATS_COUNT]
bool inline_allocation_disabled_
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
void ProcessAllocationSites(WeakObjectRetainer *retainer)
void FlushAllocationSitesScratchpad()
bool NextGCIsLikelyToBeFull()
void set_weak_object_to_code_table(Object *value)
void EnsureWeakObjectToCodeTable()
OldSpace * old_data_space()
size_t object_sizes_[OBJECT_STATS_COUNT]
MUST_USE_RESULT AllocationResult AllocateHeapNumber(double value, MutableMode mode=IMMUTABLE, PretenureFlag pretenure=NOT_TENURED)
void RecordStats(HeapStats *stats, bool take_snapshot=false)
PromotionQueue promotion_queue_
bool CanMoveObjectStart(HeapObject *object)
static AllocationSpace TargetSpaceId(InstanceType type)
void IterateStrongRoots(ObjectVisitor *v, VisitMode mode)
MUST_USE_RESULT AllocationResult AllocateRawTwoByteString(int length, PretenureFlag pretenure)
void IterateAndMarkPointersToFromSpace(Address start, Address end, ObjectSlotCallback callback)
bool IdleNotification(int idle_time_in_ms)
STATIC_ASSERT(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
void FlushNumberStringCache()
int reserved_semispace_size_
void AdjustLiveBytes(Address address, int by, InvocationMode mode)
void EnableInlineAllocation()
IncrementalMarking * incremental_marking()
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
Address new_space_top_after_last_gc_
void UpdateMaximumCommitted()
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type)
void RepairFreeListsAfterBoot()
void PrintShortHeapStatistics()
GarbageCollector SelectGarbageCollector(AllocationSpace space, const char **reason)
VisitorDispatchTable< ScavengingCallback > scavenging_visitors_table_
MUST_USE_RESULT AllocationResult AllocateRawFixedArray(int length, PretenureFlag pretenure)
static AllocationSpace SelectSpace(int object_size, AllocationSpace preferred_old_space, PretenureFlag pretenure)
MUST_USE_RESULT AllocationResult AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure, Object *filler)
static bool ShouldZapGarbage()
bool ShouldBePromoted(Address old_address, int object_size)
static void CopyBlock(Address dst, Address src, int byte_size)
intptr_t CommittedMemory()
void OnMoveEvent(HeapObject *target, HeapObject *source, int size_in_bytes)
MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type)
unsigned int gc_count_at_last_idle_gc_
void RightTrimFixedArray(FixedArrayBase *obj, int elements_to_trim)
MUST_USE_RESULT AllocationResult AllocateSymbol()
bool DeoptMaybeTenuredAllocationSites()
FixedArrayBase * LeftTrimFixedArray(FixedArrayBase *obj, int elements_to_trim)
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
void IncrementYoungSurvivorsCounter(int survived)
Object * array_buffers_list() const
MUST_USE_RESULT AllocationResult AllocateFixedTypedArray(int length, ExternalArrayType array_type, PretenureFlag pretenure)
MUST_USE_RESULT AllocationResult AllocateJSObject(JSFunction *constructor, PretenureFlag pretenure=NOT_TENURED, AllocationSite *allocation_site=NULL)
MUST_USE_RESULT AllocationResult CopyCode(Code *code, Vector< byte > reloc_info)
MUST_USE_RESULT AllocationResult AllocateFillerObject(int size, bool double_align, AllocationSpace space)
static void ScavengeObjectSlow(HeapObject **p, HeapObject *object)
void EnsureFromSpaceIsCommitted()
MUST_USE_RESULT AllocationResult AllocatePropertyCell()
RootListIndex RootIndexForEmptyExternalArray(ElementsKind kind)
void ClearNormalizedMapCaches()
static const int kOldSurvivalRateLowThreshold
MUST_USE_RESULT AllocationResult AllocateExternalArray(int length, ExternalArrayType array_type, void *external_pointer, PretenureFlag pretenure)
intptr_t OldGenerationAllocationLimit(intptr_t old_gen_size, int freed_global_handles)
void UpdateReferencesInExternalStringTable(ExternalStringTableUpdaterCallback updater_func)
intptr_t get_max_alive_after_gc()
void CompletelyClearInstanceofCache()
void RemoveGCEpilogueCallback(v8::Isolate::GCEpilogueCallback callback)
intptr_t max_alive_after_gc_
double semi_space_copied_rate_
bool ConfigureHeapDefault()
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc)
void CollectAllAvailableGarbage(const char *gc_reason=NULL)
void set_native_contexts_list(Object *object)
intptr_t promoted_objects_size_
MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
size_t full_codegen_bytes_generated_
int64_t PromotedExternalMemorySize()
OldSpace * old_data_space_
void InitializeAllocationSitesScratchpad()
bool InToSpace(Object *object)
void PrintAlloctionsHash()
void ReportStatisticsBeforeGC()
void set_encountered_weak_collections(Object *weak_collection)
bool InOldPointerSpace(Address address)
MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray()
LargeObjectSpace * lo_space_
DependentCode * LookupWeakObjectToCodeDependency(Handle< Object > obj)
Map * MapForFixedTypedArray(ExternalArrayType array_type)
intptr_t PromotedSpaceSizeOfObjects()
int high_survival_rate_period_length_
int NotifyContextDisposed()
MarkCompactCollector * mark_compact_collector()
void UpdateSurvivalStatistics(int start_new_space_size)
void ProcessArrayBuffers(WeakObjectRetainer *retainer)
intptr_t semi_space_copied_object_size_
bool InSpace(Address addr, AllocationSpace space)
void MarkMapPointersAsEncoded(bool encoded)
bool MaximumSizeScavenge()
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index)
void Step(intptr_t allocated, CompletionAction action, bool force_marking=false)
void RecordWrites(HeapObject *obj)
void NotifyOfHighPromotionRate()
void Start(CompactionFlag flag=ALLOW_COMPACTION)
void PrepareForScavenge()
void UpdateMarkingDequeAfterScavenge()
void UncommitMarkingDeque()
static int SizeOfMarkedObject(HeapObject *object)
static bool IsMarked(HeapObject *object)
HandleScopeImplementer * handle_scope_implementer()
StackGuard * stack_guard()
void PrintStack(StringStream *accumulator)
DeoptimizerData * deoptimizer_data()
HeapProfiler * heap_profiler() const
MemoryAllocator * memory_allocator()
KeyedLookupCache * keyed_lookup_cache()
void Iterate(ObjectVisitor *v)
DescriptorLookupCache * descriptor_lookup_cache()
ContextSlotCache * context_slot_cache()
void IterateDeferredHandles(ObjectVisitor *visitor)
OptimizingCompilerThread * optimizing_compiler_thread()
ThreadManager * thread_manager()
base::RandomNumberGenerator * random_number_generator()
CompilationCache * compilation_cache()
CpuProfiler * cpu_profiler() const
EternalHandles * eternal_handles()
GlobalHandles * global_handles()
Bootstrapper * bootstrapper()
static const int kNonWeakFieldsEndOffset
static const int kCodeEntryOffset
bool HasFixedTypedArrayElements()
void InitializeBody(Map *map, Object *pre_allocated_value, Object *filler_value)
static const int kHeaderSize
static const int kInitialMaxFastElementArray
void initialize_elements()
int GetInternalFieldCount()
ElementsKind GetElementsKind()
bool HasFastDoubleElements()
bool HasExternalArrayElements()
static const int kHashMask
static const int kCapacityMask
int Lookup(Handle< Map > map, Handle< Name > name)
static const int kMapHashShift
static const int kEntriesPerBucket
static const int kNotFound
static int Hash(Handle< Map > map, Handle< Name > name)
void Update(Handle< Map > map, Handle< Name > name, int field_offset)
int field_offsets_[kLength]
virtual intptr_t SizeOfObjects()
intptr_t MaximumCommittedMemory()
MUST_USE_RESULT AllocationResult AllocateRaw(int object_size, Executability executable)
bool CanAllocateSize(int size)
intptr_t CommittedMemory()
bool SlowContains(Address addr)
size_t CommittedPhysicalMemory()
static const int kIsExtensible
static const int kPointerFieldsEndOffset
static const int kPointerFieldsBeginOffset
CodeFlusher * code_flusher()
bool sweeping_in_progress()
bool reduce_memory_footprint_
void EnableCodeFlushing(bool enable)
void EnsureSweepingCompleted()
bool is_code_flushing_enabled() const
void TransferMark(Address old_start, Address new_start)
bool SetUp(intptr_t max_capacity, intptr_t capacity_executable)
void Free(MemoryChunk *chunk)
intptr_t SizeExecutable()
bool IsOutsideAllocatedSpace(const void *address) const
void set_owner(Space *space)
bool Contains(Address addr)
static void IncrementLiveBytesFromMutator(Address address, int by)
void set_next_chunk(MemoryChunk *next)
static void IncrementLiveBytesFromGC(Address address, int by)
void set_scan_on_scavenge(bool scan)
static const int kBodyOffset
static MemoryChunk * FromAddress(Address a)
MemoryChunk * next_chunk() const
void set_size(size_t size)
void SetArea(Address area_start, Address area_end)
static Address & Address_at(Address addr)
static const int kHashShift
static const int kEmptyHashField
void set_hash_field(uint32_t value)
static const uint32_t kHashBitMask
static const int kIsNotArrayIndexMask
static int GetBuiltinsCount()
static bool IsAtEnd(Address addr)
static NewSpacePage * FromLimit(Address address_limit)
NewSpacePage * next_page() const
static void VisitPointer(Heap *heap, Object **p)
bool IsAtMaximumCapacity()
void LowerInlineAllocationLimit(intptr_t step)
void RecordPromotion(HeapObject *obj)
intptr_t CommittedMemory()
size_t CommittedPhysicalMemory()
void set_age_mark(Address mark)
void ResetAllocationInfo()
intptr_t MaximumCommittedMemory()
bool SetUp(int reserved_semispace_size_, int max_semi_space_size)
void UpdateInlineAllocationLimit(int size_in_bytes)
bool CommitFromSpaceIfNeeded()
void RecordAllocation(HeapObject *obj)
bool ToSpaceContains(Address address)
intptr_t inline_allocation_limit_step()
static void Initialize(Isolate *isolate, Handle< Oddball > oddball, const char *to_string, Handle< Object > to_number, byte kind)
static const byte kUndefined
static const byte kArgumentMarker
static const byte kException
static const byte kTheHole
static const byte kUninitialized
void AgeBufferedOsrJobs()
static const int kPageSize
static const int kMaxRegularHeapObjectSize
intptr_t CommittedMemory()
size_t CommittedPhysicalMemory()
intptr_t MaximumCommittedMemory()
virtual intptr_t SizeOfObjects()
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes)
void set_type(HeapType *value, WriteBarrierMode mode=UPDATE_WRITE_BARRIER)
static const int kStringOffset
static const int kPatternOffset
static const int kArrayEntriesPerCacheEntry
static Object * Lookup(Heap *heap, String *key_string, Object *key_pattern, ResultsCacheType type)
@ REGEXP_MULTIPLE_INDICES
@ STRING_SPLIT_SUBSTRINGS
static void Enter(Isolate *isolate, Handle< String > key_string, Handle< Object > key_pattern, Handle< FixedArray > value_array, ResultsCacheType type)
static const int kRegExpResultsCacheSize
static const int kArrayOffset
static void Clear(FixedArray *cache)
static void InitializeIntrinsicFunctionNames(Isolate *isolate, Handle< NameDictionary > dict)
static void FreeArrayBuffer(Isolate *isolate, JSArrayBuffer *phantom_array_buffer)
void VisitPointers(Object **start, Object **end)
void VisitPointer(Object **p)
ScavengeVisitor(Heap *heap)
void ScavengePointer(Object **p)
ScavengeWeakObjectRetainer(Heap *heap)
virtual Object * RetainAs(Object *object)
static void Visit(Map *map, HeapObject **slot, HeapObject *object)
static void VisitSpecialized(Map *map, HeapObject **slot, HeapObject *object)
static VisitorDispatchTable< ScavengingCallback > * GetTable()
static void EvacuateObject(Map *map, HeapObject **slot, HeapObject *object, int object_size)
static void EvacuateFixedFloat64Array(Map *map, HeapObject **slot, HeapObject *object)
static void EvacuateSeqOneByteString(Map *map, HeapObject **slot, HeapObject *object)
static void RecordCopiedObject(Heap *heap, HeapObject *obj)
static VisitorDispatchTable< ScavengingCallback > table_
INLINE(static void MigrateObject(Heap *heap, HeapObject *source, HeapObject *target, int size))
static bool PromoteObject(Map *map, HeapObject **slot, HeapObject *object, int object_size)
static void EvacuateSeqTwoByteString(Map *map, HeapObject **slot, HeapObject *object)
static void EvacuateByteArray(Map *map, HeapObject **slot, HeapObject *object)
static void EvacuateShortcutCandidate(Map *map, HeapObject **slot, HeapObject *object)
static bool SemiSpaceCopyObject(Map *map, HeapObject **slot, HeapObject *object, int object_size)
static void EvacuateFixedDoubleArray(Map *map, HeapObject **slot, HeapObject *object)
static void EvacuateFixedArray(Map *map, HeapObject **slot, HeapObject *object)
static void EvacuateJSFunction(Map *map, HeapObject **slot, HeapObject *object)
static void EvacuateFixedTypedArray(Map *map, HeapObject **slot, HeapObject *object)
static void AssertValidRange(Address from, Address to)
static int SizeFor(int length)
static const int kMaxSize
static int SizeFor(int length)
static const int kMaxSize
static const int kNumberOfSpaces
static void Iterate(Isolate *isolate, ObjectVisitor *visitor)
static const int kNumberOfPreallocatedSpaces
static const int kAlignedSize
static Smi * FromInt(int value)
static bool HaveASnapshotToStartFrom()
ObjectIterator * CreateIterator()
ObjectIterator * iterator_
HeapObjectCallback size_func_
SpaceIterator(Heap *heap)
AllocationSpace identity()
static VisitorId GetVisitorId(int instance_type, int instance_size)
Object *** start_of_current_page_
void Callback(MemoryChunk *page, StoreBufferEvent event)
MemoryChunk * current_page_
StoreBuffer * store_buffer_
void EnterDirectlyIntoStoreBuffer(Address addr)
void IteratePointersToNewSpace(ObjectSlotCallback callback)
void EnsureSpace(intptr_t space_needed)
static const int kStoreBufferSize
void SetTop(Object ***top)
static MUST_USE_RESULT MaybeHandle< String > InternalizeStringIfExists(Isolate *isolate, Handle< String > string)
static void WriteToFlat(String *source, sinkchar *sink, int from, int to)
static const int kEmptyStringHash
static const int32_t kMaxOneByteCharCode
static const int kMaxLength
void set_length(int value)
void InitializeBody(int object_size)
void Iterate(ObjectVisitor *v)
List< HeapObject * > marking_stack_
void VisitPointers(Object **start, Object **end)
void MarkReachableObjects()
UnreachableObjectsFilter(Heap *heap)
bool SkipObject(HeapObject *object)
DisallowHeapAllocation no_allocation_
~UnreachableObjectsFilter()
static void FatalProcessOutOfMemory(const char *location, bool take_snapshot=false)
Callback GetVisitorById(StaticVisitorBase::VisitorId id)
void Register(StaticVisitorBase::VisitorId id, Callback callback)
void CopyFrom(VisitorDispatchTable *other)
void RegisterSpecializations()
static MUST_USE_RESULT Handle< WeakHashTable > Put(Handle< WeakHashTable > table, Handle< Object > key, Handle< Object > value)
#define PROFILE(IsolateGetter, Call)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes A file to write the raw context snapshot bytes Write V8 startup blob Print the time it takes to lazily compile hydrogen code stubs dump only objects containing this substring stress the GC compactor to flush out pretty print source code for builtins print C code to recreate TurboFan graphs report heap spill statistics along with enable possessive quantifier syntax for testing Minimal Log code events to the log file without profiling log positions Log statistical profiling Used with turns on browser compatible mode for profiling Enable perf linux profiler(experimental annotate support).") DEFINE_STRING(gc_fake_mmap
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define OBJECT_POINTER_ALIGN(value)
#define ALLOCATE_PARTIAL_MAP(instance_type, size, field_name)
#define STRING_TYPE_ELEMENT(type, size, name, camel_name)
#define MAKE_CASE(NAME, Name, name)
#define ADJUST_LAST_TIME_OBJECT_COUNT(name)
#define ALLOCATE_EXTERNAL_ARRAY_MAP(Type, type, TYPE, ctype, size)
#define ARRAY_TYPE_TO_ROOT_INDEX(Type, type, TYPE, ctype, size)
#define ALLOCATE_EMPTY_FIXED_TYPED_ARRAY(Type, type, TYPE, ctype, size)
#define STRUCT_TABLE_ELEMENT(NAME, Name, name)
#define ELEMENT_KIND_TO_ROOT_INDEX(Type, type, TYPE, ctype, size)
#define ALLOCATE_EMPTY_EXTERNAL_ARRAY(Type, type, TYPE, ctype, size)
#define ALLOCATE_FIXED_TYPED_ARRAY_MAP(Type, type, TYPE, ctype, size)
#define ALLOCATE_MAP(instance_type, size, field_name)
#define UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(space)
#define CONSTANT_STRING_ELEMENT(name, contents)
#define UPDATE_COUNTERS_FOR_SPACE(space)
#define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)
#define ALLOCATE_VARSIZE_MAP(instance_type, field_name)
#define INTERNALIZED_STRING_LIST(V)
#define LOG(isolate, Call)
#define CHECK_EQ(expected, value)
#define DCHECK_LE(v1, v2)
#define DCHECK_GE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define LAZY_MUTEX_INITIALIZER
uint32_t RoundUpToPowerOfTwo32(uint32_t value)
void CallOnce(OnceType *once, NoArgFunction init_func)
void PrintPID(const char *format,...)
template Object * VisitWeakList< JSArrayBuffer >(Heap *heap, Object *list, WeakObjectRetainer *retainer)
static HeapObject * EnsureDoubleAligned(Heap *heap, HeapObject *object, int size)
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
@ VISIT_ALL_IN_SWEEP_NEWSPACE
const intptr_t kCodeAlignment
@ USE_DEFAULT_MINIMUM_CAPACITY
void(* ObjectSlotCallback)(HeapObject **from, HeapObject *to)
static base::LazyMutex checkpoint_object_stats_mutex
void MemsetPointer(T **dest, U *value, int counter)
static void InitializeScavengingVisitorsTables()
void MemMove(void *dest, const void *src, size_t size)
kSerializedDataOffset Object
@ FIXED_DOUBLE_ARRAY_TYPE
@ MUTABLE_HEAP_NUMBER_TYPE
@ SHARED_FUNCTION_INFO_TYPE
@ JS_BUILTINS_OBJECT_TYPE
@ CONSTANT_POOL_ARRAY_TYPE
@ POLYMORPHIC_CODE_CACHE_TYPE
@ TERMINAL_FAST_ELEMENTS_KIND
static void WriteTwoByteData(Vector< const char > vector, uint16_t *chars, int len)
@ kStoreBufferScanningPageEvent
@ kStoreBufferStartScanningPagesEvent
Handle< T > handle(T *t, Isolate *isolate)
const int kVariableSizeSentinel
V8_DECLARE_ONCE(initialize_gc_once)
String *(* ExternalStringTableUpdaterCallback)(Heap *heap, Object **pointer)
const intptr_t kObjectAlignment
const Address kFromSpaceZapValue
static bool IsShortcutCandidate(int type)
static const int kInvalidEnumCacheSentinel
const uint32_t kFreeListZapValue
static LifetimePosition Max(LifetimePosition a, LifetimePosition b)
INLINE(static HeapObject *EnsureDoubleAligned(Heap *heap, HeapObject *object, int size))
void PrintF(const char *format,...)
static bool IsUnscavengedHeapObject(Heap *heap, Object **p)
@ LOGGING_AND_PROFILING_ENABLED
@ LOGGING_AND_PROFILING_DISABLED
int(* HeapObjectCallback)(HeapObject *obj)
static void ForFixedTypedArray(ExternalArrayType array_type, int *element_size, ElementsKind *element_kind)
kFeedbackVectorOffset flag
Vector< const uint8_t > OneByteVector(const char *data, int length)
static void RoundUp(Vector< char > buffer, int *length, int *decimal_point)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
template Object * VisitWeakList< Context >(Heap *heap, Object *list, WeakObjectRetainer *retainer)
static void InitializeGCOnce()
bool IsAligned(T value, U alignment)
template Object * VisitWeakList< AllocationSite >(Heap *heap, Object *list, WeakObjectRetainer *retainer)
void MemCopy(void *dest, const void *src, size_t size)
static void WriteOneByteData(Vector< const char > vector, uint8_t *chars, int len)
const intptr_t kDoubleAlignment
void CopyBytes(uint8_t *target, uint8_t *source)
const intptr_t kDoubleAlignmentMask
static bool AbortIncrementalMarkingAndCollectGarbage(Heap *heap, AllocationSpace space, const char *gc_reason=NULL)
Debugger support for the V8 JavaScript engine.
void(* GCPrologueCallback)(GCType type, GCCallbackFlags flags)
GCType
Applications can register callback functions which will be called before and after a garbage collecti...
@ kGCTypeMarkSweepCompact
#define INSTANCE_TYPE_LIST(V)
#define STRING_TYPE_LIST(V)
#define CODE_KIND_LIST(V)
#define FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(V)
size_t incremental_marking_speed_in_bytes_per_ms
size_t available_new_space_memory
bool sweeping_in_progress
bool incremental_marking_stopped
bool can_start_incremental_marking
size_t new_space_capacity
size_t scavenge_speed_in_bytes_per_ms
size_t mark_compact_speed_in_bytes_per_ms
size_t new_space_allocation_throughput_in_bytes_per_ms
#define T(name, string, precedence)