20 HeapGraphEdge::HeapGraphEdge(
Type type,
const char*
name,
int from,
int to)
25 DCHECK(type == kContextVariable
33 HeapGraphEdge::HeapGraphEdge(
Type type,
int index,
int from,
int to)
38 DCHECK(type == kElement || type == kHidden);
42 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot*
snapshot) {
43 to_entry_ = &
snapshot->entries()[to_index_];
47 const int HeapEntry::kNoEntry = -1;
49 HeapEntry::HeapEntry(HeapSnapshot*
snapshot,
54 unsigned trace_node_id)
58 self_size_(self_size),
62 trace_node_id_(trace_node_id) { }
68 HeapGraphEdge edge(type,
name, this->index(), entry->index());
69 snapshot_->edges().Add(edge);
77 HeapGraphEdge edge(type, index, this->index(), entry->index());
78 snapshot_->edges().Add(edge);
83 void HeapEntry::Print(
84 const char* prefix,
const char* edge_name,
int max_depth,
int indent) {
86 base::OS::Print(
"%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(),
id(), indent,
87 ' ', prefix, edge_name);
88 if (type() != kString) {
89 base::OS::Print(
"%s %.40s\n", TypeAsString(), name_);
91 base::OS::Print(
"\"");
92 const char* c = name_;
93 while (*c && (c - name_) <= 40) {
95 base::OS::Print(
"%c", *c);
97 base::OS::Print(
"\\n");
100 base::OS::Print(
"\"\n");
102 if (--max_depth == 0)
return;
103 Vector<HeapGraphEdge*> ch = children();
104 for (
int i = 0;
i < ch.length(); ++
i) {
105 HeapGraphEdge& edge = *ch[
i];
106 const char* edge_prefix =
"";
107 EmbeddedVector<char, 64> index;
108 const char* edge_name = index.start();
109 switch (edge.type()) {
110 case HeapGraphEdge::kContextVariable:
112 edge_name = edge.name();
114 case HeapGraphEdge::kElement:
115 SNPrintF(index,
"%d", edge.index());
117 case HeapGraphEdge::kInternal:
119 edge_name = edge.name();
121 case HeapGraphEdge::kProperty:
122 edge_name = edge.name();
124 case HeapGraphEdge::kHidden:
126 SNPrintF(index,
"%d", edge.index());
128 case HeapGraphEdge::kShortcut:
130 edge_name = edge.name();
134 edge_name = edge.name();
137 SNPrintF(index,
"!!! unknown edge type: %d ", edge.type());
139 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
144 const char* HeapEntry::TypeAsString() {
146 case kHidden:
return "/hidden/";
147 case kObject:
return "/object/";
148 case kClosure:
return "/closure/";
149 case kString:
return "/string/";
150 case kCode:
return "/code/";
151 case kArray:
return "/array/";
152 case kRegExp:
return "/regexp/";
153 case kHeapNumber:
return "/number/";
154 case kNative:
return "/native/";
155 case kSynthetic:
return "/synthetic/";
156 case kConsString:
return "/concatenated string/";
157 case kSlicedString:
return "/sliced string/";
158 case kSymbol:
return "/symbol/";
159 default:
return "???";
171 static const int kExpectedHeapGraphEdgeSize = 12;
172 static const int kExpectedHeapEntrySize = 28;
176 static const int kExpectedHeapGraphEdgeSize = 24;
177 static const int kExpectedHeapEntrySize = 40;
189 root_index_(HeapEntry::kNoEntry),
190 gc_roots_index_(HeapEntry::kNoEntry),
191 max_snapshot_js_object_id_(0) {
194 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
197 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
198 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
199 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
200 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
201 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
234 HeapEntry* entry =
AddEntry(HeapEntry::kSynthetic,
247 HeapEntry* entry =
AddEntry(HeapEntry::kSynthetic,
260 HeapEntry* entry =
AddEntry(HeapEntry::kSynthetic,
271 unsigned trace_node_id) {
272 HeapEntry entry(
this, type,
name,
id,
size, trace_node_id);
281 int children_index = 0;
282 for (
int i = 0;
i <
entries().length(); ++
i) {
284 children_index = entry->set_children_index(children_index);
287 for (
int i = 0;
i <
edges().length(); ++
i) {
289 edge->ReplaceToIndexWithEntry(
this);
290 edge->from()->add_child(edge);
299 if ((*entry)->id() ==
id_)
return 0;
300 return (*entry)->id() <
id_ ? -1 : 1;
313 return entries_by_id->
at(index);
319 const T* entry2_ptr) {
320 if ((*entry1_ptr)->id() == (*entry2_ptr)->id())
return 0;
321 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
338 root()->Print(
"",
"", max_depth, 0);
370 : next_id_(kFirstAvailableObjectId),
387 if (from ==
to)
return false;
389 if (from_value ==
NULL) {
394 if (to_value !=
NULL) {
395 int to_entry_info_index =
396 static_cast<int>(
reinterpret_cast<intptr_t
>(to_value));
402 if (to_entry->value !=
NULL) {
408 int to_entry_info_index =
409 static_cast<int>(
reinterpret_cast<intptr_t
>(to_entry->value));
412 int from_entry_info_index =
413 static_cast<int>(
reinterpret_cast<intptr_t
>(from_value));
418 if (FLAG_heap_profiler_trace_objects) {
419 PrintF(
"Move object from %p to %p old size %6d new size %6d\n",
422 entries_.at(from_entry_info_index).size,
425 entries_.at(from_entry_info_index).size = object_size;
426 to_entry->value = from_value;
428 return from_value !=
NULL;
440 if (entry ==
NULL)
return 0;
441 int entry_index =
static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
444 return entry_info.
id;
454 if (entry->value !=
NULL) {
456 static_cast<int>(
reinterpret_cast<intptr_t
>(entry->value));
459 if (FLAG_heap_profiler_trace_objects) {
460 PrintF(
"Update object size : %p with old size %d and new size %d\n",
466 return entry_info.
id;
468 entry->value =
reinterpret_cast<void*
>(
entries_.length());
483 if (FLAG_heap_profiler_trace_objects) {
484 PrintF(
"Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
488 "HeapObjectsMap::UpdateHeapObjectsMap");
489 HeapIterator iterator(
heap_);
492 obj = iterator.next()) {
494 if (FLAG_heap_profiler_trace_objects) {
495 PrintF(
"Update object : %p %6d. Next address is %p\n",
498 obj->address() + obj->Size());
502 if (FLAG_heap_profiler_trace_objects) {
503 PrintF(
"End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
515 expected_size(expected_size) {
524 if (expected_size == 0) {
525 PrintF(
"Untracked object : %p %6d. Next address is %p\n",
529 }
else if (obj->
Size() != expected_size) {
530 PrintF(
"Wrong size %6d: %p %6d. Next address is %p\n",
536 PrintF(
"Good object : %p %6d. Next address is %p\n",
546 if (a->
obj < b->
obj)
return -1;
547 if (a->
obj > b->
obj)
return 1;
558 HeapIterator iterator(
heap_);
562 obj = iterator.next()) {
567 if (FLAG_heap_profiler_trace_objects) {
568 heap_objects.
Add(HeapObjectInfo(obj, 0));
571 int entry_index =
static_cast<int>(
572 reinterpret_cast<intptr_t
>(entry->value));
574 if (FLAG_heap_profiler_trace_objects) {
575 heap_objects.
Add(HeapObjectInfo(obj,
576 static_cast<int>(entry_info.
size)));
577 if (obj->Size() !=
static_cast<int>(entry_info.
size))
580 CHECK_EQ(obj->Size(),
static_cast<int>(entry_info.
size));
584 if (FLAG_heap_profiler_trace_objects) {
585 PrintF(
"\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
588 int last_printed_object = -1;
589 bool print_next_object =
false;
590 for (
int i = 0;
i < heap_objects.length(); ++
i) {
591 const HeapObjectInfo& object_info = heap_objects[
i];
592 if (!object_info.IsValid()) {
594 if (last_printed_object !=
i - 1) {
596 PrintF(
"%d objects were skipped\n",
i - 1 - last_printed_object);
597 heap_objects[
i - 1].Print();
601 last_printed_object =
i;
602 print_next_object =
true;
603 }
else if (print_next_object) {
605 print_next_object =
false;
606 last_printed_object =
i;
609 if (last_printed_object < heap_objects.length() - 1) {
610 PrintF(
"Last %d objects were skipped\n",
611 heap_objects.length() - 1 - last_printed_object);
613 PrintF(
"End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
628 for (
int time_interval_index = 0;
630 ++time_interval_index) {
634 EntryInfo* start_entry_info = entry_info;
635 while (entry_info < end_entry_info && entry_info->
id < time_interval_id) {
636 entries_size += entry_info->
size;
640 static_cast<uint32_t>(entry_info - start_entry_info);
641 if (time_interval.
count != entries_count ||
642 time_interval.
size != entries_size) {
645 time_interval.
count = entries_count,
646 time_interval.
size = entries_size));
647 if (stats_buffer.length() >= prefered_chunk_size) {
649 &stats_buffer.
first(), stats_buffer.length());
651 stats_buffer.Clear();
655 DCHECK(entry_info == end_entry_info);
656 if (!stats_buffer.is_empty()) {
658 &stats_buffer.
first(), stats_buffer.length());
670 int first_free_entry = 1;
674 if (first_free_entry !=
i) {
675 entries_.at(first_free_entry) = entry_info;
677 entries_.at(first_free_entry).accessed =
false;
681 entry->value =
reinterpret_cast<void*
>(first_free_entry);
684 if (entry_info.
addr) {
698 const char* label = info->
GetLabel();
700 static_cast<int>(strlen(label)),
703 if (element_count != -1)
720 : entries_(
HashMap::PointersMatch) {
726 if (cache_entry ==
NULL)
return HeapEntry::kNoEntry;
727 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
734 cache_entry->value =
reinterpret_cast<void*
>(
static_cast<intptr_t
>(entry));
739 : entries_(
HashMap::PointersMatch) {
749 if (!obj->IsHeapObject())
return false;
756 if (!obj->IsHeapObject())
return;
764 HashMap::Entry* cache_entry =
766 return cache_entry !=
NULL
767 ?
reinterpret_cast<const char*
>(cache_entry->value)
773 if (!obj->IsHeapObject())
return;
775 HashMap::Entry* cache_entry =
777 cache_entry->value =
const_cast<char*
>(tag);
787 names_(snapshot_->
profiler()->names()),
788 heap_object_map_(snapshot_->
profiler()->heap_object_map()),
791 global_object_name_resolver_(resolver) {
805 if (object->IsJSFunction()) {
808 const char*
name = shared->bound() ?
"native_bind" :
811 }
else if (object->IsJSRegExp()) {
812 JSRegExp* re = JSRegExp::cast(
object);
816 }
else if (object->IsJSObject()) {
819 if (object->IsJSGlobalObject()) {
826 }
else if (object->IsString()) {
827 String*
string = String::cast(
object);
828 if (string->IsConsString())
830 HeapEntry::kConsString,
831 "(concatenated string)");
832 if (string->IsSlicedString())
834 HeapEntry::kSlicedString,
839 }
else if (object->IsSymbol()) {
840 return AddEntry(
object, HeapEntry::kSymbol,
"symbol");
841 }
else if (object->IsCode()) {
842 return AddEntry(
object, HeapEntry::kCode,
"");
843 }
else if (object->IsSharedFunctionInfo()) {
844 String*
name = String::cast(SharedFunctionInfo::cast(
object)->
name());
848 }
else if (object->IsScript()) {
855 }
else if (object->IsNativeContext()) {
856 return AddEntry(
object, HeapEntry::kHidden,
"system / NativeContext");
857 }
else if (object->IsContext()) {
858 return AddEntry(
object, HeapEntry::kObject,
"system / Context");
859 }
else if (object->IsFixedArray() ||
860 object->IsFixedDoubleArray() ||
861 object->IsByteArray() ||
862 object->IsExternalArray()) {
863 return AddEntry(
object, HeapEntry::kArray,
"");
864 }
else if (object->IsHeapNumber()) {
865 return AddEntry(
object, HeapEntry::kHeapNumber,
"number");
883 address,
static_cast<unsigned int>(
size));
884 unsigned trace_node_id = 0;
888 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
911 return entry !=
NULL ? entry :
AddEntry(ptr, allocator);
916 HeapEntry* child_entry) {
918 parent_entry->SetIndexedReference(type, index, child_entry);
922 HeapEntry* child_entry) {
924 int index = parent_entry->children_count() + 1;
925 parent_entry->SetIndexedReference(type, index, child_entry);
929 const char* reference_name,
930 HeapEntry* child_entry) {
932 parent_entry->SetNamedReference(type, reference_name, child_entry);
936 HeapEntry* child_entry) {
938 int index = parent_entry->children_count() + 1;
939 parent_entry->SetNamedReference(
955 switch (Map::cast(
object)->instance_type()) {
956 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
957 case instance_type: return "system / Map (" #Name ")";
959 #undef MAKE_STRING_MAP_CASE
960 default:
return "system / Map";
966 #define MAKE_STRUCT_CASE(NAME, Name, name) \
967 case NAME##_TYPE: return "system / "#Name;
969 #undef MAKE_STRUCT_CASE
970 default:
return "system";
976 int objects_count = 0;
979 obj = iterator->next()) {
982 return objects_count;
1002 for (
Object** p = start; p < end; p++) {
1009 if (offset < 0)
return;
1014 intptr_t p_tagged = p |
kTag;
1020 intptr_t p =
reinterpret_cast<intptr_t
>(*field);
1023 *field =
reinterpret_cast<Object*
>(p_untagged);
1024 DCHECK((*field)->IsHeapObject());
1043 if (obj->IsFixedArray())
return false;
1045 if (obj->IsJSGlobalProxy()) {
1047 }
else if (obj->IsJSArrayBuffer()) {
1049 }
else if (obj->IsJSObject()) {
1050 if (obj->IsJSWeakSet()) {
1052 }
else if (obj->IsJSWeakMap()) {
1054 }
else if (obj->IsJSSet()) {
1056 }
else if (obj->IsJSMap()) {
1060 }
else if (obj->IsString()) {
1062 }
else if (obj->IsSymbol()) {
1064 }
else if (obj->IsMap()) {
1066 }
else if (obj->IsSharedFunctionInfo()) {
1068 }
else if (obj->IsScript()) {
1070 }
else if (obj->IsAccessorInfo()) {
1072 }
else if (obj->IsAccessorPair()) {
1074 }
else if (obj->IsCodeCache()) {
1076 }
else if (obj->IsCode()) {
1078 }
else if (obj->IsBox()) {
1080 }
else if (obj->IsCell()) {
1082 }
else if (obj->IsPropertyCell()) {
1084 }
else if (obj->IsAllocationSite()) {
1092 if (!obj->IsFixedArray())
return false;
1094 if (obj->IsContext()) {
1106 "native_context", proxy->native_context(),
1120 if (obj->IsJSFunction()) {
1121 JSFunction* js_fun = JSFunction::cast(js_obj);
1122 Object* proto_or_map = js_fun->prototype_or_initial_map();
1123 if (!proto_or_map->IsTheHole()) {
1124 if (!proto_or_map->IsMap()) {
1127 heap_->prototype_string(), proto_or_map,
1135 obj, entry,
"initial_map", proto_or_map,
1141 bool bound = shared_info->bound();
1142 TagObject(js_fun->literals_or_bindings(),
1143 bound ?
"(function bindings)" :
"(function literals)");
1145 bound ?
"bindings" :
"literals",
1146 js_fun->literals_or_bindings(),
1148 TagObject(shared_info,
"(shared function info)");
1150 "shared", shared_info,
1157 "next_function_link", js_fun->next_function_link(),
1163 }
else if (obj->IsGlobalObject()) {
1166 "builtins", global_obj->builtins(),
1169 "native_context", global_obj->native_context(),
1172 "global_context", global_obj->global_context(),
1175 "global_proxy", global_obj->global_proxy(),
1179 }
else if (obj->IsJSArrayBufferView()) {
1186 TagObject(js_obj->properties(),
"(object properties)");
1188 "properties", js_obj->properties(),
1190 TagObject(js_obj->elements(),
"(object elements)");
1192 "elements", js_obj->elements(),
1198 if (string->IsConsString()) {
1204 }
else if (string->IsSlicedString()) {
1214 "name", symbol->name(),
1230 "table", collection->table(),
1239 int context_locals = scope_info->ContextLocalCount();
1240 for (
int i = 0;
i < context_locals; ++
i) {
1257 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1258 if (Context::index < Context::FIRST_WEAK_SLOT || \
1259 Context::index == Context::MAP_CACHE_INDEX) { \
1260 SetInternalReference(context, entry, #name, context->get(Context::index), \
1261 FixedArray::OffsetOfElementAt(Context::index)); \
1263 SetWeakReference(context, entry, #name, context->get(Context::index), \
1264 FixedArray::OffsetOfElementAt(Context::index)); \
1271 TagObject(context->jsfunction_result_caches(),
1272 "(context func. result caches)");
1273 TagObject(context->normalized_map_cache(),
"(context norm. map cache)");
1274 TagObject(context->runtime_context(),
"(runtime context)");
1275 TagObject(context->embedder_data(),
"(context data)");
1278 optimized_functions_list);
1282 #undef EXTRACT_CONTEXT_FIELD
1294 if (
map->HasTransitionArray()) {
1296 int transitions_entry =
GetEntry(transitions)->index();
1298 TagObject(back_pointer,
"(back pointer)");
1300 "back_pointer", back_pointer);
1302 if (FLAG_collect_maps &&
map->CanTransition()) {
1308 TagObject(prototype_transitions,
"(prototype transitions");
1310 "prototype_transitions", prototype_transitions);
1317 TagObject(transitions,
"(transition array)");
1319 "transitions", transitions,
1322 Object* back_pointer =
map->GetBackPointer();
1323 TagObject(back_pointer,
"(back pointer)");
1325 "back_pointer", back_pointer,
1329 TagObject(descriptors,
"(map descriptors)");
1331 "descriptors", descriptors,
1336 "code_cache",
map->code_cache(),
1341 "constructor",
map->constructor(),
1346 "dependent_code",
map->dependent_code(),
1365 "name", shared->name(),
1368 "code", shared->code(),
1370 TagObject(shared->scope_info(),
"(function scope info)");
1372 "scope_info", shared->scope_info(),
1375 "instance_class_name", shared->instance_class_name(),
1378 "script", shared->script(),
1380 const char* construct_stub_name =
name ?
1382 "(construct stub code)";
1383 TagObject(shared->construct_stub(), construct_stub_name);
1385 "construct_stub", shared->construct_stub(),
1388 "function_data", shared->function_data(),
1391 "debug_info", shared->debug_info(),
1394 "inferred_name", shared->inferred_name(),
1397 "optimized_code_map", shared->optimized_code_map(),
1400 "feedback_vector", shared->feedback_vector(),
1408 "source", script->source(),
1411 "name", script->name(),
1414 "context_data", script->context_data(),
1416 TagObject(script->line_ends(),
"(script line ends)");
1418 "line_ends", script->line_ends(),
1428 accessor_info->expected_receiver_type(),
1430 if (accessor_info->IsDeclaredAccessorInfo()) {
1432 DeclaredAccessorInfo::cast(accessor_info);
1434 declared_accessor_info->descriptor(),
1436 }
else if (accessor_info->IsExecutableAccessorInfo()) {
1438 ExecutableAccessorInfo::cast(accessor_info);
1440 executable_accessor_info->getter(),
1443 executable_accessor_info->setter(),
1446 executable_accessor_info->data(),
1463 TagObject(code_cache->default_cache(),
"(default code cache)");
1465 "default_cache", code_cache->default_cache(),
1467 TagObject(code_cache->normal_type_cache(),
"(code type cache)");
1469 "type_cache", code_cache->normal_type_cache(),
1482 "(%s code)", CodeStub::MajorName(
1483 CodeStub::GetMajorKey(code),
true)));
1490 TagObject(code->relocation_info(),
"(code relocation info)");
1492 "relocation_info", code->relocation_info(),
1495 "handler_table", code->handler_table(),
1497 TagObject(code->deoptimization_data(),
"(code deopt data)");
1499 "deoptimization_data", code->deoptimization_data(),
1501 if (code->
kind() == Code::FUNCTION) {
1507 "gc_metadata", code->gc_metadata(),
1512 if (code->
kind() == Code::OPTIMIZED_FUNCTION) {
1514 "next_code_link", code->next_code_link(),
1566 HeapEntry::kNative,
"system / JSArrayBufferData",
size_);
1579 "weak_first_view", buffer->weak_first_view(),
1582 if (!buffer->backing_store())
1586 HeapEntry* data_entry =
1589 entry,
"backing_store", data_entry);
1595 for (
int i = 0, l = array->
length();
i < l; ++
i) {
1608 if (!js_obj->IsJSFunction())
return;
1611 if (func->shared()->bound()) {
1620 "bound_argument_%d",
1633 for (
int i = 0;
i < real_size;
i++) {
1641 if (index < js_obj->
map()->inobject_properties()) {
1650 TagObject(value,
"(hidden properties)");
1653 "hidden_properties", value,
1657 FieldIndex field_index =
1658 FieldIndex::ForDescriptor(js_obj->
map(),
i);
1663 TagObject(value,
"(hidden properties)");
1686 int length = dictionary->
Capacity();
1687 for (
int i = 0;
i < length; ++
i) {
1689 if (dictionary->
IsKey(k)) {
1692 Object* value = target->IsPropertyCell()
1693 ? PropertyCell::cast(target)->value()
1696 TagObject(value,
"(hidden properties)");
1710 if (!callback_obj->IsAccessorPair())
return false;
1711 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1712 Object* getter = accessors->getter();
1713 if (!getter->IsOddball()) {
1716 Object* setter = accessors->setter();
1717 if (!setter->IsOddball()) {
1726 FixedArray* elements = FixedArray::cast(js_obj->elements());
1727 int length = js_obj->IsJSArray() ?
1728 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1730 for (
int i = 0;
i < length; ++
i) {
1731 if (!elements->
get(
i)->IsTheHole()) {
1737 int length = dictionary->
Capacity();
1738 for (
int i = 0;
i < length; ++
i) {
1740 if (dictionary->
IsKey(k)) {
1752 for (
int i = 0;
i < length; ++
i) {
1761 Heap* heap =
object->GetHeap();
1762 if (object->IsJSFunction())
return heap->closure_string();
1763 String* constructor_name =
object->constructor_name();
1764 if (constructor_name == heap->Object_string()) {
1768 return object->constructor_name();
1773 if (!obj->IsHeapObject())
return NULL;
1807 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1815 VisitorSynchronization::kBuiltins) {
1819 builtins->
name(builtin_index++));
1822 if (is_strong) ++strong_index;
1868 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1869 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1881 template<V8HeapExplorer::ExtractReferencesMethod extractor>
1884 bool interrupted =
false;
1885 HeapIterator iterator(
heap_, HeapIterator::kFilterUnreachable);
1890 if (interrupted)
continue;
1892 HeapEntry* heap_entry =
GetEntry(obj);
1893 int entry = heap_entry->index();
1894 if ((this->*extractor)(entry, obj)) {
1900 obj->Iterate(&refs_extractor);
1910 return object->IsHeapObject()
1911 && !
object->IsOddball()
1912 &&
object !=
heap_->empty_byte_array()
1913 &&
object !=
heap_->empty_fixed_array()
1914 &&
object !=
heap_->empty_descriptor_array()
1915 &&
object !=
heap_->fixed_array_map()
1916 &&
object !=
heap_->cell_map()
1917 &&
object !=
heap_->global_property_cell_map()
1918 &&
object !=
heap_->shared_function_info_map()
1919 &&
object !=
heap_->free_space_map()
1920 &&
object !=
heap_->one_pointer_filler_map()
1921 &&
object !=
heap_->two_pointer_filler_map();
1931 HeapEntry* child_entry =
GetEntry(child_obj);
1932 if (child_entry !=
NULL) {
1944 const char* reference_name,
1947 HeapEntry* child_entry =
GetEntry(child_obj);
1948 if (child_entry !=
NULL) {
1962 HeapEntry* child_entry =
GetEntry(child_obj);
1963 if (child_entry !=
NULL) {
1974 const char* reference_name,
1978 HeapEntry* child_entry =
GetEntry(child_obj);
1979 if (child_entry ==
NULL)
return;
1996 HeapEntry* child_entry =
GetEntry(child_obj);
1997 if (child_entry ==
NULL)
return;
2013 HeapEntry* child_entry =
GetEntry(child_obj);
2025 const char* reference_name,
2029 HeapEntry* child_entry =
GetEntry(child_obj);
2030 if (child_entry ==
NULL)
return;
2047 HeapEntry* child_entry =
GetEntry(child_obj);
2048 if (child_entry ==
NULL)
return;
2061 Name* reference_name,
2063 const char* name_format_string,
2066 HeapEntry* child_entry =
GetEntry(child_obj);
2067 if (child_entry !=
NULL) {
2069 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2071 const char*
name = name_format_string !=
NULL && reference_name->IsString()
2074 String::cast(reference_name)->ToCString(
2096 HeapEntry* child_entry =
GetEntry(child_obj);
2115 HeapEntry* child_entry =
GetEntry(child_obj);
2116 if (child_entry !=
NULL) {
2139 if (child_obj->IsNativeContext()) {
2142 if (global->IsJSGlobalObject()) {
2143 bool is_debug_object =
false;
2157 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2158 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2161 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2163 #undef STRUCT_MAP_NAME
2164 #define STRING_NAME(name, str) NAME_ENTRY(name)
2177 if (entry->name()[0] ==
'\0') {
2178 entry->set_name(tag);
2194 for (
Object** p = start; p < end; p++) {
2195 if ((*p)->IsNativeContext()) {
2198 if (proxy->IsJSGlobalProxy()) {
2199 Object* global = proxy->
map()->prototype();
2200 if (global->IsJSGlobalObject()) {
2221 const char** urls = NewArray<const char*>(enumerator.
count());
2222 for (
int i = 0, l = enumerator.
count();
i < l; ++
i) {
2234 for (
int i = 0, l = enumerator.
count();
i < l; ++
i) {
2281 const char*
name = elements != -1
2289 size != -1 ?
static_cast<int>(
size) : 0,
2299 names_(snapshot_->
profiler()->names()),
2300 progress_(progress),
2301 embedder_queried_(
false),
2302 objects_by_info_(RetainedInfosMatch),
2303 native_groups_(StringsMatch),
2349 for (
int i = 0;
i < groups->length(); ++
i) {
2353 for (
size_t j = 0; j < group->
length; ++j) {
2373 for (
int i = 0;
i < groups->length(); ++
i) {
2378 DCHECK(parent_entry != HeapEntry::kNoEntry);
2380 for (
size_t j = 0; j < group->
length; ++j) {
2381 Object* child = *children[j];
2382 HeapEntry* child_entry =
2396 HashMap::Entry* entry =
2398 if (entry->value !=
NULL) {
2421 for (
int i = 0;
i < objects->length(); ++
i) {
2436 hash_(reinterpret_cast<intptr_t>(label)),
2460 const char* label) {
2464 static_cast<int>(strlen(label_copy)),
2468 if (entry->value ==
NULL) {
2477 HeapEntry* child_entry =
2482 HeapEntry* group_entry =
2486 group_entry->index(),
2495 HeapEntry* info_entry =
2499 wrapper_entry->index(),
2503 info_entry->index(),
2514 HeapEntry* group_entry =
2530 if (info ==
NULL)
return;
2542 v8_heap_explorer_(snapshot_, this, resolver),
2543 dom_explorer_(snapshot_, this),
2557 "HeapSnapshotGenerator::GenerateSnapshot");
2560 "HeapSnapshotGenerator::GenerateSnapshot");
2564 debug_heap->Verify();
2570 debug_heap->Verify();
2592 const int kProgressReportGranularity = 10000;
2605 HeapIterator iterator(
heap_, HeapIterator::kFilterUnreachable);
2653 DCHECK(
static_cast<size_t>(n) <= strlen(s));
2654 const char* s_end = s + n;
2658 DCHECK(s_chunk_size > 0);
2665 void AddNumber(
unsigned n) { AddNumberImpl<unsigned>(n,
"%u"); }
2676 template<
typename T>
2679 static const int kMaxNumberSize =
2689 int result =
SNPrintF(buffer, format, n);
2724 allocation_tracker->PrepareForSerialization();
2770 const_cast<char*
>(s), StringHash(s),
true);
2771 if (cache_entry->value ==
NULL) {
2774 return static_cast<int>(
reinterpret_cast<intptr_t
>(cache_entry->value));
2793 template<
typename T>
2796 int number_of_digits = 0;
2802 buffer_pos += number_of_digits;
2803 int result = buffer_pos;
2805 int last_digit =
static_cast<int>(value % 10);
2806 buffer[--buffer_pos] =
'0' + last_digit;
2813 template<
typename T>
2815 typename ToUnsigned<
sizeof(value)>::
Type unsigned_value = value;
2817 return utoa_impl(unsigned_value, buffer, buffer_pos);
2832 buffer[buffer_pos++] =
',';
2834 buffer_pos =
utoa(edge->type(), buffer, buffer_pos);
2835 buffer[buffer_pos++] =
',';
2836 buffer_pos =
utoa(edge_name_or_index, buffer, buffer_pos);
2837 buffer[buffer_pos++] =
',';
2839 buffer[buffer_pos++] =
'\n';
2840 buffer[buffer_pos++] =
'\0';
2847 for (
int i = 0;
i < edges.length(); ++
i) {
2849 edges[
i - 1]->from()->index() <= edges[
i]->from()->index());
2865 buffer[buffer_pos++] =
',';
2867 buffer_pos =
utoa(entry->type(), buffer, buffer_pos);
2868 buffer[buffer_pos++] =
',';
2870 buffer[buffer_pos++] =
',';
2871 buffer_pos =
utoa(entry->id(), buffer, buffer_pos);
2872 buffer[buffer_pos++] =
',';
2873 buffer_pos =
utoa(entry->self_size(), buffer, buffer_pos);
2874 buffer[buffer_pos++] =
',';
2875 buffer_pos =
utoa(entry->children_count(), buffer, buffer_pos);
2876 buffer[buffer_pos++] =
',';
2877 buffer_pos =
utoa(entry->trace_node_id(), buffer, buffer_pos);
2878 buffer[buffer_pos++] =
'\n';
2879 buffer[buffer_pos++] =
'\0';
2886 for (
int i = 0;
i < entries.length(); ++
i) {
2902 #define JSON_A(s) "[" s "]"
2903 #define JSON_O(s) "{" s "}"
2904 #define JSON_S(s) "\"" s "\""
2912 JSON_S(
"trace_node_id"))
","
2925 JSON_S(
"concatenated string")
","
2926 JSON_S(
"sliced string"))
","
2935 JSON_S(
"name_or_index")
","
2946 JSON_S(
"string_or_number")
","
2949 JSON_S(
"function_id")
","
2951 JSON_S(
"script_name")
","
2957 JSON_S(
"function_info_index")
","
2979 static const char hex_chars[] =
"0123456789ABCDEF";
2990 if (!tracker)
return;
3003 buffer_pos =
utoa(node->
id(), buffer, buffer_pos);
3004 buffer[buffer_pos++] =
',';
3006 buffer[buffer_pos++] =
',';
3008 buffer[buffer_pos++] =
',';
3010 buffer[buffer_pos++] =
',';
3011 buffer[buffer_pos++] =
'[';
3012 buffer[buffer_pos++] =
'\0';
3016 for (
int i = 0;
i < children.
length();
i++) {
3029 if (position == -1) {
3030 buffer[buffer_pos++] =
'0';
3033 buffer_pos =
utoa(
static_cast<unsigned>(position + 1), buffer, buffer_pos);
3041 if (!tracker)
return;
3049 bool first_entry =
true;
3050 for (
int i = 0;
i < list.length();
i++) {
3054 first_entry =
false;
3056 buffer[buffer_pos++] =
',';
3059 buffer[buffer_pos++] =
',';
3061 buffer[buffer_pos++] =
',';
3063 buffer[buffer_pos++] =
',';
3065 buffer_pos =
utoa(
static_cast<unsigned>(info->
script_id), buffer,
3067 buffer[buffer_pos++] =
',';
3069 buffer[buffer_pos++] =
',';
3071 buffer[buffer_pos++] =
'\n';
3072 buffer[buffer_pos++] =
'\0';
3081 for ( ; *s !=
'\0'; ++s) {
3104 if (*s > 31 && *s < 128) {
3106 }
else if (*s <= 31) {
3111 unsigned length = 1, cursor = 0;
3112 for ( ; length <= 4 && *(s + length) !=
'\0'; ++length) { }
3134 int index =
static_cast<int>(
reinterpret_cast<uintptr_t>(entry->value));
3135 sorted_strings[index] =
reinterpret_cast<const unsigned char*
>(entry->key);
3138 for (
int i = 1;
i < sorted_strings.
length(); ++
i) {
static const uchar kBadChar
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
An interface for reporting progress and controlling long-running activities.
virtual ControlOption ReportProgressValue(int done, int total)=0
Notify about current progress.
HeapSnapshotEdge represents a directed connection between heap graph nodes: from retainers to retaine...
Callback interface for retrieving user friendly names of global objects.
virtual const char * GetName(Handle< Object > object)=0
Returns name to be used in the heap snapshot for given node.
An interface for exporting data from V8, using "push" model.
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
Writes the next chunk of heap stats data into the stream.
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
Writes the next chunk of snapshot data into the stream.
virtual void EndOfStream()=0
Notify about the end of stream.
virtual int GetChunkSize()
Get preferred output chunk size.
Interface for providing information about embedder's objects held by global handles.
virtual intptr_t GetSizeInBytes()
Returns embedder's object size in bytes.
virtual void Dispose()=0
Called by V8 when it no longer needs an instance.
virtual intptr_t GetHash()=0
Returns hash value for the instance.
virtual const char * GetLabel()=0
Returns human-readable label.
virtual const char * GetGroupLabel()
Returns human-readable group label.
virtual intptr_t GetElementCount()
Returns element count in case if a global handle retains a subgraph by holding one of its nodes.
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static const int kExpectedReceiverTypeOffset
static const int kNameOffset
static const int kGetterOffset
static const int kSetterOffset
static const int kWeakNextOffset
static const int kDependentCodeOffset
static const int kTransitionInfoOffset
static const int kNestedSiteOffset
unsigned allocation_size() const
unsigned allocation_count() const
Vector< AllocationTraceNode * > children() const
unsigned function_info_index() const
AllocationTraceNode * root()
const List< FunctionInfo * > & function_info_list() const
AllocationTraceTree * trace_tree()
virtual HeapEntry * AllocateEntry(HeapThing ptr)
HeapObjectsMap * heap_object_map_
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
HeapEntry::Type entries_type_
static const int kValueOffset
const char * name(int index)
static const int kValueOffset
static const int kDefaultCacheOffset
static const int kNormalTypeCacheOffset
ConstantPoolArray * constant_pool()
static const int kNextCodeLinkOffset
static const char * Kind2String(Kind kind)
static const int kConstantPoolOffset
static const int kTypeFeedbackInfoOffset
static const int kHandlerTableOffset
static Object * GetObjectFromEntryAddress(Address location_of_address)
static const int kRelocationInfoOffset
Object * type_feedback_info()
static const int kDeoptimizationDataOffset
static const int kGCMetadataOffset
static const int kFirstOffset
static const int kSecondOffset
JSObject * global_proxy()
static Context * cast(Object *context)
@ OPTIMIZED_FUNCTIONS_LIST
GlobalObject * global_object()
Context * declaration_context()
bool IsDebugGlobal(GlobalObject *global)
static const int kDescriptorOffset
Name * GetKey(int descriptor_number)
PropertyDetails GetDetails(int descriptor_number)
Object * GetValue(int descriptor_number)
int GetFieldIndex(int descriptor_number)
PropertyType GetType(int descriptor_number)
Object * GetConstant(int descriptor_number)
Object * ValueAt(int entry)
static const int kSetterOffset
static const int kDataOffset
static const int kGetterOffset
int operator()(HeapEntry *const *entry)
FindEntryById(SnapshotObjectId id)
static int OffsetOfElementAt(int index)
static const int kEndOffset
virtual void VisitPointers(Object **start, Object **end)
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
virtual ~GlobalHandlesExtractor()
NativeObjectsExplorer * explorer_
void IterateAllRoots(ObjectVisitor *v)
List< ObjectGroup * > * object_groups()
void IterateAllRootsWithClassIds(ObjectVisitor *v)
void RemoveObjectGroups()
List< ImplicitRefGroup * > * implicit_ref_groups()
void RemoveImplicitRefGroups()
static const int kBuiltinsOffset
static const int kGlobalContextOffset
static const int kNativeContextOffset
static const int kHeaderSize
static const int kGlobalProxyOffset
Handle< JSGlobalObject > & at(int i)
List< Handle< JSGlobalObject > > objects_
virtual void VisitPointers(Object **start, Object **end)
Object * KeyAt(int entry)
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
void Pair(HeapThing thing, int entry)
static uint32_t Hash(HeapThing thing)
static const int kMapOffset
static const SnapshotObjectId kFirstAvailableObjectId
static const SnapshotObjectId kInternalRootObjectId
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size, bool accessed=true)
static const int kObjectIdStep
static const SnapshotObjectId kGcRootsFirstSubrootId
void UpdateObjectSize(Address addr, int size)
SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
List< TimeInterval > time_intervals_
bool MoveObject(Address from, Address to, int size)
SnapshotObjectId last_assigned_id() const
int FindUntrackedObjects()
void StopHeapObjectsTracking()
size_t GetUsedMemorySize() const
SnapshotObjectId next_id_
SnapshotObjectId FindEntry(Address addr)
static const SnapshotObjectId kGcRootsObjectId
List< EntryInfo > entries_
HeapObjectsMap(Heap *heap)
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
void UpdateHeapObjectsMap()
bool Contains(Object *object)
void SetTag(Object *obj, const char *tag)
const char * GetTag(Object *obj)
v8::RetainedObjectInfo * ExecuteWrapperClassCallback(uint16_t class_id, Object **wrapper)
HeapObjectsMap * heap_object_map() const
void RemoveSnapshot(HeapSnapshot *snapshot)
AllocationTracker * allocation_tracker() const
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control, v8::HeapProfiler::ObjectNameResolver *resolver, Heap *heap)
NativeObjectsExplorer dom_explorer_
void SetProgressTotal(int iterations_count)
bool ProgressReport(bool force=false)
V8HeapExplorer v8_heap_explorer_
v8::ActivityControl * control_
void SerializeTraceNode(AllocationTraceNode *node)
void Serialize(v8::OutputStream *stream)
void SerializeTraceNodeInfos()
void SerializeTraceTree()
void SerializeNode(HeapEntry *entry)
void SerializeEdge(HeapGraphEdge *edge, bool first_edge)
int entry_index(HeapEntry *e)
OutputStreamWriter * writer_
static const int kEdgeFieldsCount
static const int kNodeFieldsCount
void SerializeString(const unsigned char *s)
int GetStringId(const char *s)
List< HeapGraphEdge * > children_
void AddSyntheticRootEntries()
HeapEntry * AddGcRootsEntry()
HeapProfiler * profiler()
int gc_subroot_indexes_[VisitorSynchronization::kNumberOfSyncTags]
SnapshotObjectId max_snapshot_js_object_id_
void Print(int max_depth)
size_t RawSnapshotSize() const
List< HeapEntry > & entries()
List< HeapGraphEdge * > & children()
HeapEntry * AddRootEntry()
void RememberLastJSObjectId()
List< HeapEntry * > sorted_entries_
List< HeapEntry * > * GetSortedEntriesList()
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, size_t size, unsigned trace_node_id)
List< HeapGraphEdge > & edges()
HeapEntry * GetEntryById(SnapshotObjectId id)
List< HeapGraphEdge > edges_
List< HeapEntry > entries_
HeapEntry * gc_subroot(int index)
HeapEntry * AddGcSubrootEntry(int tag, SnapshotObjectId id)
void IterateRoots(ObjectVisitor *v, VisitMode mode)
static const int kMakeHeapIterableMask
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
HeapProfiler * heap_profiler() const
GlobalHandles * global_handles()
JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer *explorer)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
V8HeapExplorer * explorer_
static const int kWeakNextOffset
static const int kBufferOffset
static const int kWeakFirstViewOffset
static const int kWeakNextOffset
static const int kTableOffset
static const int kNextFunctionLinkOffset
static const int kBoundFunctionIndex
static const int kBoundThisIndex
static const int kBoundArgumentsStartIndex
static const int kNonWeakFieldsEndOffset
static const int kSharedFunctionInfoOffset
FixedArray * function_bindings()
static const int kContextOffset
static const int kLiteralsOffset
static const int kPrototypeOrInitialMapOffset
static const int kNativeContextOffset
Object * InObjectPropertyAt(int index)
bool HasFastObjectElements()
SeededNumberDictionary * element_dictionary()
int GetInObjectPropertyOffset(int index)
static const int kHeaderSize
int GetInternalFieldCount()
Object * GetInternalField(int index)
static const int kPropertiesOffset
int GetInternalFieldOffset(int index)
bool HasDictionaryElements()
static const int kElementsOffset
Object * RawFastPropertyAt(FieldIndex index)
NameDictionary * property_dictionary()
static const int kTableOffset
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
void Sort(int(*cmp)(const T *x, const T *y))
int NumberOfOwnDescriptors()
static const int kDescriptorsOffset
static const int kDependentCodeOffset
InstanceType instance_type()
static const int kTransitionsOrBackPointerOffset
static const int kCodeCacheOffset
static const int kConstructorOffset
static const int kPrototypeOffset
static Object *& Object_at(Address addr)
virtual intptr_t GetHash()
Returns hash value for the instance.
virtual bool IsEquivalent(RetainedObjectInfo *other)
Returns whether two instances are equivalent.
virtual const char * GetLabel()
Returns human-readable label.
virtual ~NativeGroupRetainedObjectInfo()
virtual void Dispose()
Called by V8 when it no longer needs an instance.
NativeGroupRetainedObjectInfo(const char *label)
bool IterateAndExtractReferences(SnapshotFiller *filler)
static uint32_t InfoHash(v8::RetainedObjectInfo *info)
HeapObjectsSet in_groups_
void SetNativeRootReference(v8::RetainedObjectInfo *info)
void SetRootNativeRootsReference()
HeapEntriesAllocator * synthetic_entries_allocator_
void FillImplicitReferences()
int EstimateObjectsCount()
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
HeapEntriesAllocator * native_entries_allocator_
NativeGroupRetainedObjectInfo * FindOrAddGroupInfo(const char *label)
void SetWrapperNativeReferences(HeapObject *wrapper, v8::RetainedObjectInfo *info)
void VisitSubtreeWrapper(Object **p, uint16_t class_id)
List< HeapObject * > * GetListMaybeDisposeInfo(v8::RetainedObjectInfo *info)
void FillRetainedObjects()
virtual ~NativeObjectsExplorer()
void AddSubstring(const char *s, int n)
void AddString(const char *s)
void AddNumber(unsigned n)
void AddNumberImpl(T n, const char *format)
void AddCharacter(char c)
ScopedVector< char > chunk_
v8::OutputStream * stream_
OutputStreamWriter(v8::OutputStream *stream)
static const int kDependentCodeOffset
static const int kTypeOffset
A class to uniformly access the prototype of any Object and walk its prototype chain.
Object * GetCurrent() const
int FunctionContextSlotIndex(String *name, VariableMode *mode)
String * ContextLocalName(int var)
static const int kSourceOffset
static const int kNameOffset
static const int kLineEndsOffset
static const int kContextOffset
static const int kOptimizedCodeMapOffset
static const int kScriptOffset
static const int kConstructStubOffset
static const int kDebugInfoOffset
static const int kFeedbackVectorOffset
static const int kScopeInfoOffset
static const int kInferredNameOffset
static const int kNameOffset
static const int kInstanceClassNameOffset
static const int kCodeOffset
static const int kFunctionDataOffset
static const int kParentOffset
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
HeapEntry * FindEntry(HeapThing ptr)
HeapEntriesMap * entries_
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
virtual bool ProgressReport(bool force)=0
virtual void ProgressStep()=0
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
const char * GetName(Name *name)
const char * GetFormatted(const char *format,...)
const char * GetCopy(const char *src)
static const int kNameOffset
Entry * Next(Entry *p) const
uint32_t occupancy() const
uint32_t capacity() const
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
void * Remove(void *key, uint32_t hash)
bool HasPrototypeTransitions()
Object * back_pointer_storage()
FixedArray * GetPrototypeTransitions()
bool IsSimpleTransition()
void TagCodeObject(Code *code)
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetContextReference(HeapObject *parent_obj, int parent, String *reference_name, Object *child, int field_offset)
void ExtractFixedArrayReferences(int entry, FixedArray *array)
HeapObjectsMap * heap_object_map_
void ExtractBoxReferences(int entry, Box *box)
void ExtractMapReferences(int entry, Map *map)
bool IterateAndExtractReferences(SnapshotFiller *filler)
void SetNativeBindReference(HeapObject *parent_obj, int parent, const char *reference_name, Object *child)
void SetGcSubrootReference(VisitorSynchronization::SyncTag tag, bool is_weak, Object *child)
const char * GetStrongGcSubrootName(Object *object)
HeapObjectsSet weak_containers_
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress, v8::HeapProfiler::ObjectNameResolver *resolver)
HeapObjectsSet objects_tags_
void SetInternalReference(HeapObject *parent_obj, int parent, const char *reference_name, Object *child, int field_offset=-1)
void SetWeakReference(HeapObject *parent_obj, int parent, const char *reference_name, Object *child_obj, int field_offset)
SnapshottingProgressReportingInterface * progress_
void ExtractJSObjectReferences(int entry, JSObject *js_obj)
void SetGcRootsReference(VisitorSynchronization::SyncTag tag)
void SetElementReference(HeapObject *parent_obj, int parent, int index, Object *child)
void SetPropertyReference(HeapObject *parent_obj, int parent, Name *reference_name, Object *child, const char *name_format_string=NULL, int field_offset=-1)
HeapEntry * GetEntry(Object *obj)
bool IterateAndExtractSinglePass()
void ExtractJSWeakCollectionReferences(int entry, JSWeakCollection *collection)
void ExtractSharedFunctionInfoReferences(int entry, SharedFunctionInfo *shared)
void ExtractJSCollectionReferences(int entry, JSCollection *collection)
void ExtractJSGlobalProxyReferences(int entry, JSGlobalProxy *proxy)
void MarkAsWeakContainer(Object *object)
void ExtractJSArrayBufferReferences(int entry, JSArrayBuffer *buffer)
bool ExtractReferencesPass2(int entry, HeapObject *obj)
void ExtractCodeCacheReferences(int entry, CodeCache *code_cache)
bool ExtractReferencesPass1(int entry, HeapObject *obj)
static String * GetConstructorName(JSObject *object)
virtual ~V8HeapExplorer()
void ExtractStringReferences(int entry, String *obj)
void ExtractCellReferences(int entry, Cell *cell)
void ExtractClosureReferences(JSObject *js_obj, int entry)
v8::HeapProfiler::ObjectNameResolver * global_object_name_resolver_
void ExtractScriptReferences(int entry, Script *script)
bool ExtractAccessorPairProperty(JSObject *js_obj, int entry, Object *key, Object *callback_obj)
const char * GetSystemEntryName(HeapObject *object)
void SetRootGcRootsReference()
void ExtractSymbolReferences(int entry, Symbol *symbol)
void SetHiddenReference(HeapObject *parent_obj, int parent, int index, Object *child)
void SetUserGlobalReference(Object *user_global)
void ExtractElementReferences(JSObject *js_obj, int entry)
void ExtractContextReferences(int entry, Context *context)
void ExtractInternalReferences(JSObject *js_obj, int entry)
void ExtractCodeReferences(int entry, Code *code)
void ExtractAccessorInfoReferences(int entry, AccessorInfo *accessor_info)
void ExtractAllocationSiteReferences(int entry, AllocationSite *site)
void ExtractAccessorPairReferences(int entry, AccessorPair *accessors)
HeapEntry * AddEntry(Address address, HeapEntry::Type type, const char *name, size_t size)
HeapObjectsSet user_roots_
void TagBuiltinCodeObject(Code *code, const char *name)
void ExtractPropertyCellReferences(int entry, PropertyCell *cell)
void ExtractPropertyReferences(JSObject *js_obj, int entry)
void TagObject(Object *obj, const char *tag)
bool IsEssentialObject(Object *object)
int EstimateObjectsCount(HeapIterator *iterator)
HeapObjectsSet strong_gc_subroot_names_
Vector< T > SubVector(int from, int to)
static const char *const kTagNames[kNumberOfSyncTags]
#define NATIVE_CONTEXT_FIELDS(V)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes A file to write the raw context snapshot bytes Write V8 startup blob Print the time it takes to lazily compile hydrogen code stubs dump only objects containing this substring stress the GC compactor to flush out pretty print source code for builtins print C code to recreate TurboFan graphs report heap spill statistics along with enable possessive quantifier syntax for testing Minimal Log code events to the log file without profiling log positions Log statistical profiling Used with turns on browser compatible mode for profiling Enable perf linux profiler(experimental annotate support).") DEFINE_STRING(gc_fake_mmap
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the snapshot(mksnapshot only)") DEFINE_STRING(raw_file
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define STRING_NAME(name, str)
#define ROOT_NAME(type, name, camel_name)
#define EXTRACT_CONTEXT_FIELD(index, type, name)
#define STRUCT_MAP_NAME(NAME, Name, name)
#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name)
#define MAKE_STRUCT_CASE(NAME, Name, name)
#define STRONG_ROOT_LIST(V)
#define INTERNALIZED_STRING_LIST(V)
#define CHECK_EQ(expected, value)
#define DCHECK(condition)
#define STATIC_ASSERT(test)
static const size_t kBufferSize
static int comparator(const HeapObjectInfo *a, const HeapObjectInfo *b)
void DeleteArray(T *array)
static void WriteUChar(OutputStreamWriter *w, unibrow::uchar u)
static int SortByIds(const T *entry1_ptr, const T *entry2_ptr)
static int utoa_impl(T value, const Vector< char > &buffer, int buffer_pos)
TypeImpl< ZoneTypeConfig > Type
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
size_t GetMemoryUsedByList(const List< T, P > &list)
static const uint32_t kZeroHashSeed
static int utoa(T value, const Vector< char > &buffer, int buffer_pos)
int SNPrintF(Vector< char > str, const char *format,...)
@ ROBUST_STRING_TRAVERSAL
int SortedListBSearch(const List< T > &list, P cmp)
uint32_t ComputePointerHash(void *ptr)
void PrintF(const char *format,...)
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
size_t NumberToSize(Isolate *isolate, Object *number)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static int SerializePosition(int position, const Vector< char > &buffer, int buffer_pos)
static bool AddressesMatch(void *key1, void *key2)
int StrLength(const char *string)
void MemCopy(void *dest, const void *src, size_t size)
Debugger support for the V8 JavaScript engine.
@ kGCCallbackFlagConstructRetainedObjectInfos
GCType
Applications can register callback functions which will be called before and after a garbage collecti...
@ kGCTypeMarkSweepCompact
uint32_t SnapshotObjectId
#define STRING_TYPE_LIST(V)
A struct for exporting HeapStats data from V8, using "push" model.
SnapshotObjectId function_id
v8::RetainedObjectInfo * info
HeapObjectInfo(HeapObject *obj, int expected_size)
#define T(name, string, precedence)