V8 Project
heap-snapshot-generator.cc
Go to the documentation of this file.
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
8 
10 #include "src/code-stubs.h"
11 #include "src/conversions.h"
12 #include "src/debug.h"
13 #include "src/heap-profiler.h"
14 #include "src/types.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 
20 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
21  : type_(type),
22  from_index_(from),
23  to_index_(to),
24  name_(name) {
25  DCHECK(type == kContextVariable
26  || type == kProperty
27  || type == kInternal
28  || type == kShortcut
29  || type == kWeak);
30 }
31 
32 
33 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
34  : type_(type),
35  from_index_(from),
36  to_index_(to),
37  index_(index) {
38  DCHECK(type == kElement || type == kHidden);
39 }
40 
41 
42 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
43  to_entry_ = &snapshot->entries()[to_index_];
44 }
45 
46 
47 const int HeapEntry::kNoEntry = -1;
48 
49 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
50  Type type,
51  const char* name,
53  size_t self_size,
54  unsigned trace_node_id)
55  : type_(type),
56  children_count_(0),
57  children_index_(-1),
58  self_size_(self_size),
59  snapshot_(snapshot),
60  name_(name),
61  id_(id),
62  trace_node_id_(trace_node_id) { }
63 
64 
65 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
66  const char* name,
67  HeapEntry* entry) {
68  HeapGraphEdge edge(type, name, this->index(), entry->index());
69  snapshot_->edges().Add(edge);
70  ++children_count_;
71 }
72 
73 
74 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
75  int index,
76  HeapEntry* entry) {
77  HeapGraphEdge edge(type, index, this->index(), entry->index());
78  snapshot_->edges().Add(edge);
79  ++children_count_;
80 }
81 
82 
83 void HeapEntry::Print(
84  const char* prefix, const char* edge_name, int max_depth, int indent) {
85  STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
86  base::OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(), id(), indent,
87  ' ', prefix, edge_name);
88  if (type() != kString) {
89  base::OS::Print("%s %.40s\n", TypeAsString(), name_);
90  } else {
91  base::OS::Print("\"");
92  const char* c = name_;
93  while (*c && (c - name_) <= 40) {
94  if (*c != '\n')
95  base::OS::Print("%c", *c);
96  else
97  base::OS::Print("\\n");
98  ++c;
99  }
100  base::OS::Print("\"\n");
101  }
102  if (--max_depth == 0) return;
103  Vector<HeapGraphEdge*> ch = children();
104  for (int i = 0; i < ch.length(); ++i) {
105  HeapGraphEdge& edge = *ch[i];
106  const char* edge_prefix = "";
107  EmbeddedVector<char, 64> index;
108  const char* edge_name = index.start();
109  switch (edge.type()) {
110  case HeapGraphEdge::kContextVariable:
111  edge_prefix = "#";
112  edge_name = edge.name();
113  break;
114  case HeapGraphEdge::kElement:
115  SNPrintF(index, "%d", edge.index());
116  break;
117  case HeapGraphEdge::kInternal:
118  edge_prefix = "$";
119  edge_name = edge.name();
120  break;
121  case HeapGraphEdge::kProperty:
122  edge_name = edge.name();
123  break;
124  case HeapGraphEdge::kHidden:
125  edge_prefix = "$";
126  SNPrintF(index, "%d", edge.index());
127  break;
128  case HeapGraphEdge::kShortcut:
129  edge_prefix = "^";
130  edge_name = edge.name();
131  break;
133  edge_prefix = "w";
134  edge_name = edge.name();
135  break;
136  default:
137  SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
138  }
139  edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
140  }
141 }
142 
143 
144 const char* HeapEntry::TypeAsString() {
145  switch (type()) {
146  case kHidden: return "/hidden/";
147  case kObject: return "/object/";
148  case kClosure: return "/closure/";
149  case kString: return "/string/";
150  case kCode: return "/code/";
151  case kArray: return "/array/";
152  case kRegExp: return "/regexp/";
153  case kHeapNumber: return "/number/";
154  case kNative: return "/native/";
155  case kSynthetic: return "/synthetic/";
156  case kConsString: return "/concatenated string/";
157  case kSlicedString: return "/sliced string/";
158  case kSymbol: return "/symbol/";
159  default: return "???";
160  }
161 }
162 
163 
164 // It is very important to keep objects that form a heap snapshot
165 // as small as possible.
166 namespace { // Avoid littering the global namespace.
167 
168 template <size_t ptr_size> struct SnapshotSizeConstants;
169 
170 template <> struct SnapshotSizeConstants<4> {
171  static const int kExpectedHeapGraphEdgeSize = 12;
172  static const int kExpectedHeapEntrySize = 28;
173 };
174 
175 template <> struct SnapshotSizeConstants<8> {
176  static const int kExpectedHeapGraphEdgeSize = 24;
177  static const int kExpectedHeapEntrySize = 40;
178 };
179 
180 } // namespace
181 
182 
183 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
184  const char* title,
185  unsigned uid)
186  : profiler_(profiler),
187  title_(title),
188  uid_(uid),
189  root_index_(HeapEntry::kNoEntry),
190  gc_roots_index_(HeapEntry::kNoEntry),
191  max_snapshot_js_object_id_(0) {
193  sizeof(HeapGraphEdge) ==
194  SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
196  sizeof(HeapEntry) ==
197  SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
198  USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
199  USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
200  USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
201  USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
202  for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
203  gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
204  }
205 }
206 
207 
209  profiler_->RemoveSnapshot(this);
210  delete this;
211 }
212 
213 
216 }
217 
218 
220  AddRootEntry();
221  AddGcRootsEntry();
223  for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
224  AddGcSubrootEntry(tag, id);
226  }
228 }
229 
230 
232  DCHECK(root_index_ == HeapEntry::kNoEntry);
233  DCHECK(entries_.is_empty()); // Root entry must be the first one.
234  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
235  "",
237  0,
238  0);
239  root_index_ = entry->index();
240  DCHECK(root_index_ == 0);
241  return entry;
242 }
243 
244 
246  DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
247  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
248  "(GC roots)",
250  0,
251  0);
252  gc_roots_index_ = entry->index();
253  return entry;
254 }
255 
256 
258  DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
260  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
261  VisitorSynchronization::kTagNames[tag], id, 0, 0);
262  gc_subroot_indexes_[tag] = entry->index();
263  return entry;
264 }
265 
266 
268  const char* name,
269  SnapshotObjectId id,
270  size_t size,
271  unsigned trace_node_id) {
272  HeapEntry entry(this, type, name, id, size, trace_node_id);
273  entries_.Add(entry);
274  return &entries_.last();
275 }
276 
277 
279  DCHECK(children().is_empty());
280  children().Allocate(edges().length());
281  int children_index = 0;
282  for (int i = 0; i < entries().length(); ++i) {
283  HeapEntry* entry = &entries()[i];
284  children_index = entry->set_children_index(children_index);
285  }
286  DCHECK(edges().length() == children_index);
287  for (int i = 0; i < edges().length(); ++i) {
288  HeapGraphEdge* edge = &edges()[i];
289  edge->ReplaceToIndexWithEntry(this);
290  edge->from()->add_child(edge);
291  }
292 }
293 
294 
296  public:
297  explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
298  int operator()(HeapEntry* const* entry) {
299  if ((*entry)->id() == id_) return 0;
300  return (*entry)->id() < id_ ? -1 : 1;
301  }
302  private:
304 };
305 
306 
308  List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
309  // Perform a binary search by id.
310  int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
311  if (index == -1)
312  return NULL;
313  return entries_by_id->at(index);
314 }
315 
316 
317 template<class T>
318 static int SortByIds(const T* entry1_ptr,
319  const T* entry2_ptr) {
320  if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
321  return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
322 }
323 
324 
326  if (sorted_entries_.is_empty()) {
327  sorted_entries_.Allocate(entries_.length());
328  for (int i = 0; i < entries_.length(); ++i) {
330  }
332  }
333  return &sorted_entries_;
334 }
335 
336 
337 void HeapSnapshot::Print(int max_depth) {
338  root()->Print("", "", max_depth, 0);
339 }
340 
341 
343  return
344  sizeof(*this) +
349 }
350 
351 
352 // We split IDs on evens for embedder objects (see
353 // HeapObjectsMap::GenerateId) and odds for native objects.
362 
363 
364 static bool AddressesMatch(void* key1, void* key2) {
365  return key1 == key2;
366 }
367 
368 
370  : next_id_(kFirstAvailableObjectId),
371  entries_map_(AddressesMatch),
372  heap_(heap) {
373  // This dummy element solves a problem with entries_map_.
374  // When we do lookup in HashMap we see no difference between two cases:
375  // it has an entry with NULL as the value or it has created
376  // a new entry on the fly with NULL as the default value.
377  // With such dummy element we have a guaranty that all entries_map_ entries
378  // will have the value field grater than 0.
379  // This fact is using in MoveObject method.
380  entries_.Add(EntryInfo(0, NULL, 0));
381 }
382 
383 
384 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
385  DCHECK(to != NULL);
386  DCHECK(from != NULL);
387  if (from == to) return false;
388  void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
389  if (from_value == NULL) {
390  // It may occur that some untracked object moves to an address X and there
391  // is a tracked object at that address. In this case we should remove the
392  // entry as we know that the object has died.
393  void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
394  if (to_value != NULL) {
395  int to_entry_info_index =
396  static_cast<int>(reinterpret_cast<intptr_t>(to_value));
397  entries_.at(to_entry_info_index).addr = NULL;
398  }
399  } else {
400  HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
401  true);
402  if (to_entry->value != NULL) {
403  // We found the existing entry with to address for an old object.
404  // Without this operation we will have two EntryInfo's with the same
405  // value in addr field. It is bad because later at RemoveDeadEntries
406  // one of this entry will be removed with the corresponding entries_map_
407  // entry.
408  int to_entry_info_index =
409  static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
410  entries_.at(to_entry_info_index).addr = NULL;
411  }
412  int from_entry_info_index =
413  static_cast<int>(reinterpret_cast<intptr_t>(from_value));
414  entries_.at(from_entry_info_index).addr = to;
415  // Size of an object can change during its life, so to keep information
416  // about the object in entries_ consistent, we have to adjust size when the
417  // object is migrated.
418  if (FLAG_heap_profiler_trace_objects) {
419  PrintF("Move object from %p to %p old size %6d new size %6d\n",
420  from,
421  to,
422  entries_.at(from_entry_info_index).size,
423  object_size);
424  }
425  entries_.at(from_entry_info_index).size = object_size;
426  to_entry->value = from_value;
427  }
428  return from_value != NULL;
429 }
430 
431 
433  FindOrAddEntry(addr, size, false);
434 }
435 
436 
438  HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
439  false);
440  if (entry == NULL) return 0;
441  int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
442  EntryInfo& entry_info = entries_.at(entry_index);
443  DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
444  return entry_info.id;
445 }
446 
447 
449  unsigned int size,
450  bool accessed) {
451  DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
452  HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
453  true);
454  if (entry->value != NULL) {
455  int entry_index =
456  static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
457  EntryInfo& entry_info = entries_.at(entry_index);
458  entry_info.accessed = accessed;
459  if (FLAG_heap_profiler_trace_objects) {
460  PrintF("Update object size : %p with old size %d and new size %d\n",
461  addr,
462  entry_info.size,
463  size);
464  }
465  entry_info.size = size;
466  return entry_info.id;
467  }
468  entry->value = reinterpret_cast<void*>(entries_.length());
471  entries_.Add(EntryInfo(id, addr, size, accessed));
472  DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
473  return id;
474 }
475 
476 
478  time_intervals_.Clear();
479 }
480 
481 
483  if (FLAG_heap_profiler_trace_objects) {
484  PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
486  }
488  "HeapObjectsMap::UpdateHeapObjectsMap");
489  HeapIterator iterator(heap_);
490  for (HeapObject* obj = iterator.next();
491  obj != NULL;
492  obj = iterator.next()) {
493  FindOrAddEntry(obj->address(), obj->Size());
494  if (FLAG_heap_profiler_trace_objects) {
495  PrintF("Update object : %p %6d. Next address is %p\n",
496  obj->address(),
497  obj->Size(),
498  obj->address() + obj->Size());
499  }
500  }
502  if (FLAG_heap_profiler_trace_objects) {
503  PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
505  }
506 }
507 
508 
509 namespace {
510 
511 
513  HeapObjectInfo(HeapObject* obj, int expected_size)
514  : obj(obj),
515  expected_size(expected_size) {
516  }
517 
520 
521  bool IsValid() const { return expected_size == obj->Size(); }
522 
523  void Print() const {
524  if (expected_size == 0) {
525  PrintF("Untracked object : %p %6d. Next address is %p\n",
526  obj->address(),
527  obj->Size(),
528  obj->address() + obj->Size());
529  } else if (obj->Size() != expected_size) {
530  PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
531  expected_size,
532  obj->address(),
533  obj->Size(),
534  obj->address() + obj->Size());
535  } else {
536  PrintF("Good object : %p %6d. Next address is %p\n",
537  obj->address(),
538  expected_size,
539  obj->address() + obj->Size());
540  }
541  }
542 };
543 
544 
545 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
546  if (a->obj < b->obj) return -1;
547  if (a->obj > b->obj) return 1;
548  return 0;
549 }
550 
551 
552 } // namespace
553 
554 
556  List<HeapObjectInfo> heap_objects(1000);
557 
558  HeapIterator iterator(heap_);
559  int untracked = 0;
560  for (HeapObject* obj = iterator.next();
561  obj != NULL;
562  obj = iterator.next()) {
563  HashMap::Entry* entry = entries_map_.Lookup(
564  obj->address(), ComputePointerHash(obj->address()), false);
565  if (entry == NULL) {
566  ++untracked;
567  if (FLAG_heap_profiler_trace_objects) {
568  heap_objects.Add(HeapObjectInfo(obj, 0));
569  }
570  } else {
571  int entry_index = static_cast<int>(
572  reinterpret_cast<intptr_t>(entry->value));
573  EntryInfo& entry_info = entries_.at(entry_index);
574  if (FLAG_heap_profiler_trace_objects) {
575  heap_objects.Add(HeapObjectInfo(obj,
576  static_cast<int>(entry_info.size)));
577  if (obj->Size() != static_cast<int>(entry_info.size))
578  ++untracked;
579  } else {
580  CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
581  }
582  }
583  }
584  if (FLAG_heap_profiler_trace_objects) {
585  PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
587  heap_objects.Sort(comparator);
588  int last_printed_object = -1;
589  bool print_next_object = false;
590  for (int i = 0; i < heap_objects.length(); ++i) {
591  const HeapObjectInfo& object_info = heap_objects[i];
592  if (!object_info.IsValid()) {
593  ++untracked;
594  if (last_printed_object != i - 1) {
595  if (i > 0) {
596  PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
597  heap_objects[i - 1].Print();
598  }
599  }
600  object_info.Print();
601  last_printed_object = i;
602  print_next_object = true;
603  } else if (print_next_object) {
604  object_info.Print();
605  print_next_object = false;
606  last_printed_object = i;
607  }
608  }
609  if (last_printed_object < heap_objects.length() - 1) {
610  PrintF("Last %d objects were skipped\n",
611  heap_objects.length() - 1 - last_printed_object);
612  }
613  PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
615  }
616  return untracked;
617 }
618 
619 
623  int prefered_chunk_size = stream->GetChunkSize();
624  List<v8::HeapStatsUpdate> stats_buffer;
625  DCHECK(!entries_.is_empty());
626  EntryInfo* entry_info = &entries_.first();
627  EntryInfo* end_entry_info = &entries_.last() + 1;
628  for (int time_interval_index = 0;
629  time_interval_index < time_intervals_.length();
630  ++time_interval_index) {
631  TimeInterval& time_interval = time_intervals_[time_interval_index];
632  SnapshotObjectId time_interval_id = time_interval.id;
633  uint32_t entries_size = 0;
634  EntryInfo* start_entry_info = entry_info;
635  while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
636  entries_size += entry_info->size;
637  ++entry_info;
638  }
639  uint32_t entries_count =
640  static_cast<uint32_t>(entry_info - start_entry_info);
641  if (time_interval.count != entries_count ||
642  time_interval.size != entries_size) {
643  stats_buffer.Add(v8::HeapStatsUpdate(
644  time_interval_index,
645  time_interval.count = entries_count,
646  time_interval.size = entries_size));
647  if (stats_buffer.length() >= prefered_chunk_size) {
649  &stats_buffer.first(), stats_buffer.length());
650  if (result == OutputStream::kAbort) return last_assigned_id();
651  stats_buffer.Clear();
652  }
653  }
654  }
655  DCHECK(entry_info == end_entry_info);
656  if (!stats_buffer.is_empty()) {
658  &stats_buffer.first(), stats_buffer.length());
659  if (result == OutputStream::kAbort) return last_assigned_id();
660  }
661  stream->EndOfStream();
662  return last_assigned_id();
663 }
664 
665 
667  DCHECK(entries_.length() > 0 &&
668  entries_.at(0).id == 0 &&
669  entries_.at(0).addr == NULL);
670  int first_free_entry = 1;
671  for (int i = 1; i < entries_.length(); ++i) {
672  EntryInfo& entry_info = entries_.at(i);
673  if (entry_info.accessed) {
674  if (first_free_entry != i) {
675  entries_.at(first_free_entry) = entry_info;
676  }
677  entries_.at(first_free_entry).accessed = false;
678  HashMap::Entry* entry = entries_map_.Lookup(
679  entry_info.addr, ComputePointerHash(entry_info.addr), false);
680  DCHECK(entry);
681  entry->value = reinterpret_cast<void*>(first_free_entry);
682  ++first_free_entry;
683  } else {
684  if (entry_info.addr) {
685  entries_map_.Remove(entry_info.addr,
686  ComputePointerHash(entry_info.addr));
687  }
688  }
689  }
690  entries_.Rewind(first_free_entry);
691  DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
693 }
694 
695 
697  SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
698  const char* label = info->GetLabel();
700  static_cast<int>(strlen(label)),
701  heap_->HashSeed());
702  intptr_t element_count = info->GetElementCount();
703  if (element_count != -1)
704  id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
706  return id << 1;
707 }
708 
709 
711  return
712  sizeof(*this) +
713  sizeof(HashMap::Entry) * entries_map_.capacity() +
716 }
717 
718 
720  : entries_(HashMap::PointersMatch) {
721 }
722 
723 
725  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
726  if (cache_entry == NULL) return HeapEntry::kNoEntry;
727  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
728 }
729 
730 
731 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
732  HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
733  DCHECK(cache_entry->value == NULL);
734  cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
735 }
736 
737 
739  : entries_(HashMap::PointersMatch) {
740 }
741 
742 
744  entries_.Clear();
745 }
746 
747 
749  if (!obj->IsHeapObject()) return false;
750  HeapObject* object = HeapObject::cast(obj);
751  return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
752 }
753 
754 
756  if (!obj->IsHeapObject()) return;
757  HeapObject* object = HeapObject::cast(obj);
758  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
759 }
760 
761 
762 const char* HeapObjectsSet::GetTag(Object* obj) {
763  HeapObject* object = HeapObject::cast(obj);
764  HashMap::Entry* cache_entry =
765  entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
766  return cache_entry != NULL
767  ? reinterpret_cast<const char*>(cache_entry->value)
768  : NULL;
769 }
770 
771 
772 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
773  if (!obj->IsHeapObject()) return;
774  HeapObject* object = HeapObject::cast(obj);
775  HashMap::Entry* cache_entry =
776  entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
777  cache_entry->value = const_cast<char*>(tag);
778 }
779 
780 
785  : heap_(snapshot->profiler()->heap_object_map()->heap()),
786  snapshot_(snapshot),
787  names_(snapshot_->profiler()->names()),
788  heap_object_map_(snapshot_->profiler()->heap_object_map()),
789  progress_(progress),
790  filler_(NULL),
791  global_object_name_resolver_(resolver) {
792 }
793 
794 
796 }
797 
798 
800  return AddEntry(reinterpret_cast<HeapObject*>(ptr));
801 }
802 
803 
805  if (object->IsJSFunction()) {
806  JSFunction* func = JSFunction::cast(object);
807  SharedFunctionInfo* shared = func->shared();
808  const char* name = shared->bound() ? "native_bind" :
809  names_->GetName(String::cast(shared->name()));
810  return AddEntry(object, HeapEntry::kClosure, name);
811  } else if (object->IsJSRegExp()) {
812  JSRegExp* re = JSRegExp::cast(object);
813  return AddEntry(object,
814  HeapEntry::kRegExp,
815  names_->GetName(re->Pattern()));
816  } else if (object->IsJSObject()) {
817  const char* name = names_->GetName(
818  GetConstructorName(JSObject::cast(object)));
819  if (object->IsJSGlobalObject()) {
820  const char* tag = objects_tags_.GetTag(object);
821  if (tag != NULL) {
822  name = names_->GetFormatted("%s / %s", name, tag);
823  }
824  }
825  return AddEntry(object, HeapEntry::kObject, name);
826  } else if (object->IsString()) {
827  String* string = String::cast(object);
828  if (string->IsConsString())
829  return AddEntry(object,
830  HeapEntry::kConsString,
831  "(concatenated string)");
832  if (string->IsSlicedString())
833  return AddEntry(object,
834  HeapEntry::kSlicedString,
835  "(sliced string)");
836  return AddEntry(object,
837  HeapEntry::kString,
838  names_->GetName(String::cast(object)));
839  } else if (object->IsSymbol()) {
840  return AddEntry(object, HeapEntry::kSymbol, "symbol");
841  } else if (object->IsCode()) {
842  return AddEntry(object, HeapEntry::kCode, "");
843  } else if (object->IsSharedFunctionInfo()) {
844  String* name = String::cast(SharedFunctionInfo::cast(object)->name());
845  return AddEntry(object,
846  HeapEntry::kCode,
847  names_->GetName(name));
848  } else if (object->IsScript()) {
849  Object* name = Script::cast(object)->name();
850  return AddEntry(object,
851  HeapEntry::kCode,
852  name->IsString()
853  ? names_->GetName(String::cast(name))
854  : "");
855  } else if (object->IsNativeContext()) {
856  return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
857  } else if (object->IsContext()) {
858  return AddEntry(object, HeapEntry::kObject, "system / Context");
859  } else if (object->IsFixedArray() ||
860  object->IsFixedDoubleArray() ||
861  object->IsByteArray() ||
862  object->IsExternalArray()) {
863  return AddEntry(object, HeapEntry::kArray, "");
864  } else if (object->IsHeapNumber()) {
865  return AddEntry(object, HeapEntry::kHeapNumber, "number");
866  }
867  return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
868 }
869 
870 
872  HeapEntry::Type type,
873  const char* name) {
874  return AddEntry(object->address(), type, name, object->Size());
875 }
876 
877 
879  HeapEntry::Type type,
880  const char* name,
881  size_t size) {
883  address, static_cast<unsigned int>(size));
884  unsigned trace_node_id = 0;
885  if (AllocationTracker* allocation_tracker =
887  trace_node_id =
888  allocation_tracker->address_to_trace()->GetTraceNodeId(address);
889  }
890  return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
891 }
892 
893 
895  public:
897  : snapshot_(snapshot),
898  names_(snapshot->profiler()->names()),
899  entries_(entries) { }
900  HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
901  HeapEntry* entry = allocator->AllocateEntry(ptr);
902  entries_->Pair(ptr, entry->index());
903  return entry;
904  }
905  HeapEntry* FindEntry(HeapThing ptr) {
906  int index = entries_->Map(ptr);
907  return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
908  }
909  HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
910  HeapEntry* entry = FindEntry(ptr);
911  return entry != NULL ? entry : AddEntry(ptr, allocator);
912  }
914  int parent,
915  int index,
916  HeapEntry* child_entry) {
917  HeapEntry* parent_entry = &snapshot_->entries()[parent];
918  parent_entry->SetIndexedReference(type, index, child_entry);
919  }
921  int parent,
922  HeapEntry* child_entry) {
923  HeapEntry* parent_entry = &snapshot_->entries()[parent];
924  int index = parent_entry->children_count() + 1;
925  parent_entry->SetIndexedReference(type, index, child_entry);
926  }
928  int parent,
929  const char* reference_name,
930  HeapEntry* child_entry) {
931  HeapEntry* parent_entry = &snapshot_->entries()[parent];
932  parent_entry->SetNamedReference(type, reference_name, child_entry);
933  }
935  int parent,
936  HeapEntry* child_entry) {
937  HeapEntry* parent_entry = &snapshot_->entries()[parent];
938  int index = parent_entry->children_count() + 1;
939  parent_entry->SetNamedReference(
940  type,
941  names_->GetName(index),
942  child_entry);
943  }
944 
945  private:
949 };
950 
951 
953  switch (object->map()->instance_type()) {
954  case MAP_TYPE:
955  switch (Map::cast(object)->instance_type()) {
956 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
957  case instance_type: return "system / Map (" #Name ")";
959 #undef MAKE_STRING_MAP_CASE
960  default: return "system / Map";
961  }
962  case CELL_TYPE: return "system / Cell";
963  case PROPERTY_CELL_TYPE: return "system / PropertyCell";
964  case FOREIGN_TYPE: return "system / Foreign";
965  case ODDBALL_TYPE: return "system / Oddball";
966 #define MAKE_STRUCT_CASE(NAME, Name, name) \
967  case NAME##_TYPE: return "system / "#Name;
969 #undef MAKE_STRUCT_CASE
970  default: return "system";
971  }
972 }
973 
974 
975 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
976  int objects_count = 0;
977  for (HeapObject* obj = iterator->next();
978  obj != NULL;
979  obj = iterator->next()) {
980  objects_count++;
981  }
982  return objects_count;
983 }
984 
985 
987  public:
989  HeapObject* parent_obj,
990  int parent)
991  : generator_(generator),
992  parent_obj_(parent_obj),
993  parent_(parent),
994  next_index_(0) {
995  }
996  void VisitCodeEntry(Address entry_address) {
997  Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
999  generator_->TagCodeObject(code);
1000  }
1001  void VisitPointers(Object** start, Object** end) {
1002  for (Object** p = start; p < end; p++) {
1003  ++next_index_;
1004  if (CheckVisitedAndUnmark(p)) continue;
1006  }
1007  }
1008  static void MarkVisitedField(HeapObject* obj, int offset) {
1009  if (offset < 0) return;
1010  Address field = obj->address() + offset;
1011  DCHECK(Memory::Object_at(field)->IsHeapObject());
1012  intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field));
1013  DCHECK(!IsMarked(p));
1014  intptr_t p_tagged = p | kTag;
1015  Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged);
1016  }
1017 
1018  private:
1020  intptr_t p = reinterpret_cast<intptr_t>(*field);
1021  if (IsMarked(p)) {
1022  intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag;
1023  *field = reinterpret_cast<Object*>(p_untagged);
1024  DCHECK((*field)->IsHeapObject());
1025  return true;
1026  }
1027  return false;
1028  }
1029 
1030  static const intptr_t kTaggingMask = 3;
1031  static const intptr_t kTag = 3;
1032 
1033  static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; }
1034 
1037  int parent_;
1039 };
1040 
1041 
1043  if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1044 
1045  if (obj->IsJSGlobalProxy()) {
1046  ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1047  } else if (obj->IsJSArrayBuffer()) {
1048  ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1049  } else if (obj->IsJSObject()) {
1050  if (obj->IsJSWeakSet()) {
1051  ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1052  } else if (obj->IsJSWeakMap()) {
1053  ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1054  } else if (obj->IsJSSet()) {
1055  ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1056  } else if (obj->IsJSMap()) {
1057  ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1058  }
1059  ExtractJSObjectReferences(entry, JSObject::cast(obj));
1060  } else if (obj->IsString()) {
1061  ExtractStringReferences(entry, String::cast(obj));
1062  } else if (obj->IsSymbol()) {
1063  ExtractSymbolReferences(entry, Symbol::cast(obj));
1064  } else if (obj->IsMap()) {
1065  ExtractMapReferences(entry, Map::cast(obj));
1066  } else if (obj->IsSharedFunctionInfo()) {
1067  ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1068  } else if (obj->IsScript()) {
1069  ExtractScriptReferences(entry, Script::cast(obj));
1070  } else if (obj->IsAccessorInfo()) {
1071  ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1072  } else if (obj->IsAccessorPair()) {
1073  ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1074  } else if (obj->IsCodeCache()) {
1075  ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1076  } else if (obj->IsCode()) {
1077  ExtractCodeReferences(entry, Code::cast(obj));
1078  } else if (obj->IsBox()) {
1079  ExtractBoxReferences(entry, Box::cast(obj));
1080  } else if (obj->IsCell()) {
1081  ExtractCellReferences(entry, Cell::cast(obj));
1082  } else if (obj->IsPropertyCell()) {
1083  ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1084  } else if (obj->IsAllocationSite()) {
1085  ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1086  }
1087  return true;
1088 }
1089 
1090 
1092  if (!obj->IsFixedArray()) return false;
1093 
1094  if (obj->IsContext()) {
1096  } else {
1097  ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1098  }
1099  return true;
1100 }
1101 
1102 
1104  int entry, JSGlobalProxy* proxy) {
1105  SetInternalReference(proxy, entry,
1106  "native_context", proxy->native_context(),
1108 }
1109 
1110 
1112  int entry, JSObject* js_obj) {
1113  HeapObject* obj = js_obj;
1114  ExtractClosureReferences(js_obj, entry);
1115  ExtractPropertyReferences(js_obj, entry);
1116  ExtractElementReferences(js_obj, entry);
1117  ExtractInternalReferences(js_obj, entry);
1118  PrototypeIterator iter(heap_->isolate(), js_obj);
1119  SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1120  if (obj->IsJSFunction()) {
1121  JSFunction* js_fun = JSFunction::cast(js_obj);
1122  Object* proto_or_map = js_fun->prototype_or_initial_map();
1123  if (!proto_or_map->IsTheHole()) {
1124  if (!proto_or_map->IsMap()) {
1126  obj, entry,
1127  heap_->prototype_string(), proto_or_map,
1128  NULL,
1130  } else {
1132  obj, entry,
1133  heap_->prototype_string(), js_fun->prototype());
1135  obj, entry, "initial_map", proto_or_map,
1137  }
1138  }
1139  SharedFunctionInfo* shared_info = js_fun->shared();
1140  // JSFunction has either bindings or literals and never both.
1141  bool bound = shared_info->bound();
1142  TagObject(js_fun->literals_or_bindings(),
1143  bound ? "(function bindings)" : "(function literals)");
1144  SetInternalReference(js_fun, entry,
1145  bound ? "bindings" : "literals",
1146  js_fun->literals_or_bindings(),
1148  TagObject(shared_info, "(shared function info)");
1149  SetInternalReference(js_fun, entry,
1150  "shared", shared_info,
1152  TagObject(js_fun->context(), "(context)");
1153  SetInternalReference(js_fun, entry,
1154  "context", js_fun->context(),
1156  SetWeakReference(js_fun, entry,
1157  "next_function_link", js_fun->next_function_link(),
1162  == JSFunction::kSize);
1163  } else if (obj->IsGlobalObject()) {
1164  GlobalObject* global_obj = GlobalObject::cast(obj);
1165  SetInternalReference(global_obj, entry,
1166  "builtins", global_obj->builtins(),
1168  SetInternalReference(global_obj, entry,
1169  "native_context", global_obj->native_context(),
1171  SetInternalReference(global_obj, entry,
1172  "global_context", global_obj->global_context(),
1174  SetInternalReference(global_obj, entry,
1175  "global_proxy", global_obj->global_proxy(),
1178  4 * kPointerSize);
1179  } else if (obj->IsJSArrayBufferView()) {
1180  JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1181  SetInternalReference(view, entry, "buffer", view->buffer(),
1183  SetWeakReference(view, entry, "weak_next", view->weak_next(),
1185  }
1186  TagObject(js_obj->properties(), "(object properties)");
1187  SetInternalReference(obj, entry,
1188  "properties", js_obj->properties(),
1190  TagObject(js_obj->elements(), "(object elements)");
1191  SetInternalReference(obj, entry,
1192  "elements", js_obj->elements(),
1194 }
1195 
1196 
1198  if (string->IsConsString()) {
1199  ConsString* cs = ConsString::cast(string);
1200  SetInternalReference(cs, entry, "first", cs->first(),
1202  SetInternalReference(cs, entry, "second", cs->second(),
1204  } else if (string->IsSlicedString()) {
1205  SlicedString* ss = SlicedString::cast(string);
1206  SetInternalReference(ss, entry, "parent", ss->parent(),
1208  }
1209 }
1210 
1211 
1213  SetInternalReference(symbol, entry,
1214  "name", symbol->name(),
1216 }
1217 
1218 
1220  JSCollection* collection) {
1221  SetInternalReference(collection, entry, "table", collection->table(),
1223 }
1224 
1225 
1227  int entry, JSWeakCollection* collection) {
1228  MarkAsWeakContainer(collection->table());
1229  SetInternalReference(collection, entry,
1230  "table", collection->table(),
1232 }
1233 
1234 
1236  if (context == context->declaration_context()) {
1237  ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1238  // Add context allocated locals.
1239  int context_locals = scope_info->ContextLocalCount();
1240  for (int i = 0; i < context_locals; ++i) {
1241  String* local_name = scope_info->ContextLocalName(i);
1242  int idx = Context::MIN_CONTEXT_SLOTS + i;
1243  SetContextReference(context, entry, local_name, context->get(idx),
1245  }
1246  if (scope_info->HasFunctionName()) {
1247  String* name = scope_info->FunctionName();
1249  int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1250  if (idx >= 0) {
1251  SetContextReference(context, entry, name, context->get(idx),
1253  }
1254  }
1255  }
1256 
1257 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1258  if (Context::index < Context::FIRST_WEAK_SLOT || \
1259  Context::index == Context::MAP_CACHE_INDEX) { \
1260  SetInternalReference(context, entry, #name, context->get(Context::index), \
1261  FixedArray::OffsetOfElementAt(Context::index)); \
1262  } else { \
1263  SetWeakReference(context, entry, #name, context->get(Context::index), \
1264  FixedArray::OffsetOfElementAt(Context::index)); \
1265  }
1266  EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1267  EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1268  EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1269  EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1270  if (context->IsNativeContext()) {
1271  TagObject(context->jsfunction_result_caches(),
1272  "(context func. result caches)");
1273  TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1274  TagObject(context->runtime_context(), "(runtime context)");
1275  TagObject(context->embedder_data(), "(context data)");
1277  EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1278  optimized_functions_list);
1279  EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1280  EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1281  EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1282 #undef EXTRACT_CONTEXT_FIELD
1289  }
1290 }
1291 
1292 
1294  if (map->HasTransitionArray()) {
1295  TransitionArray* transitions = map->transitions();
1296  int transitions_entry = GetEntry(transitions)->index();
1297  Object* back_pointer = transitions->back_pointer_storage();
1298  TagObject(back_pointer, "(back pointer)");
1299  SetInternalReference(transitions, transitions_entry,
1300  "back_pointer", back_pointer);
1301 
1302  if (FLAG_collect_maps && map->CanTransition()) {
1303  if (!transitions->IsSimpleTransition()) {
1304  if (transitions->HasPrototypeTransitions()) {
1305  FixedArray* prototype_transitions =
1306  transitions->GetPrototypeTransitions();
1307  MarkAsWeakContainer(prototype_transitions);
1308  TagObject(prototype_transitions, "(prototype transitions");
1309  SetInternalReference(transitions, transitions_entry,
1310  "prototype_transitions", prototype_transitions);
1311  }
1312  // TODO(alph): transitions keys are strong links.
1313  MarkAsWeakContainer(transitions);
1314  }
1315  }
1316 
1317  TagObject(transitions, "(transition array)");
1318  SetInternalReference(map, entry,
1319  "transitions", transitions,
1321  } else {
1322  Object* back_pointer = map->GetBackPointer();
1323  TagObject(back_pointer, "(back pointer)");
1324  SetInternalReference(map, entry,
1325  "back_pointer", back_pointer,
1327  }
1328  DescriptorArray* descriptors = map->instance_descriptors();
1329  TagObject(descriptors, "(map descriptors)");
1330  SetInternalReference(map, entry,
1331  "descriptors", descriptors,
1333 
1334  MarkAsWeakContainer(map->code_cache());
1335  SetInternalReference(map, entry,
1336  "code_cache", map->code_cache(),
1338  SetInternalReference(map, entry,
1339  "prototype", map->prototype(), Map::kPrototypeOffset);
1340  SetInternalReference(map, entry,
1341  "constructor", map->constructor(),
1343  TagObject(map->dependent_code(), "(dependent code)");
1344  MarkAsWeakContainer(map->dependent_code());
1345  SetInternalReference(map, entry,
1346  "dependent_code", map->dependent_code(),
1348 }
1349 
1350 
1352  int entry, SharedFunctionInfo* shared) {
1353  HeapObject* obj = shared;
1354  String* shared_name = shared->DebugName();
1355  const char* name = NULL;
1356  if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1357  name = names_->GetName(shared_name);
1358  TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1359  } else {
1360  TagObject(shared->code(), names_->GetFormatted("(%s code)",
1361  Code::Kind2String(shared->code()->kind())));
1362  }
1363 
1364  SetInternalReference(obj, entry,
1365  "name", shared->name(),
1367  SetInternalReference(obj, entry,
1368  "code", shared->code(),
1370  TagObject(shared->scope_info(), "(function scope info)");
1371  SetInternalReference(obj, entry,
1372  "scope_info", shared->scope_info(),
1374  SetInternalReference(obj, entry,
1375  "instance_class_name", shared->instance_class_name(),
1377  SetInternalReference(obj, entry,
1378  "script", shared->script(),
1380  const char* construct_stub_name = name ?
1381  names_->GetFormatted("(construct stub code for %s)", name) :
1382  "(construct stub code)";
1383  TagObject(shared->construct_stub(), construct_stub_name);
1384  SetInternalReference(obj, entry,
1385  "construct_stub", shared->construct_stub(),
1387  SetInternalReference(obj, entry,
1388  "function_data", shared->function_data(),
1390  SetInternalReference(obj, entry,
1391  "debug_info", shared->debug_info(),
1393  SetInternalReference(obj, entry,
1394  "inferred_name", shared->inferred_name(),
1396  SetInternalReference(obj, entry,
1397  "optimized_code_map", shared->optimized_code_map(),
1399  SetInternalReference(obj, entry,
1400  "feedback_vector", shared->feedback_vector(),
1402 }
1403 
1404 
1406  HeapObject* obj = script;
1407  SetInternalReference(obj, entry,
1408  "source", script->source(),
1410  SetInternalReference(obj, entry,
1411  "name", script->name(),
1413  SetInternalReference(obj, entry,
1414  "context_data", script->context_data(),
1416  TagObject(script->line_ends(), "(script line ends)");
1417  SetInternalReference(obj, entry,
1418  "line_ends", script->line_ends(),
1420 }
1421 
1422 
1424  int entry, AccessorInfo* accessor_info) {
1425  SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1427  SetInternalReference(accessor_info, entry, "expected_receiver_type",
1428  accessor_info->expected_receiver_type(),
1430  if (accessor_info->IsDeclaredAccessorInfo()) {
1431  DeclaredAccessorInfo* declared_accessor_info =
1432  DeclaredAccessorInfo::cast(accessor_info);
1433  SetInternalReference(declared_accessor_info, entry, "descriptor",
1434  declared_accessor_info->descriptor(),
1436  } else if (accessor_info->IsExecutableAccessorInfo()) {
1437  ExecutableAccessorInfo* executable_accessor_info =
1438  ExecutableAccessorInfo::cast(accessor_info);
1439  SetInternalReference(executable_accessor_info, entry, "getter",
1440  executable_accessor_info->getter(),
1442  SetInternalReference(executable_accessor_info, entry, "setter",
1443  executable_accessor_info->setter(),
1445  SetInternalReference(executable_accessor_info, entry, "data",
1446  executable_accessor_info->data(),
1448  }
1449 }
1450 
1451 
1453  int entry, AccessorPair* accessors) {
1454  SetInternalReference(accessors, entry, "getter", accessors->getter(),
1456  SetInternalReference(accessors, entry, "setter", accessors->setter(),
1458 }
1459 
1460 
1462  int entry, CodeCache* code_cache) {
1463  TagObject(code_cache->default_cache(), "(default code cache)");
1464  SetInternalReference(code_cache, entry,
1465  "default_cache", code_cache->default_cache(),
1467  TagObject(code_cache->normal_type_cache(), "(code type cache)");
1468  SetInternalReference(code_cache, entry,
1469  "type_cache", code_cache->normal_type_cache(),
1471 }
1472 
1473 
1475  TagObject(code, names_->GetFormatted("(%s builtin)", name));
1476 }
1477 
1478 
1480  if (code->kind() == Code::STUB) {
1481  TagObject(code, names_->GetFormatted(
1482  "(%s code)", CodeStub::MajorName(
1483  CodeStub::GetMajorKey(code), true)));
1484  }
1485 }
1486 
1487 
1489  TagCodeObject(code);
1490  TagObject(code->relocation_info(), "(code relocation info)");
1491  SetInternalReference(code, entry,
1492  "relocation_info", code->relocation_info(),
1494  SetInternalReference(code, entry,
1495  "handler_table", code->handler_table(),
1497  TagObject(code->deoptimization_data(), "(code deopt data)");
1498  SetInternalReference(code, entry,
1499  "deoptimization_data", code->deoptimization_data(),
1501  if (code->kind() == Code::FUNCTION) {
1502  SetInternalReference(code, entry,
1503  "type_feedback_info", code->type_feedback_info(),
1505  }
1506  SetInternalReference(code, entry,
1507  "gc_metadata", code->gc_metadata(),
1509  SetInternalReference(code, entry,
1510  "constant_pool", code->constant_pool(),
1512  if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1513  SetWeakReference(code, entry,
1514  "next_code_link", code->next_code_link(),
1516  }
1517 }
1518 
1519 
1521  SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1522 }
1523 
1524 
1526  SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1527 }
1528 
1529 
1531  PropertyCell* cell) {
1532  ExtractCellReferences(entry, cell);
1533  SetInternalReference(cell, entry, "type", cell->type(),
1535  MarkAsWeakContainer(cell->dependent_code());
1536  SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1538 }
1539 
1540 
1542  AllocationSite* site) {
1543  SetInternalReference(site, entry, "transition_info", site->transition_info(),
1545  SetInternalReference(site, entry, "nested_site", site->nested_site(),
1547  MarkAsWeakContainer(site->dependent_code());
1548  SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1550  // Do not visit weak_next as it is not visited by the StaticVisitor,
1551  // and we're not very interested in weak_next field here.
1554 }
1555 
1556 
1558  public:
1560  : size_(size)
1561  , explorer_(explorer) {
1562  }
1563  virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1564  return explorer_->AddEntry(
1565  static_cast<Address>(ptr),
1566  HeapEntry::kNative, "system / JSArrayBufferData", size_);
1567  }
1568  private:
1569  size_t size_;
1571 };
1572 
1573 
1575  int entry, JSArrayBuffer* buffer) {
1576  SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1578  SetWeakReference(buffer, entry,
1579  "weak_first_view", buffer->weak_first_view(),
1581  // Setup a reference to a native memory backing_store object.
1582  if (!buffer->backing_store())
1583  return;
1584  size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1585  JSArrayBufferDataEntryAllocator allocator(data_size, this);
1586  HeapEntry* data_entry =
1587  filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1589  entry, "backing_store", data_entry);
1590 }
1591 
1592 
1594  bool is_weak = weak_containers_.Contains(array);
1595  for (int i = 0, l = array->length(); i < l; ++i) {
1596  if (is_weak) {
1597  SetWeakReference(array, entry,
1598  i, array->get(i), array->OffsetOfElementAt(i));
1599  } else {
1600  SetInternalReference(array, entry,
1601  i, array->get(i), array->OffsetOfElementAt(i));
1602  }
1603  }
1604 }
1605 
1606 
1608  if (!js_obj->IsJSFunction()) return;
1609 
1610  JSFunction* func = JSFunction::cast(js_obj);
1611  if (func->shared()->bound()) {
1612  FixedArray* bindings = func->function_bindings();
1613  SetNativeBindReference(js_obj, entry, "bound_this",
1614  bindings->get(JSFunction::kBoundThisIndex));
1615  SetNativeBindReference(js_obj, entry, "bound_function",
1618  i < bindings->length(); i++) {
1619  const char* reference_name = names_->GetFormatted(
1620  "bound_argument_%d",
1622  SetNativeBindReference(js_obj, entry, reference_name,
1623  bindings->get(i));
1624  }
1625  }
1626 }
1627 
1628 
1630  if (js_obj->HasFastProperties()) {
1631  DescriptorArray* descs = js_obj->map()->instance_descriptors();
1632  int real_size = js_obj->map()->NumberOfOwnDescriptors();
1633  for (int i = 0; i < real_size; i++) {
1634  switch (descs->GetType(i)) {
1635  case FIELD: {
1636  Representation r = descs->GetDetails(i).representation();
1637  if (r.IsSmi() || r.IsDouble()) break;
1638  int index = descs->GetFieldIndex(i);
1639 
1640  Name* k = descs->GetKey(i);
1641  if (index < js_obj->map()->inobject_properties()) {
1642  Object* value = js_obj->InObjectPropertyAt(index);
1643  if (k != heap_->hidden_string()) {
1645  js_obj, entry,
1646  k, value,
1647  NULL,
1648  js_obj->GetInObjectPropertyOffset(index));
1649  } else {
1650  TagObject(value, "(hidden properties)");
1652  js_obj, entry,
1653  "hidden_properties", value,
1654  js_obj->GetInObjectPropertyOffset(index));
1655  }
1656  } else {
1657  FieldIndex field_index =
1658  FieldIndex::ForDescriptor(js_obj->map(), i);
1659  Object* value = js_obj->RawFastPropertyAt(field_index);
1660  if (k != heap_->hidden_string()) {
1661  SetPropertyReference(js_obj, entry, k, value);
1662  } else {
1663  TagObject(value, "(hidden properties)");
1664  SetInternalReference(js_obj, entry, "hidden_properties", value);
1665  }
1666  }
1667  break;
1668  }
1669  case CONSTANT:
1671  js_obj, entry,
1672  descs->GetKey(i), descs->GetConstant(i));
1673  break;
1674  case CALLBACKS:
1676  js_obj, entry,
1677  descs->GetKey(i), descs->GetValue(i));
1678  break;
1679  case NORMAL: // only in slow mode
1680  UNREACHABLE();
1681  break;
1682  }
1683  }
1684  } else {
1685  NameDictionary* dictionary = js_obj->property_dictionary();
1686  int length = dictionary->Capacity();
1687  for (int i = 0; i < length; ++i) {
1688  Object* k = dictionary->KeyAt(i);
1689  if (dictionary->IsKey(k)) {
1690  Object* target = dictionary->ValueAt(i);
1691  // We assume that global objects can only have slow properties.
1692  Object* value = target->IsPropertyCell()
1693  ? PropertyCell::cast(target)->value()
1694  : target;
1695  if (k == heap_->hidden_string()) {
1696  TagObject(value, "(hidden properties)");
1697  SetInternalReference(js_obj, entry, "hidden_properties", value);
1698  continue;
1699  }
1700  if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1701  SetPropertyReference(js_obj, entry, String::cast(k), value);
1702  }
1703  }
1704  }
1705 }
1706 
1707 
1709  JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1710  if (!callback_obj->IsAccessorPair()) return false;
1711  AccessorPair* accessors = AccessorPair::cast(callback_obj);
1712  Object* getter = accessors->getter();
1713  if (!getter->IsOddball()) {
1714  SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1715  }
1716  Object* setter = accessors->setter();
1717  if (!setter->IsOddball()) {
1718  SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1719  }
1720  return true;
1721 }
1722 
1723 
1725  if (js_obj->HasFastObjectElements()) {
1726  FixedArray* elements = FixedArray::cast(js_obj->elements());
1727  int length = js_obj->IsJSArray() ?
1728  Smi::cast(JSArray::cast(js_obj)->length())->value() :
1729  elements->length();
1730  for (int i = 0; i < length; ++i) {
1731  if (!elements->get(i)->IsTheHole()) {
1732  SetElementReference(js_obj, entry, i, elements->get(i));
1733  }
1734  }
1735  } else if (js_obj->HasDictionaryElements()) {
1736  SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1737  int length = dictionary->Capacity();
1738  for (int i = 0; i < length; ++i) {
1739  Object* k = dictionary->KeyAt(i);
1740  if (dictionary->IsKey(k)) {
1741  DCHECK(k->IsNumber());
1742  uint32_t index = static_cast<uint32_t>(k->Number());
1743  SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1744  }
1745  }
1746  }
1747 }
1748 
1749 
1751  int length = js_obj->GetInternalFieldCount();
1752  for (int i = 0; i < length; ++i) {
1753  Object* o = js_obj->GetInternalField(i);
1755  js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1756  }
1757 }
1758 
1759 
1761  Heap* heap = object->GetHeap();
1762  if (object->IsJSFunction()) return heap->closure_string();
1763  String* constructor_name = object->constructor_name();
1764  if (constructor_name == heap->Object_string()) {
1765  // TODO(verwaest): Try to get object.constructor.name in this case.
1766  // This requires handlification of the V8HeapExplorer.
1767  }
1768  return object->constructor_name();
1769 }
1770 
1771 
1773  if (!obj->IsHeapObject()) return NULL;
1774  return filler_->FindOrAddEntry(obj, this);
1775 }
1776 
1777 
1779  private:
1780  struct IndexTag {
1782  : index(index), tag(tag) { }
1783  int index;
1785  };
1786 
1787  public:
1791  heap_(heap) {
1792  }
1793 
1794  void VisitPointers(Object** start, Object** end) {
1796  for (Object** p = start; p < end; p++) all_references_.Add(*p);
1797  } else {
1798  for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1799  }
1800  }
1801 
1803 
1804  void FillReferences(V8HeapExplorer* explorer) {
1805  DCHECK(strong_references_.length() <= all_references_.length());
1806  Builtins* builtins = heap_->isolate()->builtins();
1807  int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1808  while (all_index < all_references_.length()) {
1809  bool is_strong = strong_index < strong_references_.length()
1810  && strong_references_[strong_index] == all_references_[all_index];
1811  explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1812  !is_strong,
1813  all_references_[all_index]);
1814  if (reference_tags_[tags_index].tag ==
1815  VisitorSynchronization::kBuiltins) {
1816  DCHECK(all_references_[all_index]->IsCode());
1817  explorer->TagBuiltinCodeObject(
1818  Code::cast(all_references_[all_index]),
1819  builtins->name(builtin_index++));
1820  }
1821  ++all_index;
1822  if (is_strong) ++strong_index;
1823  if (reference_tags_[tags_index].index == all_index) ++tags_index;
1824  }
1825  }
1826 
1832  }
1833  }
1834 
1835  private:
1842 };
1843 
1844 
1846  SnapshotFiller* filler) {
1847  filler_ = filler;
1848 
1849  // Create references to the synthetic roots.
1851  for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1853  }
1854 
1855  // Make sure builtin code objects get their builtin tags
1856  // first. Otherwise a particular JSFunction object could set
1857  // its custom name to a generic builtin.
1858  RootsReferencesExtractor extractor(heap_);
1859  heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1860  extractor.SetCollectingAllReferences();
1861  heap_->IterateRoots(&extractor, VISIT_ALL);
1862  extractor.FillReferences(this);
1863 
1864  // We have to do two passes as sometimes FixedArrays are used
1865  // to weakly hold their items, and it's impossible to distinguish
1866  // between these cases without processing the array owner first.
1867  bool interrupted =
1868  IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1869  IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1870 
1871  if (interrupted) {
1872  filler_ = NULL;
1873  return false;
1874  }
1875 
1876  filler_ = NULL;
1877  return progress_->ProgressReport(true);
1878 }
1879 
1880 
1881 template<V8HeapExplorer::ExtractReferencesMethod extractor>
1883  // Now iterate the whole heap.
1884  bool interrupted = false;
1885  HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1886  // Heap iteration with filtering must be finished in any case.
1887  for (HeapObject* obj = iterator.next();
1888  obj != NULL;
1889  obj = iterator.next(), progress_->ProgressStep()) {
1890  if (interrupted) continue;
1891 
1892  HeapEntry* heap_entry = GetEntry(obj);
1893  int entry = heap_entry->index();
1894  if ((this->*extractor)(entry, obj)) {
1895  SetInternalReference(obj, entry,
1896  "map", obj->map(), HeapObject::kMapOffset);
1897  // Extract unvisited fields as hidden references and restore tags
1898  // of visited fields.
1899  IndexedReferencesExtractor refs_extractor(this, obj, entry);
1900  obj->Iterate(&refs_extractor);
1901  }
1902 
1903  if (!progress_->ProgressReport(false)) interrupted = true;
1904  }
1905  return interrupted;
1906 }
1907 
1908 
1910  return object->IsHeapObject()
1911  && !object->IsOddball()
1912  && object != heap_->empty_byte_array()
1913  && object != heap_->empty_fixed_array()
1914  && object != heap_->empty_descriptor_array()
1915  && object != heap_->fixed_array_map()
1916  && object != heap_->cell_map()
1917  && object != heap_->global_property_cell_map()
1918  && object != heap_->shared_function_info_map()
1919  && object != heap_->free_space_map()
1920  && object != heap_->one_pointer_filler_map()
1921  && object != heap_->two_pointer_filler_map();
1922 }
1923 
1924 
1926  int parent_entry,
1927  String* reference_name,
1928  Object* child_obj,
1929  int field_offset) {
1930  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1931  HeapEntry* child_entry = GetEntry(child_obj);
1932  if (child_entry != NULL) {
1934  parent_entry,
1935  names_->GetName(reference_name),
1936  child_entry);
1937  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1938  }
1939 }
1940 
1941 
1943  int parent_entry,
1944  const char* reference_name,
1945  Object* child_obj) {
1946  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1947  HeapEntry* child_entry = GetEntry(child_obj);
1948  if (child_entry != NULL) {
1950  parent_entry,
1951  reference_name,
1952  child_entry);
1953  }
1954 }
1955 
1956 
1958  int parent_entry,
1959  int index,
1960  Object* child_obj) {
1961  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1962  HeapEntry* child_entry = GetEntry(child_obj);
1963  if (child_entry != NULL) {
1965  parent_entry,
1966  index,
1967  child_entry);
1968  }
1969 }
1970 
1971 
1973  int parent_entry,
1974  const char* reference_name,
1975  Object* child_obj,
1976  int field_offset) {
1977  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1978  HeapEntry* child_entry = GetEntry(child_obj);
1979  if (child_entry == NULL) return;
1980  if (IsEssentialObject(child_obj)) {
1982  parent_entry,
1983  reference_name,
1984  child_entry);
1985  }
1986  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1987 }
1988 
1989 
1991  int parent_entry,
1992  int index,
1993  Object* child_obj,
1994  int field_offset) {
1995  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1996  HeapEntry* child_entry = GetEntry(child_obj);
1997  if (child_entry == NULL) return;
1998  if (IsEssentialObject(child_obj)) {
2000  parent_entry,
2001  names_->GetName(index),
2002  child_entry);
2003  }
2004  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2005 }
2006 
2007 
2009  int parent_entry,
2010  int index,
2011  Object* child_obj) {
2012  DCHECK(parent_entry == GetEntry(parent_obj)->index());
2013  HeapEntry* child_entry = GetEntry(child_obj);
2014  if (child_entry != NULL && IsEssentialObject(child_obj)) {
2016  parent_entry,
2017  index,
2018  child_entry);
2019  }
2020 }
2021 
2022 
2024  int parent_entry,
2025  const char* reference_name,
2026  Object* child_obj,
2027  int field_offset) {
2028  DCHECK(parent_entry == GetEntry(parent_obj)->index());
2029  HeapEntry* child_entry = GetEntry(child_obj);
2030  if (child_entry == NULL) return;
2031  if (IsEssentialObject(child_obj)) {
2033  parent_entry,
2034  reference_name,
2035  child_entry);
2036  }
2037  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2038 }
2039 
2040 
2042  int parent_entry,
2043  int index,
2044  Object* child_obj,
2045  int field_offset) {
2046  DCHECK(parent_entry == GetEntry(parent_obj)->index());
2047  HeapEntry* child_entry = GetEntry(child_obj);
2048  if (child_entry == NULL) return;
2049  if (IsEssentialObject(child_obj)) {
2051  parent_entry,
2052  names_->GetFormatted("%d", index),
2053  child_entry);
2054  }
2055  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2056 }
2057 
2058 
2060  int parent_entry,
2061  Name* reference_name,
2062  Object* child_obj,
2063  const char* name_format_string,
2064  int field_offset) {
2065  DCHECK(parent_entry == GetEntry(parent_obj)->index());
2066  HeapEntry* child_entry = GetEntry(child_obj);
2067  if (child_entry != NULL) {
2068  HeapGraphEdge::Type type =
2069  reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2071  const char* name = name_format_string != NULL && reference_name->IsString()
2072  ? names_->GetFormatted(
2073  name_format_string,
2074  String::cast(reference_name)->ToCString(
2076  names_->GetName(reference_name);
2077 
2078  filler_->SetNamedReference(type,
2079  parent_entry,
2080  name,
2081  child_entry);
2082  IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2083  }
2084 }
2085 
2086 
2090  snapshot_->root()->index(),
2091  snapshot_->gc_roots());
2092 }
2093 
2094 
2096  HeapEntry* child_entry = GetEntry(child_obj);
2097  DCHECK(child_entry != NULL);
2100  snapshot_->root()->index(),
2101  child_entry);
2102 }
2103 
2104 
2108  snapshot_->gc_roots()->index(),
2109  snapshot_->gc_subroot(tag));
2110 }
2111 
2112 
2114  VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2115  HeapEntry* child_entry = GetEntry(child_obj);
2116  if (child_entry != NULL) {
2117  const char* name = GetStrongGcSubrootName(child_obj);
2118  if (name != NULL) {
2121  snapshot_->gc_subroot(tag)->index(),
2122  name,
2123  child_entry);
2124  } else {
2125  if (is_weak) {
2128  snapshot_->gc_subroot(tag)->index(),
2129  child_entry);
2130  } else {
2133  snapshot_->gc_subroot(tag)->index(),
2134  child_entry);
2135  }
2136  }
2137 
2138  // Add a shortcut to JS global object reference at snapshot root.
2139  if (child_obj->IsNativeContext()) {
2140  Context* context = Context::cast(child_obj);
2141  GlobalObject* global = context->global_object();
2142  if (global->IsJSGlobalObject()) {
2143  bool is_debug_object = false;
2144  is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2145  if (!is_debug_object && !user_roots_.Contains(global)) {
2146  user_roots_.Insert(global);
2147  SetUserGlobalReference(global);
2148  }
2149  }
2150  }
2151  }
2152 }
2153 
2154 
2157 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2158 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2160 #undef ROOT_NAME
2161 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2163 #undef STRUCT_MAP_NAME
2164 #define STRING_NAME(name, str) NAME_ENTRY(name)
2166 #undef STRING_NAME
2167 #undef NAME_ENTRY
2169  }
2170  return strong_gc_subroot_names_.GetTag(object);
2171 }
2172 
2173 
2174 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2175  if (IsEssentialObject(obj)) {
2176  HeapEntry* entry = GetEntry(obj);
2177  if (entry->name()[0] == '\0') {
2178  entry->set_name(tag);
2179  }
2180  }
2181 }
2182 
2183 
2185  if (IsEssentialObject(object) && object->IsFixedArray()) {
2186  weak_containers_.Insert(object);
2187  }
2188 }
2189 
2190 
2192  public:
2193  virtual void VisitPointers(Object** start, Object** end) {
2194  for (Object** p = start; p < end; p++) {
2195  if ((*p)->IsNativeContext()) {
2196  Context* context = Context::cast(*p);
2197  JSObject* proxy = context->global_proxy();
2198  if (proxy->IsJSGlobalProxy()) {
2199  Object* global = proxy->map()->prototype();
2200  if (global->IsJSGlobalObject()) {
2201  objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2202  }
2203  }
2204  }
2205  }
2206  }
2207  int count() { return objects_.length(); }
2208  Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2209 
2210  private:
2212 };
2213 
2214 
2215 // Modifies heap. Must not be run during heap traversal.
2217  Isolate* isolate = heap_->isolate();
2218  HandleScope scope(isolate);
2219  GlobalObjectsEnumerator enumerator;
2220  isolate->global_handles()->IterateAllRoots(&enumerator);
2221  const char** urls = NewArray<const char*>(enumerator.count());
2222  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2224  HandleScope scope(isolate);
2225  Handle<JSGlobalObject> global_obj = enumerator.at(i);
2227  Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2228  } else {
2229  urls[i] = NULL;
2230  }
2231  }
2232 
2233  DisallowHeapAllocation no_allocation;
2234  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2235  objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2236  }
2237 
2238  DeleteArray(urls);
2239 }
2240 
2241 
2243  public:
2245  : explorer_(explorer) {}
2247  virtual void VisitPointers(Object** start, Object** end) {
2248  UNREACHABLE();
2249  }
2250  virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2251  explorer_->VisitSubtreeWrapper(p, class_id);
2252  }
2253  private:
2255 };
2256 
2257 
2259  public:
2262  HeapEntry::Type entries_type)
2263  : snapshot_(snapshot),
2264  names_(snapshot_->profiler()->names()),
2265  heap_object_map_(snapshot_->profiler()->heap_object_map()),
2266  entries_type_(entries_type) {
2267  }
2268  virtual HeapEntry* AllocateEntry(HeapThing ptr);
2269  private:
2274 };
2275 
2276 
2278  v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2279  intptr_t elements = info->GetElementCount();
2280  intptr_t size = info->GetSizeInBytes();
2281  const char* name = elements != -1
2282  ? names_->GetFormatted(
2283  "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2284  : names_->GetCopy(info->GetLabel());
2285  return snapshot_->AddEntry(
2286  entries_type_,
2287  name,
2289  size != -1 ? static_cast<int>(size) : 0,
2290  0);
2291 }
2292 
2293 
2297  : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2298  snapshot_(snapshot),
2299  names_(snapshot_->profiler()->names()),
2300  progress_(progress),
2301  embedder_queried_(false),
2302  objects_by_info_(RetainedInfosMatch),
2303  native_groups_(StringsMatch),
2304  filler_(NULL) {
2306  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2308  new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2309 }
2310 
2311 
2313  for (HashMap::Entry* p = objects_by_info_.Start();
2314  p != NULL;
2315  p = objects_by_info_.Next(p)) {
2316  v8::RetainedObjectInfo* info =
2317  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2318  info->Dispose();
2319  List<HeapObject*>* objects =
2320  reinterpret_cast<List<HeapObject*>* >(p->value);
2321  delete objects;
2322  }
2323  for (HashMap::Entry* p = native_groups_.Start();
2324  p != NULL;
2325  p = native_groups_.Next(p)) {
2326  v8::RetainedObjectInfo* info =
2327  reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2328  info->Dispose();
2329  }
2332 }
2333 
2334 
2337  return objects_by_info_.occupancy();
2338 }
2339 
2340 
2342  if (embedder_queried_) return;
2343  Isolate* isolate = isolate_;
2344  const GCType major_gc_type = kGCTypeMarkSweepCompact;
2345  // Record objects that are joined into ObjectGroups.
2346  isolate->heap()->CallGCPrologueCallbacks(
2348  List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2349  for (int i = 0; i < groups->length(); ++i) {
2350  ObjectGroup* group = groups->at(i);
2351  if (group->info == NULL) continue;
2353  for (size_t j = 0; j < group->length; ++j) {
2354  HeapObject* obj = HeapObject::cast(*group->objects[j]);
2355  list->Add(obj);
2356  in_groups_.Insert(obj);
2357  }
2358  group->info = NULL; // Acquire info object ownership.
2359  }
2360  isolate->global_handles()->RemoveObjectGroups();
2361  isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2362  // Record objects that are not in ObjectGroups, but have class ID.
2363  GlobalHandlesExtractor extractor(this);
2364  isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2365  embedder_queried_ = true;
2366 }
2367 
2368 
2370  Isolate* isolate = isolate_;
2371  List<ImplicitRefGroup*>* groups =
2372  isolate->global_handles()->implicit_ref_groups();
2373  for (int i = 0; i < groups->length(); ++i) {
2374  ImplicitRefGroup* group = groups->at(i);
2375  HeapObject* parent = *group->parent;
2376  int parent_entry =
2378  DCHECK(parent_entry != HeapEntry::kNoEntry);
2379  Object*** children = group->children;
2380  for (size_t j = 0; j < group->length; ++j) {
2381  Object* child = *children[j];
2382  HeapEntry* child_entry =
2386  parent_entry,
2387  "native",
2388  child_entry);
2389  }
2390  }
2392 }
2393 
2395  v8::RetainedObjectInfo* info) {
2396  HashMap::Entry* entry =
2397  objects_by_info_.Lookup(info, InfoHash(info), true);
2398  if (entry->value != NULL) {
2399  info->Dispose();
2400  } else {
2401  entry->value = new List<HeapObject*>(4);
2402  }
2403  return reinterpret_cast<List<HeapObject*>* >(entry->value);
2404 }
2405 
2406 
2408  SnapshotFiller* filler) {
2409  filler_ = filler;
2412  if (EstimateObjectsCount() > 0) {
2413  for (HashMap::Entry* p = objects_by_info_.Start();
2414  p != NULL;
2415  p = objects_by_info_.Next(p)) {
2416  v8::RetainedObjectInfo* info =
2417  reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2418  SetNativeRootReference(info);
2419  List<HeapObject*>* objects =
2420  reinterpret_cast<List<HeapObject*>* >(p->value);
2421  for (int i = 0; i < objects->length(); ++i) {
2422  SetWrapperNativeReferences(objects->at(i), info);
2423  }
2424  }
2426  }
2427  filler_ = NULL;
2428  return true;
2429 }
2430 
2431 
2433  public:
2434  explicit NativeGroupRetainedObjectInfo(const char* label)
2435  : disposed_(false),
2436  hash_(reinterpret_cast<intptr_t>(label)),
2437  label_(label) {
2438  }
2439 
2441  virtual void Dispose() {
2442  CHECK(!disposed_);
2443  disposed_ = true;
2444  delete this;
2445  }
2446  virtual bool IsEquivalent(RetainedObjectInfo* other) {
2447  return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2448  }
2449  virtual intptr_t GetHash() { return hash_; }
2450  virtual const char* GetLabel() { return label_; }
2451 
2452  private:
2454  intptr_t hash_;
2455  const char* label_;
2456 };
2457 
2458 
2460  const char* label) {
2461  const char* label_copy = names_->GetCopy(label);
2463  label_copy,
2464  static_cast<int>(strlen(label_copy)),
2465  isolate_->heap()->HashSeed());
2466  HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2467  hash, true);
2468  if (entry->value == NULL) {
2469  entry->value = new NativeGroupRetainedObjectInfo(label);
2470  }
2471  return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2472 }
2473 
2474 
2476  v8::RetainedObjectInfo* info) {
2477  HeapEntry* child_entry =
2479  DCHECK(child_entry != NULL);
2480  NativeGroupRetainedObjectInfo* group_info =
2482  HeapEntry* group_entry =
2486  group_entry->index(),
2487  child_entry);
2488 }
2489 
2490 
2492  HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2493  HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2494  DCHECK(wrapper_entry != NULL);
2495  HeapEntry* info_entry =
2497  DCHECK(info_entry != NULL);
2499  wrapper_entry->index(),
2500  "native",
2501  info_entry);
2503  info_entry->index(),
2504  wrapper_entry);
2505 }
2506 
2507 
2509  for (HashMap::Entry* entry = native_groups_.Start();
2510  entry;
2511  entry = native_groups_.Next(entry)) {
2512  NativeGroupRetainedObjectInfo* group_info =
2513  static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2514  HeapEntry* group_entry =
2516  DCHECK(group_entry != NULL);
2519  snapshot_->root()->index(),
2520  group_entry);
2521  }
2522 }
2523 
2524 
2526  if (in_groups_.Contains(*p)) return;
2527  Isolate* isolate = isolate_;
2528  v8::RetainedObjectInfo* info =
2529  isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2530  if (info == NULL) return;
2531  GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2532 }
2533 
2534 
2537  v8::ActivityControl* control,
2539  Heap* heap)
2540  : snapshot_(snapshot),
2541  control_(control),
2542  v8_heap_explorer_(snapshot_, this, resolver),
2543  dom_explorer_(snapshot_, this),
2544  heap_(heap) {
2545 }
2546 
2547 
2550 
2551  // TODO(1562) Profiler assumes that any object that is in the heap after
2552  // full GC is reachable from the root when computing dominators.
2553  // This is not true for weakly reachable objects.
2554  // As a temporary solution we call GC twice.
2557  "HeapSnapshotGenerator::GenerateSnapshot");
2560  "HeapSnapshotGenerator::GenerateSnapshot");
2561 
2562 #ifdef VERIFY_HEAP
2563  Heap* debug_heap = heap_;
2564  debug_heap->Verify();
2565 #endif
2566 
2567  SetProgressTotal(2); // 2 passes.
2568 
2569 #ifdef VERIFY_HEAP
2570  debug_heap->Verify();
2571 #endif
2572 
2574 
2575  if (!FillReferences()) return false;
2576 
2579 
2581  if (!ProgressReport(true)) return false;
2582  return true;
2583 }
2584 
2585 
2588 }
2589 
2590 
2592  const int kProgressReportGranularity = 10000;
2593  if (control_ != NULL
2594  && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2595  return
2598  }
2599  return true;
2600 }
2601 
2602 
2603 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2604  if (control_ == NULL) return;
2605  HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2606  progress_total_ = iterations_count * (
2609  progress_counter_ = 0;
2610 }
2611 
2612 
2614  SnapshotFiller filler(snapshot_, &entries_);
2617 }
2618 
2619 
2620 template<int bytes> struct MaxDecimalDigitsIn;
2621 template<> struct MaxDecimalDigitsIn<4> {
2622  static const int kSigned = 11;
2623  static const int kUnsigned = 10;
2624 };
2625 template<> struct MaxDecimalDigitsIn<8> {
2626  static const int kSigned = 20;
2627  static const int kUnsigned = 20;
2628 };
2629 
2630 
2632  public:
2634  : stream_(stream),
2635  chunk_size_(stream->GetChunkSize()),
2637  chunk_pos_(0),
2638  aborted_(false) {
2639  DCHECK(chunk_size_ > 0);
2640  }
2641  bool aborted() { return aborted_; }
2642  void AddCharacter(char c) {
2643  DCHECK(c != '\0');
2645  chunk_[chunk_pos_++] = c;
2646  MaybeWriteChunk();
2647  }
2648  void AddString(const char* s) {
2649  AddSubstring(s, StrLength(s));
2650  }
2651  void AddSubstring(const char* s, int n) {
2652  if (n <= 0) return;
2653  DCHECK(static_cast<size_t>(n) <= strlen(s));
2654  const char* s_end = s + n;
2655  while (s < s_end) {
2656  int s_chunk_size =
2657  Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2658  DCHECK(s_chunk_size > 0);
2659  MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2660  s += s_chunk_size;
2661  chunk_pos_ += s_chunk_size;
2662  MaybeWriteChunk();
2663  }
2664  }
2665  void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2666  void Finalize() {
2667  if (aborted_) return;
2669  if (chunk_pos_ != 0) {
2670  WriteChunk();
2671  }
2672  stream_->EndOfStream();
2673  }
2674 
2675  private:
2676  template<typename T>
2677  void AddNumberImpl(T n, const char* format) {
2678  // Buffer for the longest value plus trailing \0
2679  static const int kMaxNumberSize =
2680  MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2681  if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2682  int result = SNPrintF(
2683  chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2684  DCHECK(result != -1);
2685  chunk_pos_ += result;
2686  MaybeWriteChunk();
2687  } else {
2689  int result = SNPrintF(buffer, format, n);
2690  USE(result);
2691  DCHECK(result != -1);
2692  AddString(buffer.start());
2693  }
2694  }
2697  if (chunk_pos_ == chunk_size_) {
2698  WriteChunk();
2699  }
2700  }
2701  void WriteChunk() {
2702  if (aborted_) return;
2705  chunk_pos_ = 0;
2706  }
2707 
2712  bool aborted_;
2713 };
2714 
2715 
2716 // type, name|index, to_node.
2718 // type, name, id, self_size, edge_count, trace_node_id.
2720 
2722  if (AllocationTracker* allocation_tracker =
2724  allocation_tracker->PrepareForSerialization();
2725  }
2726  DCHECK(writer_ == NULL);
2727  writer_ = new OutputStreamWriter(stream);
2728  SerializeImpl();
2729  delete writer_;
2730  writer_ = NULL;
2731 }
2732 
2733 
2735  DCHECK(0 == snapshot_->root()->index());
2736  writer_->AddCharacter('{');
2737  writer_->AddString("\"snapshot\":{");
2739  if (writer_->aborted()) return;
2740  writer_->AddString("},\n");
2741  writer_->AddString("\"nodes\":[");
2742  SerializeNodes();
2743  if (writer_->aborted()) return;
2744  writer_->AddString("],\n");
2745  writer_->AddString("\"edges\":[");
2746  SerializeEdges();
2747  if (writer_->aborted()) return;
2748  writer_->AddString("],\n");
2749 
2750  writer_->AddString("\"trace_function_infos\":[");
2752  if (writer_->aborted()) return;
2753  writer_->AddString("],\n");
2754  writer_->AddString("\"trace_tree\":[");
2756  if (writer_->aborted()) return;
2757  writer_->AddString("],\n");
2758 
2759  writer_->AddString("\"strings\":[");
2760  SerializeStrings();
2761  if (writer_->aborted()) return;
2762  writer_->AddCharacter(']');
2763  writer_->AddCharacter('}');
2764  writer_->Finalize();
2765 }
2766 
2767 
2769  HashMap::Entry* cache_entry = strings_.Lookup(
2770  const_cast<char*>(s), StringHash(s), true);
2771  if (cache_entry->value == NULL) {
2772  cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2773  }
2774  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2775 }
2776 
2777 
2778 namespace {
2779 
2780 template<size_t size> struct ToUnsigned;
2781 
2782 template<> struct ToUnsigned<4> {
2783  typedef uint32_t Type;
2784 };
2785 
2786 template<> struct ToUnsigned<8> {
2787  typedef uint64_t Type;
2788 };
2789 
2790 } // namespace
2791 
2792 
2793 template<typename T>
2794 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2795  STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2796  int number_of_digits = 0;
2797  T t = value;
2798  do {
2799  ++number_of_digits;
2800  } while (t /= 10);
2801 
2802  buffer_pos += number_of_digits;
2803  int result = buffer_pos;
2804  do {
2805  int last_digit = static_cast<int>(value % 10);
2806  buffer[--buffer_pos] = '0' + last_digit;
2807  value /= 10;
2808  } while (value);
2809  return result;
2810 }
2811 
2812 
2813 template<typename T>
2814 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2815  typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2816  STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2817  return utoa_impl(unsigned_value, buffer, buffer_pos);
2818 }
2819 
2820 
2822  bool first_edge) {
2823  // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2824  static const int kBufferSize =
2825  MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2827  int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2828  || edge->type() == HeapGraphEdge::kHidden
2829  ? edge->index() : GetStringId(edge->name());
2830  int buffer_pos = 0;
2831  if (!first_edge) {
2832  buffer[buffer_pos++] = ',';
2833  }
2834  buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2835  buffer[buffer_pos++] = ',';
2836  buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2837  buffer[buffer_pos++] = ',';
2838  buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2839  buffer[buffer_pos++] = '\n';
2840  buffer[buffer_pos++] = '\0';
2841  writer_->AddString(buffer.start());
2842 }
2843 
2844 
2847  for (int i = 0; i < edges.length(); ++i) {
2848  DCHECK(i == 0 ||
2849  edges[i - 1]->from()->index() <= edges[i]->from()->index());
2850  SerializeEdge(edges[i], i == 0);
2851  if (writer_->aborted()) return;
2852  }
2853 }
2854 
2855 
2857  // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2858  static const int kBufferSize =
2859  5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2860  + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2861  + 6 + 1 + 1;
2863  int buffer_pos = 0;
2864  if (entry_index(entry) != 0) {
2865  buffer[buffer_pos++] = ',';
2866  }
2867  buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2868  buffer[buffer_pos++] = ',';
2869  buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2870  buffer[buffer_pos++] = ',';
2871  buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2872  buffer[buffer_pos++] = ',';
2873  buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2874  buffer[buffer_pos++] = ',';
2875  buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2876  buffer[buffer_pos++] = ',';
2877  buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2878  buffer[buffer_pos++] = '\n';
2879  buffer[buffer_pos++] = '\0';
2880  writer_->AddString(buffer.start());
2881 }
2882 
2883 
2885  List<HeapEntry>& entries = snapshot_->entries();
2886  for (int i = 0; i < entries.length(); ++i) {
2887  SerializeNode(&entries[i]);
2888  if (writer_->aborted()) return;
2889  }
2890 }
2891 
2892 
2894  writer_->AddString("\"title\":\"");
2896  writer_->AddString("\"");
2897  writer_->AddString(",\"uid\":");
2899  writer_->AddString(",\"meta\":");
2900  // The object describing node serialization layout.
2901  // We use a set of macros to improve readability.
2902 #define JSON_A(s) "[" s "]"
2903 #define JSON_O(s) "{" s "}"
2904 #define JSON_S(s) "\"" s "\""
2906  JSON_S("node_fields") ":" JSON_A(
2907  JSON_S("type") ","
2908  JSON_S("name") ","
2909  JSON_S("id") ","
2910  JSON_S("self_size") ","
2911  JSON_S("edge_count") ","
2912  JSON_S("trace_node_id")) ","
2913  JSON_S("node_types") ":" JSON_A(
2914  JSON_A(
2915  JSON_S("hidden") ","
2916  JSON_S("array") ","
2917  JSON_S("string") ","
2918  JSON_S("object") ","
2919  JSON_S("code") ","
2920  JSON_S("closure") ","
2921  JSON_S("regexp") ","
2922  JSON_S("number") ","
2923  JSON_S("native") ","
2924  JSON_S("synthetic") ","
2925  JSON_S("concatenated string") ","
2926  JSON_S("sliced string")) ","
2927  JSON_S("string") ","
2928  JSON_S("number") ","
2929  JSON_S("number") ","
2930  JSON_S("number") ","
2931  JSON_S("number") ","
2932  JSON_S("number")) ","
2933  JSON_S("edge_fields") ":" JSON_A(
2934  JSON_S("type") ","
2935  JSON_S("name_or_index") ","
2936  JSON_S("to_node")) ","
2937  JSON_S("edge_types") ":" JSON_A(
2938  JSON_A(
2939  JSON_S("context") ","
2940  JSON_S("element") ","
2941  JSON_S("property") ","
2942  JSON_S("internal") ","
2943  JSON_S("hidden") ","
2944  JSON_S("shortcut") ","
2945  JSON_S("weak")) ","
2946  JSON_S("string_or_number") ","
2947  JSON_S("node")) ","
2948  JSON_S("trace_function_info_fields") ":" JSON_A(
2949  JSON_S("function_id") ","
2950  JSON_S("name") ","
2951  JSON_S("script_name") ","
2952  JSON_S("script_id") ","
2953  JSON_S("line") ","
2954  JSON_S("column")) ","
2955  JSON_S("trace_node_fields") ":" JSON_A(
2956  JSON_S("id") ","
2957  JSON_S("function_info_index") ","
2958  JSON_S("count") ","
2959  JSON_S("size") ","
2960  JSON_S("children"))));
2961 #undef JSON_S
2962 #undef JSON_O
2963 #undef JSON_A
2964  writer_->AddString(",\"node_count\":");
2965  writer_->AddNumber(snapshot_->entries().length());
2966  writer_->AddString(",\"edge_count\":");
2967  writer_->AddNumber(snapshot_->edges().length());
2968  writer_->AddString(",\"trace_function_count\":");
2969  uint32_t count = 0;
2971  if (tracker) {
2972  count = tracker->function_info_list().length();
2973  }
2974  writer_->AddNumber(count);
2975 }
2976 
2977 
2979  static const char hex_chars[] = "0123456789ABCDEF";
2980  w->AddString("\\u");
2981  w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2982  w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2983  w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2984  w->AddCharacter(hex_chars[u & 0xf]);
2985 }
2986 
2987 
2990  if (!tracker) return;
2991  AllocationTraceTree* traces = tracker->trace_tree();
2992  SerializeTraceNode(traces->root());
2993 }
2994 
2995 
2997  // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2998  const int kBufferSize =
2999  4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3000  + 4 + 1 + 1;
3002  int buffer_pos = 0;
3003  buffer_pos = utoa(node->id(), buffer, buffer_pos);
3004  buffer[buffer_pos++] = ',';
3005  buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3006  buffer[buffer_pos++] = ',';
3007  buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3008  buffer[buffer_pos++] = ',';
3009  buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3010  buffer[buffer_pos++] = ',';
3011  buffer[buffer_pos++] = '[';
3012  buffer[buffer_pos++] = '\0';
3013  writer_->AddString(buffer.start());
3014 
3015  Vector<AllocationTraceNode*> children = node->children();
3016  for (int i = 0; i < children.length(); i++) {
3017  if (i > 0) {
3018  writer_->AddCharacter(',');
3019  }
3020  SerializeTraceNode(children[i]);
3021  }
3022  writer_->AddCharacter(']');
3023 }
3024 
3025 
3026 // 0-based position is converted to 1-based during the serialization.
3027 static int SerializePosition(int position, const Vector<char>& buffer,
3028  int buffer_pos) {
3029  if (position == -1) {
3030  buffer[buffer_pos++] = '0';
3031  } else {
3032  DCHECK(position >= 0);
3033  buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3034  }
3035  return buffer_pos;
3036 }
3037 
3038 
3041  if (!tracker) return;
3042  // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3043  const int kBufferSize =
3044  6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3045  + 6 + 1 + 1;
3048  tracker->function_info_list();
3049  bool first_entry = true;
3050  for (int i = 0; i < list.length(); i++) {
3051  AllocationTracker::FunctionInfo* info = list[i];
3052  int buffer_pos = 0;
3053  if (first_entry) {
3054  first_entry = false;
3055  } else {
3056  buffer[buffer_pos++] = ',';
3057  }
3058  buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3059  buffer[buffer_pos++] = ',';
3060  buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3061  buffer[buffer_pos++] = ',';
3062  buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3063  buffer[buffer_pos++] = ',';
3064  // The cast is safe because script id is a non-negative Smi.
3065  buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3066  buffer_pos);
3067  buffer[buffer_pos++] = ',';
3068  buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3069  buffer[buffer_pos++] = ',';
3070  buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3071  buffer[buffer_pos++] = '\n';
3072  buffer[buffer_pos++] = '\0';
3073  writer_->AddString(buffer.start());
3074  }
3075 }
3076 
3077 
3078 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3079  writer_->AddCharacter('\n');
3080  writer_->AddCharacter('\"');
3081  for ( ; *s != '\0'; ++s) {
3082  switch (*s) {
3083  case '\b':
3084  writer_->AddString("\\b");
3085  continue;
3086  case '\f':
3087  writer_->AddString("\\f");
3088  continue;
3089  case '\n':
3090  writer_->AddString("\\n");
3091  continue;
3092  case '\r':
3093  writer_->AddString("\\r");
3094  continue;
3095  case '\t':
3096  writer_->AddString("\\t");
3097  continue;
3098  case '\"':
3099  case '\\':
3100  writer_->AddCharacter('\\');
3101  writer_->AddCharacter(*s);
3102  continue;
3103  default:
3104  if (*s > 31 && *s < 128) {
3105  writer_->AddCharacter(*s);
3106  } else if (*s <= 31) {
3107  // Special character with no dedicated literal.
3108  WriteUChar(writer_, *s);
3109  } else {
3110  // Convert UTF-8 into \u UTF-16 literal.
3111  unsigned length = 1, cursor = 0;
3112  for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3113  unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3114  if (c != unibrow::Utf8::kBadChar) {
3115  WriteUChar(writer_, c);
3116  DCHECK(cursor != 0);
3117  s += cursor - 1;
3118  } else {
3119  writer_->AddCharacter('?');
3120  }
3121  }
3122  }
3123  }
3124  writer_->AddCharacter('\"');
3125 }
3126 
3127 
3129  ScopedVector<const unsigned char*> sorted_strings(
3130  strings_.occupancy() + 1);
3131  for (HashMap::Entry* entry = strings_.Start();
3132  entry != NULL;
3133  entry = strings_.Next(entry)) {
3134  int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3135  sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3136  }
3137  writer_->AddString("\"<dummy>\"");
3138  for (int i = 1; i < sorted_strings.length(); ++i) {
3139  writer_->AddCharacter(',');
3140  SerializeString(sorted_strings[i]);
3141  if (writer_->aborted()) return;
3142  }
3143 }
3144 
3145 
3146 } } // namespace v8::internal
static const uchar kBadChar
Definition: unicode.h:139
static uchar CalculateValue(const byte *str, unsigned length, unsigned *cursor)
Definition: unicode.cc:191
An interface for reporting progress and controlling long-running activities.
Definition: v8-profiler.h:371
virtual ControlOption ReportProgressValue(int done, int total)=0
Notify about current progress.
HeapSnapshotEdge represents a directed connection between heap graph nodes: from retainers to retaine...
Definition: v8-profiler.h:183
Callback interface for retrieving user friendly names of global objects.
Definition: v8-profiler.h:436
virtual const char * GetName(Handle< Object > object)=0
Returns name to be used in the heap snapshot for given node.
An interface for exporting data from V8, using "push" model.
Definition: v8-profiler.h:272
virtual WriteResult WriteHeapStatsChunk(HeapStatsUpdate *data, int count)
Writes the next chunk of heap stats data into the stream.
Definition: v8-profiler.h:294
virtual WriteResult WriteAsciiChunk(char *data, int size)=0
Writes the next chunk of snapshot data into the stream.
virtual void EndOfStream()=0
Notify about the end of stream.
virtual int GetChunkSize()
Get preferred output chunk size.
Definition: v8-profiler.h:282
Interface for providing information about embedder's objects held by global handles.
Definition: v8-profiler.h:545
virtual intptr_t GetSizeInBytes()
Returns embedder's object size in bytes.
Definition: v8-profiler.h:583
virtual void Dispose()=0
Called by V8 when it no longer needs an instance.
virtual intptr_t GetHash()=0
Returns hash value for the instance.
virtual const char * GetLabel()=0
Returns human-readable label.
virtual const char * GetGroupLabel()
Returns human-readable group label.
Definition: v8-profiler.h:574
virtual intptr_t GetElementCount()
Returns element count in case if a global handle retains a subgraph by holding one of its nodes.
Definition: v8-profiler.h:580
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static const int kExpectedReceiverTypeOffset
Definition: objects.h:10138
static const int kNameOffset
Definition: objects.h:10136
static const int kGetterOffset
Definition: objects.h:10340
static const int kSetterOffset
Definition: objects.h:10341
static const int kWeakNextOffset
Definition: objects.h:8261
static const int kDependentCodeOffset
Definition: objects.h:8259
static const int kTransitionInfoOffset
Definition: objects.h:8254
static const int kNestedSiteOffset
Definition: objects.h:8255
Vector< AllocationTraceNode * > children() const
const List< FunctionInfo * > & function_info_list() const
AllocationTraceTree * trace_tree()
virtual HeapEntry * AllocateEntry(HeapThing ptr)
BasicHeapEntriesAllocator(HeapSnapshot *snapshot, HeapEntry::Type entries_type)
static const int kValueOffset
Definition: objects.h:6385
const char * name(int index)
Definition: builtins.h:269
static const int kValueOffset
Definition: objects.h:9446
static const int kDefaultCacheOffset
Definition: objects.h:7919
static const int kNormalTypeCacheOffset
Definition: objects.h:7920
ConstantPoolArray * constant_pool()
Definition: objects-inl.h:4942
static const int kNextCodeLinkOffset
Definition: objects.h:5357
static const char * Kind2String(Kind kind)
Definition: objects.cc:10627
static const int kConstantPoolOffset
Definition: objects.h:5367
static const int kTypeFeedbackInfoOffset
Definition: objects.h:5355
static const int kHandlerTableOffset
Definition: objects.h:5351
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:5029
static const int kRelocationInfoOffset
Definition: objects.h:5350
Object * type_feedback_info()
Definition: objects-inl.h:6145
static const int kDeoptimizationDataOffset
Definition: objects.h:5352
static const int kGCMetadataOffset
Definition: objects.h:5358
static const int kFirstOffset
Definition: objects.h:9061
static const int kSecondOffset
Definition: objects.h:9062
JSObject * global_proxy()
Definition: contexts.cc:64
static Context * cast(Object *context)
Definition: contexts.h:255
GlobalObject * global_object()
Definition: contexts.h:437
Context * declaration_context()
Definition: contexts.cc:14
JSFunction * closure()
Definition: contexts.h:416
bool IsDebugGlobal(GlobalObject *global)
Definition: debug.cc:2384
static const int kDescriptorOffset
Definition: objects.h:10255
Name * GetKey(int descriptor_number)
Definition: objects-inl.h:2928
PropertyDetails GetDetails(int descriptor_number)
Definition: objects-inl.h:2981
Object * GetValue(int descriptor_number)
Definition: objects-inl.h:2970
int GetFieldIndex(int descriptor_number)
Definition: objects-inl.h:2993
PropertyType GetType(int descriptor_number)
Definition: objects-inl.h:2988
Object * GetConstant(int descriptor_number)
Definition: objects-inl.h:3005
Object * ValueAt(int entry)
Definition: objects.h:3491
int operator()(HeapEntry *const *entry)
static int OffsetOfElementAt(int index)
Definition: objects.h:2455
Object * get(int index)
Definition: objects-inl.h:2165
virtual void VisitPointers(Object **start, Object **end)
virtual void VisitEmbedderReference(Object **p, uint16_t class_id)
GlobalHandlesExtractor(NativeObjectsExplorer *explorer)
void IterateAllRoots(ObjectVisitor *v)
List< ObjectGroup * > * object_groups()
void IterateAllRootsWithClassIds(ObjectVisitor *v)
List< ImplicitRefGroup * > * implicit_ref_groups()
static const int kBuiltinsOffset
Definition: objects.h:7458
static const int kGlobalContextOffset
Definition: objects.h:7460
static const int kNativeContextOffset
Definition: objects.h:7459
static const int kHeaderSize
Definition: objects.h:7462
static const int kGlobalProxyOffset
Definition: objects.h:7461
Handle< JSGlobalObject > & at(int i)
List< Handle< JSGlobalObject > > objects_
virtual void VisitPointers(Object **start, Object **end)
bool IsKey(Object *k)
Definition: objects.h:3255
Object * KeyAt(int entry)
Definition: objects.h:3251
virtual HeapEntry * AllocateEntry(HeapThing ptr)=0
void Pair(HeapThing thing, int entry)
static uint32_t Hash(HeapThing thing)
static const int kMapOffset
Definition: objects.h:1427
static const SnapshotObjectId kFirstAvailableObjectId
static const SnapshotObjectId kInternalRootObjectId
SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size, bool accessed=true)
static const SnapshotObjectId kGcRootsFirstSubrootId
void UpdateObjectSize(Address addr, int size)
SnapshotObjectId GenerateId(v8::RetainedObjectInfo *info)
bool MoveObject(Address from, Address to, int size)
SnapshotObjectId last_assigned_id() const
SnapshotObjectId FindEntry(Address addr)
static const SnapshotObjectId kGcRootsObjectId
SnapshotObjectId PushHeapObjectsStats(OutputStream *stream)
void SetTag(Object *obj, const char *tag)
const char * GetTag(Object *obj)
v8::RetainedObjectInfo * ExecuteWrapperClassCallback(uint16_t class_id, Object **wrapper)
HeapObjectsMap * heap_object_map() const
Definition: heap-profiler.h:38
void RemoveSnapshot(HeapSnapshot *snapshot)
AllocationTracker * allocation_tracker() const
Definition: heap-profiler.h:35
HeapSnapshotGenerator(HeapSnapshot *snapshot, v8::ActivityControl *control, v8::HeapProfiler::ObjectNameResolver *resolver, Heap *heap)
void SerializeTraceNode(AllocationTraceNode *node)
void SerializeEdge(HeapGraphEdge *edge, bool first_edge)
List< HeapGraphEdge * > children_
int gc_subroot_indexes_[VisitorSynchronization::kNumberOfSyncTags]
List< HeapGraphEdge * > & children()
List< HeapEntry * > * GetSortedEntriesList()
HeapEntry * AddEntry(HeapEntry::Type type, const char *name, SnapshotObjectId id, size_t size, unsigned trace_node_id)
List< HeapGraphEdge > & edges()
HeapEntry * GetEntryById(SnapshotObjectId id)
HeapEntry * gc_subroot(int index)
HeapEntry * AddGcSubrootEntry(int tag, SnapshotObjectId id)
void IterateRoots(ObjectVisitor *v, VisitMode mode)
Definition: heap.cc:4722
static const int kMakeHeapIterableMask
Definition: heap.h:721
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1146
Isolate * isolate()
Definition: heap-inl.h:589
uint32_t HashSeed()
Definition: heap.h:1237
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags)
Definition: heap.cc:1163
void CollectAllGarbage(int flags, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
Definition: heap.cc:724
String * hidden_string()
Definition: heap.h:788
static void MarkVisitedField(HeapObject *obj, int offset)
IndexedReferencesExtractor(V8HeapExplorer *generator, HeapObject *parent_obj, int parent)
void VisitPointers(Object **start, Object **end)
Builtins * builtins()
Definition: isolate.h:947
HeapProfiler * heap_profiler() const
Definition: isolate.h:972
Factory * factory()
Definition: isolate.h:982
GlobalHandles * global_handles()
Definition: isolate.h:917
JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer *explorer)
static const int kWeakNextOffset
Definition: objects.h:9916
static const int kBufferOffset
Definition: objects.h:9913
static const int kWeakFirstViewOffset
Definition: objects.h:9880
static const int kWeakNextOffset
Definition: objects.h:9879
static const int kTableOffset
Definition: objects.h:9656
static const int kNextFunctionLinkOffset
Definition: objects.h:7384
static const int kBoundFunctionIndex
Definition: objects.h:7392
static const int kBoundThisIndex
Definition: objects.h:7393
static const int kSize
Definition: objects.h:7385
static const int kBoundArgumentsStartIndex
Definition: objects.h:7394
static const int kNonWeakFieldsEndOffset
Definition: objects.h:7383
static const int kSharedFunctionInfoOffset
Definition: objects.h:7379
FixedArray * function_bindings()
Definition: objects-inl.h:5965
static const int kContextOffset
Definition: objects.h:7381
static const int kLiteralsOffset
Definition: objects.h:7382
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7377
static const int kNativeContextOffset
Definition: objects.h:7427
Object * InObjectPropertyAt(int index)
Definition: objects-inl.h:2050
SeededNumberDictionary * element_dictionary()
Definition: objects-inl.h:6449
int GetInObjectPropertyOffset(int index)
Definition: objects-inl.h:2045
static const int kHeaderSize
Definition: objects.h:2195
Object * GetInternalField(int index)
Definition: objects-inl.h:1992
static const int kPropertiesOffset
Definition: objects.h:2193
int GetInternalFieldOffset(int index)
Definition: objects-inl.h:1986
static const int kElementsOffset
Definition: objects.h:2194
Object * RawFastPropertyAt(FieldIndex index)
Definition: objects-inl.h:2025
NameDictionary * property_dictionary()
Definition: objects-inl.h:6443
static const int kTableOffset
Definition: objects.h:9807
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:17
T & at(int i) const
Definition: list.h:69
T & first() const
Definition: list.h:71
void Sort(int(*cmp)(const T *x, const T *y))
Definition: list-inl.h:194
T & last() const
Definition: list.h:70
int NumberOfOwnDescriptors()
Definition: objects.h:5944
static const int kDescriptorsOffset
Definition: objects.h:6198
static const int kDependentCodeOffset
Definition: objects.h:6201
InstanceType instance_type()
Definition: objects-inl.h:4323
static const int kTransitionsOrBackPointerOffset
Definition: objects.h:6196
static const int kCodeCacheOffset
Definition: objects.h:6200
static const int kConstructorOffset
Definition: objects.h:6191
static const int kPrototypeOffset
Definition: objects.h:6190
static Object *& Object_at(Address addr)
Definition: v8memory.h:60
virtual intptr_t GetHash()
Returns hash value for the instance.
virtual bool IsEquivalent(RetainedObjectInfo *other)
Returns whether two instances are equivalent.
virtual const char * GetLabel()
Returns human-readable label.
virtual void Dispose()
Called by V8 when it no longer needs an instance.
bool IterateAndExtractReferences(SnapshotFiller *filler)
static uint32_t InfoHash(v8::RetainedObjectInfo *info)
void SetNativeRootReference(v8::RetainedObjectInfo *info)
HeapEntriesAllocator * synthetic_entries_allocator_
NativeObjectsExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress)
NativeGroupRetainedObjectInfo * FindOrAddGroupInfo(const char *label)
void SetWrapperNativeReferences(HeapObject *wrapper, v8::RetainedObjectInfo *info)
void VisitSubtreeWrapper(Object **p, uint16_t class_id)
List< HeapObject * > * GetListMaybeDisposeInfo(v8::RetainedObjectInfo *info)
void AddNumberImpl(T n, const char *format)
OutputStreamWriter(v8::OutputStream *stream)
static const int kDependentCodeOffset
Definition: objects.h:9495
static const int kTypeOffset
Definition: objects.h:9494
A class to uniformly access the prototype of any Object and walk its prototype chain.
Definition: prototype.h:25
Object * GetCurrent() const
Definition: prototype.h:62
void FillReferences(V8HeapExplorer *explorer)
void VisitPointers(Object **start, Object **end)
void Synchronize(VisitorSynchronization::SyncTag tag)
int FunctionContextSlotIndex(String *name, VariableMode *mode)
Definition: scopeinfo.cc:357
String * ContextLocalName(int var)
Definition: scopeinfo.cc:239
static const int kSourceOffset
Definition: objects.h:6503
static const int kNameOffset
Definition: objects.h:6504
static const int kLineEndsOffset
Definition: objects.h:6510
static const int kContextOffset
Definition: objects.h:6507
static const int kOptimizedCodeMapOffset
Definition: objects.h:6894
static const int kScriptOffset
Definition: objects.h:6901
static const int kConstructStubOffset
Definition: objects.h:6896
static const int kDebugInfoOffset
Definition: objects.h:6902
static const int kFeedbackVectorOffset
Definition: objects.h:6904
static const int kScopeInfoOffset
Definition: objects.h:6895
static const int kInferredNameOffset
Definition: objects.h:6903
static const int kNameOffset
Definition: objects.h:6892
static const int kInstanceClassNameOffset
Definition: objects.h:6897
static const int kCodeOffset
Definition: objects.h:6893
static const int kFunctionDataOffset
Definition: objects.h:6899
static const int kParentOffset
Definition: objects.h:9104
HeapEntry * FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
void SetNamedReference(HeapGraphEdge::Type type, int parent, const char *reference_name, HeapEntry *child_entry)
void SetIndexedReference(HeapGraphEdge::Type type, int parent, int index, HeapEntry *child_entry)
void SetIndexedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
void SetNamedAutoIndexReference(HeapGraphEdge::Type type, int parent, HeapEntry *child_entry)
HeapEntry * FindEntry(HeapThing ptr)
HeapEntry * AddEntry(HeapThing ptr, HeapEntriesAllocator *allocator)
SnapshotFiller(HeapSnapshot *snapshot, HeapEntriesMap *entries)
static uint32_t HashSequentialString(const schar *chars, int length, uint32_t seed)
Definition: objects-inl.h:6563
const char * GetName(Name *name)
const char * GetFormatted(const char *format,...)
const char * GetCopy(const char *src)
static const int kNameOffset
Definition: objects.h:8567
Entry * Next(Entry *p) const
Definition: hashmap.h:226
uint32_t occupancy() const
Definition: hashmap.h:61
uint32_t capacity() const
Definition: hashmap.h:66
Entry * Lookup(void *key, uint32_t hash, bool insert, AllocationPolicy allocator=AllocationPolicy())
Definition: hashmap.h:114
void * Remove(void *key, uint32_t hash)
Definition: hashmap.h:145
FixedArray * GetPrototypeTransitions()
virtual HeapEntry * AllocateEntry(HeapThing ptr)
void SetContextReference(HeapObject *parent_obj, int parent, String *reference_name, Object *child, int field_offset)
void ExtractFixedArrayReferences(int entry, FixedArray *array)
void ExtractBoxReferences(int entry, Box *box)
void ExtractMapReferences(int entry, Map *map)
bool IterateAndExtractReferences(SnapshotFiller *filler)
void SetNativeBindReference(HeapObject *parent_obj, int parent, const char *reference_name, Object *child)
void SetGcSubrootReference(VisitorSynchronization::SyncTag tag, bool is_weak, Object *child)
const char * GetStrongGcSubrootName(Object *object)
V8HeapExplorer(HeapSnapshot *snapshot, SnapshottingProgressReportingInterface *progress, v8::HeapProfiler::ObjectNameResolver *resolver)
void SetInternalReference(HeapObject *parent_obj, int parent, const char *reference_name, Object *child, int field_offset=-1)
void SetWeakReference(HeapObject *parent_obj, int parent, const char *reference_name, Object *child_obj, int field_offset)
SnapshottingProgressReportingInterface * progress_
void ExtractJSObjectReferences(int entry, JSObject *js_obj)
void SetGcRootsReference(VisitorSynchronization::SyncTag tag)
void SetElementReference(HeapObject *parent_obj, int parent, int index, Object *child)
void SetPropertyReference(HeapObject *parent_obj, int parent, Name *reference_name, Object *child, const char *name_format_string=NULL, int field_offset=-1)
void ExtractJSWeakCollectionReferences(int entry, JSWeakCollection *collection)
void ExtractSharedFunctionInfoReferences(int entry, SharedFunctionInfo *shared)
void ExtractJSCollectionReferences(int entry, JSCollection *collection)
void ExtractJSGlobalProxyReferences(int entry, JSGlobalProxy *proxy)
void ExtractJSArrayBufferReferences(int entry, JSArrayBuffer *buffer)
bool ExtractReferencesPass2(int entry, HeapObject *obj)
void ExtractCodeCacheReferences(int entry, CodeCache *code_cache)
bool ExtractReferencesPass1(int entry, HeapObject *obj)
static String * GetConstructorName(JSObject *object)
void ExtractStringReferences(int entry, String *obj)
void ExtractCellReferences(int entry, Cell *cell)
void ExtractClosureReferences(JSObject *js_obj, int entry)
v8::HeapProfiler::ObjectNameResolver * global_object_name_resolver_
void ExtractScriptReferences(int entry, Script *script)
bool ExtractAccessorPairProperty(JSObject *js_obj, int entry, Object *key, Object *callback_obj)
const char * GetSystemEntryName(HeapObject *object)
void ExtractSymbolReferences(int entry, Symbol *symbol)
void SetHiddenReference(HeapObject *parent_obj, int parent, int index, Object *child)
void SetUserGlobalReference(Object *user_global)
void ExtractElementReferences(JSObject *js_obj, int entry)
void ExtractContextReferences(int entry, Context *context)
void ExtractInternalReferences(JSObject *js_obj, int entry)
void ExtractCodeReferences(int entry, Code *code)
void ExtractAccessorInfoReferences(int entry, AccessorInfo *accessor_info)
void ExtractAllocationSiteReferences(int entry, AllocationSite *site)
void ExtractAccessorPairReferences(int entry, AccessorPair *accessors)
HeapEntry * AddEntry(Address address, HeapEntry::Type type, const char *name, size_t size)
void TagBuiltinCodeObject(Code *code, const char *name)
void ExtractPropertyCellReferences(int entry, PropertyCell *cell)
void ExtractPropertyReferences(JSObject *js_obj, int entry)
void TagObject(Object *obj, const char *tag)
int EstimateObjectsCount(HeapIterator *iterator)
Vector< T > SubVector(int from, int to)
Definition: vector.h:33
T * start() const
Definition: vector.h:47
int length() const
Definition: vector.h:41
static const char *const kTagNames[kNumberOfSyncTags]
Definition: objects.h:10712
#define NATIVE_CONTEXT_FIELDS(V)
Definition: contexts.h:76
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes A file to write the raw context snapshot bytes Write V8 startup blob Print the time it takes to lazily compile hydrogen code stubs dump only objects containing this substring stress the GC compactor to flush out pretty print source code for builtins print C code to recreate TurboFan graphs report heap spill statistics along with enable possessive quantifier syntax for testing Minimal Log code events to the log file without profiling log positions Log statistical profiling Used with turns on browser compatible mode for profiling Enable perf linux profiler(experimental annotate support).") DEFINE_STRING(gc_fake_mmap
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the snapshot(mksnapshot only)") DEFINE_STRING(raw_file
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define JSON_S(s)
#define STRING_NAME(name, str)
#define ROOT_NAME(type, name, camel_name)
#define JSON_A(s)
#define EXTRACT_CONTEXT_FIELD(index, type, name)
#define STRUCT_MAP_NAME(NAME, Name, name)
#define JSON_O(s)
#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name)
#define MAKE_STRUCT_CASE(NAME, Name, name)
#define STRONG_ROOT_LIST(V)
Definition: heap.h:28
#define INTERNALIZED_STRING_LIST(V)
Definition: heap.h:262
#define UNREACHABLE()
Definition: logging.h:30
#define CHECK_EQ(expected, value)
Definition: logging.h:169
#define CHECK(condition)
Definition: logging.h:36
#define DCHECK(condition)
Definition: logging.h:205
void USE(T)
Definition: macros.h:322
#define V8_PTR_PREFIX
Definition: macros.h:360
#define V8PRIuPTR
Definition: macros.h:365
#define STATIC_ASSERT(test)
Definition: macros.h:311
unsigned short uint16_t
Definition: unicode.cc:23
unsigned int uchar
Definition: unicode.h:17
static int comparator(const HeapObjectInfo *a, const HeapObjectInfo *b)
void DeleteArray(T *array)
Definition: allocation.h:68
const int kPointerSize
Definition: globals.h:129
static void WriteUChar(OutputStreamWriter *w, unibrow::uchar u)
static int SortByIds(const T *entry1_ptr, const T *entry2_ptr)
static int utoa_impl(T value, const Vector< char > &buffer, int buffer_pos)
TypeImpl< ZoneTypeConfig > Type
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
@ VISIT_ONLY_STRONG
Definition: globals.h:397
size_t GetMemoryUsedByList(const List< T, P > &list)
Definition: list.h:184
@ DISALLOW_NULLS
Definition: objects.h:8337
static const uint32_t kZeroHashSeed
Definition: utils.h:245
static int utoa(T value, const Vector< char > &buffer, int buffer_pos)
int SNPrintF(Vector< char > str, const char *format,...)
Definition: utils.cc:105
@ PROPERTY_CELL_TYPE
Definition: objects.h:665
@ ODDBALL_TYPE
Definition: objects.h:663
@ FOREIGN_TYPE
Definition: objects.h:671
@ ROBUST_STRING_TRAVERSAL
Definition: objects.h:8338
int SortedListBSearch(const List< T > &list, P cmp)
Definition: list-inl.h:219
uint32_t ComputePointerHash(void *ptr)
Definition: utils.h:274
byte * Address
Definition: globals.h:101
void PrintF(const char *format,...)
Definition: utils.cc:80
const int kHeapObjectTag
Definition: v8.h:5737
uint32_t ComputeIntegerHash(uint32_t key, uint32_t seed)
Definition: utils.h:249
size_t NumberToSize(Isolate *isolate, Object *number)
Definition: conversions.h:233
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
static int SerializePosition(int position, const Vector< char > &buffer, int buffer_pos)
static bool AddressesMatch(void *key1, void *key2)
int StrLength(const char *string)
Definition: vector.h:147
void MemCopy(void *dest, const void *src, size_t size)
Definition: utils.h:350
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
@ kGCCallbackFlagConstructRetainedObjectInfos
Definition: v8.h:4212
@ kNoGCCallbackFlags
Definition: v8.h:4210
GCType
Applications can register callback functions which will be called before and after a garbage collecti...
Definition: v8.h:4203
@ kGCTypeMarkSweepCompact
Definition: v8.h:4205
uint32_t SnapshotObjectId
Definition: v8-profiler.h:16
@ kWeak
Definition: v8-util.h:25
#define STRUCT_LIST(V)
Definition: objects.h:515
#define STRING_TYPE_LIST(V)
Definition: objects.h:457
A struct for exporting HeapStats data from V8, using "push" model.
Definition: v8-profiler.h:599
v8::RetainedObjectInfo * info
IndexTag(int index, VisitorSynchronization::SyncTag tag)
#define T(name, string, precedence)
Definition: token.cc:25