V8 Project
global-handles.cc
Go to the documentation of this file.
1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #include "src/api.h"
8 #include "src/global-handles.h"
9 
10 #include "src/vm-state-inl.h"
11 
12 namespace v8 {
13 namespace internal {
14 
15 
17  if (info != NULL) info->Dispose();
18  delete[] objects;
19 }
20 
21 
23  delete[] children;
24 }
25 
26 
28  public:
29  // State transition diagram:
30  // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
31  enum State {
32  FREE = 0,
33  NORMAL, // Normal global handle.
34  WEAK, // Flagged as weak but not yet finalized.
35  PENDING, // Has been recognized as only reachable by weak handles.
36  NEAR_DEATH // Callback has informed the handle is near death.
37  };
38 
39  // Maps handle location (slot) to the containing node.
41  DCHECK(OFFSET_OF(Node, object_) == 0);
42  return reinterpret_cast<Node*>(location);
43  }
44 
45  Node() {
48  STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
53  STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
55  STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
57  }
58 
59 #ifdef ENABLE_HANDLE_ZAPPING
60  ~Node() {
61  // TODO(1428): if it's a weak handle we should have invoked its callback.
62  // Zap the values for eager trapping.
63  object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
65  index_ = 0;
66  set_independent(false);
68  set_in_new_space_list(false);
69  parameter_or_next_free_.next_free = NULL;
71  }
72 #endif
73 
74  void Initialize(int index, Node** first_free) {
75  index_ = static_cast<uint8_t>(index);
76  DCHECK(static_cast<int>(index_) == index);
77  set_state(FREE);
78  set_in_new_space_list(false);
79  parameter_or_next_free_.next_free = *first_free;
80  *first_free = this;
81  }
82 
83  void Acquire(Object* object) {
84  DCHECK(state() == FREE);
85  object_ = object;
87  set_independent(false);
90  parameter_or_next_free_.parameter = NULL;
93  }
94 
95  void Release() {
96  DCHECK(state() != FREE);
97  set_state(FREE);
98  // Zap the values for eager trapping.
99  object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
101  set_independent(false);
105  }
106 
107  // Object slot accessors.
108  Object* object() const { return object_; }
109  Object** location() { return &object_; }
111 
112  // Wrapper class ID accessors.
113  bool has_wrapper_class_id() const {
115  }
116 
117  uint16_t wrapper_class_id() const { return class_id_; }
118 
119  // State and flag accessors.
120 
121  State state() const {
122  return NodeState::decode(flags_);
123  }
126  }
127 
128  bool is_independent() {
130  }
131  void set_independent(bool v) {
133  }
134 
137  }
138  void set_partially_dependent(bool v) {
140  }
141 
144  }
145  void set_in_new_space_list(bool v) {
147  }
148 
149  bool IsNearDeath() const {
150  // Check for PENDING to ensure correct answer when processing callbacks.
151  return state() == PENDING || state() == NEAR_DEATH;
152  }
153 
154  bool IsWeak() const { return state() == WEAK; }
155 
156  bool IsRetainer() const { return state() != FREE; }
157 
158  bool IsStrongRetainer() const { return state() == NORMAL; }
159 
160  bool IsWeakRetainer() const {
161  return state() == WEAK || state() == PENDING || state() == NEAR_DEATH;
162  }
163 
164  void MarkPending() {
165  DCHECK(state() == WEAK);
167  }
168 
169  // Independent flag accessors.
171  DCHECK(state() != FREE);
172  set_independent(true);
173  }
174 
176  DCHECK(state() != FREE);
177  if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
179  }
180  }
182 
183  // Callback accessor.
184  // TODO(svenpanne) Re-enable or nuke later.
185  // WeakReferenceCallback callback() { return callback_; }
186 
187  // Callback parameter accessors.
188  void set_parameter(void* parameter) {
189  DCHECK(state() != FREE);
191  }
192  void* parameter() const {
193  DCHECK(state() != FREE);
194  return parameter_or_next_free_.parameter;
195  }
196 
197  // Accessors for next free node in the free list.
199  DCHECK(state() == FREE);
200  return parameter_or_next_free_.next_free;
201  }
202  void set_next_free(Node* value) {
203  DCHECK(state() == FREE);
204  parameter_or_next_free_.next_free = value;
205  }
206 
207  void MakeWeak(void* parameter, WeakCallback weak_callback) {
208  DCHECK(weak_callback != NULL);
209  DCHECK(state() != FREE);
210  CHECK(object_ != NULL);
211  set_state(WEAK);
213  weak_callback_ = weak_callback;
214  }
215 
216  void* ClearWeakness() {
217  DCHECK(state() != FREE);
218  void* p = parameter();
219  set_state(NORMAL);
221  return p;
222  }
223 
225  if (state() != Node::PENDING) return false;
226  if (weak_callback_ == NULL) {
227  Release();
228  return false;
229  }
230  void* par = parameter();
233 
234  Object** object = location();
235  {
236  // Check that we are not passing a finalized external string to
237  // the callback.
238  DCHECK(!object_->IsExternalOneByteString() ||
239  ExternalOneByteString::cast(object_)->resource() != NULL);
240  DCHECK(!object_->IsExternalTwoByteString() ||
241  ExternalTwoByteString::cast(object_)->resource() != NULL);
242  // Leaving V8.
244  HandleScope handle_scope(isolate);
245  Handle<Object> handle(*object, isolate);
247  reinterpret_cast<v8::Isolate*>(isolate),
249  par);
250  weak_callback_(data);
251  }
252  // Absence of explicit cleanup or revival of weak handle
253  // in most of the cases would lead to memory leak.
254  CHECK(state() != NEAR_DEATH);
255  return true;
256  }
257 
259 
260  private:
261  inline NodeBlock* FindBlock();
262  inline void IncreaseBlockUses();
263  inline void DecreaseBlockUses();
264 
265  // Storage for object pointer.
266  // Placed first to avoid offset computation.
268 
269  // Next word stores class_id, index, state, and independent.
270  // Note: the most aligned fields should go first.
271 
272  // Wrapper class ID.
274 
275  // Index in the containing handle block.
276  uint8_t index_;
277 
278  // This stores three flags (independent, partially_dependent and
279  // in_new_space_list) and a State.
280  class NodeState: public BitField<State, 0, 4> {};
281  class IsIndependent: public BitField<bool, 4, 1> {};
282  class IsPartiallyDependent: public BitField<bool, 5, 1> {};
283  class IsInNewSpaceList: public BitField<bool, 6, 1> {};
284 
285  uint8_t flags_;
286 
287  // Handle specific callback - might be a weak reference in disguise.
289 
290  // Provided data for callback. In FREE state, this is used for
291  // the free list link.
292  union {
293  void* parameter;
296 
298 };
299 
300 
302  public:
303  static const int kSize = 256;
304 
306  : next_(next),
307  used_nodes_(0),
308  next_used_(NULL),
309  prev_used_(NULL),
311 
312  void PutNodesOnFreeList(Node** first_free) {
313  for (int i = kSize - 1; i >= 0; --i) {
314  nodes_[i].Initialize(i, first_free);
315  }
316  }
317 
318  Node* node_at(int index) {
319  DCHECK(0 <= index && index < kSize);
320  return &nodes_[index];
321  }
322 
323  void IncreaseUses() {
325  if (used_nodes_++ == 0) {
328  next_used_ = old_first;
329  prev_used_ = NULL;
330  if (old_first == NULL) return;
331  old_first->prev_used_ = this;
332  }
333  }
334 
335  void DecreaseUses() {
336  DCHECK(used_nodes_ > 0);
337  if (--used_nodes_ == 0) {
340  if (this == global_handles_->first_used_block_) {
342  }
343  }
344  }
345 
347 
348  // Next block in the list of all blocks.
349  NodeBlock* next() const { return next_; }
350 
351  // Next/previous block in the list of blocks with used nodes.
352  NodeBlock* next_used() const { return next_used_; }
353  NodeBlock* prev_used() const { return prev_used_; }
354 
355  private:
357  NodeBlock* const next_;
362 };
363 
364 
366  return FindBlock()->global_handles();
367 }
368 
369 
371  intptr_t ptr = reinterpret_cast<intptr_t>(this);
372  ptr = ptr - index_ * sizeof(Node);
373  NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
374  DCHECK(block->node_at(index_) == this);
375  return block;
376 }
377 
378 
380  NodeBlock* node_block = FindBlock();
381  node_block->IncreaseUses();
382  GlobalHandles* global_handles = node_block->global_handles();
383  global_handles->isolate()->counters()->global_handles()->Increment();
384  global_handles->number_of_global_handles_++;
385 }
386 
387 
389  NodeBlock* node_block = FindBlock();
390  GlobalHandles* global_handles = node_block->global_handles();
391  parameter_or_next_free_.next_free = global_handles->first_free_;
392  global_handles->first_free_ = this;
393  node_block->DecreaseUses();
394  global_handles->isolate()->counters()->global_handles()->Decrement();
395  global_handles->number_of_global_handles_--;
396 }
397 
398 
400  public:
401  explicit NodeIterator(GlobalHandles* global_handles)
402  : block_(global_handles->first_used_block_),
403  index_(0) {}
404 
405  bool done() const { return block_ == NULL; }
406 
407  Node* node() const {
408  DCHECK(!done());
409  return block_->node_at(index_);
410  }
411 
412  void Advance() {
413  DCHECK(!done());
414  if (++index_ < NodeBlock::kSize) return;
415  index_ = 0;
416  block_ = block_->next_used();
417  }
418 
419  private:
421  int index_;
422 
424 };
425 
426 
428  : isolate_(isolate),
432  first_free_(NULL),
435 
436 
438  NodeBlock* block = first_block_;
439  while (block != NULL) {
440  NodeBlock* tmp = block->next();
441  delete block;
442  block = tmp;
443  }
444  first_block_ = NULL;
445 }
446 
447 
449  if (first_free_ == NULL) {
450  first_block_ = new NodeBlock(this, first_block_);
452  }
453  DCHECK(first_free_ != NULL);
454  // Take the first node in the free list.
455  Node* result = first_free_;
456  first_free_ = result->next_free();
457  result->Acquire(value);
458  if (isolate_->heap()->InNewSpace(value) &&
459  !result->is_in_new_space_list()) {
460  new_space_nodes_.Add(result);
461  result->set_in_new_space_list(true);
462  }
463  return result->handle();
464 }
465 
466 
468  DCHECK(location != NULL);
469  return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
470 }
471 
472 
473 void GlobalHandles::Destroy(Object** location) {
474  if (location != NULL) Node::FromLocation(location)->Release();
475 }
476 
477 
479  void* parameter,
480  WeakCallback weak_callback) {
481  Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
482 }
483 
484 
486  return Node::FromLocation(location)->ClearWeakness();
487 }
488 
489 
491  Node::FromLocation(location)->MarkIndependent();
492 }
493 
494 
497 }
498 
499 
501  return Node::FromLocation(location)->is_independent();
502 }
503 
504 
506  return Node::FromLocation(location)->IsNearDeath();
507 }
508 
509 
510 bool GlobalHandles::IsWeak(Object** location) {
511  return Node::FromLocation(location)->IsWeak();
512 }
513 
514 
516  for (NodeIterator it(this); !it.done(); it.Advance()) {
517  if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
518  }
519 }
520 
521 
523  for (NodeIterator it(this); !it.done(); it.Advance()) {
524  if (it.node()->IsWeak() && f(it.node()->location())) {
525  it.node()->MarkPending();
526  }
527  }
528 }
529 
530 
532  for (int i = 0; i < new_space_nodes_.length(); ++i) {
533  Node* node = new_space_nodes_[i];
534  if (node->IsStrongRetainer() ||
535  (node->IsWeakRetainer() && !node->is_independent() &&
536  !node->is_partially_dependent())) {
537  v->VisitPointer(node->location());
538  }
539  }
540 }
541 
542 
545  for (int i = 0; i < new_space_nodes_.length(); ++i) {
546  Node* node = new_space_nodes_[i];
547  DCHECK(node->is_in_new_space_list());
548  if ((node->is_independent() || node->is_partially_dependent()) &&
549  node->IsWeak() && f(isolate_->heap(), node->location())) {
550  node->MarkPending();
551  }
552  }
553 }
554 
555 
557  for (int i = 0; i < new_space_nodes_.length(); ++i) {
558  Node* node = new_space_nodes_[i];
559  DCHECK(node->is_in_new_space_list());
560  if ((node->is_independent() || node->is_partially_dependent()) &&
561  node->IsWeakRetainer()) {
562  v->VisitPointer(node->location());
563  }
564  }
565 }
566 
567 
569  WeakSlotCallbackWithHeap can_skip) {
571  int last = 0;
572  bool any_group_was_visited = false;
573  for (int i = 0; i < object_groups_.length(); i++) {
574  ObjectGroup* entry = object_groups_.at(i);
575  DCHECK(entry != NULL);
576 
577  Object*** objects = entry->objects;
578  bool group_should_be_visited = false;
579  for (size_t j = 0; j < entry->length; j++) {
580  Object* object = *objects[j];
581  if (object->IsHeapObject()) {
582  if (!can_skip(isolate_->heap(), &object)) {
583  group_should_be_visited = true;
584  break;
585  }
586  }
587  }
588 
589  if (!group_should_be_visited) {
590  object_groups_[last++] = entry;
591  continue;
592  }
593 
594  // An object in the group requires visiting, so iterate over all
595  // objects in the group.
596  for (size_t j = 0; j < entry->length; ++j) {
597  Object* object = *objects[j];
598  if (object->IsHeapObject()) {
599  v->VisitPointer(&object);
600  any_group_was_visited = true;
601  }
602  }
603 
604  // Once the entire group has been iterated over, set the object
605  // group to NULL so it won't be processed again.
606  delete entry;
607  object_groups_.at(i) = NULL;
608  }
609  object_groups_.Rewind(last);
610  return any_group_was_visited;
611 }
612 
613 
615  GarbageCollector collector) {
616  // Process weak global handle callbacks. This must be done after the
617  // GC is completely done, because the callbacks may invoke arbitrary
618  // API functions.
620  const int initial_post_gc_processing_count = ++post_gc_processing_count_;
621  int freed_nodes = 0;
622  if (collector == SCAVENGER) {
623  for (int i = 0; i < new_space_nodes_.length(); ++i) {
624  Node* node = new_space_nodes_[i];
625  DCHECK(node->is_in_new_space_list());
626  if (!node->IsRetainer()) {
627  // Free nodes do not have weak callbacks. Do not use them to compute
628  // the freed_nodes.
629  continue;
630  }
631  // Skip dependent handles. Their weak callbacks might expect to be
632  // called between two global garbage collection callbacks which
633  // are not called for minor collections.
634  if (!node->is_independent() && !node->is_partially_dependent()) {
635  continue;
636  }
639  if (initial_post_gc_processing_count != post_gc_processing_count_) {
640  // Weak callback triggered another GC and another round of
641  // PostGarbageCollection processing. The current node might
642  // have been deleted in that round, so we need to bail out (or
643  // restart the processing).
644  return freed_nodes;
645  }
646  }
647  if (!node->IsRetainer()) {
648  freed_nodes++;
649  }
650  }
651  } else {
652  for (NodeIterator it(this); !it.done(); it.Advance()) {
653  if (!it.node()->IsRetainer()) {
654  // Free nodes do not have weak callbacks. Do not use them to compute
655  // the freed_nodes.
656  continue;
657  }
658  it.node()->clear_partially_dependent();
659  if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
660  if (initial_post_gc_processing_count != post_gc_processing_count_) {
661  // See the comment above.
662  return freed_nodes;
663  }
664  }
665  if (!it.node()->IsRetainer()) {
666  freed_nodes++;
667  }
668  }
669  }
670  // Update the list of new space nodes.
671  int last = 0;
672  for (int i = 0; i < new_space_nodes_.length(); ++i) {
673  Node* node = new_space_nodes_[i];
674  DCHECK(node->is_in_new_space_list());
675  if (node->IsRetainer()) {
676  if (isolate_->heap()->InNewSpace(node->object())) {
677  new_space_nodes_[last++] = node;
679  } else {
680  node->set_in_new_space_list(false);
682  }
683  } else {
684  node->set_in_new_space_list(false);
686  }
687  }
688  new_space_nodes_.Rewind(last);
689  return freed_nodes;
690 }
691 
692 
694  for (NodeIterator it(this); !it.done(); it.Advance()) {
695  if (it.node()->IsStrongRetainer()) {
696  v->VisitPointer(it.node()->location());
697  }
698  }
699 }
700 
701 
703  for (NodeIterator it(this); !it.done(); it.Advance()) {
704  if (it.node()->IsRetainer()) {
705  v->VisitPointer(it.node()->location());
706  }
707  }
708 }
709 
710 
712  for (NodeIterator it(this); !it.done(); it.Advance()) {
713  if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
714  v->VisitEmbedderReference(it.node()->location(),
715  it.node()->wrapper_class_id());
716  }
717  }
718 }
719 
720 
722  for (int i = 0; i < new_space_nodes_.length(); ++i) {
723  Node* node = new_space_nodes_[i];
724  if (node->IsRetainer() && node->has_wrapper_class_id()) {
725  v->VisitEmbedderReference(node->location(),
726  node->wrapper_class_id());
727  }
728  }
729 }
730 
731 
733  int count = 0;
734  for (NodeIterator it(this); !it.done(); it.Advance()) {
735  if (it.node()->IsWeakRetainer()) {
736  count++;
737  }
738  }
739  return count;
740 }
741 
742 
744  int count = 0;
745  for (NodeIterator it(this); !it.done(); it.Advance()) {
746  if (it.node()->IsWeakRetainer() &&
747  it.node()->object()->IsJSGlobalObject()) {
748  count++;
749  }
750  }
751  return count;
752 }
753 
754 
756  *stats->global_handle_count = 0;
757  *stats->weak_global_handle_count = 0;
758  *stats->pending_global_handle_count = 0;
759  *stats->near_death_global_handle_count = 0;
760  *stats->free_global_handle_count = 0;
761  for (NodeIterator it(this); !it.done(); it.Advance()) {
762  *stats->global_handle_count += 1;
763  if (it.node()->state() == Node::WEAK) {
764  *stats->weak_global_handle_count += 1;
765  } else if (it.node()->state() == Node::PENDING) {
766  *stats->pending_global_handle_count += 1;
767  } else if (it.node()->state() == Node::NEAR_DEATH) {
768  *stats->near_death_global_handle_count += 1;
769  } else if (it.node()->state() == Node::FREE) {
770  *stats->free_global_handle_count += 1;
771  }
772  }
773 }
774 
775 #ifdef DEBUG
776 
777 void GlobalHandles::PrintStats() {
778  int total = 0;
779  int weak = 0;
780  int pending = 0;
781  int near_death = 0;
782  int destroyed = 0;
783 
784  for (NodeIterator it(this); !it.done(); it.Advance()) {
785  total++;
786  if (it.node()->state() == Node::WEAK) weak++;
787  if (it.node()->state() == Node::PENDING) pending++;
788  if (it.node()->state() == Node::NEAR_DEATH) near_death++;
789  if (it.node()->state() == Node::FREE) destroyed++;
790  }
791 
792  PrintF("Global Handle Statistics:\n");
793  PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
794  PrintF(" # weak = %d\n", weak);
795  PrintF(" # pending = %d\n", pending);
796  PrintF(" # near_death = %d\n", near_death);
797  PrintF(" # free = %d\n", destroyed);
798  PrintF(" # total = %d\n", total);
799 }
800 
801 
802 void GlobalHandles::Print() {
803  PrintF("Global handles:\n");
804  for (NodeIterator it(this); !it.done(); it.Advance()) {
805  PrintF(" handle %p to %p%s\n",
806  reinterpret_cast<void*>(it.node()->location()),
807  reinterpret_cast<void*>(it.node()->object()),
808  it.node()->IsWeak() ? " (weak)" : "");
809  }
810 }
811 
812 #endif
813 
814 
815 
817  size_t length,
818  v8::RetainedObjectInfo* info) {
819 #ifdef DEBUG
820  for (size_t i = 0; i < length; ++i) {
821  DCHECK(!Node::FromLocation(handles[i])->is_independent());
822  }
823 #endif
824  if (length == 0) {
825  if (info != NULL) info->Dispose();
826  return;
827  }
828  ObjectGroup* group = new ObjectGroup(length);
829  for (size_t i = 0; i < length; ++i)
830  group->objects[i] = handles[i];
831  group->info = info;
832  object_groups_.Add(group);
833 }
834 
835 
837  UniqueId id) {
839 }
840 
841 
843  RetainedObjectInfo* info) {
845 }
846 
847 
849  DCHECK(!Node::FromLocation(child)->is_independent());
851 }
852 
853 
855  DCHECK(!Node::FromLocation(child)->is_independent());
856  ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
857  group->children[0] = child;
858  implicit_ref_groups_.Add(group);
859 }
860 
861 
863  for (int i = 0; i < object_groups_.length(); i++)
864  delete object_groups_.at(i);
865  object_groups_.Clear();
866  for (int i = 0; i < retainer_infos_.length(); ++i)
867  retainer_infos_[i].info->Dispose();
868  retainer_infos_.Clear();
871 }
872 
873 
875  for (int i = 0; i < implicit_ref_groups_.length(); i++) {
876  delete implicit_ref_groups_.at(i);
877  }
878  implicit_ref_groups_.Clear();
880 }
881 
882 
884  // TODO(1428): invoke weak callbacks.
885 }
886 
887 
889  if (object_group_connections_.length() == 0) {
890  for (int i = 0; i < retainer_infos_.length(); ++i)
891  retainer_infos_[i].info->Dispose();
892  retainer_infos_.Clear();
894  return;
895  }
896 
898  retainer_infos_.Sort();
900 
901  int info_index = 0; // For iterating retainer_infos_.
902  UniqueId current_group_id(0);
903  int current_group_start = 0;
904 
905  int current_implicit_refs_start = 0;
906  int current_implicit_refs_end = 0;
907  for (int i = 0; i <= object_group_connections_.length(); ++i) {
908  if (i == 0)
909  current_group_id = object_group_connections_[i].id;
910  if (i == object_group_connections_.length() ||
911  current_group_id != object_group_connections_[i].id) {
912  // Group detected: objects in indices [current_group_start, i[.
913 
914  // Find out which implicit references are related to this group. (We want
915  // to ignore object groups which only have 1 object, but that object is
916  // needed as a representative object for the implicit refrerence group.)
917  while (current_implicit_refs_start < implicit_ref_connections_.length() &&
918  implicit_ref_connections_[current_implicit_refs_start].id <
919  current_group_id)
920  ++current_implicit_refs_start;
921  current_implicit_refs_end = current_implicit_refs_start;
922  while (current_implicit_refs_end < implicit_ref_connections_.length() &&
923  implicit_ref_connections_[current_implicit_refs_end].id ==
924  current_group_id)
925  ++current_implicit_refs_end;
926 
927  if (current_implicit_refs_end > current_implicit_refs_start) {
928  // Find a representative object for the implicit references.
929  HeapObject** representative = NULL;
930  for (int j = current_group_start; j < i; ++j) {
931  Object** object = object_group_connections_[j].object;
932  if ((*object)->IsHeapObject()) {
933  representative = reinterpret_cast<HeapObject**>(object);
934  break;
935  }
936  }
937  if (representative) {
938  ImplicitRefGroup* group = new ImplicitRefGroup(
939  representative,
940  current_implicit_refs_end - current_implicit_refs_start);
941  for (int j = current_implicit_refs_start;
942  j < current_implicit_refs_end;
943  ++j) {
944  group->children[j - current_implicit_refs_start] =
945  implicit_ref_connections_[j].object;
946  }
947  implicit_ref_groups_.Add(group);
948  }
949  current_implicit_refs_start = current_implicit_refs_end;
950  }
951 
952  // Find a RetainedObjectInfo for the group.
953  RetainedObjectInfo* info = NULL;
954  while (info_index < retainer_infos_.length() &&
955  retainer_infos_[info_index].id < current_group_id) {
956  retainer_infos_[info_index].info->Dispose();
957  ++info_index;
958  }
959  if (info_index < retainer_infos_.length() &&
960  retainer_infos_[info_index].id == current_group_id) {
961  // This object group has an associated ObjectGroupRetainerInfo.
962  info = retainer_infos_[info_index].info;
963  ++info_index;
964  }
965 
966  // Ignore groups which only contain one object.
967  if (i > current_group_start + 1) {
968  ObjectGroup* group = new ObjectGroup(i - current_group_start);
969  for (int j = current_group_start; j < i; ++j) {
970  group->objects[j - current_group_start] =
971  object_group_connections_[j].object;
972  }
973  group->info = info;
974  object_groups_.Add(group);
975  } else if (info) {
976  info->Dispose();
977  }
978 
979  if (i < object_group_connections_.length()) {
980  current_group_id = object_group_connections_[i].id;
981  current_group_start = i;
982  }
983  }
984  }
987  retainer_infos_.Clear();
989 }
990 
991 
993  for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
995  }
996 }
997 
998 
1000  for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1001 }
1002 
1003 
1005  int limit = size_;
1006  for (int i = 0; i < blocks_.length(); i++) {
1007  DCHECK(limit > 0);
1008  Object** block = blocks_[i];
1009  visitor->VisitPointers(block, block + Min(limit, kSize));
1010  limit -= kSize;
1011  }
1012 }
1013 
1014 
1016  for (int i = 0; i < new_space_indices_.length(); i++) {
1017  visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1018  }
1019 }
1020 
1021 
1023  int last = 0;
1024  for (int i = 0; i < new_space_indices_.length(); i++) {
1025  int index = new_space_indices_[i];
1026  if (heap->InNewSpace(*GetLocation(index))) {
1027  new_space_indices_[last++] = index;
1028  }
1029  }
1030  new_space_indices_.Rewind(last);
1031 }
1032 
1033 
1034 void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1035  DCHECK_EQ(kInvalidIndex, *index);
1036  if (object == NULL) return;
1037  DCHECK_NE(isolate->heap()->the_hole_value(), object);
1038  int block = size_ >> kShift;
1039  int offset = size_ & kMask;
1040  // need to resize
1041  if (offset == 0) {
1042  Object** next_block = new Object*[kSize];
1043  Object* the_hole = isolate->heap()->the_hole_value();
1044  MemsetPointer(next_block, the_hole, kSize);
1045  blocks_.Add(next_block);
1046  }
1047  DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1048  blocks_[block][offset] = object;
1049  if (isolate->heap()->InNewSpace(object)) {
1051  }
1052  *index = size_++;
1053 }
1054 
1055 
1056 } } // namespace v8::internal
static const uint16_t kPersistentHandleNoClassId
Default value of persistent handle class ID.
Definition: v8-profiler.h:503
Isolate represents an isolated instance of the V8 engine.
Definition: v8.h:4356
Interface for providing information about embedder's objects held by global handles.
Definition: v8-profiler.h:545
virtual void Dispose()=0
Called by V8 when it no longer needs an instance.
General purpose unique identifier.
Definition: v8.h:144
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static const U kShift
Definition: utils.h:204
static U update(U previous, T value)
Definition: utils.h:223
static const U kMask
Definition: utils.h:203
static T decode(U value)
Definition: utils.h:228
static const int kInvalidIndex
void PostGarbageCollectionProcessing(Heap *heap)
void IterateAllRoots(ObjectVisitor *visitor)
void Create(Isolate *isolate, Object *object, int *index)
int singleton_handles_[NUMBER_OF_SINGLETON_HANDLES]
void IterateNewSpaceRoots(ObjectVisitor *visitor)
Object ** GetLocation(int index)
NodeBlock(GlobalHandles *global_handles, NodeBlock *next)
void PutNodesOnFreeList(Node **first_free)
NodeIterator(GlobalHandles *global_handles)
void set_parameter(void *parameter)
static Node * FromLocation(Object **location)
union v8::internal::GlobalHandles::Node::@14 parameter_or_next_free_
void Initialize(int index, Node **first_free)
void MakeWeak(void *parameter, WeakCallback weak_callback)
bool PostGarbageCollectionProcessing(Isolate *isolate)
GlobalHandles(Isolate *isolate)
List< ImplicitRefGroup * > implicit_ref_groups_
WeakCallbackData< v8::Value, void >::Callback WeakCallback
void RecordStats(HeapStats *stats)
bool IterateObjectGroups(ObjectVisitor *v, WeakSlotCallbackWithHeap can_skip)
void IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor *v)
void IterateAllRoots(ObjectVisitor *v)
static void MakeWeak(Object **location, void *parameter, WeakCallback weak_callback)
List< ObjectGroup * > object_groups_
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
static void * ClearWeakness(Object **location)
static bool IsWeak(Object **location)
void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo *info)
static void MarkIndependent(Object **location)
static bool IsIndependent(Object **location)
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
void SetReference(HeapObject **parent, Object **child)
void AddObjectGroup(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
int PostGarbageCollectionProcessing(GarbageCollector collector)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static void MarkPartiallyDependent(Object **location)
static bool IsNearDeath(Object **location)
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
void IterateStrongRoots(ObjectVisitor *v)
void SetReferenceFromGroup(UniqueId id, Object **child)
void IdentifyWeakHandles(WeakSlotCallback f)
static Handle< Object > CopyGlobal(Object **location)
void SetObjectGroupId(Object **handle, UniqueId id)
static const int kObjectGroupConnectionsCapacity
List< ObjectGroupConnection > object_group_connections_
static void Destroy(Object **location)
List< ObjectGroupConnection > implicit_ref_connections_
Handle< Object > Create(Object *value)
void IterateWeakRoots(ObjectVisitor *v)
List< ObjectGroupRetainerInfo > retainer_infos_
int * near_death_global_handle_count
Definition: heap.h:2064
int * global_handle_count
Definition: heap.h:2061
int * pending_global_handle_count
Definition: heap.h:2063
int * weak_global_handle_count
Definition: heap.h:2062
int * free_global_handle_count
Definition: heap.h:2065
HeapState gc_state()
Definition: heap.h:955
bool InNewSpace(Object *object)
Definition: heap-inl.h:322
void IncrementNodesCopiedInNewSpace()
Definition: heap.h:1126
void IncrementNodesPromoted()
Definition: heap.h:1128
void IncrementNodesDiedInNewSpace()
Definition: heap.h:1124
static const int kNodeStateIsNearDeathValue
Definition: v8.h:5858
static const int kNodeIsIndependentShift
Definition: v8.h:5859
static const int kNodeStateIsPendingValue
Definition: v8.h:5857
static const int kNodeStateMask
Definition: v8.h:5855
static const int kNodeStateIsWeakValue
Definition: v8.h:5856
static const int kNodeIsPartiallyDependentShift
Definition: v8.h:5860
static const int kNodeFlagsOffset
Definition: v8.h:5854
static const int kNodeClassIdOffset
Definition: v8.h:5853
Counters * counters()
Definition: isolate.h:857
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
Definition: list-inl.h:17
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define CHECK(condition)
Definition: logging.h:36
#define DCHECK_NE(v1, v2)
Definition: logging.h:207
#define DCHECK(condition)
Definition: logging.h:205
#define DCHECK_EQ(v1, v2)
Definition: logging.h:206
#define V8_PTR_PREFIX
Definition: macros.h:360
#define OFFSET_OF(type, field)
Definition: macros.h:22
#define arraysize(array)
Definition: macros.h:86
unsigned short uint16_t
Definition: unicode.cc:23
bool(* WeakSlotCallback)(Object **pointer)
Definition: globals.h:349
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
Definition: globals.h:351
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
const Address kGlobalHandleZapValue
Definition: globals.h:271
void MemsetPointer(T **dest, U *value, int counter)
Definition: utils.h:1183
Handle< T > handle(T *t, Isolate *isolate)
Definition: handles.h:146
void PrintF(const char *format,...)
Definition: utils.cc:80
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
v8::RetainedObjectInfo * info