V8 Project
objects-visiting-inl.h
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7 
8 
9 namespace v8 {
10 namespace internal {
11 
12 template <typename StaticVisitor>
14  table_.Register(
15  kVisitShortcutCandidate,
17 
18  table_.Register(
19  kVisitConsString,
21 
22  table_.Register(kVisitSlicedString,
24  int>::Visit);
25 
26  table_.Register(
27  kVisitSymbol,
29 
30  table_.Register(kVisitFixedArray,
31  &FlexibleBodyVisitor<StaticVisitor,
32  FixedArray::BodyDescriptor, int>::Visit);
33 
34  table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35  table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36  table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
37 
38  table_.Register(
39  kVisitNativeContext,
41  int>::Visit);
42 
43  table_.Register(kVisitByteArray, &VisitByteArray);
44 
45  table_.Register(
46  kVisitSharedFunctionInfo,
48  int>::Visit);
49 
50  table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
51 
52  table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
53 
54  table_.Register(kVisitJSFunction, &VisitJSFunction);
55 
56  table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
57 
58  table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
59 
60  table_.Register(kVisitJSDataView, &VisitJSDataView);
61 
62  table_.Register(kVisitFreeSpace, &VisitFreeSpace);
63 
64  table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
65 
66  table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
67 
68  table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69  kVisitDataObjectGeneric>();
70 
71  table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72  kVisitJSObjectGeneric>();
73  table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74  kVisitStructGeneric>();
75 }
76 
77 
78 template <typename StaticVisitor>
80  Map* map, HeapObject* object) {
81  Heap* heap = map->GetHeap();
82 
85  VisitPointers(heap, HeapObject::RawField(
88  VisitPointers(
93 }
94 
95 
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98  Map* map, HeapObject* object) {
99  VisitPointers(
100  map->GetHeap(),
103  VisitPointers(
104  map->GetHeap(), HeapObject::RawField(
108 }
109 
110 
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113  HeapObject* object) {
114  VisitPointers(
115  map->GetHeap(),
118  VisitPointers(
119  map->GetHeap(),
123 }
124 
125 
126 template <typename StaticVisitor>
128  table_.Register(kVisitShortcutCandidate,
130  void>::Visit);
131 
132  table_.Register(kVisitConsString,
134  void>::Visit);
135 
136  table_.Register(kVisitSlicedString,
138  void>::Visit);
139 
140  table_.Register(
141  kVisitSymbol,
143 
144  table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
145 
146  table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
147 
148  table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
149 
150  table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
151 
152  table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
153 
154  table_.Register(kVisitNativeContext, &VisitNativeContext);
155 
156  table_.Register(kVisitAllocationSite, &VisitAllocationSite);
157 
158  table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
159 
160  table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
161 
162  table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
163 
164  table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
165 
166  table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
167 
168  table_.Register(
169  kVisitOddball,
171 
172  table_.Register(kVisitMap, &VisitMap);
173 
174  table_.Register(kVisitCode, &VisitCode);
175 
176  table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
177 
178  table_.Register(kVisitJSFunction, &VisitJSFunction);
179 
180  table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
181 
182  table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
183 
184  table_.Register(kVisitJSDataView, &VisitJSDataView);
185 
186  // Registration for kVisitJSRegExp is done by StaticVisitor.
187 
188  table_.Register(
189  kVisitCell,
191 
192  table_.Register(kVisitPropertyCell, &VisitPropertyCell);
193 
194  table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
195  kVisitDataObjectGeneric>();
196 
197  table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
198  kVisitJSObjectGeneric>();
199 
200  table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
201  kVisitStructGeneric>();
202 }
203 
204 
205 template <typename StaticVisitor>
207  Heap* heap, Address entry_address) {
208  Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
209  heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
210  StaticVisitor::MarkObject(heap, code);
211 }
212 
213 
214 template <typename StaticVisitor>
215 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
216  Heap* heap, RelocInfo* rinfo) {
217  DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
218  HeapObject* object = HeapObject::cast(rinfo->target_object());
219  heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
220  // TODO(ulan): It could be better to record slots only for strongly embedded
221  // objects here and record slots for weakly embedded object during clearing
222  // of non-live references in mark-compact.
223  if (!rinfo->host()->IsWeakObject(object)) {
224  StaticVisitor::MarkObject(heap, object);
225  }
226 }
227 
228 
229 template <typename StaticVisitor>
230 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
231  RelocInfo* rinfo) {
232  DCHECK(rinfo->rmode() == RelocInfo::CELL);
233  Cell* cell = rinfo->target_cell();
234  // No need to record slots because the cell space is not compacted during GC.
235  if (!rinfo->host()->IsWeakObject(cell)) {
236  StaticVisitor::MarkObject(heap, cell);
237  }
238 }
239 
240 
241 template <typename StaticVisitor>
242 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
243  RelocInfo* rinfo) {
244  DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
245  rinfo->IsPatchedReturnSequence()) ||
246  (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
247  rinfo->IsPatchedDebugBreakSlotSequence()));
248  Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
249  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
250  StaticVisitor::MarkObject(heap, target);
251 }
252 
253 
254 template <typename StaticVisitor>
256  RelocInfo* rinfo) {
258  Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
259  // Monomorphic ICs are preserved when possible, but need to be flushed
260  // when they might be keeping a Context alive, or when the heap is about
261  // to be serialized.
262  if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
263  (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
264  target->ic_state() == POLYMORPHIC ||
265  (heap->flush_monomorphic_ics() && !target->is_weak_stub()) ||
266  heap->isolate()->serializer_enabled() ||
267  target->ic_age() != heap->global_ic_age() ||
268  target->is_invalidated_weak_stub())) {
269  ICUtility::Clear(heap->isolate(), rinfo->pc(),
270  rinfo->host()->constant_pool());
271  target = Code::GetCodeFromTargetAddress(rinfo->target_address());
272  }
273  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
274  StaticVisitor::MarkObject(heap, target);
275 }
276 
277 
278 template <typename StaticVisitor>
279 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
280  Heap* heap, RelocInfo* rinfo) {
281  DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
282  Code* target = rinfo->code_age_stub();
283  DCHECK(target != NULL);
284  heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
285  StaticVisitor::MarkObject(heap, target);
286 }
287 
288 
289 template <typename StaticVisitor>
290 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
291  Map* map, HeapObject* object) {
292  FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
293  void>::Visit(map, object);
294 
295  MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
297  ++idx) {
298  Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
299  collector->RecordSlot(slot, slot, *slot);
300  }
301 }
302 
303 
304 template <typename StaticVisitor>
305 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
306  HeapObject* object) {
307  Heap* heap = map->GetHeap();
308  Map* map_object = Map::cast(object);
309 
310  // Clears the cache of ICs related to this map.
311  if (FLAG_cleanup_code_caches_at_gc) {
312  map_object->ClearCodeCache(heap);
313  }
314 
315  // When map collection is enabled we have to mark through map's transitions
316  // and back pointers in a special way to make these links weak.
317  if (FLAG_collect_maps && map_object->CanTransition()) {
318  MarkMapContents(heap, map_object);
319  } else {
320  StaticVisitor::VisitPointers(
323  }
324 }
325 
326 
327 template <typename StaticVisitor>
328 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
329  Map* map, HeapObject* object) {
330  Heap* heap = map->GetHeap();
331 
332  Object** slot =
334  if (FLAG_collect_maps) {
335  // Mark property cell dependent codes array but do not push it onto marking
336  // stack, this will make references from it weak. We will clean dead
337  // codes when we iterate over property cells in ClearNonLiveReferences.
338  HeapObject* obj = HeapObject::cast(*slot);
339  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
340  StaticVisitor::MarkObjectWithoutPush(heap, obj);
341  } else {
342  StaticVisitor::VisitPointer(heap, slot);
343  }
344 
345  StaticVisitor::VisitPointers(
346  heap,
349 }
350 
351 
352 template <typename StaticVisitor>
353 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
354  Map* map, HeapObject* object) {
355  Heap* heap = map->GetHeap();
356 
357  Object** slot =
359  if (FLAG_collect_maps) {
360  // Mark allocation site dependent codes array but do not push it onto
361  // marking stack, this will make references from it weak. We will clean
362  // dead codes when we iterate over allocation sites in
363  // ClearNonLiveReferences.
364  HeapObject* obj = HeapObject::cast(*slot);
365  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
366  StaticVisitor::MarkObjectWithoutPush(heap, obj);
367  } else {
368  StaticVisitor::VisitPointer(heap, slot);
369  }
370 
371  StaticVisitor::VisitPointers(
372  heap,
375 }
376 
377 
378 template <typename StaticVisitor>
380  Map* map, HeapObject* object) {
381  Heap* heap = map->GetHeap();
382  JSWeakCollection* weak_collection =
383  reinterpret_cast<JSWeakCollection*>(object);
384 
385  // Enqueue weak collection in linked list of encountered weak collections.
386  if (weak_collection->next() == heap->undefined_value()) {
387  weak_collection->set_next(heap->encountered_weak_collections());
388  heap->set_encountered_weak_collections(weak_collection);
389  }
390 
391  // Skip visiting the backing hash table containing the mappings and the
392  // pointer to the other enqueued weak collections, both are post-processed.
393  StaticVisitor::VisitPointers(
400 
401  // Partially initialized weak collection is enqueued, but table is ignored.
402  if (!weak_collection->table()->IsHashTable()) return;
403 
404  // Mark the backing hash table without pushing it on the marking stack.
406  HeapObject* obj = HeapObject::cast(*slot);
407  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
408  StaticVisitor::MarkObjectWithoutPush(heap, obj);
409 }
410 
411 
412 template <typename StaticVisitor>
414  HeapObject* object) {
415  Heap* heap = map->GetHeap();
416  Code* code = Code::cast(object);
417  if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
419  }
420  code->CodeIterateBody<StaticVisitor>(heap);
421 }
422 
423 
424 template <typename StaticVisitor>
425 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
426  Map* map, HeapObject* object) {
427  Heap* heap = map->GetHeap();
428  SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
429  if (shared->ic_age() != heap->global_ic_age()) {
430  shared->ResetForNewContext(heap->global_ic_age());
431  }
432  if (FLAG_cleanup_code_caches_at_gc) {
433  shared->ClearTypeFeedbackInfo();
434  }
435  if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
436  !shared->optimized_code_map()->IsSmi()) {
437  // Always flush the optimized code map if requested by flag.
438  shared->ClearOptimizedCodeMap();
439  }
440  MarkCompactCollector* collector = heap->mark_compact_collector();
441  if (collector->is_code_flushing_enabled()) {
442  if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
443  // Add the shared function info holding an optimized code map to
444  // the code flusher for processing of code maps after marking.
445  collector->code_flusher()->AddOptimizedCodeMap(shared);
446  // Treat all references within the code map weakly by marking the
447  // code map itself but not pushing it onto the marking deque.
448  FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
449  StaticVisitor::MarkObjectWithoutPush(heap, code_map);
450  }
451  if (IsFlushable(heap, shared)) {
452  // This function's code looks flushable. But we have to postpone
453  // the decision until we see all functions that point to the same
454  // SharedFunctionInfo because some of them might be optimized.
455  // That would also make the non-optimized version of the code
456  // non-flushable, because it is required for bailing out from
457  // optimized code.
458  collector->code_flusher()->AddCandidate(shared);
459  // Treat the reference to the code object weakly.
460  VisitSharedFunctionInfoWeakCode(heap, object);
461  return;
462  }
463  } else {
464  if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
465  // Flush optimized code map on major GCs without code flushing,
466  // needed because cached code doesn't contain breakpoints.
467  shared->ClearOptimizedCodeMap();
468  }
469  }
470  VisitSharedFunctionInfoStrongCode(heap, object);
471 }
472 
473 
474 template <typename StaticVisitor>
475 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
476  Map* map, HeapObject* object) {
477  Heap* heap = map->GetHeap();
478  ConstantPoolArray* array = ConstantPoolArray::cast(object);
479  ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
480  while (!code_iter.is_finished()) {
481  Address code_entry = reinterpret_cast<Address>(
482  array->RawFieldOfElementAt(code_iter.next_index()));
483  StaticVisitor::VisitCodeEntry(heap, code_entry);
484  }
485 
486  ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
487  while (!heap_iter.is_finished()) {
488  Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
489  HeapObject* object = HeapObject::cast(*slot);
490  heap->mark_compact_collector()->RecordSlot(slot, slot, object);
491  bool is_weak_object =
492  (array->get_weak_object_state() ==
495  (array->get_weak_object_state() ==
497  Code::IsWeakObjectInIC(object));
498  if (!is_weak_object) {
499  StaticVisitor::MarkObject(heap, object);
500  }
501  }
502 }
503 
504 
505 template <typename StaticVisitor>
506 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
507  HeapObject* object) {
508  Heap* heap = map->GetHeap();
509  JSFunction* function = JSFunction::cast(object);
510  MarkCompactCollector* collector = heap->mark_compact_collector();
511  if (collector->is_code_flushing_enabled()) {
512  if (IsFlushable(heap, function)) {
513  // This function's code looks flushable. But we have to postpone
514  // the decision until we see all functions that point to the same
515  // SharedFunctionInfo because some of them might be optimized.
516  // That would also make the non-optimized version of the code
517  // non-flushable, because it is required for bailing out from
518  // optimized code.
519  collector->code_flusher()->AddCandidate(function);
520  // Visit shared function info immediately to avoid double checking
521  // of its flushability later. This is just an optimization because
522  // the shared function info would eventually be visited.
523  SharedFunctionInfo* shared = function->shared();
524  if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
525  StaticVisitor::MarkObject(heap, shared->map());
526  VisitSharedFunctionInfoWeakCode(heap, shared);
527  }
528  // Treat the reference to the code object weakly.
529  VisitJSFunctionWeakCode(heap, object);
530  return;
531  } else {
532  // Visit all unoptimized code objects to prevent flushing them.
533  StaticVisitor::MarkObject(heap, function->shared()->code());
534  if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
535  MarkInlinedFunctionsCode(heap, function->code());
536  }
537  }
538  }
539  VisitJSFunctionStrongCode(heap, object);
540 }
541 
542 
543 template <typename StaticVisitor>
544 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
545  HeapObject* object) {
546  int last_property_offset =
547  JSRegExp::kSize + kPointerSize * map->inobject_properties();
548  StaticVisitor::VisitPointers(
550  HeapObject::RawField(object, last_property_offset));
551 }
552 
553 
554 template <typename StaticVisitor>
555 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
556  Map* map, HeapObject* object) {
557  Heap* heap = map->GetHeap();
558 
561  StaticVisitor::VisitPointers(
562  heap,
565  StaticVisitor::VisitPointers(
566  heap, HeapObject::RawField(
569 }
570 
571 
572 template <typename StaticVisitor>
573 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
574  Map* map, HeapObject* object) {
575  StaticVisitor::VisitPointers(
576  map->GetHeap(),
579  StaticVisitor::VisitPointers(
580  map->GetHeap(), HeapObject::RawField(
583 }
584 
585 
586 template <typename StaticVisitor>
587 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
588  HeapObject* object) {
589  StaticVisitor::VisitPointers(
590  map->GetHeap(),
593  StaticVisitor::VisitPointers(
594  map->GetHeap(),
597 }
598 
599 
600 template <typename StaticVisitor>
602  Map* map) {
603  // Make sure that the back pointer stored either in the map itself or
604  // inside its transitions array is marked. Skip recording the back
605  // pointer slot since map space is not compacted.
606  StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
607 
608  // Treat pointers in the transitions array as weak and also mark that
609  // array to prevent visiting it later. Skip recording the transition
610  // array slot, since it will be implicitly recorded when the pointer
611  // fields of this map are visited.
612  if (map->HasTransitionArray()) {
613  TransitionArray* transitions = map->transitions();
614  MarkTransitionArray(heap, transitions);
615  }
616 
617  // Since descriptor arrays are potentially shared, ensure that only the
618  // descriptors that belong to this map are marked. The first time a
619  // non-empty descriptor array is marked, its header is also visited. The slot
620  // holding the descriptor array will be implicitly recorded when the pointer
621  // fields of this map are visited.
622  DescriptorArray* descriptors = map->instance_descriptors();
623  if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
624  descriptors->length() > 0) {
625  StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
626  descriptors->GetDescriptorEndSlot(0));
627  }
628  int start = 0;
629  int end = map->NumberOfOwnDescriptors();
630  if (start < end) {
631  StaticVisitor::VisitPointers(heap,
632  descriptors->GetDescriptorStartSlot(start),
633  descriptors->GetDescriptorEndSlot(end));
634  }
635 
636  // Mark prototype dependent codes array but do not push it onto marking
637  // stack, this will make references from it weak. We will clean dead
638  // codes when we iterate over maps in ClearNonLiveTransitions.
640  HeapObject* obj = HeapObject::cast(*slot);
641  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
642  StaticVisitor::MarkObjectWithoutPush(heap, obj);
643 
644  // Mark the pointer fields of the Map. Since the transitions array has
645  // been marked already, it is fine that one of these fields contains a
646  // pointer to it.
647  StaticVisitor::VisitPointers(
650 }
651 
652 
653 template <typename StaticVisitor>
655  Heap* heap, TransitionArray* transitions) {
656  if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
657 
658  // Simple transitions do not have keys nor prototype transitions.
659  if (transitions->IsSimpleTransition()) return;
660 
661  if (transitions->HasPrototypeTransitions()) {
662  // Mark prototype transitions array but do not push it onto marking
663  // stack, this will make references from it weak. We will clean dead
664  // prototype transitions in ClearNonLiveTransitions.
665  Object** slot = transitions->GetPrototypeTransitionsSlot();
666  HeapObject* obj = HeapObject::cast(*slot);
667  heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
668  StaticVisitor::MarkObjectWithoutPush(heap, obj);
669  }
670 
671  for (int i = 0; i < transitions->number_of_transitions(); ++i) {
672  StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
673  }
674 }
675 
676 
677 template <typename StaticVisitor>
679  Code* code) {
680  // Skip in absence of inlining.
681  // TODO(turbofan): Revisit once we support inlining.
682  if (code->is_turbofanned()) return;
683  // For optimized functions we should retain both non-optimized version
684  // of its code and non-optimized version of all inlined functions.
685  // This is required to support bailing out from inlined code.
687  DeoptimizationInputData::cast(code->deoptimization_data());
688  FixedArray* literals = data->LiteralArray();
689  for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
690  i++) {
691  JSFunction* inlined = JSFunction::cast(literals->get(i));
692  StaticVisitor::MarkObject(heap, inlined->shared()->code());
693  }
694 }
695 
696 
697 inline static bool IsValidNonBuiltinContext(Object* context) {
698  return context->IsContext() &&
699  !Context::cast(context)->global_object()->IsJSBuiltinsObject();
700 }
701 
702 
703 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
704  Object* undefined = heap->undefined_value();
705  return (info->script() != undefined) &&
706  (reinterpret_cast<Script*>(info->script())->source() != undefined);
707 }
708 
709 
710 template <typename StaticVisitor>
711 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
712  JSFunction* function) {
713  SharedFunctionInfo* shared_info = function->shared();
714 
715  // Code is either on stack, in compilation cache or referenced
716  // by optimized version of function.
717  MarkBit code_mark = Marking::MarkBitFrom(function->code());
718  if (code_mark.Get()) {
719  return false;
720  }
721 
722  // The function must have a valid context and not be a builtin.
723  if (!IsValidNonBuiltinContext(function->context())) {
724  return false;
725  }
726 
727  // We do not (yet) flush code for optimized functions.
728  if (function->code() != shared_info->code()) {
729  return false;
730  }
731 
732  // Check age of optimized code.
733  if (FLAG_age_code && !function->code()->IsOld()) {
734  return false;
735  }
736 
737  return IsFlushable(heap, shared_info);
738 }
739 
740 
741 template <typename StaticVisitor>
742 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
743  Heap* heap, SharedFunctionInfo* shared_info) {
744  // Code is either on stack, in compilation cache or referenced
745  // by optimized version of function.
746  MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
747  if (code_mark.Get()) {
748  return false;
749  }
750 
751  // The function must be compiled and have the source code available,
752  // to be able to recompile it in case we need the function again.
753  if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
754  return false;
755  }
756 
757  // We never flush code for API functions.
758  Object* function_data = shared_info->function_data();
759  if (function_data->IsFunctionTemplateInfo()) {
760  return false;
761  }
762 
763  // Only flush code for functions.
764  if (shared_info->code()->kind() != Code::FUNCTION) {
765  return false;
766  }
767 
768  // Function must be lazy compilable.
769  if (!shared_info->allows_lazy_compilation()) {
770  return false;
771  }
772 
773  // We do not (yet?) flush code for generator functions, because we don't know
774  // if there are still live activations (generator objects) on the heap.
775  if (shared_info->is_generator()) {
776  return false;
777  }
778 
779  // If this is a full script wrapped in a function we do not flush the code.
780  if (shared_info->is_toplevel()) {
781  return false;
782  }
783 
784  // If this is a function initialized with %SetCode then the one-to-one
785  // relation between SharedFunctionInfo and Code is broken.
786  if (shared_info->dont_flush()) {
787  return false;
788  }
789 
790  // Check age of code. If code aging is disabled we never flush.
791  if (!FLAG_age_code || !shared_info->code()->IsOld()) {
792  return false;
793  }
794 
795  return true;
796 }
797 
798 
799 template <typename StaticVisitor>
801  Heap* heap, HeapObject* object) {
802  Object** start_slot = HeapObject::RawField(
804  Object** end_slot = HeapObject::RawField(
806  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
807 }
808 
809 
810 template <typename StaticVisitor>
812  Heap* heap, HeapObject* object) {
813  Object** name_slot =
815  StaticVisitor::VisitPointer(heap, name_slot);
816 
817  // Skip visiting kCodeOffset as it is treated weakly here.
822 
823  Object** start_slot =
825  Object** end_slot = HeapObject::RawField(
827  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
828 }
829 
830 
831 template <typename StaticVisitor>
833  Heap* heap, HeapObject* object) {
834  Object** start_slot =
836  Object** end_slot =
838  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
839 
840  VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
843 
844  start_slot =
847  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
848 }
849 
850 
851 template <typename StaticVisitor>
853  Heap* heap, HeapObject* object) {
854  Object** start_slot =
856  Object** end_slot =
858  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
859 
860  // Skip visiting kCodeEntryOffset as it is treated weakly here.
863 
864  start_slot =
867  StaticVisitor::VisitPointers(heap, start_slot, end_slot);
868 }
869 
870 
872  int mode_mask = RelocInfo::kCodeTargetMask |
879 
880  // There are two places where we iterate code bodies: here and the
881  // templated CodeIterateBody (below). They should be kept in sync.
888 
889  RelocIterator it(this, mode_mask);
890  Isolate* isolate = this->GetIsolate();
891  for (; !it.done(); it.next()) {
892  it.rinfo()->Visit(isolate, v);
893  }
894 }
895 
896 
897 template <typename StaticVisitor>
899  int mode_mask = RelocInfo::kCodeTargetMask |
906 
907  // There are two places where we iterate code bodies: here and the non-
908  // templated CodeIterateBody (above). They should be kept in sync.
909  StaticVisitor::VisitPointer(
910  heap,
911  reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
912  StaticVisitor::VisitPointer(
913  heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
914  StaticVisitor::VisitPointer(
915  heap,
916  reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
917  StaticVisitor::VisitPointer(
918  heap,
919  reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
920  StaticVisitor::VisitNextCodeLink(
921  heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
922  StaticVisitor::VisitPointer(
923  heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
924 
925 
926  RelocIterator it(this, mode_mask);
927  for (; !it.done(); it.next()) {
928  it.rinfo()->template Visit<StaticVisitor>(heap);
929  }
930 }
931 }
932 } // namespace v8::internal
933 
934 #endif // V8_OBJECTS_VISITING_INL_H_
static const int kPointerFieldsBeginOffset
Definition: objects.h:8266
static const int kDependentCodeOffset
Definition: objects.h:8259
static const int kPointerFieldsEndOffset
Definition: objects.h:8267
ConstantPoolArray * constant_pool()
Definition: objects-inl.h:4942
void CodeIterateBody(ObjectVisitor *v)
static const int kNextCodeLinkOffset
Definition: objects.h:5357
static Code * GetCodeFromTargetAddress(Address address)
Definition: objects-inl.h:5018
static bool IsWeakObjectInOptimizedCode(Object *object)
Definition: objects-inl.h:5035
static const int kConstantPoolOffset
Definition: objects.h:5367
bool is_inline_cache_stub()
Definition: objects-inl.h:4921
static const int kTypeFeedbackInfoOffset
Definition: objects.h:5355
static const int kHandlerTableOffset
Definition: objects.h:5351
int ic_age() const
static Object * GetObjectFromEntryAddress(Address location_of_address)
Definition: objects-inl.h:5029
bool is_invalidated_weak_stub()
Definition: objects-inl.h:4907
InlineCacheState ic_state()
Definition: objects-inl.h:4635
static const int kRelocationInfoOffset
Definition: objects.h:5350
void MakeOlder(MarkingParity)
Definition: objects.cc:10475
static const int kDeoptimizationDataOffset
Definition: objects.h:5352
static bool IsWeakObjectInIC(Object *object)
Definition: objects-inl.h:5067
static Context * cast(Object *context)
Definition: contexts.h:255
GlobalObject * global_object()
Definition: contexts.h:437
FixedBodyDescriptor< kHeaderSize, kHeaderSize+FIRST_WEAK_SLOT *kPointerSize, kSize > MarkCompactBodyDescriptor
Definition: contexts.h:581
Object ** GetDescriptorStartSlot(int descriptor_number)
Definition: objects-inl.h:2918
Object ** GetDescriptorEndSlot(int descriptor_number)
Definition: objects-inl.h:2923
Object ** RawFieldOfElementAt(int index)
Definition: objects.h:2458
Object ** GetFirstElementAddress()
Definition: objects-inl.h:1539
static const int kStartOffset
Definition: objects.h:1452
static Object ** RawField(HeapObject *obj, int offset)
Definition: objects-inl.h:1311
void IterateNextCodeLink(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1510
void IteratePointer(ObjectVisitor *v, int offset)
Definition: objects-inl.h:1505
Isolate * GetIsolate() const
Definition: objects-inl.h:1387
int global_ic_age()
Definition: heap.h:1268
Object * encountered_weak_collections() const
Definition: heap.h:811
Isolate * isolate()
Definition: heap-inl.h:589
void set_encountered_weak_collections(Object *weak_collection)
Definition: heap.h:808
MarkCompactCollector * mark_compact_collector()
Definition: heap.h:1197
bool flush_monomorphic_ics()
Definition: heap.h:1274
static void Clear(Isolate *isolate, Address address, ConstantPoolArray *constant_pool)
Definition: ic-state.cc:13
bool serializer_enabled() const
Definition: isolate.h:1007
static const int kWeakNextOffset
Definition: objects.h:9916
static const int kWeakFirstViewOffset
Definition: objects.h:9880
static const int kSizeWithInternalFields
Definition: objects.h:9883
static const int kWeakNextOffset
Definition: objects.h:9879
static const int kSizeWithInternalFields
Definition: objects.h:9973
static const int kNonWeakFieldsEndOffset
Definition: objects.h:7383
static const int kCodeEntryOffset
Definition: objects.h:7376
static const int kPrototypeOrInitialMapOffset
Definition: objects.h:7377
static const int kPropertiesOffset
Definition: objects.h:2193
static const int kSize
Definition: objects.h:7772
static const int kSizeWithInternalFields
Definition: objects.h:9949
static const int kNextOffset
Definition: objects.h:9808
static const int kSize
Definition: objects.h:9809
static const int kTableOffset
Definition: objects.h:9807
static const int kPointerFieldsEndOffset
Definition: objects.h:6207
static const int kPointerFieldsBeginOffset
Definition: objects.h:6206
static const int kDependentCodeOffset
Definition: objects.h:6201
void RecordCodeEntrySlot(Address slot, Code *target)
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
static const int kPointerFieldsBeginOffset
Definition: objects.h:9498
static const int kDependentCodeOffset
Definition: objects.h:9495
static const int kPointerFieldsEndOffset
Definition: objects.h:9499
static int ModeMask(Mode mode)
Definition: assembler.h:445
static const int kCodeTargetMask
Definition: assembler.h:587
static bool IsDebugBreakSlot(Mode mode)
Definition: assembler.h:436
static bool IsJSReturn(Mode mode)
Definition: assembler.h:412
static bool IsCodeTarget(Mode mode)
Definition: assembler.h:399
byte * pc() const
Definition: assembler.h:457
Code * host() const
Definition: assembler.h:463
Mode rmode() const
Definition: assembler.h:459
static bool IsCodeAgeSequence(Mode mode)
Definition: assembler.h:442
static const int kOptimizedCodeMapOffset
Definition: objects.h:6894
static const int kNameOffset
Definition: objects.h:6892
static const int kCodeOffset
Definition: objects.h:6893
static void VisitJSFunctionStrongCode(Heap *heap, HeapObject *object)
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
static void VisitJSFunctionWeakCode(Heap *heap, HeapObject *object)
static void VisitSharedFunctionInfoStrongCode(Heap *heap, HeapObject *object)
static void MarkTransitionArray(Heap *heap, TransitionArray *transitions)
static void MarkMapContents(Heap *heap, Map *map)
static void VisitSharedFunctionInfoWeakCode(Heap *heap, HeapObject *object)
Object ** GetKeySlot(int transition_number)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
Definition: logging.h:205
const int kPointerSize
Definition: globals.h:129
static bool IsValidNonBuiltinContext(Object *context)
kSerializedDataOffset Object
Definition: objects-inl.h:5322
byte * Address
Definition: globals.h:101
static bool HasSourceCode(Heap *heap, SharedFunctionInfo *info)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20