5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
12 template <
typename StaticVisitor>
15 kVisitShortcutCandidate,
22 table_.Register(kVisitSlicedString,
30 table_.Register(kVisitFixedArray,
34 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
43 table_.Register(kVisitByteArray, &VisitByteArray);
46 kVisitSharedFunctionInfo,
50 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
52 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
54 table_.Register(kVisitJSFunction, &VisitJSFunction);
56 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
58 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
60 table_.Register(kVisitJSDataView, &VisitJSDataView);
62 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
64 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
66 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
69 kVisitDataObjectGeneric>();
71 table_.template RegisterSpecializations<
JSObjectVisitor, kVisitJSObject,
72 kVisitJSObjectGeneric>();
73 table_.template RegisterSpecializations<
StructVisitor, kVisitStruct,
74 kVisitStructGeneric>();
78 template <
typename StaticVisitor>
96 template <
typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98 Map*
map, HeapObject*
object) {
111 template <
typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map*
map,
113 HeapObject*
object) {
126 template <
typename StaticVisitor>
128 table_.Register(kVisitShortcutCandidate,
132 table_.Register(kVisitConsString,
136 table_.Register(kVisitSlicedString,
144 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
146 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
148 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
150 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
152 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
154 table_.Register(kVisitNativeContext, &VisitNativeContext);
156 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
160 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
162 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
164 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
166 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
172 table_.Register(kVisitMap, &VisitMap);
174 table_.Register(kVisitCode, &VisitCode);
176 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
178 table_.Register(kVisitJSFunction, &VisitJSFunction);
180 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
182 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
184 table_.Register(kVisitJSDataView, &VisitJSDataView);
192 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
195 kVisitDataObjectGeneric>();
197 table_.template RegisterSpecializations<
JSObjectVisitor, kVisitJSObject,
198 kVisitJSObjectGeneric>();
201 kVisitStructGeneric>();
205 template <
typename StaticVisitor>
210 StaticVisitor::MarkObject(heap, code);
214 template <
typename StaticVisitor>
215 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
216 Heap* heap, RelocInfo* rinfo) {
218 HeapObject*
object = HeapObject::cast(rinfo->target_object());
219 heap->mark_compact_collector()->RecordRelocSlot(rinfo,
object);
223 if (!rinfo->host()->IsWeakObject(
object)) {
224 StaticVisitor::MarkObject(heap,
object);
229 template <
typename StaticVisitor>
230 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
233 Cell* cell = rinfo->target_cell();
235 if (!rinfo->host()->IsWeakObject(cell)) {
236 StaticVisitor::MarkObject(heap, cell);
241 template <
typename StaticVisitor>
242 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
245 rinfo->IsPatchedReturnSequence()) ||
247 rinfo->IsPatchedDebugBreakSlotSequence()));
249 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
250 StaticVisitor::MarkObject(heap, target);
254 template <
typename StaticVisitor>
274 StaticVisitor::MarkObject(heap, target);
278 template <
typename StaticVisitor>
279 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
280 Heap* heap, RelocInfo* rinfo) {
282 Code* target = rinfo->code_age_stub();
284 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
285 StaticVisitor::MarkObject(heap, target);
289 template <
typename StaticVisitor>
290 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
291 Map*
map, HeapObject*
object) {
293 void>::Visit(
map,
object);
295 MarkCompactCollector* collector =
map->GetHeap()->mark_compact_collector();
299 collector->RecordSlot(slot, slot, *slot);
304 template <
typename StaticVisitor>
305 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map*
map,
306 HeapObject*
object) {
307 Heap* heap =
map->GetHeap();
308 Map* map_object = Map::cast(
object);
311 if (FLAG_cleanup_code_caches_at_gc) {
312 map_object->ClearCodeCache(heap);
317 if (FLAG_collect_maps && map_object->CanTransition()) {
318 MarkMapContents(heap, map_object);
320 StaticVisitor::VisitPointers(
327 template <
typename StaticVisitor>
328 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
329 Map*
map, HeapObject*
object) {
330 Heap* heap =
map->GetHeap();
334 if (FLAG_collect_maps) {
338 HeapObject* obj = HeapObject::cast(*slot);
339 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
340 StaticVisitor::MarkObjectWithoutPush(heap, obj);
342 StaticVisitor::VisitPointer(heap, slot);
345 StaticVisitor::VisitPointers(
352 template <
typename StaticVisitor>
353 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
354 Map*
map, HeapObject*
object) {
355 Heap* heap =
map->GetHeap();
359 if (FLAG_collect_maps) {
365 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
366 StaticVisitor::MarkObjectWithoutPush(heap, obj);
368 StaticVisitor::VisitPointer(heap, slot);
371 StaticVisitor::VisitPointers(
378 template <
typename StaticVisitor>
386 if (weak_collection->next() == heap->undefined_value()) {
393 StaticVisitor::VisitPointers(
402 if (!weak_collection->table()->IsHashTable())
return;
407 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
408 StaticVisitor::MarkObjectWithoutPush(heap, obj);
412 template <
typename StaticVisitor>
416 Code* code = Code::cast(
object);
424 template <
typename StaticVisitor>
425 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
426 Map*
map, HeapObject*
object) {
427 Heap* heap =
map->GetHeap();
428 SharedFunctionInfo* shared = SharedFunctionInfo::cast(
object);
429 if (shared->ic_age() != heap->global_ic_age()) {
430 shared->ResetForNewContext(heap->global_ic_age());
432 if (FLAG_cleanup_code_caches_at_gc) {
433 shared->ClearTypeFeedbackInfo();
435 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
436 !shared->optimized_code_map()->IsSmi()) {
438 shared->ClearOptimizedCodeMap();
440 MarkCompactCollector* collector = heap->mark_compact_collector();
441 if (collector->is_code_flushing_enabled()) {
442 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
445 collector->code_flusher()->AddOptimizedCodeMap(shared);
448 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
449 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
451 if (IsFlushable(heap, shared)) {
458 collector->code_flusher()->AddCandidate(shared);
460 VisitSharedFunctionInfoWeakCode(heap,
object);
464 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
467 shared->ClearOptimizedCodeMap();
470 VisitSharedFunctionInfoStrongCode(heap,
object);
474 template <
typename StaticVisitor>
475 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
476 Map*
map, HeapObject*
object) {
477 Heap* heap =
map->GetHeap();
478 ConstantPoolArray* array = ConstantPoolArray::cast(
object);
480 while (!code_iter.is_finished()) {
482 array->RawFieldOfElementAt(code_iter.next_index()));
483 StaticVisitor::VisitCodeEntry(heap, code_entry);
487 while (!heap_iter.is_finished()) {
488 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
489 HeapObject*
object = HeapObject::cast(*slot);
490 heap->mark_compact_collector()->RecordSlot(slot, slot,
object);
491 bool is_weak_object =
492 (array->get_weak_object_state() ==
495 (array->get_weak_object_state() ==
498 if (!is_weak_object) {
499 StaticVisitor::MarkObject(heap,
object);
505 template <
typename StaticVisitor>
506 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map*
map,
507 HeapObject*
object) {
508 Heap* heap =
map->GetHeap();
509 JSFunction*
function = JSFunction::cast(
object);
510 MarkCompactCollector* collector = heap->mark_compact_collector();
511 if (collector->is_code_flushing_enabled()) {
512 if (IsFlushable(heap,
function)) {
519 collector->code_flusher()->AddCandidate(
function);
523 SharedFunctionInfo* shared =
function->shared();
524 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
525 StaticVisitor::MarkObject(heap, shared->map());
526 VisitSharedFunctionInfoWeakCode(heap, shared);
529 VisitJSFunctionWeakCode(heap,
object);
533 StaticVisitor::MarkObject(heap, function->shared()->code());
534 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
535 MarkInlinedFunctionsCode(heap, function->code());
539 VisitJSFunctionStrongCode(heap,
object);
543 template <
typename StaticVisitor>
544 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map*
map,
545 HeapObject*
object) {
546 int last_property_offset =
548 StaticVisitor::VisitPointers(
554 template <
typename StaticVisitor>
555 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
556 Map*
map, HeapObject*
object) {
557 Heap* heap =
map->GetHeap();
561 StaticVisitor::VisitPointers(
565 StaticVisitor::VisitPointers(
572 template <
typename StaticVisitor>
573 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
574 Map*
map, HeapObject*
object) {
575 StaticVisitor::VisitPointers(
579 StaticVisitor::VisitPointers(
586 template <
typename StaticVisitor>
587 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map*
map,
588 HeapObject*
object) {
589 StaticVisitor::VisitPointers(
593 StaticVisitor::VisitPointers(
600 template <
typename StaticVisitor>
606 StaticVisitor::MarkObject(heap, HeapObject::cast(
map->GetBackPointer()));
612 if (
map->HasTransitionArray()) {
614 MarkTransitionArray(heap, transitions);
623 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
624 descriptors->
length() > 0) {
629 int end =
map->NumberOfOwnDescriptors();
631 StaticVisitor::VisitPointers(heap,
642 StaticVisitor::MarkObjectWithoutPush(heap, obj);
647 StaticVisitor::VisitPointers(
653 template <
typename StaticVisitor>
656 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions))
return;
668 StaticVisitor::MarkObjectWithoutPush(heap, obj);
672 StaticVisitor::VisitPointer(heap, transitions->
GetKeySlot(
i));
677 template <
typename StaticVisitor>
687 DeoptimizationInputData::cast(code->deoptimization_data());
689 for (
int i = 0, count = data->InlinedFunctionCount()->value();
i < count;
692 StaticVisitor::MarkObject(heap, inlined->shared()->
code());
698 return context->IsContext() &&
704 Object* undefined = heap->undefined_value();
705 return (info->script() != undefined) &&
706 (
reinterpret_cast<Script*
>(info->script())->source() != undefined);
710 template <
typename StaticVisitor>
711 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
712 JSFunction*
function) {
713 SharedFunctionInfo* shared_info =
function->shared();
717 MarkBit code_mark = Marking::MarkBitFrom(function->code());
718 if (code_mark.Get()) {
728 if (function->code() != shared_info->code()) {
733 if (FLAG_age_code && !function->code()->IsOld()) {
737 return IsFlushable(heap, shared_info);
741 template <
typename StaticVisitor>
742 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
743 Heap* heap, SharedFunctionInfo* shared_info) {
746 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
747 if (code_mark.Get()) {
753 if (!(shared_info->is_compiled() &&
HasSourceCode(heap, shared_info))) {
758 Object* function_data = shared_info->function_data();
759 if (function_data->IsFunctionTemplateInfo()) {
764 if (shared_info->code()->kind() != Code::FUNCTION) {
769 if (!shared_info->allows_lazy_compilation()) {
775 if (shared_info->is_generator()) {
780 if (shared_info->is_toplevel()) {
786 if (shared_info->dont_flush()) {
791 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
799 template <
typename StaticVisitor>
806 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
810 template <
typename StaticVisitor>
815 StaticVisitor::VisitPointer(heap, name_slot);
827 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
831 template <
typename StaticVisitor>
838 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
847 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
851 template <
typename StaticVisitor>
858 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
867 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
897 template <
typename StaticVisitor>
909 StaticVisitor::VisitPointer(
912 StaticVisitor::VisitPointer(
914 StaticVisitor::VisitPointer(
917 StaticVisitor::VisitPointer(
920 StaticVisitor::VisitNextCodeLink(
922 StaticVisitor::VisitPointer(
928 it.
rinfo()->template Visit<StaticVisitor>(heap);
static const int kPointerFieldsBeginOffset
static const int kDependentCodeOffset
static const int kPointerFieldsEndOffset
ConstantPoolArray * constant_pool()
void CodeIterateBody(ObjectVisitor *v)
static const int kNextCodeLinkOffset
static Code * GetCodeFromTargetAddress(Address address)
static bool IsWeakObjectInOptimizedCode(Object *object)
static const int kConstantPoolOffset
bool is_inline_cache_stub()
static const int kTypeFeedbackInfoOffset
static const int kHandlerTableOffset
static Object * GetObjectFromEntryAddress(Address location_of_address)
bool is_invalidated_weak_stub()
InlineCacheState ic_state()
static const int kRelocationInfoOffset
void MakeOlder(MarkingParity)
static const int kDeoptimizationDataOffset
static bool IsWeakObjectInIC(Object *object)
@ WEAK_OBJECTS_IN_OPTIMIZED_CODE
static Context * cast(Object *context)
GlobalObject * global_object()
FixedBodyDescriptor< kHeaderSize, kHeaderSize+FIRST_WEAK_SLOT *kPointerSize, kSize > MarkCompactBodyDescriptor
Object ** GetDescriptorStartSlot(int descriptor_number)
Object ** GetDescriptorEndSlot(int descriptor_number)
Object ** RawFieldOfElementAt(int index)
Object ** GetFirstElementAddress()
static const int kStartOffset
static const int kEndOffset
static const int kStartOffset
static Object ** RawField(HeapObject *obj, int offset)
void IterateNextCodeLink(ObjectVisitor *v, int offset)
void IteratePointer(ObjectVisitor *v, int offset)
Isolate * GetIsolate() const
Object * encountered_weak_collections() const
void set_encountered_weak_collections(Object *weak_collection)
MarkCompactCollector * mark_compact_collector()
bool flush_monomorphic_ics()
static void Clear(Isolate *isolate, Address address, ConstantPoolArray *constant_pool)
bool serializer_enabled() const
static const int kWeakNextOffset
static const int kWeakFirstViewOffset
static const int kSizeWithInternalFields
static const int kWeakNextOffset
static const int kSizeWithInternalFields
static const int kNonWeakFieldsEndOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static const int kPropertiesOffset
static const int kSizeWithInternalFields
static const int kNextOffset
static const int kTableOffset
static const int kPointerFieldsEndOffset
static const int kPointerFieldsBeginOffset
static const int kDependentCodeOffset
void RecordCodeEntrySlot(Address slot, Code *target)
void RecordRelocSlot(RelocInfo *rinfo, Object *target)
MarkingParity marking_parity()
static const int kPointerFieldsBeginOffset
static const int kDependentCodeOffset
static const int kPointerFieldsEndOffset
static int ModeMask(Mode mode)
static const int kCodeTargetMask
static bool IsDebugBreakSlot(Mode mode)
static bool IsJSReturn(Mode mode)
static bool IsCodeTarget(Mode mode)
static bool IsCodeAgeSequence(Mode mode)
static const int kOptimizedCodeMapOffset
static const int kNameOffset
static const int kCodeOffset
static void VisitJSFunctionStrongCode(Heap *heap, HeapObject *object)
static void MarkInlinedFunctionsCode(Heap *heap, Code *code)
static void VisitJSFunctionWeakCode(Heap *heap, HeapObject *object)
static void VisitSharedFunctionInfoStrongCode(Heap *heap, HeapObject *object)
static void MarkTransitionArray(Heap *heap, TransitionArray *transitions)
static void MarkMapContents(Heap *heap, Map *map)
static void VisitSharedFunctionInfoWeakCode(Heap *heap, HeapObject *object)
int number_of_transitions()
bool HasPrototypeTransitions()
Object ** GetKeySlot(int transition_number)
Object ** GetPrototypeTransitionsSlot()
bool IsSimpleTransition()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
static bool IsValidNonBuiltinContext(Object *context)
kSerializedDataOffset Object
static bool HasSourceCode(Heap *heap, SharedFunctionInfo *info)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
Debugger support for the V8 JavaScript engine.