59 #ifdef ENABLE_HANDLE_ZAPPING
75 index_ =
static_cast<uint8_t
>(index);
239 ExternalOneByteString::cast(
object_)->resource() !=
NULL);
241 ExternalTwoByteString::cast(
object_)->resource() !=
NULL);
313 for (
int i =
kSize - 1;
i >= 0; --
i) {
330 if (old_first ==
NULL)
return;
371 intptr_t ptr =
reinterpret_cast<intptr_t
>(
this);
372 ptr = ptr - index_ *
sizeof(
Node);
374 DCHECK(block->node_at(index_) ==
this);
391 parameter_or_next_free_.next_free = global_handles->
first_free_;
409 return block_->node_at(index_);
416 block_ = block_->next_used();
439 while (block !=
NULL) {
517 if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
524 if (it.node()->IsWeak() && f(it.node()->location())) {
525 it.node()->MarkPending();
572 bool any_group_was_visited =
false;
578 bool group_should_be_visited =
false;
579 for (
size_t j = 0; j < entry->
length; j++) {
580 Object*
object = *objects[j];
581 if (object->IsHeapObject()) {
583 group_should_be_visited =
true;
589 if (!group_should_be_visited) {
596 for (
size_t j = 0; j < entry->
length; ++j) {
597 Object*
object = *objects[j];
598 if (object->IsHeapObject()) {
599 v->VisitPointer(&
object);
600 any_group_was_visited =
true;
610 return any_group_was_visited;
653 if (!it.node()->IsRetainer()) {
658 it.node()->clear_partially_dependent();
659 if (it.node()->PostGarbageCollectionProcessing(
isolate_)) {
665 if (!it.node()->IsRetainer()) {
695 if (it.node()->IsStrongRetainer()) {
696 v->VisitPointer(it.node()->location());
704 if (it.node()->IsRetainer()) {
705 v->VisitPointer(it.node()->location());
713 if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
714 v->VisitEmbedderReference(it.node()->location(),
715 it.node()->wrapper_class_id());
725 v->VisitEmbedderReference(node->
location(),
735 if (it.node()->IsWeakRetainer()) {
746 if (it.node()->IsWeakRetainer() &&
747 it.node()->object()->IsJSGlobalObject()) {
769 }
else if (it.node()->state() ==
Node::FREE) {
777 void GlobalHandles::PrintStats() {
784 for (NodeIterator it(
this); !it.done(); it.Advance()) {
789 if (it.node()->state() ==
Node::FREE) destroyed++;
792 PrintF(
"Global Handle Statistics:\n");
794 PrintF(
" # weak = %d\n", weak);
795 PrintF(
" # pending = %d\n", pending);
796 PrintF(
" # near_death = %d\n", near_death);
797 PrintF(
" # free = %d\n", destroyed);
798 PrintF(
" # total = %d\n", total);
802 void GlobalHandles::Print() {
803 PrintF(
"Global handles:\n");
804 for (NodeIterator it(
this); !it.done(); it.Advance()) {
805 PrintF(
" handle %p to %p%s\n",
806 reinterpret_cast<void*
>(it.node()->location()),
807 reinterpret_cast<void*
>(it.node()->object()),
808 it.node()->IsWeak() ?
" (weak)" :
"");
820 for (
size_t i = 0;
i < length; ++
i) {
829 for (
size_t i = 0;
i < length; ++
i)
903 int current_group_start = 0;
905 int current_implicit_refs_start = 0;
906 int current_implicit_refs_end = 0;
920 ++current_implicit_refs_start;
921 current_implicit_refs_end = current_implicit_refs_start;
925 ++current_implicit_refs_end;
927 if (current_implicit_refs_end > current_implicit_refs_start) {
930 for (
int j = current_group_start; j <
i; ++j) {
932 if ((*object)->IsHeapObject()) {
933 representative =
reinterpret_cast<HeapObject**
>(object);
937 if (representative) {
940 current_implicit_refs_end - current_implicit_refs_start);
941 for (
int j = current_implicit_refs_start;
942 j < current_implicit_refs_end;
944 group->
children[j - current_implicit_refs_start] =
949 current_implicit_refs_start = current_implicit_refs_end;
967 if (
i > current_group_start + 1) {
969 for (
int j = current_group_start; j <
i; ++j) {
970 group->
objects[j - current_group_start] =
981 current_group_start =
i;
1009 visitor->VisitPointers(block, block +
Min(limit,
kSize));
1036 if (
object ==
NULL)
return;
1043 Object* the_hole = isolate->
heap()->the_hole_value();
1048 blocks_[block][offset] = object;
static const uint16_t kPersistentHandleNoClassId
Default value of persistent handle class ID.
Isolate represents an isolated instance of the V8 engine.
Interface for providing information about embedder's objects held by global handles.
virtual void Dispose()=0
Called by V8 when it no longer needs an instance.
General purpose unique identifier.
static Local< Context > ToLocal(v8::internal::Handle< v8::internal::Context > obj)
static U update(U previous, T value)
List< Object ** > blocks_
List< int > new_space_indices_
static const int kInvalidIndex
void PostGarbageCollectionProcessing(Heap *heap)
void IterateAllRoots(ObjectVisitor *visitor)
void Create(Isolate *isolate, Object *object, int *index)
int singleton_handles_[NUMBER_OF_SINGLETON_HANDLES]
void IterateNewSpaceRoots(ObjectVisitor *visitor)
Object ** GetLocation(int index)
GlobalHandles * global_handles()
NodeBlock(GlobalHandles *global_handles, NodeBlock *next)
NodeBlock * prev_used() const
Node * node_at(int index)
GlobalHandles * global_handles_
NodeBlock * next_used() const
void PutNodesOnFreeList(Node **first_free)
DISALLOW_COPY_AND_ASSIGN(NodeIterator)
NodeIterator(GlobalHandles *global_handles)
bool is_partially_dependent()
GlobalHandles * GetGlobalHandles()
DISALLOW_COPY_AND_ASSIGN(Node)
void set_partially_dependent(bool v)
bool IsWeakRetainer() const
void set_in_new_space_list(bool v)
void set_state(State state)
bool is_in_new_space_list()
Handle< Object > handle()
void MarkPartiallyDependent()
WeakCallback weak_callback_
void set_parameter(void *parameter)
void set_independent(bool v)
uint16_t wrapper_class_id() const
bool has_wrapper_class_id() const
static Node * FromLocation(Object **location)
union v8::internal::GlobalHandles::Node::@14 parameter_or_next_free_
void clear_partially_dependent()
void Initialize(int index, Node **first_free)
bool IsStrongRetainer() const
void set_next_free(Node *value)
void MakeWeak(void *parameter, WeakCallback weak_callback)
bool PostGarbageCollectionProcessing(Isolate *isolate)
void Acquire(Object *object)
GlobalHandles(Isolate *isolate)
List< ImplicitRefGroup * > implicit_ref_groups_
WeakCallbackData< v8::Value, void >::Callback WeakCallback
void RecordStats(HeapStats *stats)
bool IterateObjectGroups(ObjectVisitor *v, WeakSlotCallbackWithHeap can_skip)
void IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor *v)
void IterateAllRoots(ObjectVisitor *v)
static void MakeWeak(Object **location, void *parameter, WeakCallback weak_callback)
List< ObjectGroup * > object_groups_
void IterateNewSpaceWeakIndependentRoots(ObjectVisitor *v)
static void * ClearWeakness(Object **location)
static bool IsWeak(Object **location)
void SetRetainedObjectInfo(UniqueId id, RetainedObjectInfo *info)
static void MarkIndependent(Object **location)
static bool IsIndependent(Object **location)
int number_of_global_handles_
void IterateNewSpaceStrongAndDependentRoots(ObjectVisitor *v)
void SetReference(HeapObject **parent, Object **child)
int NumberOfWeakHandles()
void AddObjectGroup(Object ***handles, size_t length, v8::RetainedObjectInfo *info)
int PostGarbageCollectionProcessing(GarbageCollector collector)
void IterateAllRootsWithClassIds(ObjectVisitor *v)
static void MarkPartiallyDependent(Object **location)
static bool IsNearDeath(Object **location)
int NumberOfGlobalObjectWeakHandles()
void IdentifyNewSpaceWeakIndependentHandles(WeakSlotCallbackWithHeap f)
void IterateStrongRoots(ObjectVisitor *v)
void RemoveObjectGroups()
void SetReferenceFromGroup(UniqueId id, Object **child)
int post_gc_processing_count_
List< Node * > new_space_nodes_
void IdentifyWeakHandles(WeakSlotCallback f)
static Handle< Object > CopyGlobal(Object **location)
void SetObjectGroupId(Object **handle, UniqueId id)
static const int kObjectGroupConnectionsCapacity
List< ObjectGroupConnection > object_group_connections_
static void Destroy(Object **location)
List< ObjectGroupConnection > implicit_ref_connections_
Handle< Object > Create(Object *value)
void IterateWeakRoots(ObjectVisitor *v)
List< ObjectGroupRetainerInfo > retainer_infos_
void RemoveImplicitRefGroups()
NodeBlock * first_used_block_
void ComputeObjectGroupsAndImplicitReferences()
int * near_death_global_handle_count
int * global_handle_count
int * pending_global_handle_count
int * weak_global_handle_count
int * free_global_handle_count
bool InNewSpace(Object *object)
void IncrementNodesCopiedInNewSpace()
void IncrementNodesPromoted()
void IncrementNodesDiedInNewSpace()
static const int kNodeStateIsNearDeathValue
static const int kNodeIsIndependentShift
static const int kNodeStateIsPendingValue
static const int kNodeStateMask
static const int kNodeStateIsWeakValue
static const int kNodeIsPartiallyDependentShift
static const int kNodeFlagsOffset
static const int kNodeClassIdOffset
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define OFFSET_OF(type, field)
bool(* WeakSlotCallback)(Object **pointer)
bool(* WeakSlotCallbackWithHeap)(Heap *heap, Object **pointer)
static LifetimePosition Min(LifetimePosition a, LifetimePosition b)
const Address kGlobalHandleZapValue
void MemsetPointer(T **dest, U *value, int counter)
Handle< T > handle(T *t, Isolate *isolate)
void PrintF(const char *format,...)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
Debugger support for the V8 JavaScript engine.
v8::RetainedObjectInfo * info