5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_
43 *(--
rear_) =
reinterpret_cast<intptr_t
>(target);
56 return chars == str.
length();
72 return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
80 return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
82 return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
90 Map*
map = one_byte_internalized_string_map();
98 if (!allocation.
To(&result))
return allocation;
104 String* answer = String::cast(result);
122 Map*
map = internalized_string_map();
130 if (!allocation.
To(&result))
return allocation;
135 String* answer = String::cast(result);
149 if (src->
length() == 0)
return src;
155 if (src->
length() == 0)
return src;
161 if (src->
length() == 0)
return src;
168 DCHECK(AllowHandleAllocation::IsAllowed());
169 DCHECK(AllowHeapAllocation::IsAllowed());
172 if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(
isolate_) &&
173 Heap::allocation_timeout_-- <= 0) {
183 allocation =
new_space_.AllocateRaw(size_in_bytes);
187 if (allocation.
To(&
object)) {
199 if (size_in_bytes <=
code_space()->AreaSize()) {
215 if (allocation.
To(&
object)) {
226 if (
profiler->is_tracking_allocations()) {
230 if (FLAG_verify_predictable) {
236 if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
255 if (target->IsSharedFunctionInfo()) {
261 if (FLAG_verify_predictable) {
268 if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
278 Address object_address =
object->address();
308 DCHECK(string->IsExternalString());
315 if (*resource_addr !=
NULL) {
317 *resource_addr =
NULL;
378 void Heap::RecordWrite(
Address address,
int offset) {
383 void Heap::RecordWrites(
Address address,
int start,
int len) {
385 for (
int i = 0;
i < len;
i++) {
442 if (obj->
map() == one_pointer_filler_map())
return false;
450 return dst == src && (dst ==
TargetSpaceId(type) || obj->IsFiller() ||
451 obj->IsExternalString());
480 if ((dst < src) || (dst >= (src + byte_size))) {
483 Object** end_slot = src_slot + size_in_words;
485 while (src_slot != end_slot) {
486 *dst_slot++ = *src_slot++;
489 MemMove(dst, src,
static_cast<size_t>(byte_size));
502 Address memento_address = object_address +
object->Size();
509 Map* candidate_map = candidate->
map();
514 if (candidate_map != allocation_memento_map())
return NULL;
525 DCHECK(memento_address == top ||
528 if (memento_address == top)
return NULL;
538 Heap* heap =
object->GetHeap();
541 if (!FLAG_allocation_site_pretenuring ||
546 if (memento ==
NULL)
return;
561 MapWord first_word =
object->map_word();
565 if (first_word.IsForwardingAddress()) {
566 HeapObject* dest = first_word.ToForwardingAddress();
583 const char* collector_reason =
NULL;
585 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
590 return reinterpret_cast<Isolate*
>(
591 reinterpret_cast<intptr_t
>(
this) -
592 reinterpret_cast<size_t>(
reinterpret_cast<Isolate*
>(4)->
heap()) + 4);
603 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
604 if (__allocation__.To(&__object__)) { \
605 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
609 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
611 AllocationResult __allocation__ = FUNCTION_CALL; \
612 Object* __object__ = NULL; \
613 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
614 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
615 "allocation failure"); \
616 __allocation__ = FUNCTION_CALL; \
617 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
618 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
619 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
621 AlwaysAllocateScope __scope__(ISOLATE); \
622 __allocation__ = FUNCTION_CALL; \
624 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
626 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
630 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
632 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
634 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
635 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, \
636 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
637 return Handle<TYPE>())
640 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
641 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
645 DCHECK(string->IsExternalString());
685 DCHECK(string->IsExternalString());
694 if (FLAG_verify_heap) {
702 set_instanceof_cache_function(the_hole_value());
707 return condition ? true_value() : false_value();
712 set_instanceof_cache_map(the_hole_value());
713 set_instanceof_cache_function(the_hole_value());
718 : heap_(isolate->heap()), daf_(isolate) {
735 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
736 Isolate* isolate = Isolate::Current();
737 isolate->
heap()->no_weak_object_verification_scope_depth_++;
741 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
742 Isolate* isolate = Isolate::Current();
743 isolate->heap()->no_weak_object_verification_scope_depth_--;
762 for (
Object** current = start; current < end; current++) {
763 if ((*current)->IsHeapObject()) {
764 HeapObject*
object = HeapObject::cast(*current);
773 for (
Object** current = start; current < end; current++) {
774 CHECK((*current)->IsSmi());
Isolate represents an isolated instance of the V8 engine.
virtual void Dispose()
Internally V8 will call this Dispose method when the external string resource is no longer needed.
AllocationSite * GetAllocationSite()
static AllocationResult Retry(AllocationSpace space=NEW_SPACE)
static bool CanTrack(InstanceType type)
bool IncrementMementoFoundCount()
AlwaysAllocateScope(Isolate *isolate)
void Iterate(ObjectVisitor *v)
void AddOldString(String *string)
void AddString(String *string)
List< Object * > old_space_strings_
void ShrinkNewStrings(int position)
List< Object * > new_space_strings_
static const int kResourceOffset
GCCallbacksScope(Heap *heap)
void set_map_no_write_barrier(Map *value)
Isolate * GetIsolate() const
static HeapObject * FromAddress(Address address)
static const int kHeaderSize
bool is_tracking_object_moves() const
void ObjectMoveEvent(Address from, Address to, int size)
MUST_USE_RESULT AllocationResult CopyFixedArray(FixedArray *src)
bool Contains(Address addr)
StoreBuffer store_buffer_
OldSpace * TargetSpace(HeapObject *object)
static void ScavengeObject(HeapObject **p, HeapObject *object)
MUST_USE_RESULT AllocationResult CopyConstantPoolArrayWithMap(ConstantPoolArray *src, Map *map)
bool OldGenerationAllocationLimitReached()
void UpdateAllocationsHash(HeapObject *object)
uint32_t dump_allocations_hash_countdown_
uint32_t raw_allocations_hash_
MUST_USE_RESULT AllocationResult CopyFixedArrayWithMap(FixedArray *src, Map *map)
void ClearInstanceofCache()
bool InOldDataSpace(Address address)
void OnAllocationEvent(HeapObject *object, int size_in_bytes)
MUST_USE_RESULT AllocationResult AllocateInternalizedStringFromUtf8(Vector< const char > str, int chars, uint32_t hash_field)
MUST_USE_RESULT AllocationResult AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field)
void FinalizeExternalString(String *string)
bool InNewSpace(Object *object)
uint32_t allocations_count_
intptr_t OldGenerationSpaceAvailable()
static bool IsOneByte(T t, int chars)
bool AllowedToBeMigrated(HeapObject *object, AllocationSpace dest)
Object * ToBoolean(bool condition)
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes, AllocationSpace space, AllocationSpace retry_space)
int always_allocate_scope_depth_
static void MoveBlock(Address dst, Address src, int byte_size)
MUST_USE_RESULT AllocationResult CopyConstantPoolArray(ConstantPoolArray *src)
MUST_USE_RESULT AllocationResult CopyFixedDoubleArray(FixedDoubleArray *src)
static void UpdateAllocationSiteFeedback(HeapObject *object, ScratchpadSlotMode mode)
PropertyCellSpace * property_cell_space_
OldSpace * old_pointer_space_
void AddAllocationSiteToScratchpad(AllocationSite *site, ScratchpadSlotMode mode)
bool InFromSpace(Object *object)
bool CollectGarbage(AllocationSpace space, const char *gc_reason=NULL, const GCCallbackFlags gc_callback_flags=kNoGCCallbackFlags)
static AllocationSpace TargetSpaceId(InstanceType type)
STATIC_ASSERT(kUndefinedValueRootIndex==Internals::kUndefinedValueRootIndex)
IncrementalMarking * incremental_marking()
GarbageCollector SelectGarbageCollector(AllocationSpace space, const char **reason)
static AllocationSpace SelectSpace(int object_size, AllocationSpace preferred_old_space, PretenureFlag pretenure)
bool ShouldBePromoted(Address old_address, int object_size)
static void CopyBlock(Address dst, Address src, int byte_size)
void OnMoveEvent(HeapObject *target, HeapObject *source, int size_in_bytes)
AllocationMemento * FindAllocationMemento(HeapObject *object)
static void ScavengeObjectSlow(HeapObject **p, HeapObject *object)
static void ScavengePointer(HeapObject **p)
void CompletelyClearInstanceofCache()
MUST_USE_RESULT AllocationResult CopyFixedDoubleArrayWithMap(FixedDoubleArray *src, Map *map)
OldSpace * old_data_space_
bool InToSpace(Object *object)
void PrintAlloctionsHash()
bool InOldPointerSpace(Address address)
LargeObjectSpace * lo_space_
MUST_USE_RESULT AllocationResult AllocateOneByteInternalizedString(Vector< const uint8_t > str, uint32_t hash_field)
MUST_USE_RESULT AllocationResult AllocateTwoByteInternalizedString(Vector< const uc16 > str, uint32_t hash_field)
HeapProfiler * heap_profiler() const
CpuProfiler * cpu_profiler() const
MUST_USE_RESULT AllocationResult AllocateRaw(int object_size, Executability executable)
bool is_logging_code_events()
InstanceType instance_type()
bool ContainsLimit(Address addr)
@ NEW_SPACE_BELOW_AGE_MARK
static MemoryChunk * FromAddress(Address a)
void set_hash_field(uint32_t value)
NewSpacePage * prev_page() const
static bool OnSamePage(Address address1, Address address2)
static bool IsAtStart(Address addr)
static NewSpacePage * FromAddress(Address address_in_page)
bool FromSpaceContains(Address address)
bool ToSpaceContains(Address address)
MUST_USE_RESULT AllocationResult AllocateRaw(int size_in_bytes)
static void AssertValidRange(Address from, Address to)
static int SizeFor(int length)
static const int kHeaderSize
static int SizeFor(int length)
AllocationSpace identity()
static const int kMaxLength
bool IsOneByteRepresentation() const
void set_length(int value)
void VisitPointers(Object **start, Object **end)
void VisitPointers(Object **start, Object **end)
#define PROFILE(IsolateGetter, Call)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes A file to write the raw context snapshot bytes Write V8 startup blob Print the time it takes to lazily compile hydrogen code stubs dump only objects containing this substring stress the GC compactor to flush out pretty print source code for builtins print C code to recreate TurboFan graphs report heap spill statistics along with enable possessive quantifier syntax for testing Minimal Log code events to the log file without profiling log positions Log statistical profiling Used with turns on browser compatible mode for profiling Enable perf linux profiler(experimental annotate support).") DEFINE_STRING(gc_fake_mmap
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define MSAN_MEMORY_IS_INITIALIZED(p, s)
void MemMove(void *dest, const void *src, size_t size)
void PrintF(const char *format,...)
void CopyWords(T *dst, const T *src, size_t num_words)
const uint32_t kIsIndirectStringTag
bool IsAligned(T value, U alignment)
void MemCopy(void *dest, const void *src, size_t size)
const uint32_t kIsIndirectStringMask
Debugger support for the V8 JavaScript engine.
#define T(name, string, precedence)