23 if (probe->IsCode())
return handle(Code::cast(probe));
32 if (receiver_map->prototype()->IsNull()) {
48 receiver_map->is_dictionary_map()
54 if (current_map->is_dictionary_map()) cache_name =
name;
55 if (current_map->prototype()->IsNull())
break;
56 last =
handle(JSObject::cast(current_map->prototype()));
57 current_map =
handle(last->map());
63 if (!handler.
is_null())
return handler;
77 PROFILE(isolate(), CodeCreateEvent(Logger::STUB_TAG, *code, *
name));
87 #define __ ACCESS_MASM(masm())
94 int function_index = -1;
95 if (
type()->
Is(HeapType::String())) {
99 }
else if (
type()->
Is(HeapType::Number())) {
101 }
else if (
type()->
Is(HeapType::Boolean())) {
110 Object*
function = isolate()->native_context()->get(function_index);
111 Object* prototype = JSFunction::cast(
function)->instance_prototype();
113 object_reg = scratch1();
148 holder_reg = receiver();
152 DCHECK(last_map->prototype() == isolate()->heap()->null_value());
158 if (last_map->is_dictionary_map()) {
159 if (last_map->IsJSGlobalObjectMap()) {
166 if (!
name->IsUniqueName()) {
171 holder()->property_dictionary()->FindEntry(
name) ==
183 __ Move(receiver(), reg);
185 GenerateTailCall(masm(), stub.GetCode());
191 int constant_index) {
193 __ Move(receiver(), reg);
195 GenerateTailCall(masm(), stub.GetCode());
220 DCHECK(call_optimization.is_simple_api_call());
224 scratch1(),
false, 0,
NULL);
230 LookupIterator* it) {
234 bool inline_followup =
false;
235 switch (it->state()) {
236 case LookupIterator::TRANSITION:
238 case LookupIterator::ACCESS_CHECK:
239 case LookupIterator::INTERCEPTOR:
240 case LookupIterator::JSPROXY:
241 case LookupIterator::NOT_FOUND:
243 case LookupIterator::DATA:
244 inline_followup = it->property_details().type() ==
FIELD;
248 inline_followup = accessors->IsExecutableAccessorInfo();
249 if (!inline_followup)
break;
252 inline_followup = info->getter() !=
NULL &&
254 isolate(), info,
type());
259 if (inline_followup) {
271 LookupIterator* it,
Register interceptor_reg) {
278 switch (it->state()) {
279 case LookupIterator::ACCESS_CHECK:
280 case LookupIterator::INTERCEPTOR:
281 case LookupIterator::JSPROXY:
282 case LookupIterator::NOT_FOUND:
283 case LookupIterator::TRANSITION:
285 case LookupIterator::DATA: {
287 __ Move(receiver(), reg);
289 GenerateTailCall(masm(), stub.GetCode());
315 __ CheckMapDeprecated(transition, scratch1(), &miss);
318 bool is_nonexistent =
holder()->map() == transition->GetBackPointer();
319 if (is_nonexistent) {
335 scratch1(), scratch2(), scratch3(), &miss, &slow);
338 TailCallBuiltin(masm(), MissBuiltin(kind()));
350 TailCallBuiltin(masm(), MissBuiltin(kind()));
366 const CallOptimization& call_optimization) {
370 receiver(), scratch1(),
true, 1, values);
380 for (
int i = 0;
i < receiver_maps->length(); ++
i) {
385 cached_stub = isolate()->builtins()->KeyedLoadIC_String();
387 cached_stub = isolate()->builtins()->KeyedLoadIC_Slow();
389 bool is_js_array = receiver_map->instance_type() ==
JS_ARRAY_TYPE;
390 ElementsKind elements_kind = receiver_map->elements_kind();
391 if (receiver_map->has_indexed_interceptor()) {
406 handlers->
Add(cached_stub);
The superclass of all JavaScript values and objects.
static bool IsCompatibleReceiverType(Isolate *isolate, Handle< AccessorInfo > info, Handle< HeapType > type)
static Flags ComputeHandlerFlags(Kind handler_kind, StubType type=NORMAL, CacheHolderFlag holder=kCacheOnReceiver)
void CompileElementHandlers(MapHandleList *receiver_maps, CodeHandleList *handlers)
static Handle< T > cast(Handle< S > that)
static Handle< T > null()
static const int kNotFound
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
static Handle< HeapType > CurrentTypeOf(Handle< Object > object, Isolate *isolate)
static Handle< Map > GetHandlerCacheHolder(HeapType *type, bool receiver_is_holder, Isolate *isolate, CacheHolderFlag *flag)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static void UpdateCodeCache(Handle< Map > map, Handle< Name > name, Handle< Code > code)
static Handle< Code > ComputeLoadNonexistent(Handle< Name > name, Handle< HeapType > type)
Handle< Code > CompileLoadCallback(Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
Handle< Code > CompileLoadField(Handle< Name > name, FieldIndex index)
Handle< Code > CompileLoadNonexistent(Handle< Name > name)
static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler *masm, int index, Register prototype, Label *miss)
void GenerateLoadInterceptorWithFollowup(LookupIterator *it, Register holder_reg)
Handle< Code > CompileLoadInterceptor(LookupIterator *it)
virtual Register FrontendHeader(Register object_reg, Handle< Name > name, Label *miss)
void GenerateLoadConstant(Handle< Object > value)
Handle< Code > CompileLoadViaGetter(Handle< Name > name, Handle< JSFunction > getter)
void GenerateLoadInterceptor(Register holder_reg)
Handle< Code > CompileLoadConstant(Handle< Name > name, int constant_index)
void GenerateLoadPostInterceptor(LookupIterator *it, Register reg)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
Handle< Code > CompileStoreViaSetter(Handle< JSObject > object, Handle< Name > name, Handle< JSFunction > setter)
void GenerateStoreField(LookupIterator *lookup, Register value_reg, Label *miss_label)
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
void GenerateRestoreName(Label *label, Handle< Name > name)
Handle< Code > CompileStoreField(LookupIterator *it)
static Builtins::Name SlowBuiltin(Code::Kind kind)
void GenerateStoreTransition(Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
virtual Register FrontendHeader(Register object_reg, Handle< Name > name, Label *miss)
Handle< Code > CompileStoreTransition(Handle< Map > transition, Handle< Name > name)
Handle< JSObject > holder() const
void NonexistentFrontendHeader(Handle< Name > name, Label *miss, Register scratch1, Register scratch2)
void set_type_for_object(Handle< Object > object)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
Register CheckPrototypes(Register object_reg, Register holder_reg, Register scratch1, Register scratch2, Handle< Name > name, Label *miss, PrototypeCheckType check=CHECK_ALL_MAPS)
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
Handle< HeapType > type() const
static Handle< Code > Find(Handle< Name > name, Handle< Map > map, Code::Kind kind, CacheHolderFlag cache_holder, Code::StubType type)
static void GenerateDictionaryNegativeLookup(MacroAssembler *masm, Label *miss_label, Register receiver, Handle< Name > name, Register r0, Register r1)
static void GenerateFastApiCall(MacroAssembler *masm, const CallOptimization &optimization, Handle< Map > receiver_map, Register receiver, Register scratch, bool is_store, int argc, Register *values)
Register Frontend(Register object_reg, Handle< Name > name)
static void GenerateCheckPropertyCell(MacroAssembler *masm, Handle< JSGlobalObject > global, Handle< Name > name, Register scratch, Label *miss)
virtual Register FrontendHeader(Register object_reg, Handle< Name > name, Label *miss)
void set_holder(Handle< JSObject > holder)
A class to uniformly access the prototype of any Object and walk its prototype chain.
bool IsAtEnd(WhereToEnd where_to_end=END_AT_NULL) const
Object * GetCurrent() const
#define PROFILE(IsolateGetter, Call)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
IN DWORD64 OUT PDWORD64 OUT PIMAGEHLP_SYMBOL64 Symbol
bool IsSloppyArgumentsElements(ElementsKind kind)
bool IsExternalArrayElementsKind(ElementsKind kind)
const uint32_t kNotStringTag
Handle< T > handle(T *t, Isolate *isolate)
bool IsFastElementsKind(ElementsKind kind)
kFeedbackVectorOffset flag
bool IsFixedTypedArrayElementsKind(ElementsKind kind)
Debugger support for the V8 JavaScript engine.