16 #define __ ACCESS_MASM(masm)
20 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
21 Handle<JSFunction> getter) {
25 if (!getter.is_null()) {
33 ParameterCount actual(0);
34 ParameterCount expected(getter);
40 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
51 MacroAssembler* masm, Label* miss_label, Register receiver,
52 Handle<Name>
name, Register scratch0, Register scratch1) {
54 DCHECK(!receiver.is(scratch0));
55 Counters* counters = masm->isolate()->counters();
56 __ IncrementCounter(counters->negative_lookups(), 1);
57 __ IncrementCounter(counters->negative_lookups_miss(), 1);
61 const int kInterceptorOrAccessCheckNeededMask =
66 kInterceptorOrAccessCheckNeededMask);
74 Register properties = scratch0;
79 Immediate(masm->isolate()->factory()->hash_table_map()));
84 properties,
name, scratch1);
86 __ DecrementCounter(counters->negative_lookups_miss(), 1);
91 MacroAssembler* masm,
int index, Register prototype, Label* miss) {
93 Handle<JSFunction>
function(
94 JSFunction::cast(masm->isolate()->native_context()->get(index)));
96 Register scratch = prototype;
98 __ mov(scratch, Operand(
esi, offset));
104 __ Move(prototype, Immediate(Handle<Map>(function->initial_map())));
111 MacroAssembler* masm, Register receiver, Register scratch1,
112 Register scratch2, Label* miss_label) {
113 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
114 __ mov(
eax, scratch1);
124 MacroAssembler* masm,
const CallOptimization& optimization,
125 Handle<Map> receiver_map, Register receiver, Register scratch_in,
126 bool is_store,
int argc, Register* values) {
132 for (
int i = 0;
i < argc;
i++) {
133 Register arg = values[argc - 1 -
i];
134 DCHECK(!receiver.is(arg));
135 DCHECK(!scratch_in.is(arg));
140 DCHECK(optimization.is_simple_api_call());
143 Register callee =
eax;
144 Register call_data =
ebx;
146 Register api_function_address =
edx;
147 Register scratch =
edi;
150 CallOptimization::HolderLookup holder_lookup;
151 Handle<JSObject> api_holder =
152 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
153 switch (holder_lookup) {
154 case CallOptimization::kHolderIsReceiver:
157 case CallOptimization::kHolderFound:
158 __ LoadHeapObject(
holder, api_holder);
160 case CallOptimization::kHolderNotFound:
165 Isolate* isolate = masm->isolate();
166 Handle<JSFunction>
function = optimization.constant_function();
167 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
168 Handle<Object> call_data_obj(api_call_info->data(), isolate);
171 __ LoadHeapObject(callee,
function);
173 bool call_data_undefined =
false;
175 if (isolate->heap()->InNewSpace(*call_data_obj)) {
176 __ mov(scratch, api_call_info);
178 }
else if (call_data_obj->IsUndefined()) {
179 call_data_undefined =
true;
180 __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
182 __ mov(call_data, call_data_obj);
186 Address function_address = v8::ToCData<Address>(api_call_info->callback());
187 __ mov(api_function_address, Immediate(function_address));
190 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
191 __ TailCallStub(&stub);
199 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name>
name,
200 Register scratch, Label* miss) {
202 DCHECK(cell->value()->IsTheHole());
203 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
204 if (masm->serializer_enabled()) {
205 __ mov(scratch, Immediate(cell));
207 Immediate(the_hole));
209 __ cmp(Operand::ForCell(cell), Immediate(the_hole));
216 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
217 Handle<JSFunction> setter) {
227 if (!setter.is_null()) {
236 ParameterCount actual(1);
237 ParameterCount expected(setter);
243 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
256 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
257 Register holder, Register
name,
258 Handle<JSObject> holder_obj) {
265 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
266 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
267 Register scratch =
name;
268 __ mov(scratch, Immediate(interceptor));
275 static void CompileCallLoadPropertyWithInterceptor(
276 MacroAssembler* masm, Register receiver, Register holder, Register
name,
278 PushInterceptorArguments(masm, receiver, holder,
name, holder_obj);
279 __ CallExternalReference(ExternalReference(IC_Utility(
id), masm->isolate()),
284 static void StoreIC_PushArgs(MacroAssembler* masm) {
301 StoreIC_PushArgs(masm);
304 ExternalReference ref(IC_Utility(IC::kStoreIC_Slow), masm->isolate());
305 __ TailCallExternalReference(ref, 3, 1);
311 StoreIC_PushArgs(masm);
314 ExternalReference ref(IC_Utility(IC::kKeyedStoreIC_Slow), masm->isolate());
315 __ TailCallExternalReference(ref, 3, 1);
320 #define __ ACCESS_MASM(masm())
325 if (!label->is_unused()) {
335 Handle<Map> transition, Handle<Name>
name, Register receiver_reg,
336 Register storage_reg, Register value_reg, Register scratch1,
337 Register scratch2, Register unused, Label* miss_label, Label* slow) {
338 int descriptor = transition->LastAdded();
339 DescriptorArray* descriptors = transition->instance_descriptors();
340 PropertyDetails details = descriptors->GetDetails(descriptor);
341 Representation representation = details.representation();
342 DCHECK(!representation.IsNone());
345 Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
346 __ CmpObject(value_reg, constant);
348 }
else if (representation.IsSmi()) {
349 __ JumpIfNotSmi(value_reg, miss_label);
350 }
else if (representation.IsHeapObject()) {
351 __ JumpIfSmi(value_reg, miss_label);
352 HeapType* field_type = descriptors->GetFieldType(descriptor);
353 HeapType::Iterator<Map> it = field_type->Classes();
357 __ CompareMap(value_reg, it.Current());
363 __ j(
equal, &do_store, Label::kNear);
367 }
else if (representation.IsDouble()) {
368 Label do_store, heap_number;
369 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow,
MUTABLE);
371 __ JumpIfNotSmi(value_reg, &heap_number);
372 __ SmiUntag(value_reg);
374 __ fild_s(Operand(
esp, 0));
376 __ SmiTag(value_reg);
379 __ bind(&heap_number);
380 __ CheckMap(value_reg, isolate()->factory()->heap_number_map(), miss_label,
389 DCHECK(!transition->is_access_check_needed());
392 if (details.type() ==
FIELD &&
393 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
397 __ push(receiver_reg);
398 __ push(Immediate(transition));
401 __ TailCallExternalReference(
402 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
409 __ mov(scratch1, Immediate(transition));
422 int index = transition->instance_descriptors()->GetFieldIndex(
423 transition->LastAdded());
428 index -= transition->inobject_properties();
435 int offset = transition->instance_size() + (index *
kPointerSize);
436 if (representation.IsDouble()) {
442 if (!representation.IsSmi()) {
444 if (!representation.IsDouble()) {
445 __ mov(storage_reg, value_reg);
447 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
455 if (representation.IsDouble()) {
461 if (!representation.IsSmi()) {
463 if (!representation.IsDouble()) {
464 __ mov(storage_reg, value_reg);
466 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
480 DCHECK(lookup->representation().IsHeapObject());
481 __ JumpIfSmi(value_reg, miss_label);
482 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes();
485 __ CompareMap(value_reg, it.Current());
491 __ j(
equal, &do_store, Label::kNear);
495 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
496 lookup->representation());
497 GenerateTailCall(masm(), stub.GetCode());
502 Register object_reg, Register holder_reg, Register scratch1,
503 Register scratch2, Handle<Name>
name, Label* miss,
508 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
509 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
510 !scratch2.is(scratch1));
513 Register reg = object_reg;
517 if (
type()->IsConstant())
520 Handle<Map> current_map = receiver_map;
524 while (!current_map.is_identical_to(holder_map)) {
529 DCHECK(current_map->IsJSGlobalProxyMap() ||
530 !current_map->is_access_check_needed());
532 prototype =
handle(JSObject::cast(current_map->prototype()));
533 if (current_map->is_dictionary_map() &&
534 !current_map->IsJSGlobalObjectMap()) {
535 DCHECK(!current_map->IsJSGlobalProxyMap());
536 if (!
name->IsUniqueName()) {
540 DCHECK(current.is_null() ||
541 current->property_dictionary()->FindEntry(
name) ==
551 bool in_new_space = heap()->InNewSpace(*prototype);
556 bool load_prototype_from_map = in_new_space || depth == 1;
567 if (current_map->IsJSGlobalProxyMap()) {
568 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
569 }
else if (current_map->IsJSGlobalObjectMap()) {
571 name, scratch2, miss);
574 if (load_prototype_from_map) {
581 if (load_prototype_from_map) {
584 __ mov(reg, prototype);
590 current_map =
handle(current->map());
594 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
602 DCHECK(current_map->IsJSGlobalProxyMap() ||
603 !current_map->is_access_check_needed());
604 if (current_map->IsJSGlobalProxyMap()) {
605 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
614 if (!miss->is_unused()) {
618 TailCallBuiltin(masm(), MissBuiltin(kind()));
625 if (!miss->is_unused()) {
629 TailCallBuiltin(masm(), MissBuiltin(kind()));
636 Register reg, Handle<ExecutableAccessorInfo> callback) {
638 DCHECK(!scratch3().is(reg));
649 if (isolate()->heap()->InNewSpace(callback->data())) {
650 DCHECK(!scratch2().is(reg));
651 __ mov(scratch2(), Immediate(callback));
654 __ push(Immediate(Handle<Object>(callback->data(), isolate())));
656 __ push(Immediate(isolate()->factory()->undefined_value()));
658 __ push(Immediate(isolate()->factory()->undefined_value()));
659 __ push(Immediate(
reinterpret_cast<int>(isolate())));
672 Address function_address = v8::ToCData<Address>(callback->getter());
673 __ mov(getter_address, Immediate(function_address));
675 CallApiGetterStub stub(isolate());
676 __ TailCallStub(&stub);
682 __ LoadObject(
eax, value);
688 LookupIterator* it, Register holder_reg) {
690 DCHECK(!
holder()->GetNamedInterceptor()->getter()->IsUndefined());
695 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
701 bool must_perform_prototype_check =
702 !
holder().is_identical_to(it->GetHolder<JSObject>());
703 bool must_preserve_receiver_reg =
704 !receiver().is(holder_reg) &&
712 if (must_preserve_receiver_reg) {
721 CompileCallLoadPropertyWithInterceptor(
722 masm(), receiver(), holder_reg, this->
name(),
holder(),
723 IC::kLoadPropertyWithInterceptorOnly);
727 Label interceptor_failed;
728 __ cmp(
eax, factory()->no_interceptor_result_sentinel());
729 __ j(
equal, &interceptor_failed);
730 frame_scope.GenerateLeaveFrame();
734 __ bind(&interceptor_failed);
735 if (FLAG_debug_code) {
736 __ mov(receiver(), Immediate(bit_cast<int32_t>(
kZapValue)));
737 __ mov(holder_reg, Immediate(bit_cast<int32_t>(
kZapValue)));
743 if (must_preserve_receiver_reg) {
756 DCHECK(!
holder()->GetNamedInterceptor()->getter()->IsUndefined());
759 PushInterceptorArguments(masm(), receiver(), holder_reg, this->
name(),
763 ExternalReference ref = ExternalReference(
764 IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
765 __ TailCallExternalReference(
771 Handle<JSObject>
object, Handle<Name>
name,
772 Handle<ExecutableAccessorInfo> callback) {
784 ExternalReference store_callback_property =
785 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
786 __ TailCallExternalReference(store_callback_property, 5, 1);
802 ExternalReference store_ic_property = ExternalReference(
803 IC_Utility(IC::kStorePropertyWithInterceptor), isolate());
804 __ TailCallExternalReference(store_ic_property, 3, 1);
817 Handle<PropertyCell> cell, Handle<Name>
name,
bool is_configurable) {
823 if (masm()->serializer_enabled()) {
824 __ mov(result, Immediate(cell));
827 __ mov(result, Operand::ForCell(cell));
831 if (is_configurable) {
832 __ cmp(result, factory()->the_hole_value());
834 }
else if (FLAG_debug_code) {
835 __ cmp(result, factory()->the_hole_value());
836 __ Check(
not_equal, kDontDeleteCellsCannotContainTheHole);
839 Counters* counters = isolate()->counters();
840 __ IncrementCounter(counters->named_load_global_stub(), 1);
static const Register function_address()
static const int kDataOffset
static const int kValueOffset
static int SlotOffset(int index)
static const int kReturnValueOffset
static void GenerateStoreSlow(MacroAssembler *masm)
static const int kDataOffset
static const int kHeaderSize
static const int kNativeContextOffset
static const int kGlobalProxyOffset
static Handle< T > cast(Handle< S > that)
static Handle< T > null()
static const int kNotFound
static const int kValueOffset
static const int kMapOffset
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
static const int kPropertiesOffset
static const int kBitFieldOffset
static const int kIsAccessCheckNeeded
static const int kHasNamedInterceptor
static const int kPrototypeOffset
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsNameIndex
static const int kInterceptorArgsInfoIndex
Handle< Code > CompileLoadGlobal(Handle< PropertyCell > cell, Handle< Name > name, bool is_configurable)
static const int kInterceptorArgsThisIndex
virtual void FrontendFooter(Handle< Name > name, Label *miss)
static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler *masm, int index, Register prototype, Label *miss)
void GenerateLoadInterceptorWithFollowup(LookupIterator *it, Register holder_reg)
virtual Register FrontendHeader(Register object_reg, Handle< Name > name, Label *miss)
static const int kInterceptorArgsHolderIndex
void GenerateLoadConstant(Handle< Object > value)
void GenerateLoadInterceptor(Register holder_reg)
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static const int kInterceptorArgsLength
void GenerateLoadPostInterceptor(LookupIterator *it, Register reg)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
Handle< Code > CompileStoreInterceptor(Handle< Name > name)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
void GenerateStoreField(LookupIterator *lookup, Register value_reg, Label *miss_label)
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
static void GenerateSlow(MacroAssembler *masm)
void GenerateRestoreName(Label *label, Handle< Name > name)
void GenerateStoreTransition(Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static const int kHolderIndex
static const int kDataIndex
static const int kIsolateIndex
static const int kThisIndex
static const int kReturnValueDefaultValueIndex
Handle< JSObject > holder() const
Register CheckPrototypes(Register object_reg, Register holder_reg, Register scratch1, Register scratch2, Handle< Name > name, Label *miss, PrototypeCheckType check=CHECK_ALL_MAPS)
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
Handle< HeapType > type() const
static void GenerateDictionaryNegativeLookup(MacroAssembler *masm, Label *miss_label, Register receiver, Handle< Name > name, Register r0, Register r1)
static void GenerateFastApiCall(MacroAssembler *masm, const CallOptimization &optimization, Handle< Map > receiver_map, Register receiver, Register scratch, bool is_store, int argc, Register *values)
Register Frontend(Register object_reg, Handle< Name > name)
static void GenerateCheckPropertyCell(MacroAssembler *masm, Handle< JSGlobalObject > global, Handle< Name > name, Register scratch, Label *miss)
static const int kContextOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
#define LOG(isolate, Call)
#define DCHECK(condition)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Operand FieldOperand(Register object, int offset)
TypeImpl< HeapTypeConfig > HeapType
Handle< T > handle(T *t, Isolate *isolate)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
Debugger support for the V8 JavaScript engine.
bool is(Register reg) const