7 #if V8_TARGET_ARCH_ARM64
16 #define __ ACCESS_MASM(masm)
20 MacroAssembler* masm, Label* miss_label, Register receiver,
21 Handle<Name>
name, Register scratch0, Register scratch1) {
24 Counters* counters = masm->isolate()->counters();
25 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
26 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
30 const int kInterceptorOrAccessCheckNeededMask =
34 Register
map = scratch1;
37 __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask);
46 Register properties = scratch0;
50 __ JumpIfNotRoot(
map, Heap::kHashTableMapRootIndex, miss_label);
53 masm, miss_label, &done, receiver, properties,
name, scratch1);
55 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
60 MacroAssembler* masm,
int index, Register prototype, Label* miss) {
61 Isolate* isolate = masm->isolate();
63 Handle<JSFunction>
function(
64 JSFunction::cast(isolate->native_context()->get(index)));
67 Register scratch = prototype;
71 __ Cmp(scratch, Operand(
function));
75 __ Mov(prototype, Operand(Handle<Map>(function->initial_map())));
82 MacroAssembler* masm, Register receiver, Register scratch1,
83 Register scratch2, Label* miss_label) {
84 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
98 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name>
name,
99 Register scratch, Label* miss) {
101 DCHECK(cell->value()->IsTheHole());
102 __ Mov(scratch, Operand(cell));
104 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss);
108 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
109 Register holder, Register
name,
110 Handle<JSObject> holder_obj) {
118 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
119 DCHECK(!masm->isolate()->heap()->InNewSpace(*interceptor));
120 Register scratch =
name;
121 __ Mov(scratch, Operand(interceptor));
122 __ Push(scratch, receiver, holder);
126 static void CompileCallLoadPropertyWithInterceptor(
127 MacroAssembler* masm, Register receiver, Register holder, Register
name,
129 PushInterceptorArguments(masm, receiver, holder,
name, holder_obj);
131 __ CallExternalReference(ExternalReference(IC_Utility(
id), masm->isolate()),
138 MacroAssembler* masm,
const CallOptimization& optimization,
139 Handle<Map> receiver_map, Register receiver, Register scratch,
140 bool is_store,
int argc, Register* values) {
143 MacroAssembler::PushPopQueue queue(masm);
144 queue.Queue(receiver);
146 for (
int i = 0;
i < argc;
i++) {
147 Register arg = values[argc - 1 -
i];
153 DCHECK(optimization.is_simple_api_call());
156 Register callee = x0;
157 Register call_data = x4;
159 Register api_function_address = x1;
162 CallOptimization::HolderLookup holder_lookup;
163 Handle<JSObject> api_holder =
164 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup);
165 switch (holder_lookup) {
166 case CallOptimization::kHolderIsReceiver:
169 case CallOptimization::kHolderFound:
172 case CallOptimization::kHolderNotFound:
177 Isolate* isolate = masm->isolate();
178 Handle<JSFunction>
function = optimization.constant_function();
179 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
180 Handle<Object> call_data_obj(api_call_info->data(), isolate);
183 __ LoadObject(callee,
function);
185 bool call_data_undefined =
false;
187 if (isolate->heap()->InNewSpace(*call_data_obj)) {
188 __ LoadObject(call_data, api_call_info);
190 }
else if (call_data_obj->IsUndefined()) {
191 call_data_undefined =
true;
192 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
194 __ LoadObject(call_data, call_data_obj);
198 Address function_address = v8::ToCData<Address>(api_call_info->callback());
199 ApiFunction fun(function_address);
200 ExternalReference ref = ExternalReference(
201 &fun, ExternalReference::DIRECT_API_CALL, masm->isolate());
202 __ Mov(api_function_address, ref);
205 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
206 __ TailCallStub(&stub);
211 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
212 Handle<JSFunction> setter) {
224 if (!setter.is_null()) {
232 ParameterCount actual(1);
233 ParameterCount expected(setter);
239 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
253 MacroAssembler* masm, Handle<HeapType> type, Register receiver,
254 Handle<JSFunction> getter) {
258 if (!getter.is_null()) {
266 ParameterCount actual(0);
267 ParameterCount expected(getter);
273 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
290 ExternalReference ref =
291 ExternalReference(IC_Utility(IC::kStoreIC_Slow), masm->isolate());
292 __ TailCallExternalReference(ref, 3, 1);
297 ASM_LOCATION(
"ElementHandlerCompiler::GenerateStoreSlow");
305 ExternalReference ref =
306 ExternalReference(IC_Utility(IC::kKeyedStoreIC_Slow), masm->isolate());
307 __ TailCallExternalReference(ref, 3, 1);
312 #define __ ACCESS_MASM(masm())
316 Handle<PropertyCell> cell, Handle<Name>
name,
bool is_configurable) {
322 __ Mov(result, Operand(cell));
326 if (is_configurable) {
327 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &miss);
330 Counters* counters = isolate()->counters();
331 __ IncrementCounter(counters->named_load_global_stub(), 1, x1, x3);
345 ASM_LOCATION(
"NamedStoreHandlerCompiler::CompileStoreInterceptor");
350 ExternalReference store_ic_property = ExternalReference(
351 IC_Utility(IC::kStorePropertyWithInterceptor), isolate());
352 __ TailCallExternalReference(store_ic_property, 3, 1);
366 if (!label->is_unused()) {
378 Handle<Map> transition, Handle<Name>
name, Register receiver_reg,
379 Register storage_reg, Register value_reg, Register scratch1,
380 Register scratch2, Register scratch3, Label* miss_label, Label* slow) {
383 DCHECK(!
AreAliased(receiver_reg, storage_reg, value_reg, scratch1, scratch2,
389 int descriptor = transition->LastAdded();
390 DescriptorArray* descriptors = transition->instance_descriptors();
391 PropertyDetails details = descriptors->GetDetails(descriptor);
392 Representation representation = details.representation();
393 DCHECK(!representation.IsNone());
396 Handle<Object> constant(descriptors->GetValue(descriptor), isolate());
397 __ LoadObject(scratch1, constant);
398 __ Cmp(value_reg, scratch1);
399 __ B(
ne, miss_label);
400 }
else if (representation.IsSmi()) {
401 __ JumpIfNotSmi(value_reg, miss_label);
402 }
else if (representation.IsHeapObject()) {
403 __ JumpIfSmi(value_reg, miss_label);
404 HeapType* field_type = descriptors->GetFieldType(descriptor);
405 HeapType::Iterator<Map> it = field_type->Classes();
410 __ CompareMap(scratch1, it.Current());
413 __ B(
ne, miss_label);
420 }
else if (representation.IsDouble()) {
421 UseScratchRegisterScope temps(masm());
426 __ JumpIfSmi(value_reg, &do_store);
428 __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex, miss_label,
433 __ AllocateHeapNumber(storage_reg, slow, scratch1, scratch2, temp_double,
438 DCHECK(!transition->is_access_check_needed());
441 if (details.type() ==
FIELD &&
442 Map::cast(transition->GetBackPointer())->unused_property_fields() == 0) {
445 __ Mov(scratch1, Operand(transition));
446 __ Push(receiver_reg, scratch1, value_reg);
447 __ TailCallExternalReference(
448 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
455 __ Mov(scratch1, Operand(transition));
469 int index = transition->instance_descriptors()->GetFieldIndex(
470 transition->LastAdded());
475 index -= transition->inobject_properties();
480 Register prop_reg = representation.IsDouble() ? storage_reg : value_reg;
483 int offset = transition->instance_size() + (index *
kPointerSize);
486 if (!representation.IsSmi()) {
488 if (!representation.IsDouble()) {
489 __ Mov(storage_reg, value_reg);
491 __ RecordWriteField(receiver_reg, offset, storage_reg, scratch1,
503 if (!representation.IsSmi()) {
505 if (!representation.IsDouble()) {
506 __ Mov(storage_reg, value_reg);
508 __ RecordWriteField(scratch1, offset, storage_reg, receiver_reg,
524 DCHECK(lookup->representation().IsHeapObject());
525 __ JumpIfSmi(value_reg, miss_label);
526 HeapType::Iterator<Map> it = lookup->GetFieldType()->Classes();
530 __ CompareMap(scratch1(), it.Current());
533 __ B(
ne, miss_label);
540 StoreFieldStub stub(isolate(), lookup->GetFieldIndex(),
541 lookup->representation());
542 GenerateTailCall(masm(), stub.GetCode());
547 Register object_reg, Register holder_reg, Register scratch1,
548 Register scratch2, Handle<Name>
name, Label* miss,
557 Register reg = object_reg;
561 if (
type()->IsConstant()) {
565 Handle<Map> current_map = receiver_map;
569 while (!current_map.is_identical_to(holder_map)) {
574 DCHECK(current_map->IsJSGlobalProxyMap() ||
575 !current_map->is_access_check_needed());
577 prototype =
handle(JSObject::cast(current_map->prototype()));
578 if (current_map->is_dictionary_map() &&
579 !current_map->IsJSGlobalObjectMap()) {
580 DCHECK(!current_map->IsJSGlobalProxyMap());
581 if (!
name->IsUniqueName()) {
585 DCHECK(current.is_null() || (current->property_dictionary()->FindEntry(
599 bool load_prototype_from_map =
600 heap()->InNewSpace(*prototype) || depth == 1;
601 Register map_reg = scratch1;
614 if (current_map->IsJSGlobalProxyMap()) {
615 UseScratchRegisterScope temps(masm());
616 __ CheckAccessGlobalProxy(reg, scratch2, temps.AcquireX(), miss);
617 }
else if (current_map->IsJSGlobalObjectMap()) {
619 name, scratch2, miss);
624 if (load_prototype_from_map) {
627 __ Mov(reg, Operand(prototype));
633 current_map =
handle(current->map());
637 LOG(isolate(), IntEvent(
"check-maps-depth", depth + 1));
646 DCHECK(current_map->IsJSGlobalProxyMap() ||
647 !current_map->is_access_check_needed());
648 if (current_map->IsJSGlobalProxyMap()) {
649 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
658 if (!miss->is_unused()) {
663 TailCallBuiltin(masm(), MissBuiltin(kind()));
671 if (!miss->is_unused()) {
676 TailCallBuiltin(masm(), MissBuiltin(kind()));
685 __ LoadObject(x0, value);
691 Register reg, Handle<ExecutableAccessorInfo> callback) {
707 if (heap()->InNewSpace(callback->data())) {
708 __ Mov(scratch3(), Operand(callback));
712 __ Mov(scratch3(), Operand(Handle<Object>(callback->data(), isolate())));
714 __ LoadRoot(
scratch4(), Heap::kUndefinedValueRootIndex);
715 __ Mov(scratch2(), Operand(ExternalReference::isolate_address(isolate())));
718 Register args_addr = scratch2();
730 Register getter_address_reg = x2;
733 Address getter_address = v8::ToCData<Address>(callback->getter());
734 ApiFunction fun(getter_address);
736 ExternalReference ref = ExternalReference(&fun,
type, isolate());
737 __ Mov(getter_address_reg, ref);
739 CallApiGetterStub stub(isolate());
740 __ TailCallStub(&stub);
745 LookupIterator* it, Register holder_reg) {
749 DCHECK(!
holder()->GetNamedInterceptor()->getter()->IsUndefined());
754 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
760 bool must_perform_prototype_check =
761 !
holder().is_identical_to(it->GetHolder<JSObject>());
762 bool must_preserve_receiver_reg =
763 !receiver().is(holder_reg) &&
770 if (must_preserve_receiver_reg) {
771 __ Push(receiver(), holder_reg, this->
name());
778 CompileCallLoadPropertyWithInterceptor(
779 masm(), receiver(), holder_reg, this->
name(),
holder(),
780 IC::kLoadPropertyWithInterceptorOnly);
784 Label interceptor_failed;
785 __ JumpIfRoot(x0, Heap::kNoInterceptorResultSentinelRootIndex,
786 &interceptor_failed);
787 frame_scope.GenerateLeaveFrame();
790 __ Bind(&interceptor_failed);
791 if (must_preserve_receiver_reg) {
792 __ Pop(this->
name(), holder_reg, receiver());
794 __ Pop(this->
name(), holder_reg);
806 DCHECK(!
holder()->GetNamedInterceptor()->getter()->IsUndefined());
807 PushInterceptorArguments(masm(), receiver(), holder_reg, this->
name(),
810 ExternalReference ref = ExternalReference(
811 IC_Utility(IC::kLoadPropertyWithInterceptor), isolate());
812 __ TailCallExternalReference(
818 Handle<JSObject>
object, Handle<Name>
name,
819 Handle<ExecutableAccessorInfo> callback) {
820 ASM_LOCATION(
"NamedStoreHandlerCompiler::CompileStoreCallback");
829 __ Mov(scratch1(), Operand(callback));
830 __ Mov(scratch2(), Operand(
name));
831 __ Push(receiver(), holder_reg, scratch1(), scratch2(),
value());
834 ExternalReference store_callback_property =
835 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
836 __ TailCallExternalReference(store_callback_property, 5, 1);
static const int kDataOffset
static const int kValueOffset
static const int kReturnValueOffset
static void GenerateStoreSlow(MacroAssembler *masm)
static const int kDataOffset
static const int kHeaderSize
static const int kNativeContextOffset
static const int kGlobalProxyOffset
static Handle< T > cast(Handle< S > that)
static Handle< T > null()
static const int kNotFound
static const int kValueOffset
static const int kMapOffset
static Handle< Map > TypeToMap(HeapType *type, Isolate *isolate)
static Handle< PropertyCell > EnsurePropertyCell(Handle< JSGlobalObject > global, Handle< Name > name)
static const int kPropertiesOffset
static const int kBitFieldOffset
static const int kIsAccessCheckNeeded
static const int kInstanceTypeOffset
static const int kHasNamedInterceptor
static const int kPrototypeOffset
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kInterceptorArgsNameIndex
static const int kInterceptorArgsInfoIndex
Handle< Code > CompileLoadGlobal(Handle< PropertyCell > cell, Handle< Name > name, bool is_configurable)
static const int kInterceptorArgsThisIndex
virtual void FrontendFooter(Handle< Name > name, Label *miss)
static void GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler *masm, int index, Register prototype, Label *miss)
void GenerateLoadInterceptorWithFollowup(LookupIterator *it, Register holder_reg)
virtual Register FrontendHeader(Register object_reg, Handle< Name > name, Label *miss)
static const int kInterceptorArgsHolderIndex
void GenerateLoadConstant(Handle< Object > value)
void GenerateLoadInterceptor(Register holder_reg)
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static const int kInterceptorArgsLength
void GenerateLoadPostInterceptor(LookupIterator *it, Register reg)
static void GenerateLoadViaGetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > getter)
void GenerateLoadCallback(Register reg, Handle< ExecutableAccessorInfo > callback)
Handle< Code > CompileStoreInterceptor(Handle< Name > name)
virtual void FrontendFooter(Handle< Name > name, Label *miss)
void GenerateStoreField(LookupIterator *lookup, Register value_reg, Label *miss_label)
Handle< Code > CompileStoreCallback(Handle< JSObject > object, Handle< Name > name, Handle< ExecutableAccessorInfo > callback)
static void GenerateSlow(MacroAssembler *masm)
void GenerateRestoreName(Label *label, Handle< Name > name)
void GenerateStoreTransition(Handle< Map > transition, Handle< Name > name, Register receiver_reg, Register name_reg, Register value_reg, Register scratch1, Register scratch2, Register scratch3, Label *miss_label, Label *slow)
static void GenerateStoreViaSetter(MacroAssembler *masm, Handle< HeapType > type, Register receiver, Handle< JSFunction > setter)
static const int kArgsLength
static const int kHolderIndex
static const int kDataIndex
static const int kIsolateIndex
static const int kThisIndex
static const int kReturnValueDefaultValueIndex
Handle< JSObject > holder() const
Register CheckPrototypes(Register object_reg, Register holder_reg, Register scratch1, Register scratch2, Handle< Name > name, Label *miss, PrototypeCheckType check=CHECK_ALL_MAPS)
Handle< Code > GetCode(Code::Kind kind, Code::StubType type, Handle< Name > name)
Handle< HeapType > type() const
static void GenerateDictionaryNegativeLookup(MacroAssembler *masm, Label *miss_label, Register receiver, Handle< Name > name, Register r0, Register r1)
static void GenerateFastApiCall(MacroAssembler *masm, const CallOptimization &optimization, Handle< Map > receiver_map, Register receiver, Register scratch, bool is_store, int argc, Register *values)
Register Frontend(Register object_reg, Handle< Name > name)
static void GenerateCheckPropertyCell(MacroAssembler *masm, Handle< JSGlobalObject > global, Handle< Name > name, Register scratch, Label *miss)
static const int kContextOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
#define LOG(isolate, Call)
#define DCHECK(condition)
#define ASM_LOCATION(message)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
TypeImpl< ZoneTypeConfig > Type
MemOperand GlobalObjectMemOperand()
DwVfpRegister DoubleRegister
TypeImpl< HeapTypeConfig > HeapType
MemOperand ContextMemOperand(Register context, int index)
Handle< T > handle(T *t, Isolate *isolate)
MemOperand FieldMemOperand(Register object, int offset)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
Debugger support for the V8 JavaScript engine.