24 static void InitializeArrayConstructorDescriptor(
25 Isolate* isolate, CodeStubDescriptor* descriptor,
26 int constant_stack_parameter_count) {
32 Runtime::kArrayConstructor)->
entry;
34 if (constant_stack_parameter_count == 0) {
35 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
38 descriptor->Initialize(
eax, deopt_handler, constant_stack_parameter_count,
44 static void InitializeInternalArrayConstructorDescriptor(
45 Isolate* isolate, CodeStubDescriptor* descriptor,
46 int constant_stack_parameter_count) {
51 Runtime::kInternalArrayConstructor)->
entry;
53 if (constant_stack_parameter_count == 0) {
54 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
57 descriptor->Initialize(
eax, deopt_handler, constant_stack_parameter_count,
63 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
64 CodeStubDescriptor* descriptor) {
65 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
69 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
70 CodeStubDescriptor* descriptor) {
71 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
75 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
76 CodeStubDescriptor* descriptor) {
77 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
81 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
82 CodeStubDescriptor* descriptor) {
83 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
87 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
88 CodeStubDescriptor* descriptor) {
89 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
93 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
94 CodeStubDescriptor* descriptor) {
95 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
99 #define __ ACCESS_MASM(masm)
103 ExternalReference miss) {
105 isolate()->counters()->code_stubs()->Increment();
107 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
108 int param_count = descriptor.GetEnvironmentParameterCount();
112 DCHECK(param_count == 0 ||
113 eax.
is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
115 for (
int i = 0;
i < param_count; ++
i) {
116 __ push(descriptor.GetEnvironmentParameterRegister(
i));
118 __ CallExternalReference(miss, param_count);
132 __ sub(
esp, Immediate(108));
133 __ fnsave(Operand(
esp, 0));
135 const int argument_count = 1;
137 AllowExternalCallThatCantCauseGC scope(masm);
138 __ PrepareCallCFunction(argument_count,
ecx);
140 Immediate(ExternalReference::isolate_address(isolate())));
142 ExternalReference::store_buffer_overflow_function(isolate()),
146 __ frstor(Operand(
esp, 0));
147 __ add(
esp, Immediate(108));
154 class FloatingPointHelper :
public AllStatic {
165 static void LoadFloatOperand(MacroAssembler* masm, Register number);
170 static void CheckFloatOperands(MacroAssembler* masm,
177 Register input_reg = this->
source();
181 Label check_negative, process_64_bits, done, done_no_stash;
183 int double_offset =
offset();
194 Register scratch_candidates[3] = {
ebx,
edx,
edi };
195 for (
int i = 0;
i < 3;
i++) {
196 scratch1 = scratch_candidates[
i];
197 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1))
break;
202 Register result_reg = final_result_reg.
is(
ecx) ?
eax : final_result_reg;
206 Register save_reg = final_result_reg.
is(
ecx) ?
eax :
ecx;
210 bool stash_exponent_copy = !input_reg.is(
esp);
211 __ mov(scratch1, mantissa_operand);
212 __ mov(
ecx, exponent_operand);
213 if (stash_exponent_copy)
__ push(
ecx);
223 __ sub(
ecx, Immediate(delta));
224 __ xor_(result_reg, result_reg);
225 __ cmp(
ecx, Immediate(31));
228 __ jmp(&check_negative);
230 __ bind(&process_64_bits);
232 __ sub(
ecx, Immediate(delta));
234 if (stash_exponent_copy) {
237 __ mov(result_reg, exponent_operand);
243 __ shrd(result_reg, scratch1);
244 __ shr_cl(result_reg);
245 __ test(
ecx, Immediate(32));
248 __ j(
equal, &skip_mov, Label::kNear);
249 __ mov(scratch1, result_reg);
254 __ bind(&check_negative);
255 __ mov(result_reg, scratch1);
257 if (stash_exponent_copy) {
260 __ cmp(exponent_operand, Immediate(0));
265 __ mov(result_reg, scratch1);
271 if (stash_exponent_copy) {
274 __ bind(&done_no_stash);
275 if (!final_result_reg.is(result_reg)) {
277 __ mov(final_result_reg, result_reg);
285 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
287 Label load_smi, done;
289 __ JumpIfSmi(number, &load_smi, Label::kNear);
291 __ jmp(&done, Label::kNear);
296 __ fild_s(Operand(
esp, 0));
303 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
306 Label test_other, done;
309 __ JumpIfSmi(
edx, &test_other, Label::kNear);
311 Factory* factory = masm->isolate()->factory();
312 __ cmp(scratch, factory->heap_number_map());
315 __ bind(&test_other);
316 __ JumpIfSmi(
eax, &done, Label::kNear);
318 __ cmp(scratch, factory->heap_number_map());
339 PropertyAccessCompiler::TailCallBuiltin(
340 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
350 Register scratch =
eax;
351 DCHECK(!scratch.is(receiver) && !scratch.is(key));
364 ExternalReference ref = ExternalReference(
365 IC_Utility(IC::kLoadElementWithInterceptor), masm->isolate());
366 __ TailCallExternalReference(ref, 2, 1);
369 PropertyAccessCompiler::TailCallBuiltin(
370 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
386 __ JumpIfNotSmi(
edx, &slow, Label::kNear);
393 __ j(
equal, &adaptor, Label::kNear);
431 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
456 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
473 Label adaptor_frame, try_allocate;
477 __ j(
equal, &adaptor_frame, Label::kNear);
481 __ jmp(&try_allocate, Label::kNear);
484 __ bind(&adaptor_frame);
499 __ bind(&try_allocate);
506 const int kParameterMapHeaderSize =
508 Label no_parameter_map;
510 __ j(
zero, &no_parameter_map, Label::kNear);
512 __ bind(&no_parameter_map);
529 Label has_mapped_parameters, instantiate;
534 __ j(
not_zero, &has_mapped_parameters, Label::kNear);
538 __ jmp(&instantiate, Label::kNear);
540 __ bind(&has_mapped_parameters);
544 __ bind(&instantiate);
556 masm->isolate()->factory()->empty_fixed_array());
558 masm->isolate()->factory()->empty_fixed_array());
563 __ AssertNotSmi(
edx);
592 Label skip_parameter_map;
594 __ j(
zero, &skip_parameter_map);
597 Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
612 Label parameters_loop, parameters_test;
618 __ mov(
ecx, isolate()->factory()->the_hole_value());
631 __ jmp(¶meters_test, Label::kNear);
633 __ bind(¶meters_loop);
638 __ bind(¶meters_test);
640 __ j(
not_zero, ¶meters_loop, Label::kNear);
643 __ bind(&skip_parameter_map);
653 Immediate(isolate()->factory()->fixed_array_map()));
656 Label arguments_loop, arguments_test;
661 __ jmp(&arguments_test, Label::kNear);
663 __ bind(&arguments_loop);
669 __ bind(&arguments_test);
671 __ j(
less, &arguments_loop, Label::kNear);
684 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
695 Label adaptor_frame, try_allocate, runtime;
699 __ j(
equal, &adaptor_frame, Label::kNear);
703 __ jmp(&try_allocate, Label::kNear);
706 __ bind(&adaptor_frame);
715 Label add_arguments_object;
716 __ bind(&try_allocate);
718 __ j(
zero, &add_arguments_object, Label::kNear);
720 __ bind(&add_arguments_object);
734 masm->isolate()->factory()->empty_fixed_array());
736 masm->isolate()->factory()->empty_fixed_array());
749 __ j(
zero, &done, Label::kNear);
759 Immediate(isolate()->factory()->fixed_array_map()));
781 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
789 #ifdef V8_INTERPRETED_REGEXP
790 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
800 static const int kLastMatchInfoOffset = 1 *
kPointerSize;
801 static const int kPreviousIndexOffset = 2 *
kPointerSize;
806 Factory* factory = isolate()->factory();
809 ExternalReference address_of_regexp_stack_memory_address =
810 ExternalReference::address_of_regexp_stack_memory_address(isolate());
811 ExternalReference address_of_regexp_stack_memory_size =
812 ExternalReference::address_of_regexp_stack_memory_size(isolate());
813 __ mov(
ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
818 __ mov(
eax, Operand(
esp, kJSRegExpOffset));
820 __ JumpIfSmi(
eax, &runtime);
826 if (FLAG_debug_code) {
828 __ Check(
not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
830 __ Check(
equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
852 __ Move(
edi, Immediate(0));
853 __ mov(
eax, Operand(
esp, kSubjectOffset));
854 __ JumpIfSmi(
eax, &runtime);
883 Label seq_one_byte_string , seq_two_byte_string ,
884 external_string , check_underlying ,
885 not_seq_nor_cons , check_code ,
894 __ j(
zero, &seq_two_byte_string);
901 __ j(
zero, &seq_one_byte_string, Label::kNear);
918 __ bind(&check_underlying);
925 __ j(
zero, &seq_two_byte_string);
937 __ bind(&seq_one_byte_string);
941 __ mov(
ebx, Operand(
esp, kPreviousIndexOffset));
942 __ JumpIfNotSmi(
ebx, &runtime);
946 __ Move(
ecx, Immediate(1));
949 __ bind(&check_code);
954 __ JumpIfSmi(
edx, &runtime);
961 Counters* counters = isolate()->counters();
962 __ IncrementCounter(counters->regexp_entry_native(), 1);
965 static const int kRegExpExecuteArguments = 9;
966 __ EnterApiExitFrame(kRegExpExecuteArguments);
970 Immediate(ExternalReference::isolate_address(isolate())));
976 __ mov(
esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
977 __ add(
esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
986 Immediate(ExternalReference::address_of_static_offsets_vector(
1017 Label setup_two_byte, setup_rest;
1019 __ j(
zero, &setup_two_byte, Label::kNear);
1025 __ jmp(&setup_rest, Label::kNear);
1027 __ bind(&setup_two_byte);
1035 __ bind(&setup_rest);
1042 __ LeaveApiExitFrame(
true);
1060 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1062 __ mov(
edx, Immediate(isolate()->factory()->the_hole_value()));
1063 __ mov(
eax, Operand::StaticVariable(pending_exception));
1069 __ mov(Operand::StaticVariable(pending_exception),
edx);
1073 __ cmp(
eax, factory->termination_exception());
1074 Label throw_termination_exception;
1075 __ j(
equal, &throw_termination_exception, Label::kNear);
1080 __ bind(&throw_termination_exception);
1081 __ ThrowUncatchable(
eax);
1085 __ mov(
eax, factory->null_value());
1090 __ mov(
eax, Operand(
esp, kJSRegExpOffset));
1096 __ add(
edx, Immediate(2));
1101 __ mov(
eax, Operand(
esp, kLastMatchInfoOffset));
1102 __ JumpIfSmi(
eax, &runtime);
1108 __ cmp(
eax, factory->fixed_array_map());
1125 __ mov(
eax, Operand(
esp, kSubjectOffset));
1136 ExternalReference address_of_static_offsets_vector =
1137 ExternalReference::address_of_static_offsets_vector(isolate());
1138 __ mov(
ecx, Immediate(address_of_static_offsets_vector));
1143 Label next_capture, done;
1146 __ bind(&next_capture);
1147 __ sub(
edx, Immediate(1));
1158 __ jmp(&next_capture);
1162 __ mov(
eax, Operand(
esp, kLastMatchInfoOffset));
1167 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
1171 __ bind(¬_seq_nor_cons);
1173 __ j(
greater, ¬_long_external, Label::kNear);
1176 __ bind(&external_string);
1180 if (FLAG_debug_code) {
1184 __ Assert(
zero, kExternalStringExpectedButNotFound);
1199 __ bind(&seq_two_byte_string);
1203 __ mov(
ebx, Operand(
esp, kPreviousIndexOffset));
1204 __ JumpIfNotSmi(
ebx, &runtime);
1208 __ Move(
ecx, Immediate(0));
1209 __ jmp(&check_code);
1212 __ bind(¬_long_external);
1222 __ jmp(&check_underlying);
1227 static int NegativeComparisonResult(
Condition cc) {
1235 static void CheckInputType(MacroAssembler* masm, Register input,
1239 __ JumpIfNotSmi(input, fail);
1241 __ JumpIfSmi(input, &ok);
1243 Immediate(masm->isolate()->factory()->heap_number_map()));
1252 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1256 __ JumpIfSmi(
object, label);
1266 Label check_unequal_objects;
1270 CheckInputType(masm,
edx,
left(), &miss);
1271 CheckInputType(masm,
eax,
right(), &miss);
1274 Label non_smi, smi_done;
1277 __ JumpIfNotSmi(
ecx, &non_smi, Label::kNear);
1291 Label generic_heap_number_comparison;
1293 Label not_identical;
1300 Label check_for_nan;
1301 __ cmp(
edx, isolate()->factory()->undefined_value());
1305 __ bind(&check_for_nan);
1311 Immediate(isolate()->factory()->heap_number_map()));
1312 __ j(
equal, &generic_heap_number_comparison, Label::kNear);
1322 __ bind(¬_identical);
1347 __ sub(
ecx, Immediate(0x01));
1356 Immediate(isolate()->factory()->heap_number_map()));
1358 __ j(
equal, &slow, Label::kNear);
1370 Label first_non_object;
1373 __ j(
below, &first_non_object, Label::kNear);
1376 Label return_not_equal;
1378 __ bind(&return_not_equal);
1381 __ bind(&first_non_object);
1384 __ j(
equal, &return_not_equal);
1391 __ j(
equal, &return_not_equal);
1398 Label non_number_comparison;
1400 __ bind(&generic_heap_number_comparison);
1401 FloatingPointHelper::CheckFloatOperands(
1402 masm, &non_number_comparison,
ebx);
1403 FloatingPointHelper::LoadFloatOperand(masm,
eax);
1404 FloatingPointHelper::LoadFloatOperand(masm,
edx);
1410 Label below_label, above_label;
1412 __ j(
below, &below_label, Label::kNear);
1413 __ j(
above, &above_label, Label::kNear);
1415 __ Move(
eax, Immediate(0));
1418 __ bind(&below_label);
1422 __ bind(&above_label);
1428 __ bind(&unordered);
1438 __ bind(&non_number_comparison);
1441 Label check_for_strings;
1443 BranchIfNotInternalizedString(masm, &check_for_strings,
eax,
ecx);
1444 BranchIfNotInternalizedString(masm, &check_for_strings,
edx,
ecx);
1452 __ bind(&check_for_strings);
1455 &check_unequal_objects);
1465 __ Abort(kUnexpectedFallThroughFromStringComparison);
1468 __ bind(&check_unequal_objects);
1473 Label not_both_objects;
1474 Label return_unequal;
1482 __ j(
not_zero, ¬_both_objects, Label::kNear);
1484 __ j(
below, ¬_both_objects, Label::kNear);
1486 __ j(
below, ¬_both_objects, Label::kNear);
1492 __ j(
zero, &return_unequal, Label::kNear);
1495 __ j(
zero, &return_unequal, Label::kNear);
1499 __ bind(&return_unequal);
1503 __ bind(¬_both_objects);
1514 builtin =
strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1516 builtin = Builtins::COMPARE;
1532 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1540 Isolate* isolate = masm->isolate();
1541 Label initialize, done, miss, megamorphic, not_array_function;
1550 __ j(
equal, &done, Label::kFar);
1552 __ j(
equal, &done, Label::kFar);
1554 if (!FLAG_pretenuring_call_new) {
1559 Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
1567 __ jmp(&done, Label::kFar);
1578 __ bind(&megamorphic);
1582 __ jmp(&done, Label::kFar);
1586 __ bind(&initialize);
1587 if (!FLAG_pretenuring_call_new) {
1606 CreateAllocationSiteStub create_stub(isolate);
1607 __ CallStub(&create_stub);
1617 __ bind(¬_array_function);
1637 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
1651 static void EmitSlowCase(Isolate* isolate,
1652 MacroAssembler* masm,
1654 Label* non_function) {
1661 __ Move(
eax, Immediate(argc + 1));
1662 __ Move(
ebx, Immediate(0));
1663 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY);
1665 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
1671 __ bind(non_function);
1673 __ Move(
eax, Immediate(argc));
1674 __ Move(
ebx, Immediate(0));
1675 __ GetBuiltinEntry(
edx, Builtins::CALL_NON_FUNCTION);
1676 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
1681 static void EmitWrapCase(MacroAssembler* masm,
int argc, Label* cont) {
1694 static void CallFunctionNoFeedback(MacroAssembler* masm,
1695 int argc,
bool needs_checks,
1696 bool call_as_method) {
1698 Label slow, non_function, wrap, cont;
1702 __ JumpIfSmi(
edi, &non_function);
1710 ParameterCount actual(argc);
1712 if (call_as_method) {
1714 EmitContinueIfStrictOrNative(masm, &cont);
1721 __ JumpIfSmi(
eax, &wrap);
1738 EmitSlowCase(masm->isolate(), masm, argc, &non_function);
1741 if (call_as_method) {
1743 EmitWrapCase(masm, argc, &cont);
1759 Label slow, non_function_call;
1762 __ JumpIfSmi(
edi, &non_function_call);
1768 GenerateRecordCallTarget(masm);
1770 if (FLAG_pretenuring_call_new) {
1777 Label feedback_register_initialized;
1781 Handle<Map> allocation_site_map =
1782 isolate()->factory()->allocation_site_map();
1784 __ j(
equal, &feedback_register_initialized);
1785 __ mov(
ebx, isolate()->factory()->undefined_value());
1786 __ bind(&feedback_register_initialized);
1789 __ AssertUndefinedOrAllocationSite(
ebx);
1793 Register jmp_reg =
ecx;
1807 __ GetBuiltinEntry(
edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
1810 __ bind(&non_function_call);
1811 __ GetBuiltinEntry(
edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
1814 __ Move(
ebx, Immediate(0));
1815 Handle<Code> arguments_adaptor =
1816 isolate()->builtins()->ArgumentsAdaptorTrampoline();
1821 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
1834 ParameterCount actual(argc);
1836 EmitLoadTypeFeedbackVector(masm,
ebx);
1847 Factory* factory = masm->isolate()->factory();
1849 factory->allocation_site_map());
1853 ArrayConstructorStub stub(masm->isolate(),
arg_count());
1854 __ TailCallStub(&stub);
1860 CallFunctionNoFeedback(masm,
1873 Isolate* isolate = masm->isolate();
1874 Label extra_checks_or_miss, slow_start;
1875 Label slow, non_function, wrap, cont;
1876 Label have_js_function;
1878 ParameterCount actual(argc);
1880 EmitLoadTypeFeedbackVector(masm,
ebx);
1887 __ bind(&have_js_function);
1889 EmitContinueIfStrictOrNative(masm, &cont);
1894 __ JumpIfSmi(
eax, &wrap);
1905 EmitSlowCase(isolate, masm, argc, &non_function);
1909 EmitWrapCase(masm, argc, &cont);
1912 __ bind(&extra_checks_or_miss);
1922 if (!FLAG_trace_ic) {
1925 __ AssertNotSmi(
ecx);
1931 __ jmp(&slow_start);
1939 __ bind(&slow_start);
1942 __ JumpIfSmi(
edi, &non_function);
1947 __ jmp(&have_js_function);
1969 : IC::kCallIC_Customization_Miss;
1971 ExternalReference miss = ExternalReference(IC_Utility(
id),
1973 __ CallExternalReference(miss, 4);
1986 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1994 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1998 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2002 Code* save_doubles_code;
2003 if (!save_doubles.FindCodeInCache(&save_doubles_code)) {
2004 save_doubles_code = *(save_doubles.GetCode());
2006 isolate->set_fp_stubs_generated(
true);
2038 if (FLAG_debug_code) {
2039 __ CheckStackAlignment();
2046 Immediate(ExternalReference::isolate_address(isolate())));
2052 if (FLAG_debug_code) {
2054 __ cmp(
eax, isolate()->factory()->the_hole_value());
2061 Label exception_returned;
2062 __ cmp(
eax, isolate()->factory()->exception());
2063 __ j(
equal, &exception_returned);
2065 ExternalReference pending_exception_address(
2066 Isolate::kPendingExceptionAddress, isolate());
2070 if (FLAG_debug_code) {
2072 __ mov(
edx, Immediate(isolate()->factory()->the_hole_value()));
2074 __ cmp(
edx, Operand::StaticVariable(pending_exception_address));
2076 __ j(
equal, &okay, Label::kNear);
2087 __ bind(&exception_returned);
2090 __ mov(
eax, Operand::StaticVariable(pending_exception_address));
2093 __ mov(
edx, Immediate(isolate()->factory()->the_hole_value()));
2094 __ mov(Operand::StaticVariable(pending_exception_address),
edx);
2098 Label throw_termination_exception;
2099 __ cmp(
eax, isolate()->factory()->termination_exception());
2100 __ j(
equal, &throw_termination_exception);
2105 __ bind(&throw_termination_exception);
2106 __ ThrowUncatchable(
eax);
2111 Label invoke, handler_entry, exit;
2112 Label not_outermost_js, not_outermost_js_2;
2121 int marker =
type();
2130 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2131 __ push(Operand::StaticVariable(c_entry_fp));
2134 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2135 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
2137 __ mov(Operand::StaticVariable(js_entry_sp),
ebp);
2138 __ push(Immediate(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2139 __ jmp(&invoke, Label::kNear);
2140 __ bind(¬_outermost_js);
2141 __ push(Immediate(
Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
2146 __ bind(&handler_entry);
2150 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2152 __ mov(Operand::StaticVariable(pending_exception),
eax);
2153 __ mov(
eax, Immediate(isolate()->factory()->exception()));
2159 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2162 __ mov(
edx, Immediate(isolate()->factory()->the_hole_value()));
2163 __ mov(Operand::StaticVariable(pending_exception),
edx);
2166 __ push(Immediate(0));
2172 if (
type() == StackFrame::ENTRY_CONSTRUCT) {
2173 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2175 __ mov(
edx, Immediate(construct_entry));
2177 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2178 __ mov(
edx, Immediate(entry));
2192 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2193 __ bind(¬_outermost_js_2);
2196 __ pop(Operand::StaticVariable(ExternalReference(
2197 Isolate::kCEntryFPAddress, isolate())));
2232 Register
object =
eax;
2234 Register
function =
edx;
2235 Register prototype =
edi;
2236 Register scratch =
ecx;
2239 static const int kDeltaToCmpImmediate = 2;
2240 static const int kDeltaToMov = 8;
2241 static const int kDeltaToMovImmediate = 9;
2242 static const int8_t kCmpEdiOperandByte1 = bit_cast<int8_t, uint8_t>(0x3b);
2243 static const int8_t kCmpEdiOperandByte2 = bit_cast<int8_t, uint8_t>(0x3d);
2244 static const int8_t kMovEaxImmediateByte = bit_cast<int8_t, uint8_t>(0xb8);
2250 Label slow, not_js_object;
2257 __ JumpIfSmi(
object, ¬_js_object);
2258 __ IsObjectJSObjectType(
object,
map, scratch, ¬_js_object);
2265 __ CompareRoot(
function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2267 __ CompareRoot(
map, scratch, Heap::kInstanceofCacheMapRootIndex);
2269 __ LoadRoot(
eax, Heap::kInstanceofCacheAnswerRootIndex);
2275 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
2278 __ JumpIfSmi(prototype, &slow);
2279 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2284 __ StoreRoot(
map, scratch, Heap::kInstanceofCacheMapRootIndex);
2285 __ StoreRoot(
function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2293 if (FLAG_debug_code) {
2294 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
2295 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
2296 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
2297 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
2299 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
2300 __ mov(Operand(scratch, 0),
map);
2306 Label loop, is_instance, is_not_instance;
2308 __ cmp(scratch, prototype);
2309 __ j(
equal, &is_instance, Label::kNear);
2310 Factory* factory = isolate()->factory();
2311 __ cmp(scratch, Immediate(factory->null_value()));
2312 __ j(
equal, &is_not_instance, Label::kNear);
2317 __ bind(&is_instance);
2319 __ mov(
eax, Immediate(0));
2320 __ StoreRoot(
eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2322 __ mov(
eax, factory->true_value());
2326 __ mov(
eax, factory->true_value());
2329 if (FLAG_debug_code) {
2330 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2331 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2333 __ mov(Operand(scratch, kDeltaToMovImmediate),
eax);
2335 __ Move(
eax, Immediate(0));
2340 __ bind(&is_not_instance);
2343 __ StoreRoot(
eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2345 __ mov(
eax, factory->false_value());
2349 __ mov(
eax, factory->false_value());
2352 if (FLAG_debug_code) {
2353 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2354 __ Assert(
equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2356 __ mov(Operand(scratch, kDeltaToMovImmediate),
eax);
2363 Label object_not_null, object_not_null_or_smi;
2364 __ bind(¬_js_object);
2367 __ JumpIfSmi(
function, &slow, Label::kNear);
2372 __ cmp(
object, factory->null_value());
2375 __ mov(
eax, factory->false_value());
2381 __ bind(&object_not_null);
2383 __ JumpIfNotSmi(
object, &object_not_null_or_smi, Label::kNear);
2385 __ mov(
eax, factory->false_value());
2391 __ bind(&object_not_null_or_smi);
2393 Condition is_string = masm->IsObjectStringType(
object, scratch, scratch);
2396 __ mov(
eax, factory->false_value());
2422 Label true_value, done;
2424 __ j(
zero, &true_value, Label::kNear);
2425 __ mov(
eax, factory->false_value());
2426 __ jmp(&done, Label::kNear);
2427 __ bind(&true_value);
2428 __ mov(
eax, factory->true_value());
2461 Factory* factory = masm->isolate()->factory();
2471 MacroAssembler* masm,
2472 const RuntimeCallHelper& call_helper) {
2473 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2479 masm->isolate()->factory()->heap_number_map(),
2482 call_helper.BeforeCall(masm);
2486 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2490 __ CallRuntime(Runtime::kNumberToSmi, 1);
2501 call_helper.AfterCall(masm);
2512 call_helper.BeforeCall(masm);
2516 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
2520 call_helper.AfterCall(masm);
2523 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2540 Factory* factory = masm->isolate()->factory();
2541 __ Move(
result_, Immediate(factory->single_character_string_cache()));
2549 __ cmp(
result_, factory->undefined_value());
2556 MacroAssembler* masm,
2557 const RuntimeCallHelper& call_helper) {
2558 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2561 call_helper.BeforeCall(masm);
2563 __ CallRuntime(Runtime::kCharFromCode, 1);
2567 call_helper.AfterCall(masm);
2570 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2580 DCHECK(!scratch.is(dest));
2581 DCHECK(!scratch.is(src));
2582 DCHECK(!scratch.is(count));
2586 __ test(count, count);
2596 __ mov_b(scratch, Operand(src, 0));
2597 __ mov_b(Operand(dest, 0), scratch);
2619 __ JumpIfSmi(
eax, &runtime);
2628 __ JumpIfNotSmi(
ecx, &runtime);
2630 __ JumpIfNotSmi(
edx, &runtime);
2633 Label not_original_string;
2635 __ j(
below, ¬_original_string, Label::kNear);
2639 Counters* counters = isolate()->counters();
2640 __ IncrementCounter(counters->sub_string_native(), 1);
2642 __ bind(¬_original_string);
2654 Label underlying_unpacked, sliced_string, seq_or_external_string;
2659 __ j(
zero, &seq_or_external_string, Label::kNear);
2661 Factory* factory = isolate()->factory();
2663 __ j(
not_zero, &sliced_string, Label::kNear);
2667 factory->empty_string());
2673 __ jmp(&underlying_unpacked, Label::kNear);
2675 __ bind(&sliced_string);
2682 __ jmp(&underlying_unpacked, Label::kNear);
2684 __ bind(&seq_or_external_string);
2688 __ bind(&underlying_unpacked);
2690 if (FLAG_string_slices) {
2698 __ j(
less, ©_routine);
2704 Label two_byte_slice, set_slice_header;
2708 __ j(
zero, &two_byte_slice, Label::kNear);
2710 __ jmp(&set_slice_header, Label::kNear);
2711 __ bind(&two_byte_slice);
2713 __ bind(&set_slice_header);
2719 __ IncrementCounter(counters->sub_string_native(), 1);
2722 __ bind(©_routine);
2731 Label two_byte_sequential, runtime_drop_two, sequential_string;
2735 __ j(
zero, &sequential_string);
2747 __ bind(&sequential_string);
2754 __ j(
zero, &two_byte_sequential);
2776 __ IncrementCounter(counters->sub_string_native(), 1);
2779 __ bind(&two_byte_sequential);
2804 __ IncrementCounter(counters->sub_string_native(), 1);
2808 __ bind(&runtime_drop_two);
2813 __ TailCallRuntime(Runtime::kSubString, 3, 1);
2815 __ bind(&single_char);
2820 StringCharAtGenerator generator(
2822 generator.GenerateFast(masm);
2824 generator.SkipSlow(masm, &runtime);
2832 Register scratch2) {
2833 Register length = scratch1;
2836 Label strings_not_equal, check_zero_length;
2839 __ j(
equal, &check_zero_length, Label::kNear);
2840 __ bind(&strings_not_equal);
2845 Label compare_chars;
2846 __ bind(&check_zero_length);
2848 __ test(length, length);
2849 __ j(
not_zero, &compare_chars, Label::kNear);
2854 __ bind(&compare_chars);
2856 &strings_not_equal, Label::kNear);
2865 MacroAssembler* masm, Register left, Register right, Register scratch1,
2866 Register scratch2, Register scratch3) {
2867 Counters* counters = masm->isolate()->counters();
2868 __ IncrementCounter(counters->string_compare_native(), 1);
2873 __ mov(scratch3, scratch1);
2876 Register length_delta = scratch3;
2880 __ sub(scratch1, length_delta);
2881 __ bind(&left_shorter);
2883 Register min_length = scratch1;
2886 Label compare_lengths;
2887 __ test(min_length, min_length);
2888 __ j(
zero, &compare_lengths, Label::kNear);
2891 Label result_not_equal;
2893 &result_not_equal, Label::kNear);
2896 __ bind(&compare_lengths);
2897 __ test(length_delta, length_delta);
2898 Label length_not_equal;
2899 __ j(
not_zero, &length_not_equal, Label::kNear);
2907 Label result_greater;
2909 __ bind(&length_not_equal);
2910 __ j(
greater, &result_greater, Label::kNear);
2911 __ jmp(&result_less, Label::kNear);
2912 __ bind(&result_not_equal);
2913 __ j(
above, &result_greater, Label::kNear);
2914 __ bind(&result_less);
2921 __ bind(&result_greater);
2928 MacroAssembler* masm, Register left, Register right, Register length,
2929 Register scratch, Label* chars_not_equal,
2930 Label::Distance chars_not_equal_near) {
2934 __ SmiUntag(length);
2940 Register index = length;
2945 __ mov_b(scratch, Operand(left, index,
times_1, 0));
2946 __ cmpb(scratch, Operand(right, index,
times_1, 0));
2947 __ j(
not_equal, chars_not_equal, chars_not_equal_near);
2970 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
2976 __ JumpIfNotBothSequentialOneByteStrings(
edx,
eax,
ecx,
ebx, &runtime);
2989 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
2993 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3003 __ mov(
ecx,
handle(isolate()->heap()->undefined_value()));
3006 if (FLAG_debug_code) {
3010 isolate()->factory()->allocation_site_map());
3011 __ Assert(
equal, kExpectedAllocationSite);
3016 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3017 __ TailCallStub(&stub);
3026 __ JumpIfNotSmi(
ecx, &miss, Label::kNear);
3051 Label unordered, maybe_undefined1, maybe_undefined2;
3055 __ JumpIfNotSmi(
edx, &miss);
3058 __ JumpIfNotSmi(
eax, &miss);
3065 __ JumpIfSmi(
ecx, &generic_stub, Label::kNear);
3068 isolate()->factory()->heap_number_map());
3071 isolate()->factory()->heap_number_map());
3074 __ bind(&unordered);
3075 __ bind(&generic_stub);
3080 __ bind(&maybe_undefined1);
3082 __ cmp(
eax, Immediate(isolate()->factory()->undefined_value()));
3084 __ JumpIfSmi(
edx, &unordered);
3090 __ bind(&maybe_undefined2);
3092 __ cmp(
edx, Immediate(isolate()->factory()->undefined_value()));
3108 Register tmp1 =
ecx;
3109 Register tmp2 =
ebx;
3116 __ JumpIfSmi(tmp1, &miss, Label::kNear);
3153 Register tmp1 =
ecx;
3154 Register tmp2 =
ebx;
3161 __ JumpIfSmi(tmp1, &miss, Label::kNear);
3170 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3171 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3200 Register tmp1 =
ecx;
3201 Register tmp2 =
ebx;
3202 Register tmp3 =
edi;
3208 __ JumpIfSmi(tmp1, &miss);
3248 __ bind(&do_compare);
3253 __ JumpIfNotBothSequentialOneByteStrings(
left,
right, tmp1, tmp2, &runtime);
3271 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3273 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3286 __ JumpIfSmi(
ecx, &miss, Label::kNear);
3306 __ JumpIfSmi(
ecx, &miss, Label::kNear);
3326 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
3334 __ CallExternalReference(miss, 3);
3354 Register properties,
3372 NameDictionary::GetProbeOffset(
i))));
3377 Register entity_name =
r0;
3382 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
3386 __ cmp(entity_name, Handle<Name>(
name));
3391 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
3392 __ j(
equal, &good, Label::kNear);
3396 __ JumpIfNotUniqueNameInstanceType(
3403 __ push(Immediate(Handle<Object>(
name)));
3404 __ push(Immediate(
name->Hash()));
3442 __ add(
r0, Immediate(NameDictionary::GetProbeOffset(
i)));
3451 __ cmp(
name, Operand(elements,
3487 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3489 Register scratch =
result();
3493 __ SmiUntag(scratch);
3505 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(
i)));
3507 __ and_(scratch, Operand(
esp, 0));
3517 __ cmp(scratch, isolate()->factory()->undefined_value());
3518 __ j(
equal, ¬_in_dictionary);
3531 __ JumpIfNotUniqueNameInstanceType(
3533 &maybe_in_dictionary);
3537 __ bind(&maybe_in_dictionary);
3547 __ bind(&in_dictionary);
3552 __ bind(¬_in_dictionary);
3573 Label skip_to_incremental_noncompacting;
3574 Label skip_to_incremental_compacting;
3580 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3581 __ jmp(&skip_to_incremental_compacting, Label::kFar);
3590 __ bind(&skip_to_incremental_noncompacting);
3593 __ bind(&skip_to_incremental_compacting);
3607 Label dont_need_remembered_set;
3612 &dont_need_remembered_set);
3618 &dont_need_remembered_set);
3631 __ bind(&dont_need_remembered_set);
3646 int argument_count = 3;
3651 Immediate(ExternalReference::isolate_address(isolate())));
3653 AllowExternalCallThatCantCauseGC scope(masm);
3655 ExternalReference::incremental_marking_record_write_function(isolate()),
3663 MacroAssembler* masm,
3664 OnNoNeedToInformIncrementalMarker on_no_need,
3666 Label object_is_black, need_incremental, need_incremental_pop_object;
3695 __ bind(&object_is_black);
3701 Label ensure_not_white;
3717 __ jmp(&need_incremental);
3719 __ bind(&ensure_not_white);
3728 &need_incremental_pop_object,
3740 __ bind(&need_incremental_pop_object);
3743 __ bind(&need_incremental);
3760 Label double_elements;
3762 Label slow_elements;
3763 Label slow_elements_from_double;
3764 Label fast_elements;
3771 __ CheckFastElements(
edi, &double_elements);
3774 __ JumpIfSmi(
eax, &smi_element);
3775 __ CheckFastSmiElements(
edi, &fast_elements, Label::kNear);
3780 __ bind(&slow_elements);
3791 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
3793 __ bind(&slow_elements_from_double);
3795 __ jmp(&slow_elements);
3798 __ bind(&fast_elements);
3810 __ bind(&smi_element);
3817 __ bind(&double_elements);
3821 __ StoreNumberToDoubleElements(
eax,
3825 &slow_elements_from_double,
3835 int parameter_count_offset =
3838 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
3840 int additional_offset =
3849 VectorLoadStub stub(isolate(),
state());
3856 VectorKeyedLoadStub stub(isolate());
3862 if (masm->isolate()->function_entry_hook() !=
NULL) {
3864 masm->CallStub(&stub);
3871 const int kNumSavedRegisters = 3;
3887 DCHECK(isolate()->function_entry_hook() !=
NULL);
3902 static void CreateArrayDispatch(MacroAssembler* masm,
3905 T stub(masm->isolate(),
3908 __ TailCallStub(&stub);
3912 for (
int i = 0;
i <= last_index; ++
i) {
3917 T stub(masm->isolate(), kind);
3918 __ TailCallStub(&stub);
3923 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3930 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3938 Label normal_sequence;
3955 __ j(
zero, &normal_sequence);
3961 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
3964 __ TailCallStub(&stub_holey);
3966 __ bind(&normal_sequence);
3967 ArraySingleArgumentConstructorStub stub(masm->isolate(),
3970 __ TailCallStub(&stub);
3976 if (FLAG_debug_code) {
3977 Handle<Map> allocation_site_map =
3978 masm->isolate()->factory()->allocation_site_map();
3980 __ Assert(
equal, kExpectedAllocationSite);
3990 __ bind(&normal_sequence);
3993 for (
int i = 0;
i <= last_index; ++
i) {
3998 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
3999 __ TailCallStub(&stub);
4004 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4012 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4015 for (
int i = 0;
i <= to_index; ++
i) {
4017 T stub(isolate, kind);
4028 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4030 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4032 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4040 for (
int i = 0;
i < 2;
i++) {
4042 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[
i]);
4044 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[
i]);
4046 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[
i]);
4053 MacroAssembler* masm,
4056 Label not_zero_case, not_one_case;
4059 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4061 __ bind(¬_zero_case);
4064 CreateArrayDispatchOneArgument(masm,
mode);
4066 __ bind(¬_one_case);
4067 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4069 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4071 CreateArrayDispatchOneArgument(masm,
mode);
4073 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4088 if (FLAG_debug_code) {
4096 __ Assert(
not_zero, kUnexpectedInitialMapForArrayFunction);
4098 __ Assert(
equal, kUnexpectedInitialMapForArrayFunction);
4101 __ AssertUndefinedOrAllocationSite(
ebx);
4107 __ cmp(
ebx, isolate()->factory()->undefined_value());
4124 Label not_zero_case, not_one_case;
4125 Label normal_sequence;
4129 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4130 __ TailCallStub(&stub0);
4132 __ bind(¬_zero_case);
4141 __ j(
zero, &normal_sequence);
4143 InternalArraySingleArgumentConstructorStub
4145 __ TailCallStub(&stub1_holey);
4148 __ bind(&normal_sequence);
4149 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4150 __ TailCallStub(&stub1);
4152 __ bind(¬_one_case);
4153 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4154 __ TailCallStub(&stubN);
4166 if (FLAG_debug_code) {
4174 __ Assert(
not_zero, kUnexpectedInitialMapForArrayFunction);
4176 __ Assert(
equal, kUnexpectedInitialMapForArrayFunction);
4186 __ DecodeField<Map::ElementsKindBits>(
ecx);
4188 if (FLAG_debug_code) {
4194 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4198 Label fast_elements_case;
4200 __ j(
equal, &fast_elements_case);
4203 __ bind(&fast_elements_case);
4223 Register callee =
eax;
4224 Register call_data =
ebx;
4225 Register holder =
ecx;
4226 Register api_function_address =
edx;
4227 Register return_address =
edi;
4228 Register context =
esi;
4234 typedef FunctionCallbackArguments FCA;
4245 __ pop(return_address);
4258 Register scratch = call_data;
4261 __ push(Immediate(isolate()->factory()->undefined_value()));
4263 __ push(Immediate(isolate()->factory()->undefined_value()));
4271 __ push(Immediate(
reinterpret_cast<int>(isolate())));
4275 __ mov(scratch,
esp);
4278 __ push(return_address);
4284 const int kApiArgc = 1 + 1;
4288 const int kApiStackSpace = 4;
4290 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
4306 ExternalReference thunk_ref =
4307 ExternalReference::invoke_function_callback(isolate());
4309 Operand context_restore_operand(
ebp,
4312 int return_value_offset = 0;
4314 return_value_offset = 2 + FCA::kArgsLength;
4316 return_value_offset = 2 + FCA::kReturnValueOffset;
4318 Operand return_value_operand(
ebp, return_value_offset *
kPointerSize);
4319 __ CallApiFunctionAndReturn(api_function_address,
4322 argc + FCA::kArgsLength + 1,
4323 return_value_operand,
4324 &context_restore_operand);
4343 const int kApiArgc = 2 + 1;
4345 Register api_function_address =
edx;
4346 Register scratch =
ebx;
4351 __ PrepareCallApiFunction(kApiArgc);
4356 ExternalReference thunk_ref =
4357 ExternalReference::invoke_accessor_getter_callback(isolate());
4359 __ CallApiFunctionAndReturn(api_function_address,
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kTransitionInfoOffset
static const Register function_address()
static const Register parameter_count()
static const Register index()
void GenerateReadElement(MacroAssembler *masm)
void GenerateNewSloppySlow(MacroAssembler *masm)
void GenerateNewStrict(MacroAssembler *masm)
void GenerateNewSloppyFast(MacroAssembler *masm)
static const int kLengthOffset
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateDispatchToArrayStub(MacroAssembler *masm, AllocationSiteOverrideMode mode)
ArgumentCountKey argument_count() const
static const int kCallInstructionLength
static void GenerateAheadOfTime(Isolate *isolate)
bool save_doubles() const
static void GenerateAheadOfTime(Isolate *isolate)
CEntryStub(Isolate *isolate, int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
bool NeedsImmovableCode()
bool call_data_undefined() const
STATIC_ASSERT(Code::kArgumentsBits+2<=kStubMinorKeyBits)
bool RecordCallTarget() const
bool CallAsMethod() const
bool CallAsMethod() const
void GenerateMiss(MacroAssembler *masm)
virtual InlineCacheState GetICState() const OVERRIDE
static const int kHeaderSize
Condition GetCondition() const
void GenerateInternalizedStrings(MacroAssembler *masm)
void GenerateStrings(MacroAssembler *masm)
CompareICState::State state() const
void GenerateMiss(MacroAssembler *masm)
CompareICState::State left() const
void GenerateGeneric(MacroAssembler *masm)
CompareICState::State right() const
void GenerateObjects(MacroAssembler *masm)
CompareICStub(Isolate *isolate, Token::Value op, CompareICState::State left, CompareICState::State right, CompareICState::State state)
void GenerateNumbers(MacroAssembler *masm)
void GenerateUniqueNames(MacroAssembler *masm)
void GenerateKnownObjects(MacroAssembler *masm)
void GenerateSmis(MacroAssembler *masm)
static const int kFirstOffset
static const int kMinLength
static const int kSecondOffset
@ SLOPPY_ARGUMENTS_MAP_INDEX
@ STRICT_ARGUMENTS_MAP_INDEX
@ ALIASED_ARGUMENTS_MAP_INDEX
static int SlotOffset(int index)
static void GenerateAheadOfTime(Isolate *isolate)
bool is_truncating() const
Register destination() const
static const uint64_t kSignificandMask
static const uint64_t kHiddenBit
static const int kPhysicalSignificandSize
static const int kMaxShortLength
static const int kResourceDataOffset
static const int kLengthOffset
static const int kHeaderSize
static const int kNativeContextOffset
static const int kEntrySize
static const int kMantissaBits
static const int kValueOffset
static const uint32_t kExponentMask
static const int kExponentBias
static const int kExponentShift
static const int kMapOffset
static const int kStrictArgumentsObjectSize
static const int kSloppyArgumentsObjectSize
static const int kArgumentsCalleeIndex
static const int kArgumentsLengthIndex
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
bool HasCallSiteInlineCheck() const
bool HasArgsInRegisters() const
bool ReturnTrueFalseObject() const
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateCase(MacroAssembler *masm, ElementsKind kind)
static const int kJSRegexpStaticOffsetsVectorSize
StackFrame::Type type() const
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kLiteralsOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kElementsOffset
static const int kDataOneByteCodeOffset
static const int kIrregexpCaptureCountOffset
static const int kDataTagOffset
static const int kDataOffset
static const int kDataUC16CodeOffset
static const int kFunctionOffset
static const Register ReceiverRegister()
static const Register NameRegister()
LoadICState state() const
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kBitField2Offset
static const int kPrototypeOffset
static const size_t kWriteBarrierCounterOffset
static const int kEvacuationCandidateMask
static const int kSkipEvacuationSlotsRecordingMask
static const int kElementsStartOffset
NameDictionaryLookupStub(Isolate *isolate, LookupMode mode)
static const int kCapacityOffset
Register dictionary() const
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kTotalProbes
static const int kInlinedProbes
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kHashShift
static const int kEmptyHashField
static const int kHashFieldOffset
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static const intptr_t kPageAlignmentMask
ProfileEntryHookStub(Isolate *isolate)
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kArgsLength
void Restore(MacroAssembler *masm)
void SaveCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void Save(MacroAssembler *masm)
void RestoreCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void GenerateIncremental(MacroAssembler *masm, Mode mode)
void InformIncrementalMarker(MacroAssembler *masm)
RememberedSetAction remembered_set_action() const
SaveFPRegsMode save_fp_regs_mode() const
@ kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
@ kReturnOnNoNeedToInformIncrementalMarker
static const byte kTwoByteNopInstruction
void CheckNeedsToInformIncrementalMarker(MacroAssembler *masm, OnNoNeedToInformIncrementalMarker on_no_need, Mode mode)
static const byte kFiveByteNopInstruction
virtual void Generate(MacroAssembler *masm) OVERRIDE
static const int kLastCaptureCountOffset
static const int kLastSubjectOffset
static const int kLastMatchOverhead
static const int kLastInputOffset
static const int kFirstCaptureOffset
static const Function * FunctionForId(FunctionId id)
static const int kHeaderSize
static const int kConstructStubOffset
static const int kFeedbackVectorOffset
static const int kNativeBitWithinByte
static const int kStrictModeBitWithinByte
static const int kMinLength
static const int kParentOffset
static const int kOffsetOffset
static Smi * FromInt(int value)
static const int kContextOffset
static const int kCallerSPOffset
static const int kCallerFPOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
StoreBufferOverflowStub(Isolate *isolate, SaveFPRegsMode save_fp)
bool save_doubles() const
StringIndexFlags index_flags_
Label * receiver_not_string_
Label * index_out_of_range_
void GenerateFast(MacroAssembler *masm)
Label * index_not_number_
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static void GenerateOneByteCharsCompareLoop(MacroAssembler *masm, Register left, Register right, Register length, Register scratch1, Register scratch2, Label *chars_not_equal)
static void GenerateCompareFlatOneByteStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, String::Encoding encoding)
static void GenerateFlatOneByteStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int32_t kMaxOneByteCharCode
static const int kLengthOffset
static const int kCallerStackParameterCountFrameOffset
StubFunctionMode function_mode() const
static void GenerateAheadOfTime(Isolate *isolate)
static bool IsOrderedRelationalCompareOp(Value op)
static bool IsEqualityOp(Value op)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register VectorRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
bool IsPowerOfTwo32(uint32_t value)
const uint32_t kStringEncodingMask
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
@ DONT_TRACK_ALLOCATION_SITE
const intptr_t kSmiSignMask
const uint32_t kTwoByteStringTag
const uint32_t kShortExternalStringTag
const int kFastElementsKindPackedToHoley
const uint32_t kNotStringTag
Operand FieldOperand(Register object, int offset)
const uint32_t kStringTag
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ TERMINAL_FAST_ELEMENTS_KIND
@ FAST_HOLEY_SMI_ELEMENTS
Handle< T > handle(T *t, Isolate *isolate)
const uint32_t kOneByteStringTag
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool IsFastPackedElementsKind(ElementsKind kind)
const uint32_t kShortExternalStringMask
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
Condition NegateCondition(Condition cond)
@ times_half_pointer_size
const uint32_t kStringRepresentationMask
const uint32_t kSlicedNotConsMask
OStream & dec(OStream &os)
const uint32_t kInternalizedTag
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
Operand ApiParameterOperand(int index)
const uint32_t kIsNotStringMask
ElementsKind GetInitialFastElementsKind()
@ STRING_INDEX_IS_ARRAY_INDEX
const uint32_t kIsIndirectStringMask
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool is(Register reg) const
#define T(name, string, precedence)