7 #if V8_TARGET_ARCH_ARM64
23 static void InitializeArrayConstructorDescriptor(
24 Isolate* isolate, CodeStubDescriptor* descriptor,
25 int constant_stack_parameter_count) {
31 Runtime::kArrayConstructor)->
entry;
33 if (constant_stack_parameter_count == 0) {
34 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
37 descriptor->Initialize(x0, deopt_handler, constant_stack_parameter_count,
43 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
44 CodeStubDescriptor* descriptor) {
45 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
49 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
50 CodeStubDescriptor* descriptor) {
51 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
55 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
56 CodeStubDescriptor* descriptor) {
57 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
61 static void InitializeInternalArrayConstructorDescriptor(
62 Isolate* isolate, CodeStubDescriptor* descriptor,
63 int constant_stack_parameter_count) {
65 Runtime::kInternalArrayConstructor)->
entry;
67 if (constant_stack_parameter_count == 0) {
68 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
71 descriptor->Initialize(x0, deopt_handler, constant_stack_parameter_count,
77 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
78 CodeStubDescriptor* descriptor) {
79 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
83 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
84 CodeStubDescriptor* descriptor) {
85 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
89 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
90 CodeStubDescriptor* descriptor) {
91 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
95 #define __ ACCESS_MASM(masm)
99 ExternalReference miss) {
101 isolate()->counters()->code_stubs()->Increment();
103 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
104 int param_count = descriptor.GetEnvironmentParameterCount();
108 DCHECK((param_count == 0) ||
109 x0.Is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
112 MacroAssembler::PushPopQueue queue(masm);
113 for (
int i = 0;
i < param_count; ++
i) {
114 queue.Queue(descriptor.GetEnvironmentParameterRegister(
i));
118 __ CallExternalReference(miss, param_count);
127 Register input =
source();
131 DCHECK(result.Is64Bits());
132 DCHECK(jssp.Is(masm->StackPointer()));
134 int double_offset =
offset();
141 __ Push(scratch1, scratch2);
147 if (input.is(jssp)) double_offset += 1 *
kDoubleSize;
148 __ Ldr(double_scratch,
MemOperand(input, double_offset));
151 __ TryConvertDoubleToInt64(result, double_scratch, &done);
152 __ Fmov(result, double_scratch);
160 Register exponent = scratch1;
167 __ CzeroX(result,
ge);
174 if (masm->emit_debug_code()) {
177 __ Check(
ge, kUnexpectedValue);
181 Register mantissa = scratch2;
187 __ Cneg(mantissa, mantissa,
ne);
191 __ Sub(exponent, exponent,
193 __ Lsl(result, mantissa, exponent);
197 __ Pop(double_scratch);
199 __ Pop(scratch2, scratch1);
205 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
209 FPRegister double_scratch,
213 Label not_identical, return_equal, heap_number;
214 Register result = x0;
217 __ B(
ne, ¬_identical);
223 if ((cond ==
lt) || (cond ==
gt)) {
226 }
else if (cond ==
eq) {
227 __ JumpIfHeapNumber(right, &heap_number);
229 Register right_type = scratch;
238 if ((cond ==
le) || (cond ==
ge)) {
240 __ B(
ne, &return_equal);
241 __ JumpIfNotRoot(right, Heap::kUndefinedValueRootIndex, &return_equal);
253 __ Bind(&return_equal);
256 }
else if (cond ==
gt) {
266 if ((cond !=
lt) && (cond !=
gt)) {
268 __ Bind(&heap_number);
273 __ Fcmp(double_scratch, double_scratch);
274 __ B(
vc, &return_equal);
285 if (FLAG_debug_code) {
289 __ Bind(¬_identical);
294 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
302 if (masm->emit_debug_code()) {
305 __ Assert(
ne, kExpectedNonIdenticalObjects);
312 Label right_non_object;
315 __ B(
lt, &right_non_object);
318 DCHECK(left.is(x0) || right.is(x0));
319 Label return_not_equal;
320 __ Bind(&return_not_equal);
323 __ Bind(&right_non_object);
335 __ B(
ge, &return_not_equal);
341 __ Orr(scratch, left_type, right_type);
342 __ TestAndBranchIfAllClear(
348 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
356 DCHECK((left.is(x0) && right.is(x1)) ||
357 (right.is(x0) && left.is(x1)));
358 Register result = x0;
360 Label right_is_smi, done;
361 __ JumpIfSmi(right, &right_is_smi);
367 Label is_heap_number;
368 __ JumpIfHeapNumber(right, &is_heap_number);
370 if (!right.is(result)) {
374 __ Bind(&is_heap_number);
378 __ JumpIfNotHeapNumber(right, slow);
384 __ SmiUntagToDouble(left_d, left);
387 __ Bind(&right_is_smi);
392 Label is_heap_number;
393 __ JumpIfHeapNumber(left, &is_heap_number);
395 if (!left.is(result)) {
399 __ Bind(&is_heap_number);
403 __ JumpIfNotHeapNumber(left, slow);
409 __ SmiUntagToDouble(right_d, right);
418 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
425 Label* possible_strings,
426 Label* not_both_strings) {
427 DCHECK(!
AreAliased(left, right, left_map, right_map, left_type, right_type));
428 Register result = x0;
444 __ Bind(&object_test);
452 __ B(
lt, not_both_strings);
459 Register right_bitfield = right_type;
460 Register left_bitfield = left_type;
463 __ And(result, right_bitfield, left_bitfield);
470 static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
475 __ JumpIfNotSmi(input, fail);
477 __ JumpIfSmi(input, &ok);
478 __ JumpIfNotHeapNumber(input, fail);
489 Register result = x0;
493 CompareICStub_CheckInputType(masm, lhs,
left(), &miss);
494 CompareICStub_CheckInputType(masm, rhs,
right(), &miss);
497 Label not_smis, both_loaded_as_doubles;
498 Label not_two_smis, smi_done;
499 __ JumpIfEitherNotSmi(lhs, rhs, ¬_two_smis);
504 __ Bind(¬_two_smis);
511 EmitIdenticalObjectComparison(masm, lhs, rhs, x10,
d0, &slow, cond);
515 __ JumpIfBothNotSmi(lhs, rhs, ¬_smis);
525 FPRegister rhs_d =
d0;
526 FPRegister lhs_d =
d1;
527 EmitSmiNonsmiComparison(masm, lhs, rhs, lhs_d, rhs_d, &slow,
strict());
529 __ Bind(&both_loaded_as_doubles);
533 __ Fcmp(lhs_d, rhs_d);
537 __ Csinv(result, result, xzr,
ge);
545 if ((cond ==
lt) || (cond ==
le)) {
557 Register rhs_map = x10;
558 Register rhs_type = x11;
559 Register lhs_map = x12;
560 Register lhs_type = x13;
569 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs, lhs_type, rhs_type, x14);
572 Label check_for_internalized_strings;
573 Label flat_string_check;
577 __ B(
ne, &check_for_internalized_strings);
578 __ Cmp(lhs_map, rhs_map);
582 __ B(
ne, &flat_string_check);
588 __ B(&both_loaded_as_doubles);
590 __ Bind(&check_for_internalized_strings);
596 EmitCheckForInternalizedStringsOrObjects(masm, lhs, rhs, lhs_map, rhs_map,
598 &flat_string_check, &slow);
603 __ Bind(&flat_string_check);
604 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(lhs_type, rhs_type, x14,
607 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, x10,
618 if (FLAG_debug_code) {
628 native =
strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
630 native = Builtins::COMPARE;
632 if ((cond ==
lt) || (cond ==
le)) {
660 saved_regs.Remove(*(masm->TmpList()));
661 saved_fp_regs.Remove(*(masm->FPTmpList()));
663 __ PushCPURegList(saved_regs);
665 __ PushCPURegList(saved_fp_regs);
668 AllowExternalCallThatCantCauseGC scope(masm);
669 __ Mov(x0, ExternalReference::isolate_address(isolate()));
671 ExternalReference::store_buffer_overflow_function(isolate()), 1, 0);
674 __ PopCPURegList(saved_fp_regs);
676 __ PopCPURegList(saved_regs);
691 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
692 UseScratchRegisterScope temps(masm);
694 Register return_address = temps.AcquireX();
695 __ Mov(return_address,
lr);
698 __ Mov(
lr, saved_lr);
699 __ PushSafepointRegisters();
700 __ Ret(return_address);
705 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
706 UseScratchRegisterScope temps(masm);
707 Register return_address = temps.AcquireX();
709 __ Mov(return_address,
lr);
710 __ PopSafepointRegisters();
711 __ Ret(return_address);
722 Register result_tagged = x0;
723 Register base_tagged = x10;
725 DCHECK(exponent_tagged.is(x11));
727 DCHECK(exponent_integer.is(x12));
728 Register scratch1 = x14;
729 Register scratch0 = x15;
730 Register saved_lr = x19;
731 FPRegister result_double =
d0;
732 FPRegister base_double =
d0;
733 FPRegister exponent_double =
d1;
734 FPRegister base_double_copy =
d2;
735 FPRegister scratch1_double =
d6;
736 FPRegister scratch0_double =
d7;
739 Label exponent_is_smi, exponent_is_integer;
748 Label unpack_exponent;
750 __ Pop(exponent_tagged, base_tagged);
752 __ JumpIfSmi(base_tagged, &base_is_smi);
753 __ JumpIfNotHeapNumber(base_tagged, &call_runtime);
756 __ B(&unpack_exponent);
757 __ Bind(&base_is_smi);
759 __ SmiUntagToDouble(base_double, base_tagged);
761 __ Bind(&unpack_exponent);
765 __ JumpIfSmi(exponent_tagged, &exponent_is_smi);
766 __ JumpIfNotHeapNumber(exponent_tagged, &call_runtime);
768 __ Ldr(exponent_double,
771 __ JumpIfSmi(exponent_tagged, &exponent_is_smi);
772 __ Ldr(exponent_double,
780 __ TryRepresentDoubleAsInt64(exponent_integer, exponent_double,
781 scratch0_double, &exponent_is_integer);
784 FPRegister half_double =
d3;
785 FPRegister minus_half_double =
d4;
790 __ Fmov(minus_half_double, -0.5);
791 __ Fmov(half_double, 0.5);
792 __ Fcmp(minus_half_double, exponent_double);
793 __ Fccmp(half_double, exponent_double,
NZFlag,
ne);
798 __ B(
ne, &call_runtime);
818 __ Fadd(base_double, base_double, fp_zero);
823 if (masm->emit_debug_code()) {
824 UseScratchRegisterScope temps(masm);
825 Register temp = temps.AcquireX();
826 __ Fneg(scratch0_double, fp_zero);
830 __ Fmov(temp, fp_zero);
831 __ CheckRegisterIsClear(temp, kCouldNotGenerateZero);
832 __ Fmov(temp, scratch0_double);
834 __ CheckRegisterIsClear(temp, kCouldNotGenerateNegativeZero);
836 __ Fadd(scratch0_double, scratch0_double, fp_zero);
837 __ Fmov(temp, scratch0_double);
838 __ CheckRegisterIsClear(temp, kExpectedPositiveZero);
845 __ Fsub(scratch0_double, base_double, base_double);
846 __ Fcmp(scratch0_double, 0.0);
847 __ Fabs(scratch1_double, base_double);
848 __ Fcsel(base_double, scratch1_double, base_double,
vs);
851 __ Fsqrt(result_double, base_double);
852 __ Fcmp(exponent_double, 0.0);
855 __ Fmov(scratch0_double, 1.0);
856 __ Fdiv(result_double, scratch0_double, result_double);
861 AllowExternalCallThatCantCauseGC scope(masm);
862 __ Mov(saved_lr,
lr);
864 ExternalReference::power_double_double_function(isolate()),
866 __ Mov(
lr, saved_lr);
871 __ Bind(&exponent_is_smi);
875 __ SmiUntag(exponent_integer, exponent_tagged);
878 __ Bind(&exponent_is_integer);
885 Register exponent_abs = x13;
886 __ Cmp(exponent_integer, 0);
887 __ Cneg(exponent_abs, exponent_integer,
mi);
901 Label power_loop, power_loop_entry, power_loop_exit;
902 __ Fmov(scratch1_double, base_double);
903 __ Fmov(base_double_copy, base_double);
904 __ Fmov(result_double, 1.0);
905 __ B(&power_loop_entry);
907 __ Bind(&power_loop);
908 __ Fmul(scratch1_double, scratch1_double, scratch1_double);
909 __ Lsr(exponent_abs, exponent_abs, 1);
910 __ Cbz(exponent_abs, &power_loop_exit);
912 __ Bind(&power_loop_entry);
913 __ Tbz(exponent_abs, 0, &power_loop);
914 __ Fmul(result_double, result_double, scratch1_double);
917 __ Bind(&power_loop_exit);
923 __ Fmov(scratch0_double, 1.0);
924 __ Fdiv(result_double, scratch0_double, result_double);
931 __ Fcmp(result_double, 0.0);
936 __ Bind(&call_runtime);
938 __ Push(base_tagged, exponent_tagged);
939 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
943 __ AllocateHeapNumber(result_tagged, &call_runtime, scratch0, scratch1,
945 DCHECK(result_tagged.is(x0));
947 isolate()->counters()->math_pow(), 1, scratch0, scratch1);
950 AllowExternalCallThatCantCauseGC scope(masm);
951 __ Mov(saved_lr,
lr);
952 __ Fmov(base_double, base_double_copy);
953 __ Scvtf(exponent_double, exponent_integer);
955 ExternalReference::power_double_double_function(isolate()),
957 __ Mov(
lr, saved_lr);
960 isolate()->counters()->math_pow(), 1, scratch0, scratch1);
966 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
979 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
995 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1029 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
1049 DCHECK(jssp.Is(
__ StackPointer()));
1050 const Register& argc_input = x0;
1051 const Register& target_input = x1;
1060 const Register& argv = x21;
1061 const Register& argc = x22;
1062 const Register& target = x23;
1067 Register temp_argv = x11;
1074 FrameScope scope(masm, StackFrame::MANUAL);
1088 __ Mov(argc, argc_input);
1089 __ Mov(target, target_input);
1090 __ Mov(argv, temp_argv);
1131 __ Mov(x2, ExternalReference::isolate_address(isolate()));
1133 Label return_location;
1134 __ Adr(x12, &return_location);
1137 if (
__ emit_debug_code()) {
1140 UseScratchRegisterScope temps(masm);
1141 Register temp = temps.AcquireX();
1145 __ Check(
eq, kReturnAddressNotFoundInFrame);
1150 __ Bind(&return_location);
1156 const Register& result = x0;
1159 Label exception_returned;
1160 __ CompareRoot(result, Heap::kExceptionRootIndex);
1161 __ B(
eq, &exception_returned);
1173 DCHECK(jssp.Is(
__ StackPointer()));
1180 __ AssertFPCRState();
1185 __ SetStackPointer(csp);
1188 __ Bind(&exception_returned);
1191 ExternalReference pending_exception_address(
1192 Isolate::kPendingExceptionAddress, isolate());
1193 const Register& exception = result;
1194 const Register& exception_address = x11;
1195 __ Mov(exception_address, Operand(pending_exception_address));
1199 __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
1209 Label throw_termination_exception;
1210 __ Cmp(exception, Operand(isolate()->factory()->termination_exception()));
1211 __ B(
eq, &throw_termination_exception);
1217 __ SetStackPointer(jssp);
1223 __ Throw(x0, x10, x11, x12, x13);
1225 __ Bind(&throw_termination_exception);
1230 __ ThrowUncatchable(x0, x10, x11, x12, x13);
1245 DCHECK(jssp.Is(
__ StackPointer()));
1246 Register code_entry = x0;
1250 __ EnableInstrumentation();
1252 Label invoke, handler_entry, exit;
1259 __ SetStackPointer(csp);
1260 __ PushCalleeSavedRegisters();
1262 __ SetStackPointer(jssp);
1272 __ Fmov(fp_zero, 0.0);
1275 int marker =
type();
1276 int64_t bad_frame_pointer = -1L;
1277 __ Mov(x13, bad_frame_pointer);
1279 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
1282 __ Push(x13, xzr, x12, x10);
1288 Label non_outermost_js, done;
1289 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1290 __ Mov(x10, ExternalReference(js_entry_sp));
1292 __ Cbnz(x11, &non_outermost_js);
1297 __ Bind(&non_outermost_js);
1321 Assembler::BlockPoolsScope block_pools(masm);
1322 __ bind(&handler_entry);
1328 __ Mov(x10, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1332 __ LoadRoot(x0, Heap::kExceptionRootIndex);
1338 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
1345 __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
1346 __ Mov(x11, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1360 ExternalReference entry(
type() == StackFrame::ENTRY_CONSTRUCT
1361 ? Builtins::kJSConstructEntryTrampoline
1362 : Builtins::kJSEntryTrampoline,
1386 Label non_outermost_js_2;
1389 __ B(
ne, &non_outermost_js_2);
1390 __ Mov(x11, ExternalReference(js_entry_sp));
1392 __ Bind(&non_outermost_js_2);
1396 __ Mov(x11, ExternalReference(Isolate::kCEntryFPAddress, isolate()));
1402 DCHECK(jssp.Is(
__ StackPointer()));
1404 __ SetStackPointer(csp);
1405 __ PopCalleeSavedRegisters();
1420 PropertyAccessCompiler::TailCallBuiltin(
1421 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1433 Register result = x0;
1434 Register
function =
right();
1435 Register
object =
left();
1436 Register scratch1 = x6;
1437 Register scratch2 = x7;
1438 Register res_true = x8;
1439 Register res_false = x9;
1442 Register map_check_site = x4;
1447 Label not_js_object, slow;
1450 __ Pop(
function,
object);
1454 __ LoadTrueFalseRoots(res_true, res_false);
1464 __ JumpIfSmi(
object, ¬_js_object);
1465 __ IsObjectJSObjectType(
object,
map, scratch2, ¬_js_object);
1471 __ JumpIfNotRoot(
function, Heap::kInstanceofCacheFunctionRootIndex, &miss);
1472 __ JumpIfNotRoot(
map, Heap::kInstanceofCacheMapRootIndex, &miss);
1473 __ LoadRoot(result, Heap::kInstanceofCacheAnswerRootIndex);
1479 Register prototype = x13;
1480 __ TryGetFunctionPrototype(
function, prototype, scratch2, &slow,
1484 __ JumpIfSmi(prototype, &slow);
1485 __ IsObjectJSObjectType(prototype, scratch1, scratch2, &slow);
1491 __ GetRelocatedValueLocation(map_check_site, scratch1);
1496 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
1497 __ StoreRoot(
map, Heap::kInstanceofCacheMapRootIndex);
1500 Label return_true, return_result;
1501 Register smi_value = scratch1;
1504 Register chain_map = x1;
1505 Register chain_prototype = x14;
1506 Register null_value = x15;
1509 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1511 __ Mov(result, res_false);
1520 __ Cmp(chain_prototype, prototype);
1521 __ B(
eq, &return_true);
1525 __ Cmp(chain_prototype, null_value);
1526 __ B(
eq, &return_result);
1536 __ Bind(&return_true);
1537 __ Mov(result, res_true);
1542 __ Bind(&return_result);
1545 __ Add(map_check_site, map_check_site, kDeltaToLoadBoolResult);
1546 __ GetRelocatedValueLocation(map_check_site, scratch2);
1550 __ StoreRoot(cached_value, Heap::kInstanceofCacheAnswerRootIndex);
1554 Label object_not_null, object_not_null_or_smi;
1556 __ Bind(¬_js_object);
1557 Register object_type = x14;
1565 __ JumpIfSmi(
function, &slow);
1566 __ JumpIfNotObjectType(
1569 __ Mov(result, res_false);
1572 __ Cmp(object_type, Operand(isolate()->factory()->null_value()));
1573 __ B(
ne, &object_not_null);
1576 __ Bind(&object_not_null);
1578 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
1581 __ Bind(&object_not_null_or_smi);
1583 __ IsObjectJSStringType(
object, scratch2, &slow);
1592 __ Push(
object,
function);
1597 __ LoadTrueFalseRoots(res_true, res_false);
1599 __ Csel(result, res_true, res_false,
eq);
1608 DCHECK(arg_count.is(x0));
1613 static const int kDisplacement =
1618 __ JumpIfNotSmi(key, &slow);
1621 Register local_fp = x11;
1622 Register caller_fp = x11;
1623 Register caller_ctx = x12;
1629 __ Csel(local_fp,
fp, caller_fp,
ne);
1630 __ B(
ne, &skip_adaptor);
1635 __ Bind(&skip_adaptor);
1639 __ Cmp(key, arg_count);
1643 __ Sub(x10, arg_count, key);
1652 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1664 Register caller_fp = x10;
1681 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1694 Register arg_count_smi = x3;
1695 Register param_count_smi = x3;
1696 Register param_count = x7;
1697 Register recv_arg = x14;
1698 Register
function = x4;
1699 __ Pop(param_count_smi, recv_arg,
function);
1700 __ SmiUntag(param_count, param_count_smi);
1703 Register caller_fp = x11;
1704 Register caller_ctx = x12;
1706 Label adaptor_frame, try_allocate;
1711 __ B(
eq, &adaptor_frame);
1723 Register arg_count = x2;
1724 __ Mov(arg_count, param_count);
1725 __ B(&try_allocate);
1728 __ Bind(&adaptor_frame);
1729 __ Ldr(arg_count_smi,
1732 __ SmiUntag(arg_count, arg_count_smi);
1737 Register mapped_params = x1;
1738 __ Cmp(param_count, arg_count);
1739 __ Csel(mapped_params, param_count, arg_count,
lt);
1741 __ Bind(&try_allocate);
1756 const int kParameterMapHeaderSize =
1760 Register
size = x10;
1766 __ Cmp(mapped_params, 0);
1776 Register alloc_obj = x0;
1791 Register global_object = x10;
1792 Register global_ctx = x10;
1793 Register sloppy_args_map = x11;
1794 Register aliased_args_map = x10;
1799 __ Ldr(sloppy_args_map,
1801 __ Ldr(aliased_args_map,
1803 __ Cmp(mapped_params, 0);
1804 __ CmovX(sloppy_args_map, aliased_args_map,
ne);
1808 __ LoadRoot(x10, Heap::kEmptyFixedArrayRootIndex);
1816 __ AssertNotSmi(
function);
1840 Register elements = x5;
1845 Label skip_parameter_map;
1846 __ Cmp(mapped_params, 0);
1849 Register backing_store = x6;
1850 __ CmovX(backing_store, elements,
eq);
1851 __ B(
eq, &skip_parameter_map);
1853 __ LoadRoot(x10, Heap::kSloppyArgumentsElementsMapRootIndex);
1855 __ Add(x10, mapped_params, 2);
1861 __ Add(x10, x10, kParameterMapHeaderSize);
1892 Register loop_count = x11;
1893 Register index = x12;
1894 Register the_hole = x13;
1895 Label parameters_loop, parameters_test;
1896 __ Mov(loop_count, mapped_params);
1898 __ Sub(index, index, mapped_params);
1900 __ LoadRoot(the_hole, Heap::kTheHoleValueRootIndex);
1902 __ Add(backing_store, backing_store, kParameterMapHeaderSize);
1904 __ B(¶meters_test);
1906 __ Bind(¶meters_loop);
1907 __ Sub(loop_count, loop_count, 1);
1914 __ Bind(¶meters_test);
1915 __ Cbnz(loop_count, ¶meters_loop);
1917 __ Bind(&skip_parameter_map);
1919 __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
1933 Label arguments_loop, arguments_test;
1934 __ Mov(x10, mapped_params);
1936 __ B(&arguments_test);
1938 __ Bind(&arguments_loop);
1943 __ Add(x10, x10, 1);
1945 __ Bind(&arguments_test);
1946 __ Cmp(x10, arg_count);
1947 __ B(
lt, &arguments_loop);
1953 __ Push(
function, recv_arg, arg_count_smi);
1954 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1970 __ TailCallExternalReference(
1971 ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
1976 PropertyAccessCompiler::TailCallBuiltin(
1977 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1991 Register param_count_smi = x1;
1992 Register params = x2;
1993 Register
function = x3;
1994 Register param_count = x13;
1995 __ Pop(param_count_smi, params,
function);
1996 __ SmiUntag(param_count, param_count_smi);
1999 Register caller_fp = x11;
2000 Register caller_ctx = x12;
2001 Label try_allocate, runtime;
2006 __ B(
ne, &try_allocate);
2015 __ Ldr(param_count_smi,
2018 __ SmiUntag(param_count, param_count_smi);
2024 Register
size = x10;
2025 __ Bind(&try_allocate);
2027 __ Cmp(param_count, 0);
2033 Register alloc_obj = x0;
2034 __ Allocate(
size, alloc_obj, x11, x12, &runtime,
2038 Register global_object = x10;
2039 Register global_ctx = x10;
2040 Register strict_args_map = x4;
2044 __ Ldr(strict_args_map,
2055 __ LoadRoot(x5, Heap::kEmptyFixedArrayRootIndex);
2067 __ Cbz(param_count, &done);
2071 Register elements = x5;
2074 __ LoadRoot(x10, Heap::kFixedArrayMapRootIndex);
2089 Register array = x4;
2099 __ Sub(param_count, param_count, 1);
2100 __ Cbnz(param_count, &loop);
2108 __ Push(
function, params, param_count_smi);
2109 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
2114 #ifdef V8_INTERPRETED_REGEXP
2115 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
2139 Register string_type = w0;
2140 Register jsstring_length = x2;
2141 Register jsregexp_object = x3;
2142 Register string_encoding = w4;
2143 Register sliced_string_offset = w5;
2144 Register string_representation = w6;
2157 Register subject = x19;
2158 Register regexp_data = x20;
2159 Register last_match_info_elements = x21;
2160 Register code_object = x22;
2163 CPURegList used_callee_saved_registers(subject,
2165 last_match_info_elements,
2167 __ PushCPURegList(used_callee_saved_registers);
2185 ExternalReference address_of_regexp_stack_memory_address =
2186 ExternalReference::address_of_regexp_stack_memory_address(isolate());
2187 ExternalReference address_of_regexp_stack_memory_size =
2188 ExternalReference::address_of_regexp_stack_memory_size(isolate());
2189 __ Mov(x10, address_of_regexp_stack_memory_size);
2191 __ Cbz(x10, &runtime);
2194 DCHECK(jssp.Is(
__ StackPointer()));
2195 __ Peek(jsregexp_object, kJSRegExpOffset);
2196 __ JumpIfSmi(jsregexp_object, &runtime);
2197 __ JumpIfNotObjectType(jsregexp_object, x10, x10,
JS_REGEXP_TYPE, &runtime);
2201 if (FLAG_debug_code) {
2204 __ Check(
ne, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2206 __ Check(
eq, kUnexpectedTypeForRegExpDataFixedArrayExpected);
2224 __ Add(x10, x10, x10);
2229 __ Mov(sliced_string_offset, 0);
2231 DCHECK(jssp.Is(
__ StackPointer()));
2232 __ Peek(subject, kSubjectOffset);
2233 __ JumpIfSmi(subject, &runtime);
2257 Label check_underlying;
2259 Label not_seq_nor_cons;
2260 Label external_string;
2261 Label not_long_external;
2264 __ And(string_representation,
2280 __ Cbz(string_representation, &seq_string);
2288 __ B(
ge, ¬_seq_nor_cons);
2292 __ JumpIfNotRoot(x10, Heap::kempty_stringRootIndex, &runtime);
2297 __ Bind(&check_underlying);
2305 __ TestAndBranchIfAnySet(string_type.X(),
2310 __ Bind(&seq_string);
2314 DCHECK(jssp.Is(
__ StackPointer()));
2315 __ Peek(x10, kPreviousIndexOffset);
2316 __ JumpIfNotSmi(x10, &runtime);
2317 __ Cmp(jsstring_length, x10);
2322 __ SmiUntag(x1, x10);
2339 __ Add(x10, regexp_data, x10);
2347 __ JumpIfSmi(code_object, &runtime);
2350 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1,
2355 __ EnterExitFrame(
false, x10, 1);
2366 __ Mov(x10, ExternalReference::isolate_address(isolate()));
2369 Register length = w11;
2370 Register previous_index_in_bytes = w12;
2371 Register start = x13;
2386 __ Ubfx(string_encoding, string_encoding, 2, 1);
2387 __ Eor(string_encoding, string_encoding, 1);
2393 __ Lsl(previous_index_in_bytes, w1, string_encoding);
2394 __ Lsl(length, length, string_encoding);
2395 __ Lsl(sliced_string_offset, sliced_string_offset, string_encoding);
2398 __ Mov(x0, subject);
2406 __ Add(w10, previous_index_in_bytes, sliced_string_offset);
2407 __ Add(x2, start, Operand(w10,
UXTW));
2411 __ Sub(w10, length, previous_index_in_bytes);
2412 __ Add(x3, x2, Operand(w10,
UXTW));
2415 __ Mov(x4, ExternalReference::address_of_static_offsets_vector(isolate()));
2423 __ Mov(x10, address_of_regexp_stack_memory_address);
2425 __ Mov(x11, address_of_regexp_stack_memory_size);
2427 __ Add(x6, x10, x11);
2434 DirectCEntryStub stub(isolate());
2435 stub.GenerateCall(masm, code_object);
2437 __ LeaveExitFrame(
false, x10,
true);
2440 Label failure, exception;
2442 __ CompareAndBranch(w0,
2449 Register number_of_capture_registers = x12;
2456 __ Add(x10, x10, x10);
2457 __ Add(number_of_capture_registers, x10, 2);
2460 DCHECK(jssp.Is(
__ StackPointer()));
2461 __ Peek(x10, kLastMatchInfoOffset);
2462 __ JumpIfSmi(x10, &runtime);
2466 __ Ldr(last_match_info_elements,
2470 __ JumpIfNotRoot(x10, Heap::kFixedArrayMapRootIndex, &runtime);
2485 __ SmiTag(x10, number_of_capture_registers);
2495 __ Mov(x10, subject);
2496 __ RecordWriteField(last_match_info_elements,
2505 __ Mov(x10, subject);
2506 __ RecordWriteField(last_match_info_elements,
2513 Register last_match_offsets = x13;
2514 Register offsets_vector_index = x14;
2515 Register current_offset = x15;
2519 ExternalReference address_of_static_offsets_vector =
2520 ExternalReference::address_of_static_offsets_vector(isolate());
2521 __ Mov(offsets_vector_index, address_of_static_offsets_vector);
2523 Label next_capture, done;
2526 __ Add(last_match_offsets,
2527 last_match_info_elements,
2529 __ Bind(&next_capture);
2530 __ Subs(number_of_capture_registers, number_of_capture_registers, 2);
2534 __ Ldr(current_offset,
2537 __ SmiTag(x10, current_offset);
2544 __ B(&next_capture);
2548 __ Peek(x0, kLastMatchInfoOffset);
2549 __ PopCPURegList(used_callee_saved_registers);
2554 __ Bind(&exception);
2555 Register exception_value = x0;
2559 __ Mov(x10, Operand(isolate()->factory()->the_hole_value()));
2561 Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2564 __ Cmp(x10, exception_value);
2570 Label termination_exception;
2571 __ JumpIfRoot(exception_value,
2572 Heap::kTerminationExceptionRootIndex,
2573 &termination_exception);
2575 __ Throw(exception_value, x10, x11, x12, x13);
2577 __ Bind(&termination_exception);
2578 __ ThrowUncatchable(exception_value, x10, x11, x12, x13);
2581 __ Mov(x0, Operand(isolate()->factory()->null_value()));
2582 __ PopCPURegList(used_callee_saved_registers);
2588 __ PopCPURegList(used_callee_saved_registers);
2589 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
2593 __ Bind(¬_seq_nor_cons);
2595 __ B(
ne, ¬_long_external);
2598 __ Bind(&external_string);
2599 if (masm->emit_debug_code()) {
2605 __ Check(
eq, kExternalStringExpectedButNotFound);
2608 __ Check(
ne, kExternalStringExpectedButNotFound);
2619 __ Bind(¬_long_external);
2621 __ TestAndBranchIfAnySet(string_representation,
2626 __ Ldr(sliced_string_offset,
2629 __ B(&check_underlying);
2634 static void GenerateRecordCallTarget(MacroAssembler* masm,
2640 Register scratch2) {
2650 Label initialize, done, miss, megamorphic, not_array_function;
2653 masm->isolate()->heap()->megamorphic_symbol());
2655 masm->isolate()->heap()->uninitialized_symbol());
2664 __ Cmp(scratch1,
function);
2667 if (!FLAG_pretenuring_call_new) {
2673 __ JumpIfNotRoot(scratch2, Heap::kAllocationSiteMapRootIndex, &miss);
2677 __ Cmp(
function, scratch1);
2678 __ B(
ne, &megamorphic);
2686 __ JumpIfRoot(scratch1, Heap::kUninitializedSymbolRootIndex, &initialize);
2689 __ Bind(&megamorphic);
2692 __ LoadRoot(scratch2, Heap::kMegamorphicSymbolRootIndex);
2698 __ Bind(&initialize);
2700 if (!FLAG_pretenuring_call_new) {
2703 __ Cmp(
function, scratch1);
2704 __ B(
ne, ¬_array_function);
2711 CreateAllocationSiteStub create_stub(masm->isolate());
2720 __ CallStub(&create_stub);
2727 __ Bind(¬_array_function);
2746 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
2757 static void EmitSlowCase(MacroAssembler* masm,
2761 Label* non_function) {
2766 __ Mov(x0, argc + 1);
2768 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY);
2770 Handle<Code> adaptor =
2771 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2777 __ Bind(non_function);
2781 __ GetBuiltinFunction(
function, Builtins::CALL_NON_FUNCTION);
2782 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2787 static void EmitWrapCase(MacroAssembler* masm,
int argc, Label* cont) {
2799 static void CallFunctionNoFeedback(MacroAssembler* masm,
2800 int argc,
bool needs_checks,
2801 bool call_as_method) {
2803 Register
function = x1;
2805 Label slow, non_function, wrap, cont;
2812 __ JumpIfSmi(
function, &non_function);
2820 ParameterCount actual(argc);
2822 if (call_as_method) {
2824 EmitContinueIfStrictOrNative(masm, &cont);
2831 __ JumpIfSmi(x3, &wrap);
2840 __ InvokeFunction(
function,
2847 EmitSlowCase(masm, argc,
function, type, &non_function);
2850 if (call_as_method) {
2852 EmitWrapCase(masm, argc, &cont);
2869 Register
function = x1;
2870 Label slow, non_function_call;
2873 __ JumpIfSmi(
function, &non_function_call);
2875 Register object_type = x10;
2880 GenerateRecordCallTarget(masm, x0,
function, x2, x3, x4, x5);
2883 if (FLAG_pretenuring_call_new) {
2889 Label feedback_register_initialized;
2893 __ JumpIfRoot(x5, Heap::kAllocationSiteMapRootIndex,
2894 &feedback_register_initialized);
2895 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
2896 __ bind(&feedback_register_initialized);
2899 __ AssertUndefinedOrAllocationSite(x2, x5);
2903 Register jump_reg = x4;
2904 Register shared_func_info = jump_reg;
2905 Register cons_stub = jump_reg;
2906 Register cons_stub_code = jump_reg;
2907 __ Ldr(shared_func_info,
2913 __ Br(cons_stub_code);
2918 __ B(
ne, &non_function_call);
2919 __ GetBuiltinFunction(x1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2922 __ Bind(&non_function_call);
2923 __ GetBuiltinFunction(x1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2928 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2933 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2946 Register
function = x1;
2948 Register index = x3;
2949 Register scratch = x4;
2954 __ Cmp(
function, scratch);
2966 __ JumpIfNotRoot(
map, Heap::kAllocationSiteMapRootIndex, &miss);
2969 __ Mov(allocation_site, scratch);
2970 ArrayConstructorStub stub(masm->isolate(),
arg_count());
2971 __ TailCallStub(&stub);
2977 CallFunctionNoFeedback(masm,
2991 Label extra_checks_or_miss, slow_start;
2992 Label slow, non_function, wrap, cont;
2993 Label have_js_function;
2995 ParameterCount actual(argc);
2997 Register
function = x1;
2999 Register index = x3;
3009 __ Cmp(x4,
function);
3010 __ B(
ne, &extra_checks_or_miss);
3012 __ bind(&have_js_function);
3014 EmitContinueIfStrictOrNative(masm, &cont);
3019 __ JumpIfSmi(x3, &wrap);
3025 __ InvokeFunction(
function,
3031 EmitSlowCase(masm, argc,
function, type, &non_function);
3035 EmitWrapCase(masm, argc, &cont);
3038 __ bind(&extra_checks_or_miss);
3041 __ JumpIfRoot(x4, Heap::kMegamorphicSymbolRootIndex, &slow_start);
3042 __ JumpIfRoot(x4, Heap::kUninitializedSymbolRootIndex, &miss);
3044 if (!FLAG_trace_ic) {
3047 __ AssertNotSmi(x4);
3051 __ LoadRoot(x5, Heap::kMegamorphicSymbolRootIndex);
3061 __ bind(&slow_start);
3064 __ JumpIfSmi(
function, &non_function);
3068 __ B(&have_js_function);
3086 : IC::kCallIC_Customization_Miss;
3088 ExternalReference miss = ExternalReference(IC_Utility(
id),
3090 __ CallExternalReference(miss, 4);
3131 MacroAssembler* masm,
3132 const RuntimeCallHelper& call_helper) {
3133 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3138 call_helper.BeforeCall(masm);
3142 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3146 __ CallRuntime(Runtime::kNumberToSmi, 1);
3155 call_helper.AfterCall(masm);
3166 call_helper.BeforeCall(masm);
3169 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
3171 call_helper.AfterCall(masm);
3174 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3183 __ LoadRoot(
result_, Heap::kSingleCharacterStringCacheRootIndex);
3193 MacroAssembler* masm,
3194 const RuntimeCallHelper& call_helper) {
3195 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3198 call_helper.BeforeCall(masm);
3200 __ CallRuntime(Runtime::kCharFromCode, 1);
3202 call_helper.AfterCall(masm);
3205 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3215 __ JumpIfEitherNotSmi(x0, x1, &miss);
3236 Label unordered, maybe_undefined1, maybe_undefined2;
3237 Label miss, handle_lhs, values_in_d_regs;
3238 Label untag_rhs, untag_lhs;
3240 Register result = x0;
3243 FPRegister rhs_d =
d0;
3244 FPRegister lhs_d =
d1;
3247 __ JumpIfNotSmi(lhs, &miss);
3250 __ JumpIfNotSmi(rhs, &miss);
3257 __ JumpIfSmi(rhs, &handle_lhs);
3258 __ JumpIfNotHeapNumber(rhs, &maybe_undefined1);
3262 __ Bind(&handle_lhs);
3263 __ JumpIfSmi(lhs, &values_in_d_regs);
3264 __ JumpIfNotHeapNumber(lhs, &maybe_undefined2);
3267 __ Bind(&values_in_d_regs);
3268 __ Fcmp(lhs_d, rhs_d);
3269 __ B(
vs, &unordered);
3271 __ Cset(result,
gt);
3272 __ Csinv(result, result, xzr,
ge);
3275 __ Bind(&unordered);
3280 __ Bind(&maybe_undefined1);
3282 __ JumpIfNotRoot(rhs, Heap::kUndefinedValueRootIndex, &miss);
3283 __ JumpIfSmi(lhs, &unordered);
3284 __ JumpIfNotHeapNumber(lhs, &maybe_undefined2);
3288 __ Bind(&maybe_undefined2);
3290 __ JumpIfRoot(lhs, Heap::kUndefinedValueRootIndex, &unordered);
3303 Register result = x0;
3308 __ JumpIfEitherSmi(lhs, rhs, &miss);
3311 Register rhs_map = x10;
3312 Register lhs_map = x11;
3313 Register rhs_type = x10;
3314 Register lhs_type = x11;
3321 __ Orr(x12, lhs_type, rhs_type);
3322 __ TestAndBranchIfAnySet(
3328 __ Cset(result,
ne);
3342 Register result = x0;
3346 Register lhs_instance_type = w2;
3347 Register rhs_instance_type = w3;
3350 __ JumpIfEitherSmi(lhs, rhs, &miss);
3361 __ JumpIfNotUniqueNameInstanceType(lhs_instance_type, &miss);
3362 __ JumpIfNotUniqueNameInstanceType(rhs_instance_type, &miss);
3367 __ Cset(result,
ne);
3383 Register result = x0;
3388 __ JumpIfEitherSmi(rhs, lhs, &miss);
3391 Register rhs_map = x10;
3392 Register lhs_map = x11;
3393 Register rhs_type = x10;
3394 Register lhs_type = x11;
3400 __ Orr(x12, lhs_type, rhs_type);
3419 Label not_internalized_strings;
3420 __ Orr(x12, lhs_type, rhs_type);
3421 __ TestAndBranchIfAnySet(
3425 __ Bind(¬_internalized_strings);
3430 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(lhs_type, rhs_type, x12,
3446 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3448 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3462 Register result = x0;
3466 __ JumpIfEitherSmi(rhs, lhs, &miss);
3472 __ Sub(result, rhs, lhs);
3485 Register result = x0;
3489 __ JumpIfEitherSmi(rhs, lhs, &miss);
3491 Register rhs_map = x10;
3492 Register lhs_map = x11;
3500 __ Sub(result, rhs, lhs);
3515 Register stub_entry = x11;
3517 ExternalReference miss =
3518 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
3523 Register
right = x0;
3531 __ CallExternalReference(miss, 3);
3540 __ Jump(stub_entry);
3566 Register from = x15;
3567 Register input_string = x10;
3568 Register input_length = x11;
3569 Register input_type = x12;
3570 Register result_string = x0;
3571 Register result_length = x1;
3574 __ Peek(
to, kToOffset);
3575 __ Peek(from, kFromOffset);
3578 __ JumpIfEitherNotSmi(from,
to, &runtime);
3583 __ Subs(result_length,
to, from);
3590 __ Peek(input_string, kStringOffset);
3591 __ JumpIfSmi(input_string, &runtime);
3592 __ IsObjectJSStringType(input_string, input_type, &runtime);
3595 __ Cmp(result_length, 1);
3596 __ B(
eq, &single_char);
3600 __ Ldrsw(input_length,
3603 __ Cmp(result_length, input_length);
3604 __ CmovX(x0, input_string,
eq);
3606 __ B(
eq, &return_x0);
3623 Label underlying_unpacked, sliced_string, seq_or_external_string;
3624 Label update_instance_type;
3632 __ B(
eq, &seq_or_external_string);
3634 __ B(
ne, &sliced_string);
3636 Register unpacked_string = input_string;
3640 __ JumpIfNotRoot(temp, Heap::kempty_stringRootIndex, &runtime);
3641 __ Ldr(unpacked_string,
3643 __ B(&update_instance_type);
3645 __ Bind(&sliced_string);
3649 __ Add(from, from, temp);
3650 __ Ldr(unpacked_string,
3653 __ Bind(&update_instance_type);
3659 __ Bind(&seq_or_external_string);
3670 __ Bind(&underlying_unpacked);
3672 if (FLAG_string_slices) {
3676 __ B(
lt, ©_routine);
3682 Label two_byte_slice, set_slice_header;
3686 __ AllocateOneByteSlicedString(result_string, result_length, x3, x4,
3688 __ B(&set_slice_header);
3690 __ Bind(&two_byte_slice);
3691 __ AllocateTwoByteSlicedString(result_string, result_length, x3, x4,
3694 __ Bind(&set_slice_header);
3697 __ Str(unpacked_string,
3701 __ Bind(©_routine);
3713 Register unpacked_char0 = x13;
3714 Register substring_char0 = x13;
3715 Register result_char0 = x14;
3716 Label two_byte_sequential, sequential_string, allocate_result;
3721 __ B(
eq, &sequential_string);
3725 __ Ldr(unpacked_char0,
3728 __ B(&allocate_result);
3730 __ Bind(&sequential_string);
3733 __ Add(unpacked_char0, unpacked_string,
3736 __ Bind(&allocate_result);
3742 __ AllocateOneByteString(result_string, result_length, x3, x4, x5, &runtime);
3745 __ Add(substring_char0, unpacked_char0, from);
3748 __ Add(result_char0, result_string,
3756 __ Bind(&two_byte_sequential);
3757 __ AllocateTwoByteString(result_string, result_length, x3, x4, x5, &runtime);
3760 __ Add(substring_char0, unpacked_char0, Operand(from,
LSL, 1));
3763 __ Add(result_char0, result_string,
3767 __ Add(result_length, result_length, result_length);
3770 __ Bind(&return_x0);
3771 Counters* counters = isolate()->counters();
3772 __ IncrementCounter(counters->sub_string_native(), 1, x3, x4);
3777 __ TailCallRuntime(Runtime::kSubString, 3, 1);
3779 __ bind(&single_char);
3785 StringCharAtGenerator generator(
3786 input_string, from, result_length, x0,
3788 generator.GenerateFast(masm);
3791 generator.SkipSlow(masm, &runtime);
3796 MacroAssembler* masm, Register left, Register right, Register scratch1,
3797 Register scratch2, Register scratch3) {
3799 Register result = x0;
3800 Register left_length = scratch1;
3801 Register right_length = scratch2;
3805 Label strings_not_equal, check_zero_length;
3808 __ Cmp(left_length, right_length);
3809 __ B(
eq, &check_zero_length);
3811 __ Bind(&strings_not_equal);
3816 Label compare_chars;
3817 __ Bind(&check_zero_length);
3819 __ Cbnz(left_length, &compare_chars);
3824 __ Bind(&compare_chars);
3826 scratch3, &strings_not_equal);
3835 MacroAssembler* masm, Register left, Register right, Register scratch1,
3836 Register scratch2, Register scratch3, Register scratch4) {
3838 Label result_not_equal, compare_lengths;
3841 Register length_delta = scratch3;
3844 __ Subs(length_delta, scratch1, scratch2);
3846 Register min_length = scratch1;
3847 __ Csel(min_length, scratch2, scratch1,
gt);
3848 __ Cbz(min_length, &compare_lengths);
3852 scratch4, &result_not_equal);
3855 __ Bind(&compare_lengths);
3860 Register result = x0;
3861 __ Subs(result, length_delta, 0);
3863 __ Bind(&result_not_equal);
3865 Register
less = x11;
3875 MacroAssembler* masm, Register left, Register right, Register length,
3876 Register scratch1, Register scratch2, Label* chars_not_equal) {
3882 __ SmiUntag(length);
3884 __ Add(left, left, scratch1);
3885 __ Add(right, right, scratch1);
3887 Register index = length;
3888 __ Neg(index, length);
3895 __ Cmp(scratch1, scratch2);
3896 __ B(
ne, chars_not_equal);
3897 __ Add(index, index, 1);
3898 __ Cbnz(index, &loop);
3905 Counters* counters = isolate()->counters();
3910 Register right = x10;
3911 Register left = x11;
3912 Register result = x0;
3913 __ Pop(right, left);
3916 __ Subs(result, right, left);
3917 __ B(
ne, ¬_same);
3919 __ IncrementCounter(counters->string_compare_native(), 1, x3, x4);
3925 __ JumpIfEitherIsNotSequentialOneByteStrings(left, right, x12, x13, &runtime);
3929 __ IncrementCounter(counters->string_compare_native(), 1, x3, x4);
3942 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3946 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3956 __ LoadObject(x2,
handle(isolate()->heap()->undefined_value()));
3959 if (FLAG_debug_code) {
3960 __ AssertNotSmi(x2, kExpectedAllocationSite);
3962 __ AssertRegisterIsRoot(x10, Heap::kAllocationSiteMapRootIndex,
3963 kExpectedAllocationSite);
3968 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3969 __ TailCallStub(&stub);
3979 Label dont_need_remembered_set;
3983 __ JumpIfNotInNewSpace(val, &dont_need_remembered_set);
3986 &dont_need_remembered_set);
3995 __ RememberedSetHelper(
object(),
address(),
3999 __ Bind(&dont_need_remembered_set);
4019 __ Mov(x2, ExternalReference::isolate_address(isolate()));
4021 AllowExternalCallThatCantCauseGC scope(masm);
4022 ExternalReference
function =
4023 ExternalReference::incremental_marking_record_write_function(
4025 __ CallCFunction(
function, 3, 0);
4032 MacroAssembler* masm,
4033 OnNoNeedToInformIncrementalMarker on_no_need,
4036 Label need_incremental;
4037 Label need_incremental_pop_scratch;
4044 __ Subs(counter, counter, 1);
4047 __ B(
mi, &need_incremental);
4054 __ RememberedSetHelper(
object(),
address(),
4067 Label ensure_not_white;
4078 __ Bind(&ensure_not_white);
4084 __ EnsureNotWhite(val,
4089 &need_incremental_pop_scratch);
4094 __ RememberedSetHelper(
object(),
address(),
4101 __ Bind(&need_incremental_pop_scratch);
4104 __ Bind(&need_incremental);
4110 Label skip_to_incremental_noncompacting;
4111 Label skip_to_incremental_compacting;
4119 InstructionAccurateScope scope(masm, 2);
4120 __ adr(xzr, &skip_to_incremental_noncompacting);
4121 __ adr(xzr, &skip_to_incremental_compacting);
4125 __ RememberedSetHelper(
object(),
address(),
4131 __ Bind(&skip_to_incremental_noncompacting);
4134 __ Bind(&skip_to_incremental_compacting);
4145 Register value = x0;
4146 Register index_smi = x3;
4148 Register array = x1;
4149 Register array_map = x2;
4150 Register array_index_smi = x4;
4151 __ PeekPair(array_index_smi, array, 0);
4154 Label double_elements, smi_element, fast_elements, slow_elements;
4155 Register bitfield2 = x10;
4165 __ B(
hi, &double_elements);
4167 __ JumpIfSmi(value, &smi_element);
4175 __ Bind(&slow_elements);
4176 __ Push(array, index_smi, value);
4179 __ Push(x11, array_index_smi);
4180 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4183 __ Bind(&fast_elements);
4195 __ Bind(&smi_element);
4201 __ Bind(&double_elements);
4203 __ StoreNumberToDoubleElements(value, index_smi, x10, x11,
d0,
4212 int parameter_count_offset =
4218 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4227 VectorLoadStub stub(isolate(),
state());
4234 VectorKeyedLoadStub stub(isolate());
4239 static unsigned int GetProfileEntryHookCallSize(MacroAssembler* masm) {
4254 if (masm->isolate()->function_entry_hook() !=
NULL) {
4257 DontEmitDebugCodeScope no_debug_code(masm);
4258 Label entry_hook_call_start;
4259 __ Bind(&entry_hook_call_start);
4262 DCHECK(masm->SizeOfCodeGeneratedSince(&entry_hook_call_start) ==
4263 GetProfileEntryHookCallSize(masm));
4271 MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
4281 __ Sub(x0,
lr, GetProfileEntryHookCallSize(masm));
4283 #if V8_HOST_ARCH_ARM64
4285 reinterpret_cast<uintptr_t>(isolate()->function_entry_hook());
4286 __ Mov(x10, entry_hook);
4291 __ Mov(x10, Operand(ExternalReference(&dispatcher,
4292 ExternalReference::BUILTIN_CALL,
4295 __ Mov(x2, ExternalReference::isolate_address(isolate()));
4304 FrameScope frame(masm, StackFrame::MANUAL);
4305 __ CallCFunction(x10, 2, 0);
4319 const Register old_stack_pointer =
__ StackPointer();
4320 __ SetStackPointer(csp);
4328 __ AssertFPCRState();
4331 __ SetStackPointer(old_stack_pointer);
4341 reinterpret_cast<intptr_t
>(GetCode().location());
4343 __ Mov(x10, target);
4356 MacroAssembler* masm,
4362 Register scratch2) {
4370 __ Sub(scratch1, scratch1, 1);
4380 DCHECK(NameDictionary::GetProbeOffset(
i) <
4382 __ Add(scratch2, scratch2, Operand(
4389 __ Add(scratch2, scratch2, Operand(scratch2,
LSL, 1));
4392 UseScratchRegisterScope temps(masm);
4393 Register scratch3 = temps.AcquireX();
4404 spill_list.Combine(
lr);
4405 spill_list.Remove(scratch1);
4406 spill_list.Remove(scratch2);
4408 __ PushCPURegList(spill_list);
4411 DCHECK(!elements.is(x1));
4413 __ Mov(x0, elements);
4415 __ Mov(x0, elements);
4422 __ Cbz(x0, ¬_found);
4423 __ Mov(scratch2, x2);
4424 __ PopCPURegList(spill_list);
4427 __ Bind(¬_found);
4428 __ PopCPURegList(spill_list);
4437 Register properties,
4439 Register scratch0) {
4450 Register
index = scratch0;
4460 Register entity_name = scratch0;
4462 Register tmp =
index;
4466 __ JumpIfRoot(entity_name, Heap::kUndefinedValueRootIndex, done);
4469 __ Cmp(entity_name, Operand(
name));
4473 __ JumpIfRoot(entity_name, Heap::kTheHoleValueRootIndex, &good);
4477 __ Ldrb(entity_name,
4479 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
4484 spill_list.Combine(
lr);
4485 spill_list.Remove(scratch0);
4487 __ PushCPURegList(spill_list);
4490 __ Mov(x1, Operand(
name));
4495 __ Mov(scratch0, x0);
4496 __ PopCPURegList(spill_list);
4498 __ Cbz(scratch0, done);
4517 Register
index = x2;
4520 Register undefined = x5;
4521 Register entry_key = x6;
4523 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4526 __ Sub(mask, mask, 1);
4529 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4538 DCHECK(NameDictionary::GetProbeOffset(
i) <
4555 __ Cmp(entry_key, undefined);
4556 __ B(
eq, ¬_in_dictionary);
4559 __ Cmp(entry_key, key);
4560 __ B(
eq, &in_dictionary);
4566 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
4570 __ Bind(&maybe_in_dictionary);
4579 __ Bind(&in_dictionary);
4583 __ Bind(¬_in_dictionary);
4590 static void CreateArrayDispatch(MacroAssembler* masm,
4595 __ TailCallStub(&stub);
4601 for (
int i = 0;
i <= last_index; ++
i) {
4606 __ CompareAndBranch(kind, candidate_kind,
ne, &next);
4607 T stub(masm->isolate(), candidate_kind);
4608 __ TailCallStub(&stub);
4613 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4623 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4632 Register allocation_site = x2;
4635 Label normal_sequence;
4645 __ Tbnz(kind, 0, &normal_sequence);
4651 __ Cbz(x10, &normal_sequence);
4657 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4660 __ TailCallStub(&stub_holey);
4662 __ Bind(&normal_sequence);
4663 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4666 __ TailCallStub(&stub);
4670 __ Orr(kind, kind, 1);
4672 if (FLAG_debug_code) {
4674 __ JumpIfNotRoot(x10, Heap::kAllocationSiteMapRootIndex,
4676 __ Assert(
eq, kExpectedAllocationSite);
4689 __ Bind(&normal_sequence);
4692 for (
int i = 0;
i <= last_index; ++
i) {
4695 __ CompareAndBranch(kind, candidate_kind,
ne, &next);
4696 ArraySingleArgumentConstructorStub stub(masm->isolate(), candidate_kind);
4697 __ TailCallStub(&stub);
4702 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4710 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4713 for (
int i = 0;
i <= to_index; ++
i) {
4715 T stub(isolate, kind);
4726 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4728 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4730 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4738 for (
int i = 0;
i < 2;
i++) {
4740 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[
i]);
4742 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[
i]);
4744 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[
i]);
4751 MacroAssembler* masm,
4755 Label zero_case, n_case;
4756 __ Cbz(argc, &zero_case);
4761 CreateArrayDispatchOneArgument(masm,
mode);
4763 __ Bind(&zero_case);
4765 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4769 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4772 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4774 CreateArrayDispatchOneArgument(masm,
mode);
4776 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4792 Register constructor = x1;
4793 Register allocation_site = x2;
4795 if (FLAG_debug_code) {
4799 Label unexpected_map, map_ok;
4804 __ JumpIfSmi(x10, &unexpected_map);
4805 __ JumpIfObjectType(x10, x10, x11,
MAP_TYPE, &map_ok);
4806 __ Bind(&unexpected_map);
4807 __ Abort(kUnexpectedInitialMapForArrayFunction);
4812 __ AssertUndefinedOrAllocationSite(allocation_site, x10);
4818 __ JumpIfRoot(allocation_site, Heap::kUndefinedValueRootIndex, &no_info);
4833 Label zero_case, n_case;
4836 __ Cbz(argc, &zero_case);
4837 __ CompareAndBranch(argc, 1,
ne, &n_case);
4845 __ Cbz(x10, &packed_case);
4847 InternalArraySingleArgumentConstructorStub
4849 __ TailCallStub(&stub1_holey);
4851 __ Bind(&packed_case);
4853 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4854 __ TailCallStub(&stub1);
4856 __ Bind(&zero_case);
4858 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4859 __ TailCallStub(&stub0);
4863 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4864 __ TailCallStub(&stubN);
4876 Register constructor = x1;
4878 if (FLAG_debug_code) {
4882 Label unexpected_map, map_ok;
4887 __ JumpIfSmi(x10, &unexpected_map);
4888 __ JumpIfObjectType(x10, x10, x11,
MAP_TYPE, &map_ok);
4889 __ Bind(&unexpected_map);
4890 __ Abort(kUnexpectedInitialMapForArrayFunction);
4900 __ LoadElementsKindFromMap(kind, x10);
4902 if (FLAG_debug_code) {
4906 __ Assert(
eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4909 Label fast_elements_case;
4913 __ Bind(&fast_elements_case);
4932 Register callee = x0;
4933 Register call_data = x4;
4934 Register holder = x2;
4935 Register api_function_address = x1;
4936 Register context =
cp;
4942 typedef FunctionCallbackArguments FCA;
4954 __ Push(context, callee, call_data);
4960 __ LoadRoot(call_data, Heap::kUndefinedValueRootIndex);
4962 Register isolate_reg = x5;
4963 __ Mov(isolate_reg, ExternalReference::isolate_address(isolate()));
4967 __ Push(call_data, call_data, isolate_reg, holder);
4971 __ Mov(args, masm->StackPointer());
4975 const int kApiStackSpace = 4;
4979 const int kCallApiFunctionSpillSpace = 4;
4981 FrameScope frame_scope(masm, StackFrame::MANUAL);
4982 __ EnterExitFrame(
false, x10, kApiStackSpace + kCallApiFunctionSpillSpace);
4996 const int kStackUnwindSpace =
argc + FCA::kArgsLength + 1;
4997 ExternalReference thunk_ref =
4998 ExternalReference::invoke_function_callback(isolate());
5000 AllowExternalCallThatCantCauseGC scope(masm);
5004 int return_value_offset = 0;
5006 return_value_offset = 2 + FCA::kArgsLength;
5008 return_value_offset = 2 + FCA::kReturnValueOffset;
5012 const int spill_offset = 1 + kApiStackSpace;
5013 __ CallApiFunctionAndReturn(api_function_address,
5017 return_value_operand,
5018 &context_restore_operand);
5031 DCHECK(api_function_address.is(x2));
5033 __ Mov(x0, masm->StackPointer());
5036 const int kApiStackSpace = 1;
5040 const int kCallApiFunctionSpillSpace = 4;
5042 FrameScope frame_scope(masm, StackFrame::MANUAL);
5043 __ EnterExitFrame(
false, x10, kApiStackSpace + kCallApiFunctionSpillSpace);
5052 ExternalReference thunk_ref =
5053 ExternalReference::invoke_accessor_getter_callback(isolate());
5055 const int spill_offset = 1 + kApiStackSpace;
5056 __ CallApiFunctionAndReturn(api_function_address,
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kTransitionInfoOffset
static const Register function_address()
static const Register parameter_count()
static const Register index()
void GenerateReadElement(MacroAssembler *masm)
void GenerateNewSloppySlow(MacroAssembler *masm)
void GenerateNewStrict(MacroAssembler *masm)
void GenerateNewSloppyFast(MacroAssembler *masm)
static const int kLengthOffset
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateDispatchToArrayStub(MacroAssembler *masm, AllocationSiteOverrideMode mode)
ArgumentCountKey argument_count() const
friend class BlockConstPoolScope
static const int kCallSizeWithRelocation
static void GenerateAheadOfTime(Isolate *isolate)
bool save_doubles() const
static void GenerateAheadOfTime(Isolate *isolate)
CEntryStub(Isolate *isolate, int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
bool NeedsImmovableCode()
bool call_data_undefined() const
STATIC_ASSERT(Code::kArgumentsBits+2<=kStubMinorKeyBits)
bool RecordCallTarget() const
bool CallAsMethod() const
bool CallAsMethod() const
void GenerateMiss(MacroAssembler *masm)
virtual InlineCacheState GetICState() const OVERRIDE
static const int kValueOffset
static const int kHeaderSize
Condition GetCondition() const
void GenerateInternalizedStrings(MacroAssembler *masm)
void GenerateStrings(MacroAssembler *masm)
CompareICState::State state() const
void GenerateMiss(MacroAssembler *masm)
CompareICState::State left() const
void GenerateGeneric(MacroAssembler *masm)
CompareICState::State right() const
void GenerateObjects(MacroAssembler *masm)
CompareICStub(Isolate *isolate, Token::Value op, CompareICState::State left, CompareICState::State right, CompareICState::State state)
void GenerateNumbers(MacroAssembler *masm)
void GenerateUniqueNames(MacroAssembler *masm)
void GenerateKnownObjects(MacroAssembler *masm)
void GenerateSmis(MacroAssembler *masm)
static const int kFirstOffset
static const int kMinLength
static const int kSecondOffset
@ SLOPPY_ARGUMENTS_MAP_INDEX
@ STRICT_ARGUMENTS_MAP_INDEX
@ ALIASED_ARGUMENTS_MAP_INDEX
static bool IsSupported(CpuFeature f)
static void GenerateAheadOfTime(Isolate *isolate)
void GenerateCall(MacroAssembler *masm, Register target)
bool skip_fastpath() const
bool is_truncating() const
Register destination() const
static const int kCallerFPOffset
static const int kSPOffset
static const int kMaxShortLength
static const int kResourceDataOffset
static const int kLengthOffset
static const int kHeaderSize
static const int kNativeContextOffset
static const int kEntrySize
static const int kMantissaBits
static const int kValueOffset
static const int kExponentBits
static const int kExponentBias
static const int kMapOffset
static const int kStrictArgumentsObjectSize
static const int kSloppyArgumentsObjectSize
static const int kArgumentsCalleeIndex
static const int kArgumentsLengthIndex
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
bool HasCallSiteInlineCheck() const
bool HasArgsInRegisters() const
bool ReturnTrueFalseObject() const
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateCase(MacroAssembler *masm, ElementsKind kind)
static const int kJSRegexpStaticOffsetsVectorSize
StackFrame::Type type() const
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kLiteralsOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kElementsOffset
static const int kDataOneByteCodeOffset
static const int kIrregexpCaptureCountOffset
static const int kDataTagOffset
static const int kDataOffset
static const int kDataUC16CodeOffset
static const int kFunctionOffset
static const Register ReceiverRegister()
static const Register NameRegister()
LoadICState state() const
static const int8_t kMaximumBitField2FastHoleyElementValue
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kBitField2Offset
static const int kPrototypeOffset
static const Register exponent()
ExponentType exponent_type() const
static const Register exponent()
static const size_t kWriteBarrierCounterOffset
static const int kEvacuationCandidateMask
static const int kSkipEvacuationSlotsRecordingMask
static const int kElementsStartOffset
NameDictionaryLookupStub(Isolate *isolate, LookupMode mode)
static const int kCapacityOffset
Register dictionary() const
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kTotalProbes
static const int kInlinedProbes
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kHashShift
static const int kHashFieldOffset
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static Operand UntagSmiAndScale(Register smi, int scale)
static Operand UntagSmi(Register smi)
static const intptr_t kPageAlignmentMask
ProfileEntryHookStub(Isolate *isolate)
static void MaybeCallEntryHook(MacroAssembler *masm)
static void EntryHookTrampoline(intptr_t function, intptr_t stack_pointer, Isolate *isolate)
static const int kArgsLength
void Restore(MacroAssembler *masm)
void SaveCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void Save(MacroAssembler *masm)
void RestoreCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void GenerateIncremental(MacroAssembler *masm, Mode mode)
void InformIncrementalMarker(MacroAssembler *masm)
RememberedSetAction remembered_set_action() const
SaveFPRegsMode save_fp_regs_mode() const
@ kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
@ kReturnOnNoNeedToInformIncrementalMarker
void CheckNeedsToInformIncrementalMarker(MacroAssembler *masm, OnNoNeedToInformIncrementalMarker on_no_need, Mode mode)
virtual void Generate(MacroAssembler *masm) OVERRIDE
static const int kLastCaptureCountOffset
static const int kLastSubjectOffset
static const int kLastMatchOverhead
static const int kLastInputOffset
static const int kFirstCaptureOffset
static const Function * FunctionForId(FunctionId id)
static const int kHeaderSize
static const int kConstructStubOffset
static const int kFeedbackVectorOffset
static const int kCompilerHintsOffset
static const int kMinLength
static const int kParentOffset
static const int kOffsetOffset
static Smi * FromInt(int value)
static const int kContextOffset
static const int kCallerSPOffset
static const int kCallerFPOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
StoreBufferOverflowStub(Isolate *isolate, SaveFPRegsMode save_fp)
bool save_doubles() const
StringIndexFlags index_flags_
Label * receiver_not_string_
Label * index_out_of_range_
void GenerateFast(MacroAssembler *masm)
Label * index_not_number_
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static void GenerateOneByteCharsCompareLoop(MacroAssembler *masm, Register left, Register right, Register length, Register scratch1, Register scratch2, Label *chars_not_equal)
static void GenerateCompareFlatOneByteStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateFlatOneByteStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int32_t kMaxOneByteCharCode
static const int kLengthOffset
static const int kCallerStackParameterCountFrameOffset
StubFunctionMode function_mode() const
static void GenerateAheadOfTime(Isolate *isolate)
static bool IsOrderedRelationalCompareOp(Value op)
static bool IsEqualityOp(Value op)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register VectorRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define ASM_LOCATION(message)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const LowDwVfpRegister d2
const uint32_t kStringEncodingMask
const LowDwVfpRegister d7
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
@ DONT_TRACK_ALLOCATION_SITE
const LowDwVfpRegister d6
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const unsigned kXRegSizeInBits
const LowDwVfpRegister d1
const intptr_t kSmiSignMask
const uint32_t kTwoByteStringTag
const uint32_t kShortExternalStringTag
int MaskToBit(uint64_t mask)
const LowDwVfpRegister d0
const int kFastElementsKindPackedToHoley
MemOperand GlobalObjectMemOperand()
const uint32_t kNotStringTag
DwVfpRegister DoubleRegister
Register GetAllocatableRegisterThatIsNotOneOf(Register reg1, Register reg2=NoReg, Register reg3=NoReg, Register reg4=NoReg)
const int kPointerSizeLog2
const uint32_t kStringTag
MemOperand ContextMemOperand(Register context, int index)
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ TERMINAL_FAST_ELEMENTS_KIND
@ FAST_HOLEY_SMI_ELEMENTS
Handle< T > handle(T *t, Isolate *isolate)
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignmentMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool IsFastPackedElementsKind(ElementsKind kind)
const uint32_t kShortExternalStringMask
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
const uint64_t kSmiShiftMask
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
const uint32_t kStringRepresentationMask
const uint32_t kSlicedNotConsMask
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
const uint32_t kInternalizedTag
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
const unsigned kByteSizeInBytes
const LowDwVfpRegister d3
const uint32_t kIsNotStringMask
const unsigned kInstructionSize
ElementsKind GetInitialFastElementsKind()
@ STRING_INDEX_IS_ARRAY_INDEX
void CopyBytes(uint8_t *target, uint8_t *source)
const uint32_t kIsIndirectStringMask
MemOperand UntagSmiMemOperand(Register object, int offset)
const RegList kCallerSaved
const LowDwVfpRegister d4
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool Is(const CPURegister &other) const
#define T(name, string, precedence)