7 #if V8_TARGET_ARCH_MIPS64
23 static void InitializeArrayConstructorDescriptor(
24 Isolate* isolate, CodeStubDescriptor* descriptor,
25 int constant_stack_parameter_count) {
27 Runtime::kArrayConstructor)->
entry;
29 if (constant_stack_parameter_count == 0) {
30 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
33 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
39 static void InitializeInternalArrayConstructorDescriptor(
40 Isolate* isolate, CodeStubDescriptor* descriptor,
41 int constant_stack_parameter_count) {
43 Runtime::kInternalArrayConstructor)->
entry;
45 if (constant_stack_parameter_count == 0) {
46 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
49 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
55 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
56 CodeStubDescriptor* descriptor) {
57 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
61 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
62 CodeStubDescriptor* descriptor) {
63 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
67 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
68 CodeStubDescriptor* descriptor) {
69 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
73 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
74 CodeStubDescriptor* descriptor) {
75 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
79 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
80 CodeStubDescriptor* descriptor) {
81 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
85 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
86 CodeStubDescriptor* descriptor) {
87 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
91 #define __ ACCESS_MASM(masm)
94 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
97 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
103 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
109 ExternalReference miss) {
111 isolate()->counters()->code_stubs()->Increment();
113 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
114 int param_count = descriptor.GetEnvironmentParameterCount();
118 DCHECK((param_count == 0) ||
119 a0.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
122 for (
int i = 0;
i < param_count; ++
i) {
124 __ sd(descriptor.GetEnvironmentParameterRegister(
i),
127 __ CallExternalReference(miss, param_count);
135 Label out_of_range, only_low, negate, done;
136 Register input_reg =
source();
139 int double_offset =
offset();
151 __ Push(scratch, scratch2, scratch3);
154 __ ldc1(double_scratch,
MemOperand(input_reg, double_offset));
161 __ Trunc_w_d(double_scratch, double_scratch);
163 __ mfc1(scratch3, double_scratch);
176 __ Branch(&error,
ne, scratch, Operand(zero_reg));
177 __ Move(result_reg, scratch3);
183 Register input_high = scratch2;
184 Register input_low = scratch3;
189 Label normal_exponent, restore_sign;
198 __ Movz(result_reg, zero_reg, scratch);
199 __ Branch(&done,
eq, scratch, Operand(zero_reg));
208 __ Branch(&normal_exponent,
le, result_reg, Operand(zero_reg));
209 __ mov(result_reg, zero_reg);
212 __ bind(&normal_exponent);
218 Register
sign = result_reg;
224 Label high_shift_needed, high_shift_done;
225 __ Branch(&high_shift_needed,
lt, scratch, Operand(32));
226 __ mov(input_high, zero_reg);
227 __ Branch(&high_shift_done);
228 __ bind(&high_shift_needed);
237 __ sllv(input_high, input_high, scratch);
239 __ bind(&high_shift_done);
242 Label pos_shift, shift_done;
244 __ subu(scratch, at, scratch);
245 __ Branch(&pos_shift,
ge, scratch, Operand(zero_reg));
248 __ Subu(scratch, zero_reg, scratch);
249 __ sllv(input_low, input_low, scratch);
250 __ Branch(&shift_done);
253 __ srlv(input_low, input_low, scratch);
255 __ bind(&shift_done);
256 __ Or(input_high, input_high, Operand(input_low));
261 __ Subu(result_reg, zero_reg, input_high);
262 __ Movz(result_reg, input_high, scratch);
266 __ Pop(scratch, scratch2, scratch3);
282 Label max_negative_int;
288 __ Branch(&max_negative_int,
eq,
the_int(), Operand(0x80000000u));
294 __ li(
scratch(), Operand(non_smi_exponent));
309 HeapNumber::kExponentOffset));
313 HeapNumber::kMantissaOffset));
315 __ bind(&max_negative_int);
334 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
338 Label heap_number, return_equal;
339 Register exp_mask_reg = t1;
341 __ Branch(¬_identical,
ne, a0, Operand(a1));
350 __ GetObjectType(a0, t0, t0);
353 __ GetObjectType(a0, t0, t0);
363 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
364 __ Branch(&return_equal,
ne, a0, Operand(a6));
378 __ bind(&return_equal);
386 __ mov(v0, zero_reg);
392 __ bind(&heap_number);
401 __ And(a7, a6, Operand(exp_mask_reg));
403 __ Branch(&return_equal,
ne, a7, Operand(exp_mask_reg));
409 __ Or(v0, a7, Operand(a6));
416 __ Ret(
eq, v0, Operand(zero_reg));
428 __ bind(¬_identical);
432 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
435 Label* both_loaded_as_doubles,
438 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
439 (lhs.is(a1) && rhs.is(a0)));
442 __ JumpIfSmi(lhs, &lhs_is_smi);
445 __ GetObjectType(lhs, t0, t0);
458 __ SmiUntag(at, rhs);
464 __ jmp(both_loaded_as_doubles);
466 __ bind(&lhs_is_smi);
468 __ GetObjectType(rhs, t0, t0);
473 __ li(v0, Operand(1));
482 __ SmiUntag(at, lhs);
490 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
497 Label first_non_object;
500 __ GetObjectType(lhs, a2, a2);
504 Label return_not_equal;
505 __ bind(&return_not_equal);
507 __ li(v0, Operand(1));
509 __ bind(&first_non_object);
513 __ GetObjectType(rhs, a3, a3);
522 __ Or(a2, a2, Operand(a3));
524 __ Branch(&return_not_equal,
eq, at, Operand(zero_reg));
528 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
531 Label* both_loaded_as_doubles,
532 Label* not_heap_numbers,
534 __ GetObjectType(lhs, a3, a2);
538 __ Branch(slow,
ne, a3, Operand(a2));
545 __ jmp(both_loaded_as_doubles);
550 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
553 Label* possible_strings,
554 Label* not_both_strings) {
555 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
556 (lhs.is(a1) && rhs.is(a0)));
562 __ Branch(&object_test,
ne, at, Operand(zero_reg));
564 __ Branch(possible_strings,
ne, at, Operand(zero_reg));
565 __ GetObjectType(rhs, a3, a3);
568 __ Branch(possible_strings,
ne, at, Operand(zero_reg));
573 __ li(v0, Operand(1));
575 __ bind(&object_test);
577 __ GetObjectType(rhs, a2, a3);
593 static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
599 __ JumpIfNotSmi(input, fail);
601 __ JumpIfSmi(input, &ok);
602 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
620 CompareICStub_CheckInputType(masm, lhs, a2,
left(), &miss);
621 CompareICStub_CheckInputType(masm, rhs, a3,
right(), &miss);
624 Label not_smis, both_loaded_as_doubles;
626 Label not_two_smis, smi_done;
628 __ JumpIfNotSmi(a2, ¬_two_smis);
633 __ dsubu(v0, a1, a0);
634 __ bind(¬_two_smis);
641 EmitIdenticalObjectComparison(masm, &slow,
cc);
647 __ And(a6, lhs, Operand(rhs));
648 __ JumpIfNotSmi(a6, ¬_smis, a4);
657 EmitSmiNonsmiComparison(masm, lhs, rhs,
658 &both_loaded_as_doubles, &slow,
strict());
660 __ bind(&both_loaded_as_doubles);
717 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
720 Label check_for_internalized_strings;
721 Label flat_string_check;
727 EmitCheckForTwoHeapNumbers(masm,
730 &both_loaded_as_doubles,
731 &check_for_internalized_strings,
734 __ bind(&check_for_internalized_strings);
740 EmitCheckForInternalizedStringsOrObjects(
741 masm, lhs, rhs, &flat_string_check, &slow);
746 __ bind(&flat_string_check);
748 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow);
750 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
767 native =
strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
769 native = Builtins::COMPARE;
793 __ PushSafepointRegisters();
801 __ PopSafepointRegisters();
814 const int argument_count = 1;
815 const int fp_argument_count = 0;
816 const Register scratch = a1;
818 AllowExternalCallThatCantCauseGC scope(masm);
819 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
820 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
822 ExternalReference::store_buffer_overflow_function(isolate()),
834 const Register base = a1;
837 const Register heapnumbermap = a5;
838 const Register heapnumber = v0;
843 const FPURegister single_scratch =
f8;
844 const Register scratch = t1;
845 const Register scratch2 = a7;
847 Label call_runtime, done, int_exponent;
849 Label base_is_smi, unpack_exponent;
856 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
858 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
860 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
863 __ jmp(&unpack_exponent);
865 __ bind(&base_is_smi);
866 __ mtc1(scratch, single_scratch);
867 __ cvt_d_w(double_base, single_scratch);
868 __ bind(&unpack_exponent);
870 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
873 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
874 __ ldc1(double_exponent,
878 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
880 __ ldc1(double_exponent,
885 Label int_exponent_convert;
895 __ Branch(&int_exponent_convert,
eq, scratch2, Operand(zero_reg));
904 __ Move(double_scratch, 0.5);
916 __ neg_d(double_result, double_scratch);
920 __ sqrt_d(double_result, double_scratch);
923 __ bind(¬_plus_half);
924 __ Move(double_scratch, -0.5);
940 __ Move(double_result, 1);
941 __ sqrt_d(double_scratch, double_scratch);
942 __ div_d(double_result, double_result, double_scratch);
948 AllowExternalCallThatCantCauseGC scope(masm);
949 __ PrepareCallCFunction(0, 2, scratch2);
950 __ MovToFloatParameters(double_base, double_exponent);
952 ExternalReference::power_double_double_function(isolate()),
956 __ MovFromFloatResult(double_result);
959 __ bind(&int_exponent_convert);
963 __ bind(&int_exponent);
967 __ mov(scratch, exponent);
970 __ mov(exponent, scratch);
973 __ mov_d(double_scratch, double_base);
974 __ Move(double_result, 1.0);
977 Label positive_exponent;
978 __ Branch(&positive_exponent,
ge, scratch, Operand(zero_reg));
979 __ Dsubu(scratch, zero_reg, scratch);
980 __ bind(&positive_exponent);
982 Label while_true, no_carry, loop_end;
983 __ bind(&while_true);
985 __ And(scratch2, scratch, 1);
987 __ Branch(&no_carry,
eq, scratch2, Operand(zero_reg));
988 __ mul_d(double_result, double_result, double_scratch);
991 __ dsra(scratch, scratch, 1);
993 __ Branch(&loop_end,
eq, scratch, Operand(zero_reg));
994 __ mul_d(double_scratch, double_scratch, double_scratch);
996 __ Branch(&while_true);
1000 __ Branch(&done,
ge, exponent, Operand(zero_reg));
1001 __ Move(double_scratch, 1.0);
1002 __ div_d(double_result, double_scratch, double_result);
1009 __ mtc1(exponent, single_scratch);
1010 __ cvt_d_w(double_exponent, single_scratch);
1013 Counters* counters = isolate()->counters();
1016 __ bind(&call_runtime);
1017 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
1022 __ AllocateHeapNumber(
1023 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
1024 __ sdc1(double_result,
1026 DCHECK(heapnumber.is(v0));
1027 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1032 AllowExternalCallThatCantCauseGC scope(masm);
1033 __ PrepareCallCFunction(0, 2, scratch);
1034 __ MovToFloatParameters(double_base, double_exponent);
1036 ExternalReference::power_double_double_function(isolate()),
1040 __ MovFromFloatResult(double_result);
1043 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1054 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1064 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1080 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1083 CEntryStub(isolate, 1,
mode).GetCode();
1084 StoreBufferOverflowStub(isolate,
mode).GetCode();
1085 isolate->set_fp_stubs_generated(
true);
1115 FrameScope scope(masm, StackFrame::MANUAL);
1130 __ AssertStackIsAligned();
1132 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1144 masm->bal(&find_ra);
1146 masm->bind(&find_ra);
1151 const int kNumInstructionsToJump = 5;
1152 masm->Daddu(ra, ra, kNumInstructionsToJump *
kInt32Size);
1164 masm->InstructionsGeneratedSince(&find_ra));
1169 if (FLAG_debug_code) {
1171 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
1172 __ Branch(&okay,
ne, v0, Operand(a4));
1173 __ stop(
"The hole escaped");
1178 Label exception_returned;
1179 __ LoadRoot(a4, Heap::kExceptionRootIndex);
1180 __ Branch(&exception_returned,
eq, a4, Operand(v0));
1182 ExternalReference pending_exception_address(
1183 Isolate::kPendingExceptionAddress, isolate());
1187 if (FLAG_debug_code) {
1189 __ li(a2, Operand(pending_exception_address));
1191 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
1193 __ Branch(&okay,
eq, a4, Operand(a2));
1194 __ stop(
"Unexpected pending exception");
1206 __ bind(&exception_returned);
1209 __ li(a2, Operand(pending_exception_address));
1213 __ li(a3, Operand(isolate()->factory()->the_hole_value()));
1218 Label throw_termination_exception;
1219 __ LoadRoot(a4, Heap::kTerminationExceptionRootIndex);
1220 __ Branch(&throw_termination_exception,
eq, v0, Operand(a4));
1225 __ bind(&throw_termination_exception);
1226 __ ThrowUncatchable(v0);
1231 Label invoke, handler_entry, exit;
1232 Isolate* isolate = masm->isolate();
1266 __ InitializeRootRegister();
1269 __ li(a7, Operand(-1));
1270 int marker =
type();
1273 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
1274 __ li(a4, Operand(c_entry_fp));
1297 Label non_outermost_js;
1298 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1299 __ li(a5, Operand(ExternalReference(js_entry_sp)));
1301 __ Branch(&non_outermost_js,
ne, a6, Operand(zero_reg));
1303 __ li(a4, Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1307 __ bind(&non_outermost_js);
1308 __ li(a4, Operand(
Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1315 __ bind(&handler_entry);
1321 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1324 __ LoadRoot(v0, Heap::kExceptionRootIndex);
1331 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
1338 __ LoadRoot(a5, Heap::kTheHoleValueRootIndex);
1339 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1361 if (
type() == StackFrame::ENTRY_CONSTRUCT) {
1362 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1364 __ li(a4, Operand(construct_entry));
1366 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
1367 __ li(a4, Operand(entry));
1379 Label non_outermost_js_2;
1381 __ Branch(&non_outermost_js_2,
1384 Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1385 __ li(a5, Operand(ExternalReference(js_entry_sp)));
1387 __ bind(&non_outermost_js_2);
1391 __ li(a4, Operand(ExternalReference(Isolate::kCEntryFPAddress,
1423 const Register
object = a0;
1425 const Register
function = a1;
1426 const Register prototype = a4;
1427 const Register inline_site = t1;
1428 const Register scratch = a2;
1432 Label slow, loop, is_instance, is_not_instance, not_js_object;
1440 __ JumpIfSmi(
object, ¬_js_object);
1441 __ IsObjectJSObjectType(
object,
map, scratch, ¬_js_object);
1447 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
1448 __ Branch(&miss,
ne,
function, Operand(at));
1449 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
1450 __ Branch(&miss,
ne,
map, Operand(at));
1451 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1458 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
1461 __ JumpIfSmi(prototype, &slow);
1462 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
1467 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
1468 __ StoreRoot(
map, Heap::kInstanceofCacheMapRootIndex);
1475 __ LoadFromSafepointRegisterSlot(scratch, a4);
1476 __ Dsubu(inline_site, ra, scratch);
1478 __ GetRelocatedValue(inline_site, scratch, v1);
1487 Register scratch2 =
map;
1491 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
1493 __ Branch(&is_instance,
eq, scratch, Operand(prototype));
1494 __ Branch(&is_not_instance,
eq, scratch, Operand(scratch2));
1499 __ bind(&is_instance);
1502 __ mov(v0, zero_reg);
1503 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1506 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
1507 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1509 __ PatchRelocatedValue(inline_site, scratch, v0);
1513 __ mov(v0, zero_reg);
1518 __ bind(&is_not_instance);
1521 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1524 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1525 __ Daddu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1527 __ PatchRelocatedValue(inline_site, scratch, v0);
1536 Label object_not_null, object_not_null_or_smi;
1537 __ bind(¬_js_object);
1540 __ JumpIfSmi(
function, &slow);
1541 __ GetObjectType(
function, scratch2, scratch);
1545 __ Branch(&object_not_null,
1548 Operand(isolate()->factory()->null_value()));
1552 __ bind(&object_not_null);
1554 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
1558 __ bind(&object_not_null_or_smi);
1560 __ IsObjectJSStringType(
object, scratch, &slow);
1578 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
1580 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1592 PropertyAccessCompiler::TailCallBuiltin(
1593 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1600 const int kDisplacement =
1607 __ JumpIfNotSmi(a1, &slow);
1621 __ Branch(&slow,
hs, a1, Operand(a0));
1624 __ dsubu(a3, a0, a1);
1626 __ Daddu(a3,
fp, Operand(a7));
1638 __ dsubu(a3, a0, a1);
1640 __ Daddu(a3, a2, Operand(a7));
1648 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1669 __ Daddu(a3, a3, Operand(a7));
1674 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1692 Label adaptor_frame, try_allocate;
1695 __ Branch(&adaptor_frame,
1702 __ Branch(&try_allocate);
1705 __ bind(&adaptor_frame);
1708 __ Daddu(a3, a3, Operand(t2));
1716 __ Branch(&skip_min,
lt, a1, Operand(a2));
1720 __ bind(&try_allocate);
1724 const int kParameterMapHeaderSize =
1727 Label param_map_size;
1730 __ mov(t1, zero_reg);
1732 __ daddiu(t1, t1, kParameterMapHeaderSize);
1733 __ bind(¶m_map_size);
1737 __ Daddu(t1, t1, Operand(t2));
1749 const int kNormalOffset =
1751 const int kAliasedOffset =
1756 Label skip2_ne, skip2_eq;
1757 __ Branch(&skip2_ne,
ne, a1, Operand(zero_reg));
1761 __ Branch(&skip2_eq,
eq, a1, Operand(zero_reg));
1770 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
1777 __ AssertNotSmi(a3);
1799 Label skip_parameter_map;
1809 __ LoadRoot(a6, Heap::kSloppyArgumentsElementsMapRootIndex);
1815 __ Daddu(a6, a4, Operand(t2));
1816 __ Daddu(a6, a6, Operand(kParameterMapHeaderSize));
1827 Label parameters_loop, parameters_test;
1831 __ Dsubu(t1, t1, Operand(a1));
1832 __ LoadRoot(a7, Heap::kTheHoleValueRootIndex);
1834 __ Daddu(a3, a4, Operand(t2));
1835 __ Daddu(a3, a3, Operand(kParameterMapHeaderSize));
1843 __ jmp(¶meters_test);
1845 __ bind(¶meters_loop);
1850 __ Daddu(t2, a4, a5);
1853 __ Daddu(t2, a3, a5);
1856 __ bind(¶meters_test);
1859 __ bind(&skip_parameter_map);
1864 __ LoadRoot(a5, Heap::kFixedArrayMapRootIndex);
1868 Label arguments_loop, arguments_test;
1872 __ Dsubu(a4, a4, Operand(t2));
1873 __ jmp(&arguments_test);
1875 __ bind(&arguments_loop);
1879 __ Daddu(a5, a3, Operand(t2));
1883 __ bind(&arguments_test);
1884 __ Branch(&arguments_loop,
lt, t1, Operand(a2));
1893 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1906 __ Branch(&slow,
ne, t0, Operand(zero_reg));
1912 __ TailCallExternalReference(
1913 ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
1918 PropertyAccessCompiler::TailCallBuiltin(
1919 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1928 Label adaptor_frame, try_allocate, runtime;
1931 __ Branch(&adaptor_frame,
1938 __ Branch(&try_allocate);
1941 __ bind(&adaptor_frame);
1946 __ Daddu(a3, a2, Operand(at));
1953 Label add_arguments_object;
1954 __ bind(&try_allocate);
1955 __ Branch(&add_arguments_object,
eq, a1, Operand(zero_reg));
1959 __ bind(&add_arguments_object);
1963 __ Allocate(a1, v0, a2, a3, &runtime,
1973 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
1985 __ Branch(&done,
eq, a1, Operand(zero_reg));
1994 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex);
2013 __ Dsubu(a1, a1, Operand(1));
2014 __ Branch(&loop,
ne, a1, Operand(zero_reg));
2022 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
2030 #ifdef V8_INTERPRETED_REGEXP
2031 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
2052 Register subject =
s0;
2053 Register regexp_data =
s1;
2054 Register last_match_info_elements =
s2;
2057 ExternalReference address_of_regexp_stack_memory_address =
2058 ExternalReference::address_of_regexp_stack_memory_address(
2060 ExternalReference address_of_regexp_stack_memory_size =
2061 ExternalReference::address_of_regexp_stack_memory_size(isolate());
2062 __ li(a0, Operand(address_of_regexp_stack_memory_size));
2064 __ Branch(&runtime,
eq, a0, Operand(zero_reg));
2069 __ JumpIfSmi(a0, &runtime);
2070 __ GetObjectType(a0, a1, a1);
2075 if (FLAG_debug_code) {
2076 __ SmiTst(regexp_data, a4);
2078 kUnexpectedTypeForRegExpDataFixedArrayExpected,
2081 __ GetObjectType(regexp_data, a0, a0);
2083 kUnexpectedTypeForRegExpDataFixedArrayExpected,
2106 __ mov(t0, zero_reg);
2108 __ JumpIfSmi(subject, &runtime);
2109 __ mov(a3, subject);
2133 Label check_underlying;
2135 Label not_seq_nor_cons;
2136 Label external_string;
2137 Label not_long_external;
2146 __ Branch(&seq_string,
eq, a1, Operand(zero_reg));
2159 __ LoadRoot(a1, Heap::kempty_stringRootIndex);
2160 __ Branch(&runtime,
ne, a0, Operand(a1));
2164 __ bind(&check_underlying);
2172 __ Branch(&external_string,
ne, at, Operand(zero_reg));
2175 __ bind(&seq_string);
2182 __ JumpIfNotSmi(a1, &runtime);
2184 __ Branch(&runtime,
ls, a3, Operand(a1));
2194 __ Movz(t9, a5, a0);
2201 __ JumpIfSmi(t9, &runtime);
2209 __ IncrementCounter(isolate()->counters()->regexp_entry_native(),
2213 const int kRegExpExecuteArguments = 9;
2214 const int kParameterRegisters = (
kMipsAbi ==
kN64) ? 8 : 4;
2215 __ EnterExitFrame(
false, kRegExpExecuteArguments - kParameterRegisters);
2237 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
2241 __ li(a7, Operand(1));
2244 __ li(a0, Operand(address_of_regexp_stack_memory_address));
2246 __ li(a2, Operand(address_of_regexp_stack_memory_size));
2248 __ daddu(a6, a0, a2);
2252 __ mov(a5, zero_reg);
2256 ExternalReference::address_of_static_offsets_vector(isolate())));
2262 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
2266 __ li(a0, Operand(1));
2270 __ li(a0, Operand(address_of_regexp_stack_memory_address));
2272 __ li(a2, Operand(address_of_regexp_stack_memory_size));
2274 __ daddu(a0, a0, a2);
2279 __ mov(a0, zero_reg);
2284 ExternalReference::address_of_static_offsets_vector(isolate())));
2291 __ Xor(a3, a3, Operand(1));
2301 __ dsllv(t1, t0, a3);
2302 __ daddu(t0, t2, t1);
2303 __ dsllv(t1, a1, a3);
2304 __ daddu(a2, t0, t1);
2309 __ dsllv(t1, t2, a3);
2310 __ daddu(a3, t0, t1);
2315 __ mov(a0, subject);
2319 DirectCEntryStub stub(isolate());
2320 stub.GenerateCall(masm, t9);
2322 __ LeaveExitFrame(
false,
no_reg,
true);
2330 __ Branch(&success,
eq, v0, Operand(1));
2341 __ li(a1, Operand(isolate()->factory()->the_hole_value()));
2342 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2345 __ Branch(&runtime,
eq, v0, Operand(a1));
2350 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
2351 Label termination_exception;
2352 __ Branch(&termination_exception,
eq, v0, Operand(a0));
2356 __ bind(&termination_exception);
2357 __ ThrowUncatchable(v0);
2361 __ li(v0, Operand(isolate()->factory()->null_value()));
2370 __ Daddu(a1, a1, Operand(1));
2374 __ JumpIfSmi(a0, &runtime);
2375 __ GetObjectType(a0, a2, a2);
2378 __ ld(last_match_info_elements,
2381 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
2382 __ Branch(&runtime,
ne, a0, Operand(at));
2389 __ SmiUntag(at, a0);
2390 __ Branch(&runtime,
gt, a2, Operand(at));
2402 __ mov(a2, subject);
2403 __ RecordWriteField(last_match_info_elements,
2409 __ mov(subject, a2);
2413 __ RecordWriteField(last_match_info_elements,
2421 ExternalReference address_of_static_offsets_vector =
2422 ExternalReference::address_of_static_offsets_vector(isolate());
2423 __ li(a2, Operand(address_of_static_offsets_vector));
2427 Label next_capture, done;
2431 last_match_info_elements,
2433 __ bind(&next_capture);
2434 __ Dsubu(a1, a1, Operand(1));
2435 __ Branch(&done,
lt, a1, Operand(zero_reg));
2453 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
2457 __ bind(¬_seq_nor_cons);
2462 __ bind(&external_string);
2465 if (FLAG_debug_code) {
2470 kExternalStringExpectedButNotFound,
2481 __ jmp(&seq_string);
2484 __ bind(¬_long_external);
2487 __ Branch(&runtime,
ne, at, Operand(zero_reg));
2494 __ jmp(&check_underlying);
2499 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2507 Label initialize, done, miss, megamorphic, not_array_function;
2510 masm->isolate()->heap()->megamorphic_symbol());
2512 masm->isolate()->heap()->uninitialized_symbol());
2516 __ Daddu(a4, a2, Operand(a4));
2521 __ Branch(&done,
eq, a4, Operand(a1));
2523 if (!FLAG_pretenuring_call_new) {
2529 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2530 __ Branch(&miss,
ne, a5, Operand(at));
2534 __ Branch(&megamorphic,
ne, a1, Operand(a4));
2542 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
2543 __ Branch(&initialize,
eq, a4, Operand(at));
2546 __ bind(&megamorphic);
2548 __ Daddu(a4, a2, Operand(a4));
2549 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
2554 __ bind(&initialize);
2555 if (!FLAG_pretenuring_call_new) {
2558 __ Branch(¬_array_function,
ne, a1, Operand(a4));
2573 __ MultiPush(kSavedRegs);
2575 CreateAllocationSiteStub create_stub(masm->isolate());
2576 __ CallStub(&create_stub);
2578 __ MultiPop(kSavedRegs);
2583 __ bind(¬_array_function);
2587 __ Daddu(a4, a2, Operand(a4));
2600 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
2604 int32_t strict_mode_function_mask =
2610 __ And(at, a4, Operand(strict_mode_function_mask));
2611 __ Branch(cont,
ne, at, Operand(zero_reg));
2613 __ And(at, a4, Operand(native_mask));
2614 __ Branch(cont,
ne, at, Operand(zero_reg));
2618 static void EmitSlowCase(MacroAssembler* masm,
2620 Label* non_function) {
2625 __ mov(a2, zero_reg);
2626 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
2628 Handle<Code> adaptor =
2629 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2635 __ bind(non_function);
2637 __ li(a0, Operand(argc));
2638 __ mov(a2, zero_reg);
2639 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
2640 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2645 static void EmitWrapCase(MacroAssembler* masm,
int argc, Label* cont) {
2657 static void CallFunctionNoFeedback(MacroAssembler* masm,
2658 int argc,
bool needs_checks,
2659 bool call_as_method) {
2661 Label slow, non_function, wrap, cont;
2666 __ JumpIfSmi(a1, &non_function);
2669 __ GetObjectType(a1, a4, a4);
2675 ParameterCount actual(argc);
2677 if (call_as_method) {
2679 EmitContinueIfStrictOrNative(masm, &cont);
2686 __ JumpIfSmi(a3, &wrap);
2687 __ GetObjectType(a3, a4, a4);
2700 EmitSlowCase(masm, argc, &non_function);
2703 if (call_as_method) {
2706 EmitWrapCase(masm, argc, &cont);
2721 Label slow, non_function_call;
2723 __ JumpIfSmi(a1, &non_function_call);
2725 __ GetObjectType(a1, a4, a4);
2729 GenerateRecordCallTarget(masm);
2732 __ Daddu(a5, a2, at);
2733 if (FLAG_pretenuring_call_new) {
2739 Label feedback_register_initialized;
2743 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2744 __ Branch(&feedback_register_initialized,
eq, a5, Operand(at));
2745 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2746 __ bind(&feedback_register_initialized);
2749 __ AssertUndefinedOrAllocationSite(a2, a5);
2753 Register jmp_reg = a4;
2766 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2769 __ bind(&non_function_call);
2770 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2774 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2817 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2831 EmitLoadTypeFeedbackVector(masm, a2);
2834 __ Branch(&miss,
ne, a1, Operand(at));
2838 __ Daddu(at, a2, Operand(at));
2843 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2844 __ Branch(&miss,
ne, a5, Operand(at));
2847 ArrayConstructorStub stub(masm->isolate(),
arg_count());
2848 __ TailCallStub(&stub);
2854 CallFunctionNoFeedback(masm,
2860 __ stop(
"Unexpected code address");
2867 Label extra_checks_or_miss, slow_start;
2868 Label slow, non_function, wrap, cont;
2869 Label have_js_function;
2871 ParameterCount actual(argc);
2873 EmitLoadTypeFeedbackVector(masm, a2);
2877 __ Daddu(a4, a2, Operand(a4));
2879 __ Branch(&extra_checks_or_miss,
ne, a1, Operand(a4));
2881 __ bind(&have_js_function);
2883 EmitContinueIfStrictOrNative(masm, &cont);
2887 __ JumpIfSmi(a3, &wrap);
2888 __ GetObjectType(a3, a4, a4);
2897 EmitSlowCase(masm, argc, &non_function);
2901 EmitWrapCase(masm, argc, &cont);
2904 __ bind(&extra_checks_or_miss);
2907 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
2908 __ Branch(&slow_start,
eq, a4, Operand(at));
2909 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
2910 __ Branch(&miss,
eq, a4, Operand(at));
2912 if (!FLAG_trace_ic) {
2915 __ AssertNotSmi(a4);
2916 __ GetObjectType(a4, a5, a5);
2919 __ Daddu(a4, a2, Operand(a4));
2920 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
2922 __ Branch(&slow_start);
2930 __ bind(&slow_start);
2933 __ JumpIfSmi(a1, &non_function);
2936 __ GetObjectType(a1, a4, a4);
2938 __ Branch(&have_js_function);
2954 : IC::kCallIC_Customization_Miss;
2956 ExternalReference miss = ExternalReference(IC_Utility(
id),
2958 __ CallExternalReference(miss, 4);
2967 MacroAssembler* masm,
2968 const RuntimeCallHelper& call_helper) {
2969 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2976 Heap::kHeapNumberMapRootIndex,
2979 call_helper.BeforeCall(masm);
2983 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2987 __ CallRuntime(Runtime::kNumberToSmi, 1);
2998 call_helper.AfterCall(masm);
3008 call_helper.BeforeCall(masm);
3011 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
3015 call_helper.AfterCall(masm);
3018 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3040 __ LoadRoot(
result_, Heap::kSingleCharacterStringCacheRootIndex);
3046 __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
3053 MacroAssembler* masm,
3054 const RuntimeCallHelper& call_helper) {
3055 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3058 call_helper.BeforeCall(masm);
3060 __ CallRuntime(Runtime::kCharFromCode, 1);
3063 call_helper.AfterCall(masm);
3066 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3070 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
3079 if (FLAG_debug_code) {
3083 kDestinationOfCopyNotAligned,
3093 __ Daddu(count, count, count);
3096 Register limit = count;
3097 __ Daddu(limit, dest, Operand(count));
3099 Label loop_entry, loop;
3101 __ Branch(&loop_entry);
3104 __ daddiu(src, src, 1);
3106 __ daddiu(dest, dest, 1);
3107 __ bind(&loop_entry);
3108 __ Branch(&loop,
lt, dest, Operand(limit));
3143 __ JumpIfNotSmi(a2, &runtime);
3144 __ JumpIfNotSmi(a3, &runtime);
3147 __ SmiUntag(a2, a2);
3148 __ SmiUntag(a3, a3);
3149 __ Branch(&runtime,
lt, a3, Operand(zero_reg));
3151 __ Branch(&runtime,
gt, a3, Operand(a2));
3152 __ Dsubu(a2, a2, a3);
3156 __ JumpIfSmi(v0, &runtime);
3161 __ Branch(&runtime,
ne, a4, Operand(zero_reg));
3164 __ Branch(&single_char,
eq, a2, Operand(1));
3173 __ Branch(&return_v0,
eq, a2, Operand(a4));
3175 __ Branch(&runtime,
hi, a2, Operand(a4));
3184 Label underlying_unpacked, sliced_string, seq_or_external_string;
3192 __ Branch(&sliced_string,
ne, a4, Operand(zero_reg));
3195 __ LoadRoot(a4, Heap::kempty_stringRootIndex);
3196 __ Branch(&runtime,
ne, a5, Operand(a4));
3201 __ jmp(&underlying_unpacked);
3203 __ bind(&sliced_string);
3208 __ Daddu(a3, a3, a4);
3212 __ jmp(&underlying_unpacked);
3214 __ bind(&seq_or_external_string);
3218 __ bind(&underlying_unpacked);
3220 if (FLAG_string_slices) {
3233 Label two_byte_slice, set_slice_header;
3237 __ Branch(&two_byte_slice,
eq, a4, Operand(zero_reg));
3238 __ AllocateOneByteSlicedString(v0, a2, a6, a7, &runtime);
3239 __ jmp(&set_slice_header);
3240 __ bind(&two_byte_slice);
3241 __ AllocateTwoByteSlicedString(v0, a2, a6, a7, &runtime);
3242 __ bind(&set_slice_header);
3248 __ bind(©_routine);
3255 Label two_byte_sequential, sequential_string, allocate_result;
3259 __ Branch(&sequential_string,
eq, a4, Operand(zero_reg));
3265 __ Branch(&runtime,
ne, a4, Operand(zero_reg));
3268 __ jmp(&allocate_result);
3270 __ bind(&sequential_string);
3275 __ bind(&allocate_result);
3279 __ Branch(&two_byte_sequential,
eq, a4, Operand(zero_reg));
3282 __ AllocateOneByteString(v0, a2, a4, a6, a7, &runtime);
3285 __ Daddu(a5, a5, a3);
3300 __ bind(&two_byte_sequential);
3301 __ AllocateTwoByteString(v0, a2, a4, a6, a7, &runtime);
3306 __ Daddu(a5, a5, a4);
3318 __ bind(&return_v0);
3319 Counters* counters = isolate()->counters();
3320 __ IncrementCounter(counters->sub_string_native(), 1, a3, a4);
3325 __ TailCallRuntime(Runtime::kSubString, 3, 1);
3327 __ bind(&single_char);
3332 StringCharAtGenerator generator(
3334 generator.GenerateFast(masm);
3336 generator.SkipSlow(masm, &runtime);
3341 MacroAssembler* masm, Register left, Register right, Register scratch1,
3342 Register scratch2, Register scratch3) {
3343 Register length = scratch1;
3346 Label strings_not_equal, check_zero_length;
3349 __ Branch(&check_zero_length,
eq, length, Operand(scratch2));
3350 __ bind(&strings_not_equal);
3356 Label compare_chars;
3357 __ bind(&check_zero_length);
3359 __ Branch(&compare_chars,
ne, length, Operand(zero_reg));
3365 __ bind(&compare_chars);
3368 v0, &strings_not_equal);
3377 MacroAssembler* masm, Register left, Register right, Register scratch1,
3378 Register scratch2, Register scratch3, Register scratch4) {
3379 Label result_not_equal, compare_lengths;
3383 __ Dsubu(scratch3, scratch1, Operand(scratch2));
3384 Register length_delta = scratch3;
3385 __ slt(scratch4, scratch2, scratch1);
3386 __ Movn(scratch1, scratch2, scratch4);
3387 Register min_length = scratch1;
3389 __ Branch(&compare_lengths,
eq, min_length, Operand(zero_reg));
3393 scratch4, v0, &result_not_equal);
3396 __ bind(&compare_lengths);
3399 __ mov(scratch2, length_delta);
3400 __ mov(scratch4, zero_reg);
3401 __ mov(v0, zero_reg);
3403 __ bind(&result_not_equal);
3407 __ Branch(&ret,
eq, scratch2, Operand(scratch4));
3409 __ Branch(&ret,
gt, scratch2, Operand(scratch4));
3417 MacroAssembler* masm, Register left, Register right, Register length,
3418 Register scratch1, Register scratch2, Register scratch3,
3419 Label* chars_not_equal) {
3423 __ SmiUntag(length);
3424 __ Daddu(scratch1, length,
3426 __ Daddu(left, left, Operand(scratch1));
3427 __ Daddu(right, right, Operand(scratch1));
3428 __ Dsubu(length, zero_reg, length);
3429 Register index = length;
3435 __ Daddu(scratch3, left, index);
3437 __ Daddu(scratch3, right, index);
3439 __ Branch(chars_not_equal,
ne, scratch1, Operand(scratch2));
3440 __ Daddu(index, index, 1);
3441 __ Branch(&loop,
ne, index, Operand(zero_reg));
3448 Counters* counters = isolate()->counters();
3457 __ Branch(¬_same,
ne, a0, Operand(a1));
3461 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
3467 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime);
3470 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
3475 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3479 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3489 __ li(a2,
handle(isolate()->heap()->undefined_value()));
3492 if (FLAG_debug_code) {
3494 __ Assert(
ne, kExpectedAllocationSite, at, Operand(zero_reg));
3496 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3497 __ Assert(
eq, kExpectedAllocationSite, a4, Operand(at));
3502 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3503 __ TailCallStub(&stub);
3511 __ JumpIfNotSmi(a2, &miss);
3516 __ Dsubu(v0, a0, a1);
3522 __ Dsubu(v0, a1, a0);
3534 Label unordered, maybe_undefined1, maybe_undefined2;
3538 __ JumpIfNotSmi(a1, &miss);
3541 __ JumpIfNotSmi(a0, &miss);
3547 Label done,
left, left_smi, right_smi;
3548 __ JumpIfSmi(a0, &right_smi);
3549 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
3554 __ bind(&right_smi);
3555 __ SmiUntag(a2, a0);
3556 FPURegister single_scratch =
f6;
3557 __ mtc1(a2, single_scratch);
3558 __ cvt_d_w(
f2, single_scratch);
3561 __ JumpIfSmi(a1, &left_smi);
3562 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
3568 __ SmiUntag(a2, a1);
3569 single_scratch =
f8;
3570 __ mtc1(a2, single_scratch);
3571 __ cvt_d_w(
f0, single_scratch);
3576 Label fpu_eq, fpu_lt;
3578 __ BranchF(&fpu_eq, &unordered,
eq,
f0,
f2);
3594 __ li(v0, Operand(
LESS));
3596 __ bind(&unordered);
3597 __ bind(&generic_stub);
3602 __ bind(&maybe_undefined1);
3604 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3605 __ Branch(&miss,
ne, a0, Operand(at));
3606 __ JumpIfSmi(a1, &unordered);
3607 __ GetObjectType(a1, a2, a2);
3612 __ bind(&maybe_undefined2);
3614 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3615 __ Branch(&unordered,
eq, a1, Operand(at));
3629 Register
right = a0;
3642 __ Or(tmp1, tmp1, Operand(tmp2));
3644 __ Branch(&miss,
ne, at, Operand(zero_reg));
3670 Register
right = a0;
3684 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
3685 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
3715 Register
right = a0;
3732 __ Or(tmp3, tmp1, tmp2);
3734 __ Branch(&miss,
ne, tmp5, Operand(zero_reg));
3737 Label left_ne_right;
3742 __ mov(v0, zero_reg);
3743 __ bind(&left_ne_right);
3753 __ Or(tmp3, tmp1, Operand(tmp2));
3756 __ Branch(&is_symbol,
ne, tmp5, Operand(zero_reg));
3762 __ bind(&is_symbol);
3767 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
3783 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3785 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3796 __ And(a2, a1, Operand(a0));
3797 __ JumpIfSmi(a2, &miss);
3799 __ GetObjectType(a0, a2, a2);
3801 __ GetObjectType(a1, a2, a2);
3806 __ dsubu(v0, a0, a1);
3816 __ JumpIfSmi(a2, &miss);
3823 __ dsubu(v0, a0, a1);
3833 ExternalReference miss =
3834 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
3869 __ Assert(
ne, kReceivedInvalidReturnAddress, a4,
3870 Operand(
reinterpret_cast<uint64_t
>(
kZapValue)));
3879 reinterpret_cast<intptr_t
>(GetCode().location());
3880 __ Move(t9, target);
3890 Register properties,
3892 Register scratch0) {
3902 Register
index = scratch0;
3907 Operand(
name->Hash() + NameDictionary::GetProbeOffset(
i)));
3914 Register entity_name = scratch0;
3917 Register tmp = properties;
3920 __ Daddu(tmp, properties, scratch0);
3923 DCHECK(!tmp.is(entity_name));
3924 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3925 __ Branch(done,
eq, entity_name, Operand(tmp));
3928 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
3931 __ Branch(miss,
eq, entity_name, Operand(Handle<Name>(
name)));
3934 __ Branch(&good,
eq, entity_name, Operand(tmp));
3940 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3948 const int spill_mask =
3949 (ra.bit() | a6.bit() | a5.bit() | a4.bit() | a3.bit() |
3950 a2.bit() | a1.bit() | a0.bit() | v0.bit());
3952 __ MultiPush(spill_mask);
3954 __ li(a1, Operand(Handle<Name>(
name)));
3958 __ MultiPop(spill_mask);
3960 __ Branch(done,
eq, at, Operand(zero_reg));
3961 __ Branch(miss,
ne, at, Operand(zero_reg));
3975 Register scratch2) {
3976 DCHECK(!elements.is(scratch1));
3977 DCHECK(!elements.is(scratch2));
3985 __ SmiUntag(scratch1);
3986 __ Dsubu(scratch1, scratch1, Operand(1));
3998 DCHECK(NameDictionary::GetProbeOffset(
i) <
4000 __ Daddu(scratch2, scratch2, Operand(
4004 __ And(scratch2, scratch1, scratch2);
4010 __ dsll(at, scratch2, 1);
4011 __ Daddu(scratch2, scratch2, at);
4015 __ Daddu(scratch2, elements, at);
4017 __ Branch(done,
eq,
name, Operand(at));
4020 const int spill_mask =
4021 (ra.bit() | a6.bit() | a5.bit() | a4.bit() |
4022 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
4023 ~(scratch1.bit() | scratch2.bit());
4025 __ MultiPush(spill_mask);
4027 DCHECK(!elements.is(a1));
4029 __ Move(a0, elements);
4031 __ Move(a0, elements);
4036 __ mov(scratch2, a2);
4038 __ MultiPop(spill_mask);
4040 __ Branch(done,
ne, at, Operand(zero_reg));
4041 __ Branch(miss,
eq, at, Operand(zero_reg));
4060 Register
index = a2;
4063 Register undefined = a5;
4064 Register entry_key = a6;
4066 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4070 __ Dsubu(mask, mask, Operand(1));
4074 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4083 DCHECK(NameDictionary::GetProbeOffset(
i) <
4085 __ Daddu(
index, hash, Operand(
4107 __ Branch(¬_in_dictionary,
eq, entry_key, Operand(undefined));
4110 __ Branch(&in_dictionary,
eq, entry_key, Operand(key));
4117 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
4121 __ bind(&maybe_in_dictionary);
4130 __ bind(&in_dictionary);
4134 __ bind(¬_in_dictionary);
4155 Label skip_to_incremental_noncompacting;
4156 Label skip_to_incremental_compacting;
4164 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
4166 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
4170 __ RememberedSetHelper(
object(),
4178 __ bind(&skip_to_incremental_noncompacting);
4181 __ bind(&skip_to_incremental_compacting);
4196 Label dont_need_remembered_set;
4201 &dont_need_remembered_set);
4207 &dont_need_remembered_set);
4215 __ RememberedSetHelper(
object(),
4221 __ bind(&dont_need_remembered_set);
4234 int argument_count = 3;
4243 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4245 AllowExternalCallThatCantCauseGC scope(masm);
4247 ExternalReference::incremental_marking_record_write_function(isolate()),
4254 MacroAssembler* masm,
4255 OnNoNeedToInformIncrementalMarker on_no_need,
4258 Label need_incremental;
4259 Label need_incremental_pop_scratch;
4277 __ RememberedSetHelper(
object(),
4292 Label ensure_not_white;
4306 __ bind(&ensure_not_white);
4316 &need_incremental_pop_scratch);
4321 __ RememberedSetHelper(
object(),
4330 __ bind(&need_incremental_pop_scratch);
4333 __ bind(&need_incremental);
4349 Label double_elements;
4351 Label slow_elements;
4352 Label fast_elements;
4359 __ CheckFastElements(a2, a5, &double_elements);
4361 __ JumpIfSmi(a0, &smi_element);
4362 __ CheckFastSmiElements(a2, a5, &fast_elements);
4366 __ bind(&slow_elements);
4372 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4375 __ bind(&fast_elements);
4378 __ Daddu(a6, a5, a6);
4389 __ bind(&smi_element);
4392 __ Daddu(a6, a5, a6);
4398 __ bind(&double_elements);
4400 __ StoreNumberToDoubleElements(a0, a3, a5, a7, t1, a2, &slow_elements);
4409 int parameter_count_offset =
4413 __ Daddu(a1, a1, Operand(1));
4415 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4424 VectorLoadStub stub(isolate(),
state());
4431 VectorKeyedLoadStub stub(isolate());
4437 if (masm->isolate()->function_entry_hook() !=
NULL) {
4449 const int32_t kReturnAddressDistanceFromFunctionStart =
4461 __ MultiPush(kSavedRegs | ra.bit());
4464 __ Dsubu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
4471 int frame_alignment = masm->ActivationFrameAlignment();
4475 __ And(
sp,
sp, Operand(-frame_alignment));
4479 #if defined(V8_HOST_ARCH_MIPS) || defined(V8_HOST_ARCH_MIPS64)
4480 int64_t entry_hook =
4481 reinterpret_cast<int64_t
>(isolate()->function_entry_hook());
4482 __ li(t9, Operand(entry_hook));
4487 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4490 __ li(t9, Operand(ExternalReference(&dispatcher,
4491 ExternalReference::BUILTIN_CALL,
4505 __ MultiPop(kSavedRegs | ra.bit());
4511 static void CreateArrayDispatch(MacroAssembler* masm,
4515 __ TailCallStub(&stub);
4519 for (
int i = 0;
i <= last_index; ++
i) {
4521 T stub(masm->isolate(), kind);
4522 __ TailCallStub(&stub,
eq, a3, Operand(kind));
4526 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4533 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4540 Label normal_sequence;
4550 __ And(at, a3, Operand(1));
4551 __ Branch(&normal_sequence,
ne, at, Operand(zero_reg));
4555 __ Branch(&normal_sequence,
eq, a5, Operand(zero_reg));
4561 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4564 __ TailCallStub(&stub_holey);
4566 __ bind(&normal_sequence);
4567 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4570 __ TailCallStub(&stub);
4574 __ Daddu(a3, a3, Operand(1));
4576 if (FLAG_debug_code) {
4578 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
4579 __ Assert(
eq, kExpectedAllocationSite, a5, Operand(at));
4591 __ bind(&normal_sequence);
4594 for (
int i = 0;
i <= last_index; ++
i) {
4596 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4597 __ TailCallStub(&stub,
eq, a3, Operand(kind));
4601 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4609 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4612 for (
int i = 0;
i <= to_index; ++
i) {
4614 T stub(isolate, kind);
4625 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4627 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4629 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4637 for (
int i = 0;
i < 2;
i++) {
4639 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[
i]);
4641 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[
i]);
4643 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[
i]);
4650 MacroAssembler* masm,
4653 Label not_zero_case, not_one_case;
4655 __ Branch(¬_zero_case,
ne, at, Operand(zero_reg));
4656 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4658 __ bind(¬_zero_case);
4659 __ Branch(¬_one_case,
gt, a0, Operand(1));
4660 CreateArrayDispatchOneArgument(masm,
mode);
4662 __ bind(¬_one_case);
4663 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4665 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4667 CreateArrayDispatchOneArgument(masm,
mode);
4669 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4685 if (FLAG_debug_code) {
4693 __ Assert(
ne, kUnexpectedInitialMapForArrayFunction,
4694 at, Operand(zero_reg));
4695 __ GetObjectType(a4, a4, a5);
4696 __ Assert(
eq, kUnexpectedInitialMapForArrayFunction,
4700 __ AssertUndefinedOrAllocationSite(a2, a4);
4705 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4706 __ Branch(&no_info,
eq, a2, Operand(at));
4722 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4723 __ TailCallStub(&stub0,
lo, a0, Operand(1));
4725 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4726 __ TailCallStub(&stubN,
hi, a0, Operand(1));
4733 InternalArraySingleArgumentConstructorStub
4735 __ TailCallStub(&stub1_holey,
ne, at, Operand(zero_reg));
4738 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4739 __ TailCallStub(&stub1);
4751 if (FLAG_debug_code) {
4759 __ Assert(
ne, kUnexpectedInitialMapForArrayFunction,
4760 at, Operand(zero_reg));
4761 __ GetObjectType(a3, a3, a4);
4762 __ Assert(
eq, kUnexpectedInitialMapForArrayFunction,
4773 __ DecodeField<Map::ElementsKindBits>(a3);
4775 if (FLAG_debug_code) {
4779 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
4784 Label fast_elements_case;
4788 __ bind(&fast_elements_case);
4807 Register callee = a0;
4808 Register call_data = a4;
4809 Register holder = a2;
4810 Register api_function_address = a1;
4811 Register context =
cp;
4817 typedef FunctionCallbackArguments FCA;
4829 __ Push(context, callee, call_data);
4833 Register scratch = call_data;
4835 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4838 __ Push(scratch, scratch);
4840 Operand(ExternalReference::isolate_address(isolate())));
4842 __ Push(scratch, holder);
4845 __ mov(scratch,
sp);
4849 const int kApiStackSpace = 4;
4851 FrameScope frame_scope(masm, StackFrame::MANUAL);
4852 __ EnterExitFrame(
false, kApiStackSpace);
4854 DCHECK(!api_function_address.is(a0) && !scratch.is(a0));
4864 __ li(at, Operand(
argc));
4869 const int kStackUnwindSpace =
argc + FCA::kArgsLength + 1;
4870 ExternalReference thunk_ref =
4871 ExternalReference::invoke_function_callback(isolate());
4873 AllowExternalCallThatCantCauseGC scope(masm);
4877 int return_value_offset = 0;
4879 return_value_offset = 2 + FCA::kArgsLength;
4881 return_value_offset = 2 + FCA::kReturnValueOffset;
4885 __ CallApiFunctionAndReturn(api_function_address,
4888 return_value_operand,
4889 &context_restore_operand);
4902 DCHECK(api_function_address.is(a2));
4907 const int kApiStackSpace = 1;
4908 FrameScope frame_scope(masm, StackFrame::MANUAL);
4909 __ EnterExitFrame(
false, kApiStackSpace);
4918 ExternalReference thunk_ref =
4919 ExternalReference::invoke_accessor_getter_callback(isolate());
4920 __ CallApiFunctionAndReturn(api_function_address,
#define kLithiumScratchDouble
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kTransitionInfoOffset
static const Register function_address()
static const Register parameter_count()
static const Register index()
void GenerateReadElement(MacroAssembler *masm)
void GenerateNewSloppySlow(MacroAssembler *masm)
void GenerateNewStrict(MacroAssembler *masm)
void GenerateNewSloppyFast(MacroAssembler *masm)
static const int kLengthOffset
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateDispatchToArrayStub(MacroAssembler *masm, AllocationSiteOverrideMode mode)
ArgumentCountKey argument_count() const
static const int kInstrSize
friend class BlockTrampolinePoolScope
static const int kCallTargetAddressOffset
static void GenerateAheadOfTime(Isolate *isolate)
bool save_doubles() const
static void GenerateAheadOfTime(Isolate *isolate)
CEntryStub(Isolate *isolate, int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
bool NeedsImmovableCode()
bool call_data_undefined() const
STATIC_ASSERT(Code::kArgumentsBits+2<=kStubMinorKeyBits)
bool RecordCallTarget() const
bool CallAsMethod() const
bool CallAsMethod() const
void GenerateMiss(MacroAssembler *masm)
virtual InlineCacheState GetICState() const OVERRIDE
static const int kValueOffset
static const int kHeaderSize
Condition GetCondition() const
void GenerateInternalizedStrings(MacroAssembler *masm)
void GenerateStrings(MacroAssembler *masm)
CompareICState::State state() const
void GenerateMiss(MacroAssembler *masm)
CompareICState::State left() const
void GenerateGeneric(MacroAssembler *masm)
CompareICState::State right() const
void GenerateObjects(MacroAssembler *masm)
CompareICStub(Isolate *isolate, Token::Value op, CompareICState::State left, CompareICState::State right, CompareICState::State state)
void GenerateNumbers(MacroAssembler *masm)
void GenerateUniqueNames(MacroAssembler *masm)
void GenerateKnownObjects(MacroAssembler *masm)
void GenerateSmis(MacroAssembler *masm)
static const int kFirstOffset
static const int kMinLength
static const int kSecondOffset
@ SLOPPY_ARGUMENTS_MAP_INDEX
@ STRICT_ARGUMENTS_MAP_INDEX
@ ALIASED_ARGUMENTS_MAP_INDEX
static int SlotOffset(int index)
static void GenerateAheadOfTime(Isolate *isolate)
void GenerateCall(MacroAssembler *masm, Register target)
bool skip_fastpath() const
Register destination() const
static const int kCallerFPOffset
static const int kMaxShortLength
static const int kResourceDataOffset
static const int kLengthOffset
static const int kHeaderSize
static const int kNativeContextOffset
static const int kEntrySize
static const int kMantissaBits
static const uint32_t kSignMask
static const int kValueOffset
static const uint32_t kExponentMask
static const int kMantissaBitsInTopWord
static const int kExponentBits
static const int kExponentBias
static const int kExponentShift
static const int kNonMantissaBitsInTopWord
static const int kMapOffset
static const int kStrictArgumentsObjectSize
static const int kSloppyArgumentsObjectSize
static const int kArgumentsCalleeIndex
static const int kArgumentsLengthIndex
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
bool HasCallSiteInlineCheck() const
bool HasArgsInRegisters() const
bool ReturnTrueFalseObject() const
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateCase(MacroAssembler *masm, ElementsKind kind)
static const int kJSRegexpStaticOffsetsVectorSize
StackFrame::Type type() const
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kLiteralsOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kElementsOffset
static const int kDataOneByteCodeOffset
static const int kIrregexpCaptureCountOffset
static const int kDataTagOffset
static const int kDataOffset
static const int kDataUC16CodeOffset
static const int kFunctionOffset
static const Register ReceiverRegister()
static const Register NameRegister()
LoadICState state() const
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kBitField2Offset
static const int kPrototypeOffset
ExponentType exponent_type() const
static const Register exponent()
static const size_t kWriteBarrierCounterOffset
static const int kEvacuationCandidateMask
static const int kSkipEvacuationSlotsRecordingMask
static const int kElementsStartOffset
NameDictionaryLookupStub(Isolate *isolate, LookupMode mode)
static const int kCapacityOffset
Register dictionary() const
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kTotalProbes
static const int kInlinedProbes
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kHashShift
static const int kHashFieldOffset
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static const intptr_t kPageAlignmentMask
ProfileEntryHookStub(Isolate *isolate)
static void MaybeCallEntryHook(MacroAssembler *masm)
static void EntryHookTrampoline(intptr_t function, intptr_t stack_pointer, Isolate *isolate)
static const int kArgsLength
void Restore(MacroAssembler *masm)
void SaveCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void Save(MacroAssembler *masm)
void RestoreCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void GenerateIncremental(MacroAssembler *masm, Mode mode)
void InformIncrementalMarker(MacroAssembler *masm)
RememberedSetAction remembered_set_action() const
static void PatchBranchIntoNop(MacroAssembler *masm, int pos)
SaveFPRegsMode save_fp_regs_mode() const
@ kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
@ kReturnOnNoNeedToInformIncrementalMarker
void CheckNeedsToInformIncrementalMarker(MacroAssembler *masm, OnNoNeedToInformIncrementalMarker on_no_need, Mode mode)
virtual void Generate(MacroAssembler *masm) OVERRIDE
static const int kLastCaptureCountOffset
static const int kLastSubjectOffset
static const int kLastMatchOverhead
static const int kLastInputOffset
static const int kFirstCaptureOffset
static const Function * FunctionForId(FunctionId id)
static const int kHeaderSize
static const int kConstructStubOffset
static const int kFeedbackVectorOffset
static const int kNativeBitWithinByte
static const int kStrictModeBitWithinByte
static const int kMinLength
static const int kParentOffset
static const int kOffsetOffset
static Smi * FromInt(int value)
static const int kContextOffset
static const int kCallerSPOffset
static const int kCallerFPOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
StoreBufferOverflowStub(Isolate *isolate, SaveFPRegsMode save_fp)
bool save_doubles() const
StringIndexFlags index_flags_
Label * receiver_not_string_
Label * index_out_of_range_
void GenerateFast(MacroAssembler *masm)
Label * index_not_number_
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static void GenerateOneByteCharsCompareLoop(MacroAssembler *masm, Register left, Register right, Register length, Register scratch1, Register scratch2, Label *chars_not_equal)
static void GenerateCompareFlatOneByteStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, String::Encoding encoding)
static void GenerateFlatOneByteStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int32_t kMaxOneByteCharCode
static const int kLengthOffset
static const int kCallerStackParameterCountFrameOffset
StubFunctionMode function_mode() const
static void GenerateAheadOfTime(Isolate *isolate)
static bool IsOrderedRelationalCompareOp(Value op)
static bool IsEqualityOp(Value op)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register VectorRegister()
void Generate(MacroAssembler *masm)
Register the_heap_number() const
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
WriteInt32ToHeapNumberStub(Isolate *isolate, Register the_int, Register the_heap_number, Register scratch)
static const AbiVariants kMipsAbi
static const ArchVariants kArchVariant
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
bool IsPowerOfTwo32(uint32_t value)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const uint32_t kStringEncodingMask
const FPUControlRegister FCSR
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
@ DONT_TRACK_ALLOCATION_SITE
const RegList kJSCallerSaved
@ kCheckForInexactConversion
const intptr_t kPointerAlignmentMask
const intptr_t kSmiSignMask
const uint32_t kTwoByteStringTag
const uint32_t kShortExternalStringTag
const RegList kCalleeSaved
const int kFastElementsKindPackedToHoley
const uint32_t kNotStringTag
DwVfpRegister DoubleRegister
const uint32_t kFCSRUnderflowFlagMask
const int kPointerSizeLog2
const uint32_t kStringTag
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ TERMINAL_FAST_ELEMENTS_KIND
@ FAST_HOLEY_SMI_ELEMENTS
Handle< T > handle(T *t, Isolate *isolate)
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignmentMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool IsFastPackedElementsKind(ElementsKind kind)
const bool FLAG_enable_slow_asserts
const uint32_t kShortExternalStringMask
const uint32_t kFCSRInvalidOpFlagMask
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
const uint32_t kStringRepresentationMask
const uint32_t kFCSROverflowFlagMask
const uint32_t kSlicedNotConsMask
const int kCArgsSlotsSize
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
const uint32_t kInternalizedTag
const int kNumJSCallerSaved
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
const uint32_t kIsNotStringMask
const int kNumCalleeSavedFPU
const int kNumCalleeSaved
ElementsKind GetInitialFastElementsKind()
const RegList kCalleeSavedFPU
@ STRING_INDEX_IS_ARRAY_INDEX
const uint32_t kIsIndirectStringMask
const RegList kCallerSavedFPU
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool is(Register reg) const
#define T(name, string, precedence)