7 #if V8_TARGET_ARCH_MIPS
24 static void InitializeArrayConstructorDescriptor(
25 Isolate* isolate, CodeStubDescriptor* descriptor,
26 int constant_stack_parameter_count) {
28 Runtime::kArrayConstructor)->
entry;
30 if (constant_stack_parameter_count == 0) {
31 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
34 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
40 static void InitializeInternalArrayConstructorDescriptor(
41 Isolate* isolate, CodeStubDescriptor* descriptor,
42 int constant_stack_parameter_count) {
44 Runtime::kInternalArrayConstructor)->
entry;
46 if (constant_stack_parameter_count == 0) {
47 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
50 descriptor->Initialize(a0, deopt_handler, constant_stack_parameter_count,
56 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
57 CodeStubDescriptor* descriptor) {
58 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
62 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
63 CodeStubDescriptor* descriptor) {
64 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
68 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
69 CodeStubDescriptor* descriptor) {
70 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
74 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
75 CodeStubDescriptor* descriptor) {
76 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
80 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
81 CodeStubDescriptor* descriptor) {
82 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
86 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
87 CodeStubDescriptor* descriptor) {
88 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
92 #define __ ACCESS_MASM(masm)
95 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
98 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
104 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
110 ExternalReference miss) {
112 isolate()->counters()->code_stubs()->Increment();
114 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
115 int param_count = descriptor.GetEnvironmentParameterCount();
119 DCHECK(param_count == 0 ||
120 a0.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
123 for (
int i = 0;
i < param_count; ++
i) {
125 __ sw(descriptor.GetEnvironmentParameterRegister(
i),
128 __ CallExternalReference(miss, param_count);
136 Label out_of_range, only_low, negate, done;
137 Register input_reg =
source();
140 int double_offset =
offset();
152 __ Push(scratch, scratch2, scratch3);
156 __ ldc1(double_scratch,
MemOperand(input_reg, double_offset));
163 __ Trunc_w_d(double_scratch, double_scratch);
165 __ mfc1(scratch3, double_scratch);
178 __ Branch(&error,
ne, scratch, Operand(zero_reg));
179 __ Move(result_reg, scratch3);
185 Register input_high = scratch2;
186 Register input_low = scratch3;
189 MemOperand(input_reg, double_offset + Register::kMantissaOffset));
191 MemOperand(input_reg, double_offset + Register::kExponentOffset));
193 Label normal_exponent, restore_sign;
202 __ Movz(result_reg, zero_reg, scratch);
203 __ Branch(&done,
eq, scratch, Operand(zero_reg));
212 __ Branch(&normal_exponent,
le, result_reg, Operand(zero_reg));
213 __ mov(result_reg, zero_reg);
216 __ bind(&normal_exponent);
222 Register
sign = result_reg;
228 Label high_shift_needed, high_shift_done;
229 __ Branch(&high_shift_needed,
lt, scratch, Operand(32));
230 __ mov(input_high, zero_reg);
231 __ Branch(&high_shift_done);
232 __ bind(&high_shift_needed);
241 __ sllv(input_high, input_high, scratch);
243 __ bind(&high_shift_done);
246 Label pos_shift, shift_done;
248 __ subu(scratch, at, scratch);
249 __ Branch(&pos_shift,
ge, scratch, Operand(zero_reg));
252 __ Subu(scratch, zero_reg, scratch);
253 __ sllv(input_low, input_low, scratch);
254 __ Branch(&shift_done);
257 __ srlv(input_low, input_low, scratch);
259 __ bind(&shift_done);
260 __ Or(input_high, input_high, Operand(input_low));
265 __ Subu(result_reg, zero_reg, input_high);
266 __ Movz(result_reg, input_high, scratch);
270 __ Pop(scratch, scratch2, scratch3);
286 Label max_negative_int;
292 __ Branch(&max_negative_int,
eq,
the_int(), Operand(0x80000000u));
298 __ li(
scratch(), Operand(non_smi_exponent));
313 HeapNumber::kExponentOffset));
317 HeapNumber::kMantissaOffset));
319 __ bind(&max_negative_int);
338 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
342 Label heap_number, return_equal;
343 Register exp_mask_reg = t5;
345 __ Branch(¬_identical,
ne, a0, Operand(a1));
354 __ GetObjectType(a0, t4, t4);
357 __ GetObjectType(a0, t4, t4);
367 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
368 __ Branch(&return_equal,
ne, a0, Operand(t2));
382 __ bind(&return_equal);
390 __ mov(v0, zero_reg);
397 __ bind(&heap_number);
406 __ And(t3, t2, Operand(exp_mask_reg));
408 __ Branch(&return_equal,
ne, t3, Operand(exp_mask_reg));
414 __ Or(v0, t3, Operand(t2));
421 __ Ret(
eq, v0, Operand(zero_reg));
433 __ bind(¬_identical);
437 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
440 Label* both_loaded_as_doubles,
443 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
444 (lhs.is(a1) && rhs.is(a0)));
447 __ JumpIfSmi(lhs, &lhs_is_smi);
450 __ GetObjectType(lhs, t4, t4);
470 __ jmp(both_loaded_as_doubles);
472 __ bind(&lhs_is_smi);
474 __ GetObjectType(rhs, t4, t4);
479 __ li(v0, Operand(1));
496 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
503 Label first_non_object;
506 __ GetObjectType(lhs, a2, a2);
510 Label return_not_equal;
511 __ bind(&return_not_equal);
513 __ li(v0, Operand(1));
515 __ bind(&first_non_object);
519 __ GetObjectType(rhs, a3, a3);
528 __ Or(a2, a2, Operand(a3));
530 __ Branch(&return_not_equal,
eq, at, Operand(zero_reg));
534 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
537 Label* both_loaded_as_doubles,
538 Label* not_heap_numbers,
540 __ GetObjectType(lhs, a3, a2);
544 __ Branch(slow,
ne, a3, Operand(a2));
551 __ jmp(both_loaded_as_doubles);
556 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
559 Label* possible_strings,
560 Label* not_both_strings) {
561 DCHECK((lhs.is(a0) && rhs.is(a1)) ||
562 (lhs.is(a1) && rhs.is(a0)));
568 __ Branch(&object_test,
ne, at, Operand(zero_reg));
570 __ Branch(possible_strings,
ne, at, Operand(zero_reg));
571 __ GetObjectType(rhs, a3, a3);
574 __ Branch(possible_strings,
ne, at, Operand(zero_reg));
579 __ li(v0, Operand(1));
581 __ bind(&object_test);
583 __ GetObjectType(rhs, a2, a3);
599 static void CompareICStub_CheckInputType(MacroAssembler* masm, Register input,
605 __ JumpIfNotSmi(input, fail);
607 __ JumpIfSmi(input, &ok);
608 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail,
626 CompareICStub_CheckInputType(masm, lhs, a2,
left(), &miss);
627 CompareICStub_CheckInputType(masm, rhs, a3,
right(), &miss);
630 Label not_smis, both_loaded_as_doubles;
632 Label not_two_smis, smi_done;
634 __ JumpIfNotSmi(a2, ¬_two_smis);
639 __ bind(¬_two_smis);
646 EmitIdenticalObjectComparison(masm, &slow,
cc);
652 __ And(t2, lhs, Operand(rhs));
653 __ JumpIfNotSmi(t2, ¬_smis, t0);
662 EmitSmiNonsmiComparison(masm, lhs, rhs,
663 &both_loaded_as_doubles, &slow,
strict());
665 __ bind(&both_loaded_as_doubles);
722 EmitStrictTwoHeapObjectCompare(masm, lhs, rhs);
725 Label check_for_internalized_strings;
726 Label flat_string_check;
732 EmitCheckForTwoHeapNumbers(masm,
735 &both_loaded_as_doubles,
736 &check_for_internalized_strings,
739 __ bind(&check_for_internalized_strings);
745 EmitCheckForInternalizedStringsOrObjects(
746 masm, lhs, rhs, &flat_string_check, &slow);
751 __ bind(&flat_string_check);
753 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow);
755 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
772 native =
strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
774 native = Builtins::COMPARE;
798 __ PushSafepointRegisters();
806 __ PopSafepointRegisters();
819 const int argument_count = 1;
820 const int fp_argument_count = 0;
821 const Register scratch = a1;
823 AllowExternalCallThatCantCauseGC scope(masm);
824 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
825 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
827 ExternalReference::store_buffer_overflow_function(isolate()),
839 const Register base = a1;
842 const Register heapnumbermap = t1;
843 const Register heapnumber = v0;
848 const FPURegister single_scratch =
f8;
849 const Register scratch = t5;
850 const Register scratch2 = t3;
852 Label call_runtime, done, int_exponent;
854 Label base_is_smi, unpack_exponent;
861 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex);
863 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi);
865 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
868 __ jmp(&unpack_exponent);
870 __ bind(&base_is_smi);
871 __ mtc1(scratch, single_scratch);
872 __ cvt_d_w(double_base, single_scratch);
873 __ bind(&unpack_exponent);
875 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
878 __ Branch(&call_runtime,
ne, scratch, Operand(heapnumbermap));
879 __ ldc1(double_exponent,
883 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent);
885 __ ldc1(double_exponent,
890 Label int_exponent_convert;
900 __ Branch(&int_exponent_convert,
eq, scratch2, Operand(zero_reg));
909 __ Move(double_scratch, 0.5);
921 __ neg_d(double_result, double_scratch);
925 __ sqrt_d(double_result, double_scratch);
928 __ bind(¬_plus_half);
929 __ Move(double_scratch, -0.5);
945 __ Move(double_result, 1);
946 __ sqrt_d(double_scratch, double_scratch);
947 __ div_d(double_result, double_result, double_scratch);
953 AllowExternalCallThatCantCauseGC scope(masm);
954 __ PrepareCallCFunction(0, 2, scratch2);
955 __ MovToFloatParameters(double_base, double_exponent);
957 ExternalReference::power_double_double_function(isolate()),
961 __ MovFromFloatResult(double_result);
964 __ bind(&int_exponent_convert);
968 __ bind(&int_exponent);
972 __ mov(scratch, exponent);
975 __ mov(exponent, scratch);
978 __ mov_d(double_scratch, double_base);
979 __ Move(double_result, 1.0);
982 Label positive_exponent;
983 __ Branch(&positive_exponent,
ge, scratch, Operand(zero_reg));
984 __ Subu(scratch, zero_reg, scratch);
985 __ bind(&positive_exponent);
987 Label while_true, no_carry, loop_end;
988 __ bind(&while_true);
990 __ And(scratch2, scratch, 1);
992 __ Branch(&no_carry,
eq, scratch2, Operand(zero_reg));
993 __ mul_d(double_result, double_result, double_scratch);
996 __ sra(scratch, scratch, 1);
998 __ Branch(&loop_end,
eq, scratch, Operand(zero_reg));
999 __ mul_d(double_scratch, double_scratch, double_scratch);
1001 __ Branch(&while_true);
1005 __ Branch(&done,
ge, exponent, Operand(zero_reg));
1006 __ Move(double_scratch, 1.0);
1007 __ div_d(double_result, double_scratch, double_result);
1014 __ mtc1(exponent, single_scratch);
1015 __ cvt_d_w(double_exponent, single_scratch);
1018 Counters* counters = isolate()->counters();
1021 __ bind(&call_runtime);
1022 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
1027 __ AllocateHeapNumber(
1028 heapnumber, scratch, scratch2, heapnumbermap, &call_runtime);
1029 __ sdc1(double_result,
1031 DCHECK(heapnumber.is(v0));
1032 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1037 AllowExternalCallThatCantCauseGC scope(masm);
1038 __ PrepareCallCFunction(0, 2, scratch);
1039 __ MovToFloatParameters(double_base, double_exponent);
1041 ExternalReference::power_double_double_function(isolate()),
1045 __ MovFromFloatResult(double_result);
1048 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2);
1059 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1069 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1085 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1088 CEntryStub(isolate, 1,
mode).GetCode();
1089 StoreBufferOverflowStub(isolate,
mode).GetCode();
1090 isolate->set_fp_stubs_generated(
true);
1116 FrameScope scope(masm, StackFrame::MANUAL);
1132 __ AssertStackIsAligned();
1134 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
1146 masm->bal(&find_ra);
1148 masm->bind(&find_ra);
1153 const int kNumInstructionsToJump = 5;
1154 masm->Addu(ra, ra, kNumInstructionsToJump *
kPointerSize);
1166 masm->InstructionsGeneratedSince(&find_ra));
1172 if (FLAG_debug_code) {
1174 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1175 __ Branch(&okay,
ne, v0, Operand(t0));
1176 __ stop(
"The hole escaped");
1181 Label exception_returned;
1182 __ LoadRoot(t0, Heap::kExceptionRootIndex);
1183 __ Branch(&exception_returned,
eq, t0, Operand(v0));
1185 ExternalReference pending_exception_address(
1186 Isolate::kPendingExceptionAddress, isolate());
1190 if (FLAG_debug_code) {
1192 __ li(a2, Operand(pending_exception_address));
1194 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
1196 __ Branch(&okay,
eq, t0, Operand(a2));
1197 __ stop(
"Unexpected pending exception");
1209 __ bind(&exception_returned);
1212 __ li(a2, Operand(pending_exception_address));
1216 __ li(a3, Operand(isolate()->factory()->the_hole_value()));
1221 Label throw_termination_exception;
1222 __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex);
1223 __ Branch(&throw_termination_exception,
eq, v0, Operand(t0));
1228 __ bind(&throw_termination_exception);
1229 __ ThrowUncatchable(v0);
1234 Label invoke, handler_entry, exit;
1235 Isolate* isolate = masm->isolate();
1262 __ InitializeRootRegister();
1266 __ li(t3, Operand(-1));
1267 int marker =
type();
1270 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
1294 Label non_outermost_js;
1295 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
1296 __ li(t1, Operand(ExternalReference(js_entry_sp)));
1298 __ Branch(&non_outermost_js,
ne, t2, Operand(zero_reg));
1300 __ li(t0, Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1304 __ bind(&non_outermost_js);
1305 __ li(t0, Operand(
Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1312 __ bind(&handler_entry);
1318 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1321 __ LoadRoot(v0, Heap::kExceptionRootIndex);
1328 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
1335 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
1336 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
1358 if (
type() == StackFrame::ENTRY_CONSTRUCT) {
1359 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1361 __ li(t0, Operand(construct_entry));
1363 ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
1364 __ li(t0, Operand(entry));
1377 Label non_outermost_js_2;
1379 __ Branch(&non_outermost_js_2,
1382 Operand(
Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1383 __ li(t1, Operand(ExternalReference(js_entry_sp)));
1385 __ bind(&non_outermost_js_2);
1389 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
1419 const Register
object = a0;
1421 const Register
function = a1;
1422 const Register prototype = t0;
1423 const Register inline_site = t5;
1424 const Register scratch = a2;
1428 Label slow, loop, is_instance, is_not_instance, not_js_object;
1436 __ JumpIfSmi(
object, ¬_js_object);
1437 __ IsObjectJSObjectType(
object,
map, scratch, ¬_js_object);
1443 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex);
1444 __ Branch(&miss,
ne,
function, Operand(at));
1445 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex);
1446 __ Branch(&miss,
ne,
map, Operand(at));
1447 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1454 __ TryGetFunctionPrototype(
function, prototype, scratch, &slow,
true);
1457 __ JumpIfSmi(prototype, &slow);
1458 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
1463 __ StoreRoot(
function, Heap::kInstanceofCacheFunctionRootIndex);
1464 __ StoreRoot(
map, Heap::kInstanceofCacheMapRootIndex);
1471 __ LoadFromSafepointRegisterSlot(scratch, t0);
1472 __ Subu(inline_site, ra, scratch);
1474 __ GetRelocatedValue(inline_site, scratch, v1);
1483 Register scratch2 =
map;
1487 __ LoadRoot(scratch2, Heap::kNullValueRootIndex);
1489 __ Branch(&is_instance,
eq, scratch, Operand(prototype));
1490 __ Branch(&is_not_instance,
eq, scratch, Operand(scratch2));
1495 __ bind(&is_instance);
1498 __ mov(v0, zero_reg);
1499 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1501 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
1505 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
1506 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1508 __ PatchRelocatedValue(inline_site, scratch, v0);
1512 __ mov(v0, zero_reg);
1517 __ bind(&is_not_instance);
1520 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex);
1522 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1526 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1527 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult));
1529 __ PatchRelocatedValue(inline_site, scratch, v0);
1538 Label object_not_null, object_not_null_or_smi;
1539 __ bind(¬_js_object);
1542 __ JumpIfSmi(
function, &slow);
1543 __ GetObjectType(
function, scratch2, scratch);
1547 __ Branch(&object_not_null,
1550 Operand(isolate()->factory()->null_value()));
1552 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1558 __ bind(&object_not_null);
1560 __ JumpIfNotSmi(
object, &object_not_null_or_smi);
1562 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1568 __ bind(&object_not_null_or_smi);
1570 __ IsObjectJSStringType(
object, scratch, &slow);
1572 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1592 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
1594 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
1606 PropertyAccessCompiler::TailCallBuiltin(
1607 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
1614 const int kDisplacement =
1621 __ JumpIfNotSmi(a1, &slow);
1635 __ Branch(&slow,
hs, a1, Operand(a0));
1638 __ subu(a3, a0, a1);
1640 __ Addu(a3,
fp, Operand(t3));
1652 __ subu(a3, a0, a1);
1654 __ Addu(a3, a2, Operand(t3));
1662 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1683 __ Addu(a3, a3, Operand(t3));
1688 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1706 Label adaptor_frame, try_allocate;
1709 __ Branch(&adaptor_frame,
1716 __ b(&try_allocate);
1720 __ bind(&adaptor_frame);
1723 __ Addu(a3, a3, Operand(t6));
1731 __ Branch(&skip_min,
lt, a1, Operand(a2));
1735 __ bind(&try_allocate);
1739 const int kParameterMapHeaderSize =
1742 Label param_map_size;
1745 __ mov(t5, zero_reg);
1747 __ addiu(t5, t5, kParameterMapHeaderSize);
1748 __ bind(¶m_map_size);
1752 __ Addu(t5, t5, Operand(t6));
1764 const int kNormalOffset =
1766 const int kAliasedOffset =
1771 Label skip2_ne, skip2_eq;
1772 __ Branch(&skip2_ne,
ne, a1, Operand(zero_reg));
1776 __ Branch(&skip2_eq,
eq, a1, Operand(zero_reg));
1785 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
1792 __ AssertNotSmi(a3);
1815 Label skip_parameter_map;
1825 __ LoadRoot(t2, Heap::kSloppyArgumentsElementsMapRootIndex);
1831 __ Addu(t2, t0, Operand(t6));
1832 __ Addu(t2, t2, Operand(kParameterMapHeaderSize));
1843 Label parameters_loop, parameters_test;
1847 __ Subu(t5, t5, Operand(a1));
1848 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
1850 __ Addu(a3, t0, Operand(t6));
1851 __ Addu(a3, a3, Operand(kParameterMapHeaderSize));
1859 __ jmp(¶meters_test);
1861 __ bind(¶meters_loop);
1865 __ Addu(t6, t0, t1);
1868 __ Addu(t6, a3, t1);
1871 __ bind(¶meters_test);
1874 __ bind(&skip_parameter_map);
1879 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex);
1883 Label arguments_loop, arguments_test;
1887 __ Subu(t0, t0, Operand(t6));
1888 __ jmp(&arguments_test);
1890 __ bind(&arguments_loop);
1894 __ Addu(t1, a3, Operand(t6));
1898 __ bind(&arguments_test);
1899 __ Branch(&arguments_loop,
lt, t5, Operand(a2));
1908 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
1921 __ Branch(&slow,
ne, t0, Operand(zero_reg));
1927 __ TailCallExternalReference(
1928 ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
1933 PropertyAccessCompiler::TailCallBuiltin(
1934 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
1943 Label adaptor_frame, try_allocate, runtime;
1946 __ Branch(&adaptor_frame,
1953 __ Branch(&try_allocate);
1956 __ bind(&adaptor_frame);
1960 __ Addu(a3, a2, Operand(at));
1967 Label add_arguments_object;
1968 __ bind(&try_allocate);
1969 __ Branch(&add_arguments_object,
eq, a1, Operand(zero_reg));
1973 __ bind(&add_arguments_object);
1977 __ Allocate(a1, v0, a2, a3, &runtime,
1987 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
1999 __ Branch(&done,
eq, a1, Operand(zero_reg));
2008 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex);
2026 __ Subu(a1, a1, Operand(1));
2027 __ Branch(&loop,
ne, a1, Operand(zero_reg));
2035 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
2043 #ifdef V8_INTERPRETED_REGEXP
2044 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
2065 Register subject =
s0;
2066 Register regexp_data =
s1;
2067 Register last_match_info_elements =
s2;
2070 ExternalReference address_of_regexp_stack_memory_address =
2071 ExternalReference::address_of_regexp_stack_memory_address(
2073 ExternalReference address_of_regexp_stack_memory_size =
2074 ExternalReference::address_of_regexp_stack_memory_size(isolate());
2075 __ li(a0, Operand(address_of_regexp_stack_memory_size));
2077 __ Branch(&runtime,
eq, a0, Operand(zero_reg));
2082 __ JumpIfSmi(a0, &runtime);
2083 __ GetObjectType(a0, a1, a1);
2088 if (FLAG_debug_code) {
2089 __ SmiTst(regexp_data, t0);
2091 kUnexpectedTypeForRegExpDataFixedArrayExpected,
2094 __ GetObjectType(regexp_data, a0, a0);
2096 kUnexpectedTypeForRegExpDataFixedArrayExpected,
2120 __ mov(t0, zero_reg);
2122 __ JumpIfSmi(subject, &runtime);
2123 __ mov(a3, subject);
2147 Label seq_string , external_string ,
2148 check_underlying , not_seq_nor_cons ,
2158 __ Branch(&seq_string,
eq, a1, Operand(zero_reg));
2171 __ LoadRoot(a1, Heap::kempty_stringRootIndex);
2172 __ Branch(&runtime,
ne, a0, Operand(a1));
2176 __ bind(&check_underlying);
2184 __ Branch(&external_string,
ne, at, Operand(zero_reg));
2187 __ bind(&seq_string);
2194 __ JumpIfNotSmi(a1, &runtime);
2196 __ Branch(&runtime,
ls, a3, Operand(a1));
2206 __ Movz(t9, t1, a0);
2213 __ JumpIfSmi(t9, &runtime);
2221 __ IncrementCounter(isolate()->counters()->regexp_entry_native(),
2225 const int kRegExpExecuteArguments = 9;
2226 const int kParameterRegisters = 4;
2227 __ EnterExitFrame(
false, kRegExpExecuteArguments - kParameterRegisters);
2245 __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
2249 __ li(a0, Operand(1));
2253 __ li(a0, Operand(address_of_regexp_stack_memory_address));
2255 __ li(a2, Operand(address_of_regexp_stack_memory_size));
2257 __ addu(a0, a0, a2);
2262 __ mov(a0, zero_reg);
2267 ExternalReference::address_of_static_offsets_vector(isolate())));
2273 __ Xor(a3, a3, Operand(1));
2283 __ sllv(t1, t0, a3);
2284 __ addu(t0, t2, t1);
2285 __ sllv(t1, a1, a3);
2286 __ addu(a2, t0, t1);
2290 __ sllv(t1, t2, a3);
2291 __ addu(a3, t0, t1);
2296 __ mov(a0, subject);
2300 DirectCEntryStub stub(isolate());
2301 stub.GenerateCall(masm, t9);
2303 __ LeaveExitFrame(
false,
no_reg,
true);
2311 __ Branch(&success,
eq, v0, Operand(1));
2322 __ li(a1, Operand(isolate()->factory()->the_hole_value()));
2323 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
2326 __ Branch(&runtime,
eq, v0, Operand(a1));
2331 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex);
2332 Label termination_exception;
2333 __ Branch(&termination_exception,
eq, v0, Operand(a0));
2337 __ bind(&termination_exception);
2338 __ ThrowUncatchable(v0);
2342 __ li(v0, Operand(isolate()->factory()->null_value()));
2353 __ Addu(a1, a1, Operand(2));
2356 __ JumpIfSmi(a0, &runtime);
2357 __ GetObjectType(a0, a2, a2);
2360 __ lw(last_match_info_elements,
2363 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
2364 __ Branch(&runtime,
ne, a0, Operand(at));
2371 __ Branch(&runtime,
gt, a2, Operand(at));
2383 __ mov(a2, subject);
2384 __ RecordWriteField(last_match_info_elements,
2390 __ mov(subject, a2);
2394 __ RecordWriteField(last_match_info_elements,
2402 ExternalReference address_of_static_offsets_vector =
2403 ExternalReference::address_of_static_offsets_vector(isolate());
2404 __ li(a2, Operand(address_of_static_offsets_vector));
2408 Label next_capture, done;
2412 last_match_info_elements,
2414 __ bind(&next_capture);
2415 __ Subu(a1, a1, Operand(1));
2416 __ Branch(&done,
lt, a1, Operand(zero_reg));
2434 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
2438 __ bind(¬_seq_nor_cons);
2443 __ bind(&external_string);
2446 if (FLAG_debug_code) {
2451 kExternalStringExpectedButNotFound,
2462 __ jmp(&seq_string);
2465 __ bind(¬_long_external);
2468 __ Branch(&runtime,
ne, at, Operand(zero_reg));
2475 __ jmp(&check_underlying);
2480 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2488 Label initialize, done, miss, megamorphic, not_array_function;
2491 masm->isolate()->heap()->megamorphic_symbol());
2493 masm->isolate()->heap()->uninitialized_symbol());
2497 __ Addu(t0, a2, Operand(t0));
2502 __ Branch(&done,
eq, t0, Operand(a1));
2504 if (!FLAG_pretenuring_call_new) {
2510 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2511 __ Branch(&miss,
ne, t1, Operand(at));
2515 __ Branch(&megamorphic,
ne, a1, Operand(t0));
2523 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
2524 __ Branch(&initialize,
eq, t0, Operand(at));
2527 __ bind(&megamorphic);
2529 __ Addu(t0, a2, Operand(t0));
2530 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
2535 __ bind(&initialize);
2536 if (!FLAG_pretenuring_call_new) {
2539 __ Branch(¬_array_function,
ne, a1, Operand(t0));
2554 __ MultiPush(kSavedRegs);
2556 CreateAllocationSiteStub create_stub(masm->isolate());
2557 __ CallStub(&create_stub);
2559 __ MultiPop(kSavedRegs);
2564 __ bind(¬_array_function);
2568 __ Addu(t0, a2, Operand(t0));
2581 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
2586 int32_t strict_mode_function_mask =
2590 __ And(at, t0, Operand(strict_mode_function_mask | native_mask));
2591 __ Branch(cont,
ne, at, Operand(zero_reg));
2595 static void EmitSlowCase(MacroAssembler* masm,
2597 Label* non_function) {
2602 __ mov(a2, zero_reg);
2603 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
2605 Handle<Code> adaptor =
2606 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2612 __ bind(non_function);
2614 __ li(a0, Operand(argc));
2615 __ mov(a2, zero_reg);
2616 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
2617 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2622 static void EmitWrapCase(MacroAssembler* masm,
int argc, Label* cont) {
2634 static void CallFunctionNoFeedback(MacroAssembler* masm,
2635 int argc,
bool needs_checks,
2636 bool call_as_method) {
2638 Label slow, non_function, wrap, cont;
2643 __ JumpIfSmi(a1, &non_function);
2646 __ GetObjectType(a1, t0, t0);
2652 ParameterCount actual(argc);
2654 if (call_as_method) {
2656 EmitContinueIfStrictOrNative(masm, &cont);
2663 __ JumpIfSmi(a3, &wrap);
2664 __ GetObjectType(a3, t0, t0);
2678 EmitSlowCase(masm, argc, &non_function);
2681 if (call_as_method) {
2684 EmitWrapCase(masm, argc, &cont);
2699 Label slow, non_function_call;
2702 __ JumpIfSmi(a1, &non_function_call);
2704 __ GetObjectType(a1, t0, t0);
2708 GenerateRecordCallTarget(masm);
2711 __ Addu(t1, a2, at);
2712 if (FLAG_pretenuring_call_new) {
2718 Label feedback_register_initialized;
2722 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2723 __ Branch(&feedback_register_initialized,
eq, t1, Operand(at));
2724 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2725 __ bind(&feedback_register_initialized);
2728 __ AssertUndefinedOrAllocationSite(a2, t1);
2732 Register jmp_reg = t0;
2745 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2748 __ bind(&non_function_call);
2749 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2753 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2758 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2772 EmitLoadTypeFeedbackVector(masm, a2);
2775 __ Branch(&miss,
ne, a1, Operand(at));
2779 __ Addu(at, a2, Operand(at));
2784 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
2785 __ Branch(&miss,
ne, t1, Operand(at));
2788 ArrayConstructorStub stub(masm->isolate(),
arg_count());
2789 __ TailCallStub(&stub);
2795 CallFunctionNoFeedback(masm,
2801 __ stop(
"Unexpected code address");
2808 Label extra_checks_or_miss, slow_start;
2809 Label slow, non_function, wrap, cont;
2810 Label have_js_function;
2812 ParameterCount actual(argc);
2814 EmitLoadTypeFeedbackVector(masm, a2);
2818 __ Addu(t0, a2, Operand(t0));
2820 __ Branch(&extra_checks_or_miss,
ne, a1, Operand(t0));
2822 __ bind(&have_js_function);
2824 EmitContinueIfStrictOrNative(masm, &cont);
2828 __ JumpIfSmi(a3, &wrap);
2829 __ GetObjectType(a3, t0, t0);
2838 EmitSlowCase(masm, argc, &non_function);
2842 EmitWrapCase(masm, argc, &cont);
2845 __ bind(&extra_checks_or_miss);
2848 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
2849 __ Branch(&slow_start,
eq, t0, Operand(at));
2850 __ LoadRoot(at, Heap::kUninitializedSymbolRootIndex);
2851 __ Branch(&miss,
eq, t0, Operand(at));
2853 if (!FLAG_trace_ic) {
2856 __ AssertNotSmi(t0);
2857 __ GetObjectType(t0, t1, t1);
2860 __ Addu(t0, a2, Operand(t0));
2861 __ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
2863 __ Branch(&slow_start);
2871 __ bind(&slow_start);
2874 __ JumpIfSmi(a1, &non_function);
2877 __ GetObjectType(a1, t0, t0);
2879 __ Branch(&have_js_function);
2895 : IC::kCallIC_Customization_Miss;
2897 ExternalReference miss = ExternalReference(IC_Utility(
id),
2899 __ CallExternalReference(miss, 4);
2946 MacroAssembler* masm,
2947 const RuntimeCallHelper& call_helper) {
2948 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2955 Heap::kHeapNumberMapRootIndex,
2958 call_helper.BeforeCall(masm);
2962 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2966 __ CallRuntime(Runtime::kNumberToSmi, 1);
2977 call_helper.AfterCall(masm);
2987 call_helper.BeforeCall(masm);
2990 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
2994 call_helper.AfterCall(masm);
2997 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3019 __ LoadRoot(
result_, Heap::kSingleCharacterStringCacheRootIndex);
3025 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
3032 MacroAssembler* masm,
3033 const RuntimeCallHelper& call_helper) {
3034 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3037 call_helper.BeforeCall(masm);
3039 __ CallRuntime(Runtime::kCharFromCode, 1);
3042 call_helper.AfterCall(masm);
3045 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3049 enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
3058 if (FLAG_debug_code) {
3062 kDestinationOfCopyNotAligned,
3072 __ Addu(count, count, count);
3075 Register limit = count;
3076 __ Addu(limit, dest, Operand(count));
3078 Label loop_entry, loop;
3080 __ Branch(&loop_entry);
3083 __ Addu(src, src, Operand(1));
3085 __ Addu(dest, dest, Operand(1));
3086 __ bind(&loop_entry);
3087 __ Branch(&loop,
lt, dest, Operand(limit));
3120 __ UntagAndJumpIfNotSmi(a2, a2, &runtime);
3121 __ UntagAndJumpIfNotSmi(a3, a3, &runtime);
3124 __ Branch(&runtime,
lt, a3, Operand(zero_reg));
3126 __ Branch(&runtime,
gt, a3, Operand(a2));
3127 __ Subu(a2, a2, a3);
3131 __ JumpIfSmi(v0, &runtime);
3136 __ Branch(&runtime,
ne, t0, Operand(zero_reg));
3139 __ Branch(&single_char,
eq, a2, Operand(1));
3148 __ Branch(&return_v0,
eq, a2, Operand(t0));
3150 __ Branch(&runtime,
hi, a2, Operand(t0));
3159 Label underlying_unpacked, sliced_string, seq_or_external_string;
3167 __ Branch(&sliced_string,
ne, t0, Operand(zero_reg));
3170 __ LoadRoot(t0, Heap::kempty_stringRootIndex);
3171 __ Branch(&runtime,
ne, t1, Operand(t0));
3176 __ jmp(&underlying_unpacked);
3178 __ bind(&sliced_string);
3183 __ Addu(a3, a3, t0);
3187 __ jmp(&underlying_unpacked);
3189 __ bind(&seq_or_external_string);
3193 __ bind(&underlying_unpacked);
3195 if (FLAG_string_slices) {
3208 Label two_byte_slice, set_slice_header;
3212 __ Branch(&two_byte_slice,
eq, t0, Operand(zero_reg));
3213 __ AllocateOneByteSlicedString(v0, a2, t2, t3, &runtime);
3214 __ jmp(&set_slice_header);
3215 __ bind(&two_byte_slice);
3216 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime);
3217 __ bind(&set_slice_header);
3223 __ bind(©_routine);
3230 Label two_byte_sequential, sequential_string, allocate_result;
3234 __ Branch(&sequential_string,
eq, t0, Operand(zero_reg));
3240 __ Branch(&runtime,
ne, t0, Operand(zero_reg));
3243 __ jmp(&allocate_result);
3245 __ bind(&sequential_string);
3250 __ bind(&allocate_result);
3254 __ Branch(&two_byte_sequential,
eq, t0, Operand(zero_reg));
3257 __ AllocateOneByteString(v0, a2, t0, t2, t3, &runtime);
3260 __ Addu(t1, t1, a3);
3275 __ bind(&two_byte_sequential);
3276 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime);
3281 __ Addu(t1, t1, t0);
3293 __ bind(&return_v0);
3294 Counters* counters = isolate()->counters();
3295 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
3300 __ TailCallRuntime(Runtime::kSubString, 3, 1);
3302 __ bind(&single_char);
3308 StringCharAtGenerator generator(
3310 generator.GenerateFast(masm);
3312 generator.SkipSlow(masm, &runtime);
3317 MacroAssembler* masm, Register left, Register right, Register scratch1,
3318 Register scratch2, Register scratch3) {
3319 Register length = scratch1;
3322 Label strings_not_equal, check_zero_length;
3325 __ Branch(&check_zero_length,
eq, length, Operand(scratch2));
3326 __ bind(&strings_not_equal);
3332 Label compare_chars;
3333 __ bind(&check_zero_length);
3335 __ Branch(&compare_chars,
ne, length, Operand(zero_reg));
3341 __ bind(&compare_chars);
3344 v0, &strings_not_equal);
3353 MacroAssembler* masm, Register left, Register right, Register scratch1,
3354 Register scratch2, Register scratch3, Register scratch4) {
3355 Label result_not_equal, compare_lengths;
3359 __ Subu(scratch3, scratch1, Operand(scratch2));
3360 Register length_delta = scratch3;
3361 __ slt(scratch4, scratch2, scratch1);
3362 __ Movn(scratch1, scratch2, scratch4);
3363 Register min_length = scratch1;
3365 __ Branch(&compare_lengths,
eq, min_length, Operand(zero_reg));
3369 scratch4, v0, &result_not_equal);
3372 __ bind(&compare_lengths);
3375 __ mov(scratch2, length_delta);
3376 __ mov(scratch4, zero_reg);
3377 __ mov(v0, zero_reg);
3379 __ bind(&result_not_equal);
3383 __ Branch(&ret,
eq, scratch2, Operand(scratch4));
3385 __ Branch(&ret,
gt, scratch2, Operand(scratch4));
3393 MacroAssembler* masm, Register left, Register right, Register length,
3394 Register scratch1, Register scratch2, Register scratch3,
3395 Label* chars_not_equal) {
3399 __ SmiUntag(length);
3400 __ Addu(scratch1, length,
3402 __ Addu(left, left, Operand(scratch1));
3403 __ Addu(right, right, Operand(scratch1));
3404 __ Subu(length, zero_reg, length);
3405 Register index = length;
3411 __ Addu(scratch3, left, index);
3413 __ Addu(scratch3, right, index);
3415 __ Branch(chars_not_equal,
ne, scratch1, Operand(scratch2));
3416 __ Addu(index, index, 1);
3417 __ Branch(&loop,
ne, index, Operand(zero_reg));
3424 Counters* counters = isolate()->counters();
3433 __ Branch(¬_same,
ne, a0, Operand(a1));
3437 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2);
3443 __ JumpIfNotBothSequentialOneByteStrings(a1, a0, a2, a3, &runtime);
3446 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3);
3451 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3455 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3465 __ li(a2,
handle(isolate()->heap()->undefined_value()));
3468 if (FLAG_debug_code) {
3470 __ Assert(
ne, kExpectedAllocationSite, at, Operand(zero_reg));
3472 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
3473 __ Assert(
eq, kExpectedAllocationSite, t0, Operand(at));
3478 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3479 __ TailCallStub(&stub);
3487 __ JumpIfNotSmi(a2, &miss);
3492 __ Subu(v0, a0, a1);
3498 __ Subu(v0, a1, a0);
3510 Label unordered, maybe_undefined1, maybe_undefined2;
3514 __ JumpIfNotSmi(a1, &miss);
3517 __ JumpIfNotSmi(a0, &miss);
3523 Label done,
left, left_smi, right_smi;
3524 __ JumpIfSmi(a0, &right_smi);
3525 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1,
3530 __ bind(&right_smi);
3531 __ SmiUntag(a2, a0);
3532 FPURegister single_scratch =
f6;
3533 __ mtc1(a2, single_scratch);
3534 __ cvt_d_w(
f2, single_scratch);
3537 __ JumpIfSmi(a1, &left_smi);
3538 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2,
3544 __ SmiUntag(a2, a1);
3545 single_scratch =
f8;
3546 __ mtc1(a2, single_scratch);
3547 __ cvt_d_w(
f0, single_scratch);
3552 Label fpu_eq, fpu_lt;
3554 __ BranchF(&fpu_eq, &unordered,
eq,
f0,
f2);
3570 __ li(v0, Operand(
LESS));
3572 __ bind(&unordered);
3573 __ bind(&generic_stub);
3578 __ bind(&maybe_undefined1);
3580 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3581 __ Branch(&miss,
ne, a0, Operand(at));
3582 __ JumpIfSmi(a1, &unordered);
3583 __ GetObjectType(a1, a2, a2);
3588 __ bind(&maybe_undefined2);
3590 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
3591 __ Branch(&unordered,
eq, a1, Operand(at));
3605 Register
right = a0;
3618 __ Or(tmp1, tmp1, Operand(tmp2));
3620 __ Branch(&miss,
ne, at, Operand(zero_reg));
3646 Register
right = a0;
3660 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss);
3661 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss);
3691 Register
right = a0;
3708 __ Or(tmp3, tmp1, tmp2);
3710 __ Branch(&miss,
ne, tmp5, Operand(zero_reg));
3713 Label left_ne_right;
3718 __ mov(v0, zero_reg);
3719 __ bind(&left_ne_right);
3729 __ Or(tmp3, tmp1, Operand(tmp2));
3732 __ Branch(&is_symbol,
ne, tmp5, Operand(zero_reg));
3738 __ bind(&is_symbol);
3743 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4,
3759 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3761 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3772 __ And(a2, a1, Operand(a0));
3773 __ JumpIfSmi(a2, &miss);
3775 __ GetObjectType(a0, a2, a2);
3777 __ GetObjectType(a1, a2, a2);
3782 __ subu(v0, a0, a1);
3792 __ JumpIfSmi(a2, &miss);
3799 __ subu(v0, a0, a1);
3809 ExternalReference miss =
3810 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
3845 __ Assert(
ne, kReceivedInvalidReturnAddress, t0,
3855 reinterpret_cast<intptr_t
>(GetCode().location());
3856 __ Move(t9, target);
3866 Register properties,
3868 Register scratch0) {
3878 Register
index = scratch0;
3890 Register entity_name = scratch0;
3893 Register tmp = properties;
3895 __ Addu(tmp, properties, scratch0);
3898 DCHECK(!tmp.is(entity_name));
3899 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex);
3900 __ Branch(done,
eq, entity_name, Operand(tmp));
3903 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex);
3906 __ Branch(miss,
eq, entity_name, Operand(Handle<Name>(
name)));
3909 __ Branch(&good,
eq, entity_name, Operand(tmp));
3915 __ JumpIfNotUniqueNameInstanceType(entity_name, miss);
3923 const int spill_mask =
3924 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
3925 a2.bit() | a1.bit() | a0.bit() | v0.bit());
3927 __ MultiPush(spill_mask);
3929 __ li(a1, Operand(Handle<Name>(
name)));
3933 __ MultiPop(spill_mask);
3935 __ Branch(done,
eq, at, Operand(zero_reg));
3936 __ Branch(miss,
ne, at, Operand(zero_reg));
3950 Register scratch2) {
3951 DCHECK(!elements.is(scratch1));
3952 DCHECK(!elements.is(scratch2));
3961 __ Subu(scratch1, scratch1, Operand(1));
3973 DCHECK(NameDictionary::GetProbeOffset(
i) <
3975 __ Addu(scratch2, scratch2, Operand(
3979 __ And(scratch2, scratch1, scratch2);
3985 __ sll(at, scratch2, 1);
3986 __ Addu(scratch2, scratch2, at);
3989 __ sll(at, scratch2, 2);
3990 __ Addu(scratch2, elements, at);
3992 __ Branch(done,
eq,
name, Operand(at));
3995 const int spill_mask =
3996 (ra.bit() | t2.bit() | t1.bit() | t0.bit() |
3997 a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
3998 ~(scratch1.bit() | scratch2.bit());
4000 __ MultiPush(spill_mask);
4002 DCHECK(!elements.is(a1));
4004 __ Move(a0, elements);
4006 __ Move(a0, elements);
4011 __ mov(scratch2, a2);
4013 __ MultiPop(spill_mask);
4015 __ Branch(done,
ne, at, Operand(zero_reg));
4016 __ Branch(miss,
eq, at, Operand(zero_reg));
4035 Register
index = a2;
4038 Register undefined = t1;
4039 Register entry_key = t2;
4041 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4045 __ Subu(mask, mask, Operand(1));
4049 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex);
4058 DCHECK(NameDictionary::GetProbeOffset(
i) <
4082 __ Branch(¬_in_dictionary,
eq, entry_key, Operand(undefined));
4085 __ Branch(&in_dictionary,
eq, entry_key, Operand(key));
4092 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary);
4096 __ bind(&maybe_in_dictionary);
4105 __ bind(&in_dictionary);
4109 __ bind(¬_in_dictionary);
4130 Label skip_to_incremental_noncompacting;
4131 Label skip_to_incremental_compacting;
4139 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting);
4141 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting);
4145 __ RememberedSetHelper(
object(),
4153 __ bind(&skip_to_incremental_noncompacting);
4156 __ bind(&skip_to_incremental_compacting);
4171 Label dont_need_remembered_set;
4176 &dont_need_remembered_set);
4182 &dont_need_remembered_set);
4190 __ RememberedSetHelper(
object(),
4196 __ bind(&dont_need_remembered_set);
4209 int argument_count = 3;
4218 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4220 AllowExternalCallThatCantCauseGC scope(masm);
4222 ExternalReference::incremental_marking_record_write_function(isolate()),
4229 MacroAssembler* masm,
4230 OnNoNeedToInformIncrementalMarker on_no_need,
4233 Label need_incremental;
4234 Label need_incremental_pop_scratch;
4252 __ RememberedSetHelper(
object(),
4267 Label ensure_not_white;
4281 __ bind(&ensure_not_white);
4291 &need_incremental_pop_scratch);
4296 __ RememberedSetHelper(
object(),
4305 __ bind(&need_incremental_pop_scratch);
4308 __ bind(&need_incremental);
4324 Label double_elements;
4326 Label slow_elements;
4327 Label fast_elements;
4334 __ CheckFastElements(a2, t1, &double_elements);
4336 __ JumpIfSmi(a0, &smi_element);
4337 __ CheckFastSmiElements(a2, t1, &fast_elements);
4341 __ bind(&slow_elements);
4347 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4350 __ bind(&fast_elements);
4353 __ Addu(t2, t1, t2);
4364 __ bind(&smi_element);
4367 __ Addu(t2, t1, t2);
4373 __ bind(&double_elements);
4375 __ StoreNumberToDoubleElements(a0, a3, t1, t3, t5, a2, &slow_elements);
4384 int parameter_count_offset =
4388 __ Addu(a1, a1, Operand(1));
4390 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4399 VectorLoadStub stub(isolate(),
state());
4406 VectorKeyedLoadStub stub(isolate());
4412 if (masm->isolate()->function_entry_hook() !=
NULL) {
4424 const int32_t kReturnAddressDistanceFromFunctionStart =
4436 __ MultiPush(kSavedRegs | ra.bit());
4439 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
4446 int frame_alignment = masm->ActivationFrameAlignment();
4450 __ And(
sp,
sp, Operand(-frame_alignment));
4453 #if defined(V8_HOST_ARCH_MIPS)
4455 reinterpret_cast<int32_t>(isolate()->function_entry_hook());
4456 __ li(t9, Operand(entry_hook));
4461 __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
4464 __ li(t9, Operand(ExternalReference(&dispatcher,
4465 ExternalReference::BUILTIN_CALL,
4479 __ MultiPop(kSavedRegs | ra.bit());
4485 static void CreateArrayDispatch(MacroAssembler* masm,
4489 __ TailCallStub(&stub);
4493 for (
int i = 0;
i <= last_index; ++
i) {
4495 T stub(masm->isolate(), kind);
4496 __ TailCallStub(&stub,
eq, a3, Operand(kind));
4500 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4507 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4514 Label normal_sequence;
4524 __ And(at, a3, Operand(1));
4525 __ Branch(&normal_sequence,
ne, at, Operand(zero_reg));
4530 __ Branch(&normal_sequence,
eq, t1, Operand(zero_reg));
4536 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4539 __ TailCallStub(&stub_holey);
4541 __ bind(&normal_sequence);
4542 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4545 __ TailCallStub(&stub);
4549 __ Addu(a3, a3, Operand(1));
4551 if (FLAG_debug_code) {
4553 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
4554 __ Assert(
eq, kExpectedAllocationSite, t1, Operand(at));
4566 __ bind(&normal_sequence);
4569 for (
int i = 0;
i <= last_index; ++
i) {
4571 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4572 __ TailCallStub(&stub,
eq, a3, Operand(kind));
4576 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4584 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4587 for (
int i = 0;
i <= to_index; ++
i) {
4589 T stub(isolate, kind);
4600 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4602 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4604 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4612 for (
int i = 0;
i < 2;
i++) {
4614 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[
i]);
4616 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[
i]);
4618 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[
i]);
4625 MacroAssembler* masm,
4628 Label not_zero_case, not_one_case;
4630 __ Branch(¬_zero_case,
ne, at, Operand(zero_reg));
4631 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4633 __ bind(¬_zero_case);
4634 __ Branch(¬_one_case,
gt, a0, Operand(1));
4635 CreateArrayDispatchOneArgument(masm,
mode);
4637 __ bind(¬_one_case);
4638 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4640 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm,
mode);
4642 CreateArrayDispatchOneArgument(masm,
mode);
4644 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm,
mode);
4660 if (FLAG_debug_code) {
4668 __ Assert(
ne, kUnexpectedInitialMapForArrayFunction,
4669 at, Operand(zero_reg));
4670 __ GetObjectType(t0, t0, t1);
4671 __ Assert(
eq, kUnexpectedInitialMapForArrayFunction,
4675 __ AssertUndefinedOrAllocationSite(a2, t0);
4680 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4681 __ Branch(&no_info,
eq, a2, Operand(at));
4697 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4698 __ TailCallStub(&stub0,
lo, a0, Operand(1));
4700 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4701 __ TailCallStub(&stubN,
hi, a0, Operand(1));
4708 InternalArraySingleArgumentConstructorStub
4710 __ TailCallStub(&stub1_holey,
ne, at, Operand(zero_reg));
4713 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4714 __ TailCallStub(&stub1);
4726 if (FLAG_debug_code) {
4734 __ Assert(
ne, kUnexpectedInitialMapForArrayFunction,
4735 at, Operand(zero_reg));
4736 __ GetObjectType(a3, a3, t0);
4737 __ Assert(
eq, kUnexpectedInitialMapForArrayFunction,
4748 __ DecodeField<Map::ElementsKindBits>(a3);
4750 if (FLAG_debug_code) {
4754 eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
4759 Label fast_elements_case;
4763 __ bind(&fast_elements_case);
4782 Register callee = a0;
4783 Register call_data = t0;
4784 Register holder = a2;
4785 Register api_function_address = a1;
4786 Register context =
cp;
4792 typedef FunctionCallbackArguments FCA;
4804 __ Push(context, callee, call_data);
4808 Register scratch = call_data;
4810 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4813 __ Push(scratch, scratch);
4815 Operand(ExternalReference::isolate_address(isolate())));
4817 __ Push(scratch, holder);
4820 __ mov(scratch,
sp);
4824 const int kApiStackSpace = 4;
4826 FrameScope frame_scope(masm, StackFrame::MANUAL);
4827 __ EnterExitFrame(
false, kApiStackSpace);
4829 DCHECK(!api_function_address.is(a0) && !scratch.is(a0));
4839 __ li(at, Operand(
argc));
4844 const int kStackUnwindSpace =
argc + FCA::kArgsLength + 1;
4845 ExternalReference thunk_ref =
4846 ExternalReference::invoke_function_callback(isolate());
4848 AllowExternalCallThatCantCauseGC scope(masm);
4852 int return_value_offset = 0;
4854 return_value_offset = 2 + FCA::kArgsLength;
4856 return_value_offset = 2 + FCA::kReturnValueOffset;
4860 __ CallApiFunctionAndReturn(api_function_address,
4863 return_value_operand,
4864 &context_restore_operand);
4877 DCHECK(api_function_address.is(a2));
4882 const int kApiStackSpace = 1;
4883 FrameScope frame_scope(masm, StackFrame::MANUAL);
4884 __ EnterExitFrame(
false, kApiStackSpace);
4893 ExternalReference thunk_ref =
4894 ExternalReference::invoke_accessor_getter_callback(isolate());
4895 __ CallApiFunctionAndReturn(api_function_address,
#define kLithiumScratchDouble
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kTransitionInfoOffset
static const Register function_address()
static const Register parameter_count()
static const Register index()
void GenerateReadElement(MacroAssembler *masm)
void GenerateNewSloppySlow(MacroAssembler *masm)
void GenerateNewStrict(MacroAssembler *masm)
void GenerateNewSloppyFast(MacroAssembler *masm)
static const int kLengthOffset
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateDispatchToArrayStub(MacroAssembler *masm, AllocationSiteOverrideMode mode)
ArgumentCountKey argument_count() const
static const int kInstrSize
friend class BlockTrampolinePoolScope
static const int kCallTargetAddressOffset
static void GenerateAheadOfTime(Isolate *isolate)
bool save_doubles() const
static void GenerateAheadOfTime(Isolate *isolate)
CEntryStub(Isolate *isolate, int result_size, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
bool NeedsImmovableCode()
bool call_data_undefined() const
STATIC_ASSERT(Code::kArgumentsBits+2<=kStubMinorKeyBits)
bool RecordCallTarget() const
bool CallAsMethod() const
bool CallAsMethod() const
void GenerateMiss(MacroAssembler *masm)
virtual InlineCacheState GetICState() const OVERRIDE
static const int kValueOffset
static const int kHeaderSize
Condition GetCondition() const
void GenerateInternalizedStrings(MacroAssembler *masm)
void GenerateStrings(MacroAssembler *masm)
CompareICState::State state() const
void GenerateMiss(MacroAssembler *masm)
CompareICState::State left() const
void GenerateGeneric(MacroAssembler *masm)
CompareICState::State right() const
void GenerateObjects(MacroAssembler *masm)
CompareICStub(Isolate *isolate, Token::Value op, CompareICState::State left, CompareICState::State right, CompareICState::State state)
void GenerateNumbers(MacroAssembler *masm)
void GenerateUniqueNames(MacroAssembler *masm)
void GenerateKnownObjects(MacroAssembler *masm)
void GenerateSmis(MacroAssembler *masm)
static const int kFirstOffset
static const int kMinLength
static const int kSecondOffset
@ SLOPPY_ARGUMENTS_MAP_INDEX
@ STRICT_ARGUMENTS_MAP_INDEX
@ ALIASED_ARGUMENTS_MAP_INDEX
static int SlotOffset(int index)
static void GenerateAheadOfTime(Isolate *isolate)
void GenerateCall(MacroAssembler *masm, Register target)
bool skip_fastpath() const
Register destination() const
static const int kCallerFPOffset
static const int kMaxShortLength
static const int kResourceDataOffset
static const int kLengthOffset
static const int kHeaderSize
static const int kNativeContextOffset
static const int kEntrySize
static const int kMantissaBits
static const uint32_t kSignMask
static const int kValueOffset
static const uint32_t kExponentMask
static const int kMantissaBitsInTopWord
static const int kExponentBits
static const int kExponentBias
static const int kExponentShift
static const int kNonMantissaBitsInTopWord
static const int kMapOffset
static const int kStrictArgumentsObjectSize
static const int kSloppyArgumentsObjectSize
static const int kArgumentsCalleeIndex
static const int kArgumentsLengthIndex
void GenerateLightweightMiss(MacroAssembler *masm, ExternalReference miss)
bool HasCallSiteInlineCheck() const
bool HasArgsInRegisters() const
bool ReturnTrueFalseObject() const
static void GenerateStubsAheadOfTime(Isolate *isolate)
void GenerateCase(MacroAssembler *masm, ElementsKind kind)
static const int kJSRegexpStaticOffsetsVectorSize
StackFrame::Type type() const
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kLiteralsOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kElementsOffset
static const int kDataOneByteCodeOffset
static const int kIrregexpCaptureCountOffset
static const int kDataTagOffset
static const int kDataOffset
static const int kDataUC16CodeOffset
static const int kFunctionOffset
static const Register ReceiverRegister()
static const Register NameRegister()
LoadICState state() const
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kBitField2Offset
static const int kPrototypeOffset
ExponentType exponent_type() const
static const Register exponent()
static const size_t kWriteBarrierCounterOffset
static const int kEvacuationCandidateMask
static const int kSkipEvacuationSlotsRecordingMask
static const int kElementsStartOffset
NameDictionaryLookupStub(Isolate *isolate, LookupMode mode)
static const int kCapacityOffset
Register dictionary() const
static void GeneratePositiveLookup(MacroAssembler *masm, Label *miss, Label *done, Register elements, Register name, Register r0, Register r1)
static const int kTotalProbes
static const int kInlinedProbes
static void GenerateNegativeLookup(MacroAssembler *masm, Label *miss, Label *done, Register receiver, Register properties, Handle< Name > name, Register scratch0)
static const int kHashShift
static const int kHashFieldOffset
static void GenerateLoadFunctionPrototype(MacroAssembler *masm, Register receiver, Register scratch1, Register scratch2, Label *miss_label)
static const intptr_t kPageAlignmentMask
ProfileEntryHookStub(Isolate *isolate)
static void MaybeCallEntryHook(MacroAssembler *masm)
static void EntryHookTrampoline(intptr_t function, intptr_t stack_pointer, Isolate *isolate)
static const int kArgsLength
void Restore(MacroAssembler *masm)
void SaveCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void Save(MacroAssembler *masm)
void RestoreCallerSaveRegisters(MacroAssembler *masm, SaveFPRegsMode mode)
void GenerateIncremental(MacroAssembler *masm, Mode mode)
void InformIncrementalMarker(MacroAssembler *masm)
RememberedSetAction remembered_set_action() const
static void PatchBranchIntoNop(MacroAssembler *masm, int pos)
SaveFPRegsMode save_fp_regs_mode() const
@ kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
@ kReturnOnNoNeedToInformIncrementalMarker
void CheckNeedsToInformIncrementalMarker(MacroAssembler *masm, OnNoNeedToInformIncrementalMarker on_no_need, Mode mode)
virtual void Generate(MacroAssembler *masm) OVERRIDE
static const int kLastCaptureCountOffset
static const int kLastSubjectOffset
static const int kLastMatchOverhead
static const int kLastInputOffset
static const int kFirstCaptureOffset
static const Function * FunctionForId(FunctionId id)
static const int kHeaderSize
static const int kConstructStubOffset
static const int kFeedbackVectorOffset
static const int kCompilerHintsOffset
static const int kMinLength
static const int kParentOffset
static const int kOffsetOffset
static Smi * FromInt(int value)
static const int kContextOffset
static const int kCallerSPOffset
static const int kCallerFPOffset
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
StoreBufferOverflowStub(Isolate *isolate, SaveFPRegsMode save_fp)
bool save_doubles() const
StringIndexFlags index_flags_
Label * receiver_not_string_
Label * index_out_of_range_
void GenerateFast(MacroAssembler *masm)
Label * index_not_number_
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
void GenerateFast(MacroAssembler *masm)
void GenerateSlow(MacroAssembler *masm, const RuntimeCallHelper &call_helper)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static void GenerateOneByteCharsCompareLoop(MacroAssembler *masm, Register left, Register right, Register length, Register scratch1, Register scratch2, Label *chars_not_equal)
static void GenerateCompareFlatOneByteStrings(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3, Register scratch4)
static void GenerateCopyCharacters(MacroAssembler *masm, Register dest, Register src, Register count, Register scratch, String::Encoding encoding)
static void GenerateFlatOneByteStringEquals(MacroAssembler *masm, Register left, Register right, Register scratch1, Register scratch2, Register scratch3)
static const int32_t kMaxOneByteCharCode
static const int kLengthOffset
static const int kCallerStackParameterCountFrameOffset
StubFunctionMode function_mode() const
static void GenerateAheadOfTime(Isolate *isolate)
static bool IsOrderedRelationalCompareOp(Value op)
static bool IsEqualityOp(Value op)
static Handle< Object > UninitializedSentinel(Isolate *isolate)
static Handle< Object > MegamorphicSentinel(Isolate *isolate)
static const Register VectorRegister()
void Generate(MacroAssembler *masm)
Register the_heap_number() const
static void GenerateFixedRegStubsAheadOfTime(Isolate *isolate)
WriteInt32ToHeapNumberStub(Isolate *isolate, Register the_int, Register the_heap_number, Register scratch)
#define IsMipsArchVariant(check)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
bool IsPowerOfTwo32(uint32_t value)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const uint32_t kStringEncodingMask
const FPUControlRegister FCSR
ElementsKind GetFastElementsKindFromSequenceIndex(int sequence_number)
@ DONT_TRACK_ALLOCATION_SITE
const RegList kJSCallerSaved
@ kCheckForInexactConversion
const intptr_t kPointerAlignmentMask
const intptr_t kSmiSignMask
const uint32_t kTwoByteStringTag
const uint32_t kShortExternalStringTag
const RegList kCalleeSaved
const int kFastElementsKindPackedToHoley
const uint32_t kNotStringTag
DwVfpRegister DoubleRegister
const uint32_t kFCSRUnderflowFlagMask
const int kPointerSizeLog2
const uint32_t kStringTag
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ TERMINAL_FAST_ELEMENTS_KIND
@ FAST_HOLEY_SMI_ELEMENTS
Handle< T > handle(T *t, Isolate *isolate)
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignmentMask
int GetSequenceIndexFromFastElementsKind(ElementsKind elements_kind)
bool IsFastPackedElementsKind(ElementsKind kind)
const bool FLAG_enable_slow_asserts
const uint32_t kShortExternalStringMask
const uint32_t kFCSRInvalidOpFlagMask
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
const uint32_t kStringRepresentationMask
const uint32_t kFCSROverflowFlagMask
const uint32_t kSlicedNotConsMask
const int kCArgsSlotsSize
const uint32_t kInternalizedTag
const int kNumJSCallerSaved
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
const uint32_t kIsNotStringMask
const int kNumCalleeSavedFPU
const int kNumCalleeSaved
ElementsKind GetInitialFastElementsKind()
const RegList kCalleeSavedFPU
@ STRING_INDEX_IS_ARRAY_INDEX
const uint32_t kIsIndirectStringMask
const RegList kCallerSavedFPU
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool is(Register reg) const
#define T(name, string, precedence)