25 : Assembler(arg_isolate, buffer,
size),
26 generating_stub_(
false),
28 root_array_available_(
true) {
29 if (isolate() !=
NULL) {
30 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
36 static const int64_t kInvalidRootRegisterDelta = -1;
39 int64_t MacroAssembler::RootRegisterDelta(ExternalReference other) {
40 if (predictable_code_size() &&
41 (other.address() <
reinterpret_cast<Address>(isolate()) ||
42 other.address() >=
reinterpret_cast<Address>(isolate() + 1))) {
43 return kInvalidRootRegisterDelta;
46 reinterpret_cast<Address>(isolate()->heap()->roots_array_start());
48 int64_t delta = kInvalidRootRegisterDelta;
50 delta = other.address() - roots_register_value;
54 reinterpret_cast<intptr_t
>(other.address()));
56 reinterpret_cast<intptr_t
>(roots_register_value));
63 Operand MacroAssembler::ExternalOperand(ExternalReference target,
65 if (root_array_available_ && !serializer_enabled()) {
66 int64_t delta = RootRegisterDelta(target);
67 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
71 Move(scratch, target);
72 return Operand(scratch, 0);
76 void MacroAssembler::Load(Register destination, ExternalReference source) {
77 if (root_array_available_ && !serializer_enabled()) {
78 int64_t delta = RootRegisterDelta(source);
79 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
85 if (destination.is(
rax)) {
94 void MacroAssembler::Store(ExternalReference destination, Register source) {
95 if (root_array_available_ && !serializer_enabled()) {
96 int64_t delta = RootRegisterDelta(destination);
97 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
103 if (source.is(
rax)) {
104 store_rax(destination);
112 void MacroAssembler::LoadAddress(Register destination,
113 ExternalReference source) {
114 if (root_array_available_ && !serializer_enabled()) {
115 int64_t delta = RootRegisterDelta(source);
116 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
122 Move(destination, source);
126 int MacroAssembler::LoadAddressSize(ExternalReference source) {
127 if (root_array_available_ && !serializer_enabled()) {
131 int64_t delta = RootRegisterDelta(source);
132 if (delta != kInvalidRootRegisterDelta && is_int32(delta)) {
136 if (!is_int8(
static_cast<int32_t>(delta))) {
143 return Assembler::kMoveAddressIntoScratchRegisterInstructionLength;
147 void MacroAssembler::PushAddress(ExternalReference source) {
148 int64_t address =
reinterpret_cast<int64_t
>(source.address());
149 if (is_int32(address) && !serializer_enabled()) {
150 if (emit_debug_code()) {
161 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
162 DCHECK(root_array_available_);
168 void MacroAssembler::LoadRootIndexed(Register destination,
169 Register variable_offset,
171 DCHECK(root_array_available_);
179 void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
180 DCHECK(root_array_available_);
186 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
187 DCHECK(root_array_available_);
192 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
193 DCHECK(root_array_available_);
199 void MacroAssembler::CompareRoot(
const Operand& with,
200 Heap::RootListIndex index) {
201 DCHECK(root_array_available_);
208 void MacroAssembler::RememberedSetHelper(Register
object,
212 RememberedSetFinalAction and_then) {
213 if (emit_debug_code()) {
215 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
220 LoadRoot(scratch, Heap::kStoreBufferTopRootIndex);
222 movp(Operand(scratch, 0), addr);
226 StoreRoot(scratch, Heap::kStoreBufferTopRootIndex);
230 testp(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
231 if (and_then == kReturnAtEnd) {
232 Label buffer_overflowed;
233 j(
not_equal, &buffer_overflowed, Label::kNear);
235 bind(&buffer_overflowed);
237 DCHECK(and_then == kFallThroughAtEnd);
238 j(
equal, &done, Label::kNear);
240 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
241 CallStub(&store_buffer_overflow);
242 if (and_then == kReturnAtEnd) {
245 DCHECK(and_then == kFallThroughAtEnd);
251 void MacroAssembler::InNewSpace(Register
object,
255 Label::Distance distance) {
256 if (serializer_enabled()) {
261 if (scratch.is(
object)) {
265 Move(scratch, ExternalReference::new_space_mask(isolate()));
266 andp(scratch,
object);
270 j(
cc, branch, distance);
273 ? is_int32(
static_cast<int64_t
>(isolate()->heap()->NewSpaceMask()))
275 intptr_t new_space_start =
276 reinterpret_cast<intptr_t
>(isolate()->heap()->NewSpaceStart());
278 Assembler::RelocInfoNone());
279 if (scratch.is(
object)) {
285 Immediate(
static_cast<int32_t>(isolate()->heap()->NewSpaceMask())));
286 j(
cc, branch, distance);
291 void MacroAssembler::RecordWriteField(
306 JumpIfSmi(value, &done);
314 if (emit_debug_code()) {
317 j(
zero, &ok, Label::kNear);
322 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
329 if (emit_debug_code()) {
330 Move(value,
kZapValue, Assembler::RelocInfoNone());
331 Move(dst,
kZapValue, Assembler::RelocInfoNone());
336 void MacroAssembler::RecordWriteArray(
350 JumpIfSmi(value, &done);
354 Register dst = index;
358 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
365 if (emit_debug_code()) {
366 Move(value,
kZapValue, Assembler::RelocInfoNone());
367 Move(index,
kZapValue, Assembler::RelocInfoNone());
372 void MacroAssembler::RecordWriteForMap(Register
object,
380 AssertNotSmi(
object);
382 if (emit_debug_code()) {
385 CompareMap(
map, isolate()->factory()->meta_map());
387 j(
equal, &ok, Label::kNear);
392 if (!FLAG_incremental_marking) {
396 if (emit_debug_code()) {
401 j(
equal, &ok, Label::kNear);
407 leap(dst,
FieldOperand(
object, HeapObject::kMapOffset));
419 MemoryChunk::kPointersToHereAreInterestingMask,
431 isolate()->counters()->write_barriers_static()->Increment();
432 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
436 if (emit_debug_code()) {
437 Move(dst,
kZapValue, Assembler::RelocInfoNone());
443 void MacroAssembler::RecordWrite(
451 DCHECK(!
object.is(value));
452 DCHECK(!
object.is(address));
453 DCHECK(!value.is(address));
454 AssertNotSmi(
object);
457 !FLAG_incremental_marking) {
461 if (emit_debug_code()) {
463 cmpp(value, Operand(address, 0));
464 j(
equal, &ok, Label::kNear);
475 JumpIfSmi(value, &done);
481 MemoryChunk::kPointersToHereAreInterestingMask,
487 CheckPageFlag(
object,
489 MemoryChunk::kPointersFromHereAreInterestingMask,
494 RecordWriteStub stub(isolate(),
object, value, address, remembered_set_action,
501 isolate()->counters()->write_barriers_static()->Increment();
502 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
506 if (emit_debug_code()) {
507 Move(address,
kZapValue, Assembler::RelocInfoNone());
508 Move(value,
kZapValue, Assembler::RelocInfoNone());
514 if (emit_debug_code()) Check(
cc, reason);
518 void MacroAssembler::AssertFastElements(Register elements) {
519 if (emit_debug_code()) {
521 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
522 Heap::kFixedArrayMapRootIndex);
523 j(
equal, &ok, Label::kNear);
524 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
525 Heap::kFixedDoubleArrayMapRootIndex);
526 j(
equal, &ok, Label::kNear);
527 CompareRoot(
FieldOperand(elements, HeapObject::kMapOffset),
528 Heap::kFixedCOWArrayMapRootIndex);
529 j(
equal, &ok, Label::kNear);
530 Abort(kJSObjectWithFastElementsMapHasSlowElements);
538 j(
cc, &
L, Label::kNear);
545 void MacroAssembler::CheckStackAlignment() {
546 int frame_alignment = base::OS::ActivationFrameAlignment();
547 int frame_alignment_mask = frame_alignment - 1;
550 Label alignment_as_expected;
551 testp(
rsp, Immediate(frame_alignment_mask));
552 j(
zero, &alignment_as_expected, Label::kNear);
555 bind(&alignment_as_expected);
560 void MacroAssembler::NegativeZeroTest(Register result,
564 testl(result, result);
576 RecordComment(
"Abort message: ");
580 if (FLAG_trap_on_abort) {
587 Assembler::RelocInfoNone());
594 CallRuntime(Runtime::kAbort, 1);
596 CallRuntime(Runtime::kAbort, 1);
603 void MacroAssembler::CallStub(
CodeStub* stub, TypeFeedbackId ast_id) {
604 DCHECK(AllowThisStubCall(stub));
605 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
609 void MacroAssembler::TailCallStub(
CodeStub* stub) {
610 Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
614 void MacroAssembler::StubReturn(
int argc) {
615 DCHECK(argc >= 1 && generating_stub());
620 bool MacroAssembler::AllowThisStubCall(
CodeStub* stub) {
621 return has_frame_ || !stub->SometimesSetsUpAFrame();
625 void MacroAssembler::IndexFromHash(Register hash, Register index) {
630 (1 << String::kArrayIndexValueBits));
631 if (!hash.is(index)) {
634 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
638 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
644 CHECK(f->nargs < 0 || f->nargs == num_arguments);
650 Set(
rax, num_arguments);
651 LoadAddress(
rbx, ExternalReference(f, isolate()));
652 CEntryStub ces(isolate(), f->result_size, save_doubles);
657 void MacroAssembler::CallExternalReference(
const ExternalReference& ext,
659 Set(
rax, num_arguments);
660 LoadAddress(
rbx, ext);
662 CEntryStub stub(isolate(), 1);
667 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
681 Set(
rax, num_arguments);
682 JumpToExternalReference(ext, result_size);
686 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
689 TailCallExternalReference(ExternalReference(fid, isolate()),
695 static int Offset(ExternalReference ref0, ExternalReference ref1) {
696 int64_t offset = (ref0.address() - ref1.address());
698 DCHECK(
static_cast<int>(offset) == offset);
699 return static_cast<int>(offset);
703 void MacroAssembler::PrepareCallApiFunction(
int arg_stack_space) {
704 EnterApiExitFrame(arg_stack_space);
708 void MacroAssembler::CallApiFunctionAndReturn(
709 Register function_address,
710 ExternalReference thunk_ref,
711 Register thunk_last_arg,
713 Operand return_value_operand,
714 Operand* context_restore_operand) {
716 Label promote_scheduled_exception;
717 Label exception_handled;
718 Label delete_allocated_handles;
719 Label leave_exit_frame;
722 Factory* factory = isolate()->factory();
723 ExternalReference next_address =
724 ExternalReference::handle_scope_next_address(isolate());
725 const int kNextOffset = 0;
726 const int kLimitOffset =
Offset(
727 ExternalReference::handle_scope_limit_address(isolate()),
729 const int kLevelOffset =
Offset(
730 ExternalReference::handle_scope_level_address(isolate()),
732 ExternalReference scheduled_exception_address =
733 ExternalReference::scheduled_exception_address(isolate());
737 Register prev_next_address_reg =
r14;
738 Register prev_limit_reg =
rbx;
739 Register base_reg =
r15;
740 Move(base_reg, next_address);
741 movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
742 movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
743 addl(Operand(base_reg, kLevelOffset), Immediate(1));
745 if (FLAG_log_timer_events) {
746 FrameScope frame(
this, StackFrame::MANUAL);
747 PushSafepointRegisters();
748 PrepareCallCFunction(1);
749 LoadAddress(
arg_reg_1, ExternalReference::isolate_address(isolate()));
750 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
751 PopSafepointRegisters();
755 Label profiler_disabled;
756 Label end_profiler_check;
757 Move(
rax, ExternalReference::is_profiling_address(isolate()));
758 cmpb(Operand(
rax, 0), Immediate(0));
759 j(
zero, &profiler_disabled);
762 Move(thunk_last_arg, function_address);
763 Move(
rax, thunk_ref);
764 jmp(&end_profiler_check);
766 bind(&profiler_disabled);
768 Move(
rax, function_address);
770 bind(&end_profiler_check);
775 if (FLAG_log_timer_events) {
776 FrameScope frame(
this, StackFrame::MANUAL);
777 PushSafepointRegisters();
778 PrepareCallCFunction(1);
779 LoadAddress(
arg_reg_1, ExternalReference::isolate_address(isolate()));
780 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
781 PopSafepointRegisters();
785 movp(
rax, return_value_operand);
790 subl(Operand(base_reg, kLevelOffset), Immediate(1));
791 movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
792 cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
794 bind(&leave_exit_frame);
797 Move(
rsi, scheduled_exception_address);
798 Cmp(Operand(
rsi, 0), factory->the_hole_value());
799 j(
not_equal, &promote_scheduled_exception);
800 bind(&exception_handled);
802 #if ENABLE_EXTRA_CHECKS
805 Register return_value =
rax;
808 JumpIfSmi(return_value, &ok, Label::kNear);
812 j(
below, &ok, Label::kNear);
817 CompareRoot(
map, Heap::kHeapNumberMapRootIndex);
818 j(
equal, &ok, Label::kNear);
820 CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
821 j(
equal, &ok, Label::kNear);
823 CompareRoot(return_value, Heap::kTrueValueRootIndex);
824 j(
equal, &ok, Label::kNear);
826 CompareRoot(return_value, Heap::kFalseValueRootIndex);
827 j(
equal, &ok, Label::kNear);
829 CompareRoot(return_value, Heap::kNullValueRootIndex);
830 j(
equal, &ok, Label::kNear);
832 Abort(kAPICallReturnedInvalidObject);
837 bool restore_context = context_restore_operand !=
NULL;
838 if (restore_context) {
839 movp(
rsi, *context_restore_operand);
841 LeaveApiExitFrame(!restore_context);
844 bind(&promote_scheduled_exception);
847 CallRuntime(Runtime::kPromoteScheduledException, 0);
849 jmp(&exception_handled);
852 bind(&delete_allocated_handles);
853 movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
854 movp(prev_limit_reg,
rax);
855 LoadAddress(
arg_reg_1, ExternalReference::isolate_address(isolate()));
857 ExternalReference::delete_handle_scope_extensions(isolate()));
859 movp(
rax, prev_limit_reg);
860 jmp(&leave_exit_frame);
864 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext,
867 LoadAddress(
rbx, ext);
868 CEntryStub ces(isolate(), result_size);
869 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
873 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
875 const CallWrapper& call_wrapper) {
882 ParameterCount expected(0);
883 GetBuiltinEntry(
rdx,
id);
884 InvokeCode(
rdx, expected, expected,
flag, call_wrapper);
888 void MacroAssembler::GetBuiltinFunction(Register target,
889 Builtins::JavaScript
id) {
891 movp(target, Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
892 movp(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
894 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
898 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
901 GetBuiltinFunction(
rdi,
id);
906 #define REG(Name) { kRegister_ ## Name ## _Code }
908 static const Register saved_regs[] = {
915 static const int kNumberOfSavedRegs =
sizeof(saved_regs) /
sizeof(Register);
921 Register exclusion3) {
925 for (
int i = 0;
i < kNumberOfSavedRegs;
i++) {
926 Register reg = saved_regs[
i];
927 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
933 subp(
rsp, Immediate(
kDoubleSize * XMMRegister::kMaxNumRegisters));
934 for (
int i = 0;
i < XMMRegister::kMaxNumRegisters;
i++) {
935 XMMRegister reg = XMMRegister::from_code(
i);
945 Register exclusion3) {
947 for (
int i = 0;
i < XMMRegister::kMaxNumRegisters;
i++) {
948 XMMRegister reg = XMMRegister::from_code(
i);
951 addp(
rsp, Immediate(
kDoubleSize * XMMRegister::kMaxNumRegisters));
953 for (
int i = kNumberOfSavedRegs - 1;
i >= 0;
i--) {
954 Register reg = saved_regs[
i];
955 if (!reg.is(exclusion1) && !reg.is(exclusion2) && !reg.is(exclusion3)) {
962 void MacroAssembler::Cvtlsi2sd(XMMRegister dst, Register src) {
968 void MacroAssembler::Cvtlsi2sd(XMMRegister dst,
const Operand& src) {
974 void MacroAssembler::Load(Register dst,
const Operand& src, Representation r) {
976 if (r.IsInteger8()) {
978 }
else if (r.IsUInteger8()) {
980 }
else if (r.IsInteger16()) {
982 }
else if (r.IsUInteger16()) {
984 }
else if (r.IsInteger32()) {
992 void MacroAssembler::Store(
const Operand& dst, Register src, Representation r) {
994 if (r.IsInteger8() || r.IsUInteger8()) {
996 }
else if (r.IsInteger16() || r.IsUInteger16()) {
998 }
else if (r.IsInteger32()) {
1001 if (r.IsHeapObject()) {
1003 }
else if (r.IsSmi()) {
1011 void MacroAssembler::Set(Register dst, int64_t x) {
1014 }
else if (is_uint32(x)) {
1015 movl(dst, Immediate(
static_cast<uint32_t>(x)));
1016 }
else if (is_int32(x)) {
1017 movq(dst, Immediate(
static_cast<int32_t>(x)));
1024 void MacroAssembler::Set(
const Operand& dst, intptr_t x) {
1027 movp(dst, Immediate(
static_cast<int32_t>(x)));
1033 movp(dst, Immediate(
static_cast<int32_t>(x)));
1041 bool MacroAssembler::IsUnsafeInt(
const int32_t x) {
1042 static const int kMaxBits = 17;
1047 void MacroAssembler::SafeMove(Register dst, Smi* src) {
1049 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1052 Move(dst, Smi::FromInt(src->value() ^ jit_cookie()));
1057 int32_t value =
static_cast<int32_t>(
reinterpret_cast<intptr_t
>(src));
1058 movp(dst, Immediate(value ^ jit_cookie()));
1059 xorp(dst, Immediate(jit_cookie()));
1067 void MacroAssembler::SafePush(Smi* src) {
1068 if (IsUnsafeInt(src->value()) && jit_cookie() != 0) {
1071 Push(Smi::FromInt(src->value() ^ jit_cookie()));
1076 int32_t value =
static_cast<int32_t>(
reinterpret_cast<intptr_t
>(src));
1077 Push(Immediate(value ^ jit_cookie()));
1078 xorp(Operand(
rsp, 0), Immediate(jit_cookie()));
1086 Register MacroAssembler::GetSmiConstant(Smi* source) {
1087 int value = source->value();
1100 void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
1101 if (emit_debug_code()) {
1103 Assembler::RelocInfoNone());
1105 Assert(
equal, kUninitializedKSmiConstantRegister);
1107 int value = source->value();
1113 unsigned int uvalue =
negative ? -value : value;
1147 Move(dst, source, Assembler::RelocInfoNone());
1156 void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
1165 void MacroAssembler::Integer32ToSmiField(
const Operand& dst, Register src) {
1166 if (emit_debug_code()) {
1167 testb(dst, Immediate(0x01));
1169 j(
zero, &ok, Label::kNear);
1170 Abort(kInteger32ToSmiFieldWritingToNonSmiLocation);
1185 void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
1189 addl(dst, Immediate(constant));
1191 leal(dst, Operand(src, constant));
1197 void MacroAssembler::SmiToInteger32(Register dst, Register src) {
1212 void MacroAssembler::SmiToInteger32(Register dst,
const Operand& src) {
1223 void MacroAssembler::SmiToInteger64(Register dst, Register src) {
1236 void MacroAssembler::SmiToInteger64(Register dst,
const Operand& src) {
1242 SmiToInteger64(dst, dst);
1247 void MacroAssembler::SmiTest(Register src) {
1253 void MacroAssembler::SmiCompare(Register smi1, Register smi2) {
1260 void MacroAssembler::SmiCompare(Register dst, Smi* src) {
1266 void MacroAssembler::Cmp(Register dst, Smi* src) {
1268 if (src->value() == 0) {
1271 Register constant_reg = GetSmiConstant(src);
1272 cmpp(dst, constant_reg);
1277 void MacroAssembler::SmiCompare(Register dst,
const Operand& src) {
1284 void MacroAssembler::SmiCompare(
const Operand& dst, Register src) {
1291 void MacroAssembler::SmiCompare(
const Operand& dst, Smi* src) {
1297 cmpl(dst, Immediate(src));
1302 void MacroAssembler::Cmp(
const Operand& dst, Smi* src) {
1304 Register smi_reg = GetSmiConstant(src);
1305 DCHECK(!dst.AddressUsesRegister(smi_reg));
1310 void MacroAssembler::SmiCompareInteger32(
const Operand& dst, Register src) {
1321 void MacroAssembler::PositiveSmiTimesPowerOfTwoToInteger64(Register dst,
1327 SmiToInteger64(dst, src);
1334 sarp(dst, Immediate(
kSmiShift - power));
1336 shlp(dst, Immediate(power -
kSmiShift));
1341 void MacroAssembler::PositiveSmiDivPowerOfTwoToInteger32(Register dst,
1344 DCHECK((0 <= power) && (power < 32));
1346 shrp(dst, Immediate(power +
kSmiShift));
1353 void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
1355 Label::Distance near_jump) {
1356 if (dst.is(src1) || dst.is(src2)) {
1366 JumpIfNotSmi(dst, on_not_smis, near_jump);
1371 Condition MacroAssembler::CheckSmi(Register src) {
1378 Condition MacroAssembler::CheckSmi(
const Operand& src) {
1385 Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
1395 Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
1396 if (first.is(second)) {
1397 return CheckSmi(first);
1413 Condition MacroAssembler::CheckBothNonNegativeSmi(Register first,
1415 if (first.is(second)) {
1416 return CheckNonNegativeSmi(first);
1426 Condition MacroAssembler::CheckEitherSmi(Register first,
1429 if (first.is(second)) {
1430 return CheckSmi(first);
1432 if (scratch.is(second)) {
1433 andl(scratch, first);
1435 if (!scratch.is(first)) {
1436 movl(scratch, first);
1438 andl(scratch, second);
1445 Condition MacroAssembler::CheckIsMinSmi(Register src) {
1453 Condition MacroAssembler::CheckInteger32ValidSmiValue(Register src) {
1459 cmpl(src, Immediate(0xc0000000));
1465 Condition MacroAssembler::CheckUInteger32ValidSmiValue(Register src) {
1473 testl(src, Immediate(0xc0000000));
1479 void MacroAssembler::CheckSmiToIndicator(Register dst, Register src) {
1489 void MacroAssembler::CheckSmiToIndicator(Register dst,
const Operand& src) {
1490 if (!(src.AddressUsesRegister(dst))) {
1500 void MacroAssembler::JumpIfValidSmiValue(Register src,
1502 Label::Distance near_jump) {
1503 Condition is_valid = CheckInteger32ValidSmiValue(src);
1504 j(is_valid, on_valid, near_jump);
1508 void MacroAssembler::JumpIfNotValidSmiValue(Register src,
1510 Label::Distance near_jump) {
1511 Condition is_valid = CheckInteger32ValidSmiValue(src);
1516 void MacroAssembler::JumpIfUIntValidSmiValue(Register src,
1518 Label::Distance near_jump) {
1519 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1520 j(is_valid, on_valid, near_jump);
1524 void MacroAssembler::JumpIfUIntNotValidSmiValue(Register src,
1526 Label::Distance near_jump) {
1527 Condition is_valid = CheckUInteger32ValidSmiValue(src);
1532 void MacroAssembler::JumpIfSmi(Register src,
1534 Label::Distance near_jump) {
1536 j(smi, on_smi, near_jump);
1540 void MacroAssembler::JumpIfNotSmi(Register src,
1542 Label::Distance near_jump) {
1548 void MacroAssembler::JumpUnlessNonNegativeSmi(
1549 Register src, Label* on_not_smi_or_negative,
1550 Label::Distance near_jump) {
1551 Condition non_negative_smi = CheckNonNegativeSmi(src);
1552 j(
NegateCondition(non_negative_smi), on_not_smi_or_negative, near_jump);
1556 void MacroAssembler::JumpIfSmiEqualsConstant(Register src,
1559 Label::Distance near_jump) {
1560 SmiCompare(src, constant);
1561 j(
equal, on_equals, near_jump);
1565 void MacroAssembler::JumpIfNotBothSmi(Register src1,
1567 Label* on_not_both_smi,
1568 Label::Distance near_jump) {
1569 Condition both_smi = CheckBothSmi(src1, src2);
1574 void MacroAssembler::JumpUnlessBothNonNegativeSmi(Register src1,
1576 Label* on_not_both_smi,
1577 Label::Distance near_jump) {
1578 Condition both_smi = CheckBothNonNegativeSmi(src1, src2);
1583 void MacroAssembler::SmiAddConstant(Register dst, Register src, Smi* constant) {
1584 if (constant->value() == 0) {
1589 }
else if (dst.is(src)) {
1591 switch (constant->value()) {
1605 Register constant_reg = GetSmiConstant(constant);
1606 addp(dst, constant_reg);
1610 switch (constant->value()) {
1624 LoadSmiConstant(dst, constant);
1632 void MacroAssembler::SmiAddConstant(
const Operand& dst, Smi* constant) {
1633 if (constant->value() != 0) {
1636 Immediate(constant->value()));
1639 addp(dst, Immediate(constant));
1645 void MacroAssembler::SmiAddConstant(Register dst,
1648 SmiOperationExecutionMode
mode,
1649 Label* bailout_label,
1650 Label::Distance near_jump) {
1651 if (constant->value() == 0) {
1655 }
else if (dst.is(src)) {
1668 jmp(bailout_label, near_jump);
1672 j(
overflow, bailout_label, near_jump);
1680 LoadSmiConstant(dst, constant);
1682 j(
overflow, bailout_label, near_jump);
1687 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) {
1688 if (constant->value() == 0) {
1692 }
else if (dst.is(src)) {
1694 Register constant_reg = GetSmiConstant(constant);
1695 subp(dst, constant_reg);
1697 if (constant->value() == Smi::kMinValue) {
1698 LoadSmiConstant(dst, constant);
1704 LoadSmiConstant(dst, Smi::FromInt(-constant->value()));
1711 void MacroAssembler::SmiSubConstant(Register dst,
1714 SmiOperationExecutionMode
mode,
1715 Label* bailout_label,
1716 Label::Distance near_jump) {
1717 if (constant->value() == 0) {
1721 }
else if (dst.is(src)) {
1734 jmp(bailout_label, near_jump);
1738 j(
overflow, bailout_label, near_jump);
1746 if (constant->value() == Smi::kMinValue) {
1751 j(
overflow, bailout_label, near_jump);
1754 LoadSmiConstant(dst, Smi::FromInt(-(constant->value())));
1756 j(
overflow, bailout_label, near_jump);
1762 void MacroAssembler::SmiNeg(Register dst,
1764 Label* on_smi_result,
1765 Label::Distance near_jump) {
1785 static void SmiAddHelper(MacroAssembler* masm,
1789 Label* on_not_smi_result,
1790 Label::Distance near_jump) {
1793 masm->addp(dst, src2);
1796 masm->subp(dst, src2);
1797 masm->jmp(on_not_smi_result, near_jump);
1800 masm->movp(dst, src1);
1801 masm->addp(dst, src2);
1802 masm->j(
overflow, on_not_smi_result, near_jump);
1807 void MacroAssembler::SmiAdd(Register dst,
1810 Label* on_not_smi_result,
1811 Label::Distance near_jump) {
1814 SmiAddHelper<Register>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1818 void MacroAssembler::SmiAdd(Register dst,
1820 const Operand& src2,
1821 Label* on_not_smi_result,
1822 Label::Distance near_jump) {
1824 DCHECK(!src2.AddressUsesRegister(dst));
1825 SmiAddHelper<Operand>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1829 void MacroAssembler::SmiAdd(Register dst,
1834 if (!dst.is(src1)) {
1835 if (emit_debug_code()) {
1840 leap(dst, Operand(src1, src2,
times_1, 0));
1849 static void SmiSubHelper(MacroAssembler* masm,
1853 Label* on_not_smi_result,
1854 Label::Distance near_jump) {
1857 masm->subp(dst, src2);
1860 masm->addp(dst, src2);
1861 masm->jmp(on_not_smi_result, near_jump);
1864 masm->movp(dst, src1);
1865 masm->subp(dst, src2);
1866 masm->j(
overflow, on_not_smi_result, near_jump);
1871 void MacroAssembler::SmiSub(Register dst,
1874 Label* on_not_smi_result,
1875 Label::Distance near_jump) {
1878 SmiSubHelper<Register>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1882 void MacroAssembler::SmiSub(Register dst,
1884 const Operand& src2,
1885 Label* on_not_smi_result,
1886 Label::Distance near_jump) {
1888 DCHECK(!src2.AddressUsesRegister(dst));
1889 SmiSubHelper<Operand>(
this, dst, src1, src2, on_not_smi_result, near_jump);
1894 static void SmiSubNoOverflowHelper(MacroAssembler* masm,
1900 if (!dst.is(src1)) {
1901 masm->movp(dst, src1);
1903 masm->subp(dst, src2);
1904 masm->Assert(
no_overflow, kSmiSubtractionOverflow);
1908 void MacroAssembler::SmiSub(Register dst, Register src1, Register src2) {
1910 SmiSubNoOverflowHelper<Register>(
this, dst, src1, src2);
1914 void MacroAssembler::SmiSub(Register dst,
1916 const Operand& src2) {
1917 SmiSubNoOverflowHelper<Operand>(
this, dst, src1, src2);
1921 void MacroAssembler::SmiMul(Register dst,
1924 Label* on_not_smi_result,
1925 Label::Distance near_jump) {
1932 Label failure, zero_correct_result;
1934 SmiToInteger64(dst, src1);
1936 j(
overflow, &failure, Label::kNear);
1940 Label correct_result;
1942 j(
not_zero, &correct_result, Label::kNear);
1947 j(
positive, &zero_correct_result, Label::kNear);
1951 jmp(on_not_smi_result, near_jump);
1953 bind(&zero_correct_result);
1956 bind(&correct_result);
1958 SmiToInteger64(dst, src1);
1960 j(
overflow, on_not_smi_result, near_jump);
1963 Label correct_result;
1965 j(
not_zero, &correct_result, Label::kNear);
1970 j(
negative, on_not_smi_result, near_jump);
1971 bind(&correct_result);
1976 void MacroAssembler::SmiDiv(Register dst,
1979 Label* on_not_smi_result,
1980 Label::Distance near_jump) {
1990 j(
zero, on_not_smi_result, near_jump);
1995 SmiToInteger32(
rax, src1);
2004 testl(
rax, Immediate(~Smi::kMinValue));
2005 j(
not_zero, &safe_div, Label::kNear);
2008 j(
positive, &safe_div, Label::kNear);
2010 jmp(on_not_smi_result, near_jump);
2012 j(
negative, on_not_smi_result, near_jump);
2016 SmiToInteger32(src2, src2);
2020 Integer32ToSmi(src2, src2);
2025 j(
zero, &smi_result, Label::kNear);
2027 jmp(on_not_smi_result, near_jump);
2030 j(
not_zero, on_not_smi_result, near_jump);
2032 if (!dst.is(src1) && src1.is(
rax)) {
2035 Integer32ToSmi(dst,
rax);
2039 void MacroAssembler::SmiMod(Register dst,
2042 Label* on_not_smi_result,
2043 Label::Distance near_jump) {
2053 j(
zero, on_not_smi_result, near_jump);
2058 SmiToInteger32(
rax, src1);
2059 SmiToInteger32(src2, src2);
2063 cmpl(
rax, Immediate(Smi::kMinValue));
2065 cmpl(src2, Immediate(-1));
2068 Integer32ToSmi(src2, src2);
2072 jmp(on_not_smi_result, near_jump);
2079 Integer32ToSmi(src2, src2);
2087 j(
not_zero, &smi_result, Label::kNear);
2089 j(
negative, on_not_smi_result, near_jump);
2091 Integer32ToSmi(dst,
rdx);
2095 void MacroAssembler::SmiNot(Register dst, Register src) {
2115 void MacroAssembler::SmiAnd(Register dst, Register src1, Register src2) {
2117 if (!dst.is(src1)) {
2124 void MacroAssembler::SmiAndConstant(Register dst, Register src, Smi* constant) {
2125 if (constant->value() == 0) {
2127 }
else if (dst.is(src)) {
2129 Register constant_reg = GetSmiConstant(constant);
2130 andp(dst, constant_reg);
2132 LoadSmiConstant(dst, constant);
2138 void MacroAssembler::SmiOr(Register dst, Register src1, Register src2) {
2139 if (!dst.is(src1)) {
2147 void MacroAssembler::SmiOrConstant(Register dst, Register src, Smi* constant) {
2150 Register constant_reg = GetSmiConstant(constant);
2151 orp(dst, constant_reg);
2153 LoadSmiConstant(dst, constant);
2159 void MacroAssembler::SmiXor(Register dst, Register src1, Register src2) {
2160 if (!dst.is(src1)) {
2168 void MacroAssembler::SmiXorConstant(Register dst, Register src, Smi* constant) {
2171 Register constant_reg = GetSmiConstant(constant);
2172 xorp(dst, constant_reg);
2174 LoadSmiConstant(dst, constant);
2180 void MacroAssembler::SmiShiftArithmeticRightConstant(Register dst,
2183 DCHECK(is_uint5(shift_value));
2184 if (shift_value > 0) {
2186 sarp(dst, Immediate(shift_value +
kSmiShift));
2195 void MacroAssembler::SmiShiftLeftConstant(Register dst,
2198 Label* on_not_smi_result,
2199 Label::Distance near_jump) {
2204 if (shift_value > 0) {
2206 shlq(dst, Immediate(shift_value & 0x1f));
2213 SmiToInteger32(dst, src);
2214 shll(dst, Immediate(shift_value));
2215 JumpIfNotValidSmiValue(dst, on_not_smi_result, near_jump);
2216 Integer32ToSmi(dst, dst);
2222 void MacroAssembler::SmiShiftLogicalRightConstant(
2223 Register dst, Register src,
int shift_value,
2224 Label* on_not_smi_result, Label::Distance near_jump) {
2229 if (shift_value == 0) {
2231 j(
negative, on_not_smi_result, near_jump);
2235 shrp(dst, Immediate(shift_value +
kSmiShift));
2239 SmiToInteger32(dst, src);
2240 shrp(dst, Immediate(shift_value));
2241 JumpIfUIntNotValidSmiValue(dst, on_not_smi_result, near_jump);
2242 Integer32ToSmi(dst, dst);
2248 void MacroAssembler::SmiShiftLeft(Register dst,
2251 Label* on_not_smi_result,
2252 Label::Distance near_jump) {
2255 if (!dst.is(src1)) {
2259 SmiToInteger32(
rcx, src2);
2261 andp(
rcx, Immediate(0x1f));
2271 if (src1.is(
rcx) || src2.is(
rcx)) {
2278 SmiToInteger32(dst, src1);
2279 SmiToInteger32(
rcx, src2);
2281 JumpIfValidSmiValue(dst, &valid_result, Label::kNear);
2284 if (src1.is(
rcx) || src2.is(
rcx)) {
2291 jmp(on_not_smi_result, near_jump);
2292 bind(&valid_result);
2293 Integer32ToSmi(dst, dst);
2299 void MacroAssembler::SmiShiftLogicalRight(Register dst,
2302 Label* on_not_smi_result,
2303 Label::Distance near_jump) {
2309 if (src1.is(
rcx) || src2.is(
rcx)) {
2316 SmiToInteger32(dst, src1);
2317 SmiToInteger32(
rcx, src2);
2319 JumpIfUIntValidSmiValue(dst, &valid_result, Label::kNear);
2322 if (src1.is(
rcx) || src2.is(
rcx)) {
2329 jmp(on_not_smi_result, near_jump);
2330 bind(&valid_result);
2331 Integer32ToSmi(dst, dst);
2336 void MacroAssembler::SmiShiftArithmeticRight(Register dst,
2344 SmiToInteger32(
rcx, src2);
2345 if (!dst.is(src1)) {
2348 SmiToInteger32(dst, dst);
2350 Integer32ToSmi(dst, dst);
2354 void MacroAssembler::SelectNonSmi(Register dst,
2358 Label::Distance near_jump) {
2367 Check(not_both_smis, kBothRegistersWereSmisInSelectNonSmi);
2375 j(
not_zero, on_not_smis, near_jump);
2391 SmiIndex MacroAssembler::SmiToIndex(Register dst,
2406 return SmiIndex(dst,
times_1);
2418 return SmiIndex(dst,
times_1);
2425 SmiIndex MacroAssembler::SmiToNegativeIndex(Register dst,
2440 return SmiIndex(dst,
times_1);
2450 return SmiIndex(dst,
times_1);
2457 void MacroAssembler::AddSmiField(Register dst,
const Operand& src) {
2470 intptr_t smi =
reinterpret_cast<intptr_t
>(source);
2471 if (is_int32(smi)) {
2474 Register constant = GetSmiConstant(source);
2480 void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
2481 DCHECK(!src.is(scratch));
2493 void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
2494 DCHECK(!dst.is(scratch));
2506 void MacroAssembler::Test(
const Operand& src, Smi* source) {
2508 testl(Operand(src,
kIntSize), Immediate(source->value()));
2511 testl(src, Immediate(source));
2519 void MacroAssembler::LookupNumberStringCache(Register
object,
2525 Register number_string_cache = result;
2526 Register mask = scratch1;
2527 Register scratch = scratch2;
2530 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2535 mask,
FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2536 shrl(mask, Immediate(1));
2537 subp(mask, Immediate(1));
2544 Label load_result_from_cache;
2545 JumpIfSmi(
object, &is_smi);
2547 isolate()->factory()->heap_number_map(),
2552 movl(scratch,
FieldOperand(
object, HeapNumber::kValueOffset + 4));
2553 xorp(scratch,
FieldOperand(
object, HeapNumber::kValueOffset));
2554 andp(scratch, mask);
2561 Register index = scratch;
2562 Register probe = mask;
2567 FixedArray::kHeaderSize));
2568 JumpIfSmi(probe, not_found);
2573 jmp(&load_result_from_cache);
2576 SmiToInteger32(scratch,
object);
2577 andp(scratch, mask);
2589 FixedArray::kHeaderSize));
2593 bind(&load_result_from_cache);
2599 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2603 void MacroAssembler::JumpIfNotString(Register
object,
2604 Register object_map,
2606 Label::Distance near_jump) {
2608 j(is_smi, not_string, near_jump);
2614 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(
2615 Register first_object, Register second_object, Register scratch1,
2616 Register scratch2, Label* on_fail, Label::Distance near_jump) {
2618 Condition either_smi = CheckEitherSmi(first_object, second_object);
2619 j(either_smi, on_fail, near_jump);
2622 movp(scratch1,
FieldOperand(first_object, HeapObject::kMapOffset));
2623 movp(scratch2,
FieldOperand(second_object, HeapObject::kMapOffset));
2624 movzxbl(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2625 movzxbl(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2629 const int kFlatOneByteStringMask =
2631 const int kFlatOneByteStringTag =
2634 andl(scratch1, Immediate(kFlatOneByteStringMask));
2635 andl(scratch2, Immediate(kFlatOneByteStringMask));
2637 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2638 leap(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2640 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
2645 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2646 Register instance_type, Register scratch, Label* failure,
2647 Label::Distance near_jump) {
2648 if (!scratch.is(instance_type)) {
2649 movl(scratch, instance_type);
2652 const int kFlatOneByteStringMask =
2655 andl(scratch, Immediate(kFlatOneByteStringMask));
2661 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
2662 Register first_object_instance_type, Register second_object_instance_type,
2663 Register scratch1, Register scratch2, Label* on_fail,
2664 Label::Distance near_jump) {
2666 movp(scratch1, first_object_instance_type);
2667 movp(scratch2, second_object_instance_type);
2671 const int kFlatOneByteStringMask =
2673 const int kFlatOneByteStringTag =
2676 andl(scratch1, Immediate(kFlatOneByteStringMask));
2677 andl(scratch2, Immediate(kFlatOneByteStringMask));
2679 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2680 leap(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2682 Immediate(kFlatOneByteStringTag + (kFlatOneByteStringTag << 3)));
2688 static void JumpIfNotUniqueNameHelper(MacroAssembler* masm,
2689 T operand_or_register,
2690 Label* not_unique_name,
2691 Label::Distance distance) {
2694 masm->testb(operand_or_register,
2696 masm->j(
zero, &succeed, Label::kNear);
2697 masm->cmpb(operand_or_register, Immediate(
static_cast<uint8_t
>(
SYMBOL_TYPE)));
2698 masm->j(
not_equal, not_unique_name, distance);
2700 masm->bind(&succeed);
2704 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2705 Label* not_unique_name,
2706 Label::Distance distance) {
2707 JumpIfNotUniqueNameHelper<Operand>(
this, operand, not_unique_name, distance);
2711 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
2712 Label* not_unique_name,
2713 Label::Distance distance) {
2714 JumpIfNotUniqueNameHelper<Register>(
this, reg, not_unique_name, distance);
2718 void MacroAssembler::Move(Register dst, Register src) {
2725 void MacroAssembler::Move(Register dst, Handle<Object> source) {
2727 if (source->IsSmi()) {
2728 Move(dst, Smi::cast(*source));
2730 MoveHeapObject(dst, source);
2735 void MacroAssembler::Move(
const Operand& dst, Handle<Object> source) {
2737 if (source->IsSmi()) {
2738 Move(dst, Smi::cast(*source));
2746 void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
2748 if (source->IsSmi()) {
2749 Cmp(dst, Smi::cast(*source));
2757 void MacroAssembler::Cmp(
const Operand& dst, Handle<Object> source) {
2759 if (source->IsSmi()) {
2760 Cmp(dst, Smi::cast(*source));
2770 if (source->IsSmi()) {
2771 Push(Smi::cast(*source));
2779 void MacroAssembler::MoveHeapObject(Register result,
2780 Handle<Object>
object) {
2782 DCHECK(object->IsHeapObject());
2783 if (isolate()->heap()->InNewSpace(*
object)) {
2784 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2785 Move(result, cell, RelocInfo::CELL);
2786 movp(result, Operand(result, 0));
2788 Move(result,
object, RelocInfo::EMBEDDED_OBJECT);
2793 void MacroAssembler::LoadGlobalCell(Register dst, Handle<Cell> cell) {
2796 load_rax(cell.location(), RelocInfo::CELL);
2798 Move(dst, cell, RelocInfo::CELL);
2799 movp(dst, Operand(dst, 0));
2804 void MacroAssembler::Drop(
int stack_elements) {
2805 if (stack_elements > 0) {
2811 void MacroAssembler::DropUnderReturnAddress(
int stack_elements,
2813 DCHECK(stack_elements > 0);
2819 PopReturnAddressTo(scratch);
2820 Drop(stack_elements);
2821 PushReturnAddressFrom(scratch);
2831 leal(
rsp, Operand(
rsp, -4));
2832 movp(Operand(
rsp, 0), src);
2842 leal(
rsp, Operand(
rsp, -4));
2848 void MacroAssembler::PushQuad(
const Operand& src) {
2862 leal(
rsp, Operand(
rsp, -4));
2863 movp(Operand(
rsp, 0), value);
2868 void MacroAssembler::PushImm32(
int32_t imm32) {
2872 leal(
rsp, Operand(
rsp, -4));
2873 movp(Operand(
rsp, 0), Immediate(imm32));
2878 void MacroAssembler::Pop(Register dst) {
2884 movp(dst, Operand(
rsp, 0));
2885 leal(
rsp, Operand(
rsp, 4));
2890 void MacroAssembler::Pop(
const Operand& dst) {
2896 movp(scratch, Operand(
rsp, 0));
2898 leal(
rsp, Operand(
rsp, 4));
2903 Assembler::RelocInfoNone());
2909 void MacroAssembler::PopQuad(
const Operand& dst) {
2919 void MacroAssembler::LoadSharedFunctionInfoSpecialField(Register dst,
2922 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
2923 offset <= SharedFunctionInfo::kSize &&
2924 (((offset - SharedFunctionInfo::kLengthOffset) /
kIntSize) % 2 == 1));
2929 SmiToInteger32(dst, dst);
2934 void MacroAssembler::TestBitSharedFunctionInfoSpecialField(Register base,
2937 DCHECK(offset > SharedFunctionInfo::kLengthOffset &&
2938 offset <= SharedFunctionInfo::kSize &&
2939 (((offset - SharedFunctionInfo::kLengthOffset) /
kIntSize) % 2 == 1));
2946 testb(
FieldOperand(base, offset + byte_offset), Immediate(1 << bit_in_byte));
2950 void MacroAssembler::Jump(ExternalReference ext) {
2956 void MacroAssembler::Jump(
const Operand& op) {
2966 void MacroAssembler::Jump(
Address destination, RelocInfo::Mode rmode) {
2972 void MacroAssembler::Jump(Handle<Code> code_object, RelocInfo::Mode rmode) {
2974 jmp(code_object, rmode);
2978 int MacroAssembler::CallSize(ExternalReference ext) {
2980 return LoadAddressSize(ext) +
2981 Assembler::kCallScratchRegisterInstructionLength;
2985 void MacroAssembler::Call(ExternalReference ext) {
2987 int end_position = pc_offset() + CallSize(ext);
2992 CHECK_EQ(end_position, pc_offset());
2997 void MacroAssembler::Call(
const Operand& op) {
3007 void MacroAssembler::Call(
Address destination, RelocInfo::Mode rmode) {
3009 int end_position = pc_offset() + CallSize(destination);
3014 CHECK_EQ(pc_offset(), end_position);
3019 void MacroAssembler::Call(Handle<Code> code_object,
3020 RelocInfo::Mode rmode,
3021 TypeFeedbackId ast_id) {
3023 int end_position = pc_offset() + CallSize(code_object);
3025 DCHECK(RelocInfo::IsCodeTarget(rmode) ||
3026 rmode == RelocInfo::CODE_AGE_SEQUENCE);
3027 call(code_object, rmode, ast_id);
3029 CHECK_EQ(end_position, pc_offset());
3034 void MacroAssembler::Pushad() {
3054 leap(
rsp, Operand(
rsp, -sp_delta));
3058 void MacroAssembler::Popad() {
3062 leap(
rsp, Operand(
rsp, sp_delta));
3077 void MacroAssembler::Dropad() {
3105 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst,
3106 const Immediate& imm) {
3107 movp(SafepointRegisterSlot(dst), imm);
3111 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
3112 movp(SafepointRegisterSlot(dst), src);
3116 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
3117 movp(dst, SafepointRegisterSlot(src));
3121 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
3122 return Operand(
rsp, SafepointRegisterStackIndex(reg.code()) *
kPointerSize);
3126 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
3127 int handler_index) {
3139 if (kind == StackHandler::JS_ENTRY) {
3143 pushq(Immediate(0));
3144 Push(Smi::FromInt(0));
3152 StackHandler::IndexField::encode(handler_index) |
3153 StackHandler::KindField::encode(kind);
3154 Push(Immediate(state));
3158 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
3159 Push(ExternalOperand(handler_address));
3161 movp(ExternalOperand(handler_address),
rsp);
3165 void MacroAssembler::PopTryHandler() {
3167 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
3168 Pop(ExternalOperand(handler_address));
3173 void MacroAssembler::JumpToHandlerEntry() {
3178 shrp(
rdx, Immediate(StackHandler::kKindWidth));
3181 SmiToInteger64(
rdx,
rdx);
3198 if (!value.is(
rax)) {
3202 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
3203 movp(
rsp, ExternalOperand(handler_address));
3205 Pop(ExternalOperand(handler_address));
3220 j(
zero, &skip, Label::kNear);
3221 movp(Operand(
rbp, StandardFrameConstants::kContextOffset),
rsi);
3224 JumpToHandlerEntry();
3228 void MacroAssembler::ThrowUncatchable(Register value) {
3239 if (!value.is(
rax)) {
3243 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
3244 Load(
rsp, handler_address);
3247 Label fetch_next, check_kind;
3248 jmp(&check_kind, Label::kNear);
3250 movp(
rsp, Operand(
rsp, StackHandlerConstants::kNextOffset));
3254 testl(Operand(
rsp, StackHandlerConstants::kStateOffset),
3255 Immediate(StackHandler::KindField::kMask));
3259 Pop(ExternalOperand(handler_address));
3269 JumpToHandlerEntry();
3273 void MacroAssembler::Ret() {
3278 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
3279 if (is_uint16(bytes_dropped)) {
3282 PopReturnAddressTo(scratch);
3283 addp(
rsp, Immediate(bytes_dropped));
3284 PushReturnAddressFrom(scratch);
3290 void MacroAssembler::FCmp() {
3296 void MacroAssembler::CmpObjectType(Register heap_object,
3300 CmpInstanceType(
map, type);
3304 void MacroAssembler::CmpInstanceType(Register
map,
InstanceType type) {
3306 Immediate(
static_cast<int8_t
>(type)));
3310 void MacroAssembler::CheckFastElements(Register
map,
3312 Label::Distance distance) {
3318 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
3319 j(
above, fail, distance);
3323 void MacroAssembler::CheckFastObjectElements(Register
map,
3325 Label::Distance distance) {
3331 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
3334 Immediate(Map::kMaximumBitField2FastHoleyElementValue));
3335 j(
above, fail, distance);
3339 void MacroAssembler::CheckFastSmiElements(Register
map,
3341 Label::Distance distance) {
3345 Immediate(Map::kMaximumBitField2FastHoleySmiElementValue));
3346 j(
above, fail, distance);
3350 void MacroAssembler::StoreNumberToDoubleElements(
3351 Register maybe_number,
3354 XMMRegister xmm_scratch,
3356 int elements_offset) {
3357 Label smi_value, is_nan, maybe_nan, not_nan, have_double_value, done;
3359 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
3361 CheckMap(maybe_number,
3362 isolate()->factory()->heap_number_map(),
3373 movsd(xmm_scratch,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
3374 bind(&have_double_value);
3376 FixedDoubleArray::kHeaderSize - elements_offset),
3383 j(
greater, &is_nan, Label::kNear);
3384 cmpl(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
3391 FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
3393 jmp(&have_double_value, Label::kNear);
3401 FixedDoubleArray::kHeaderSize - elements_offset),
3407 void MacroAssembler::CompareMap(Register obj, Handle<Map>
map) {
3412 void MacroAssembler::CheckMap(Register obj,
3417 JumpIfSmi(obj, fail);
3420 CompareMap(obj,
map);
3425 void MacroAssembler::ClampUint8(Register reg) {
3427 testl(reg, Immediate(0xFFFFFF00));
3428 j(
zero, &done, Label::kNear);
3435 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
3436 XMMRegister temp_xmm_reg,
3437 Register result_reg) {
3440 xorps(temp_xmm_reg, temp_xmm_reg);
3441 cvtsd2si(result_reg, input_reg);
3442 testl(result_reg, Immediate(0xFFFFFF00));
3443 j(
zero, &done, Label::kNear);
3444 cmpl(result_reg, Immediate(1));
3445 j(
overflow, &conv_failure, Label::kNear);
3446 movl(result_reg, Immediate(0));
3447 setcc(
sign, result_reg);
3448 subl(result_reg, Immediate(1));
3449 andl(result_reg, Immediate(255));
3450 jmp(&done, Label::kNear);
3451 bind(&conv_failure);
3453 ucomisd(input_reg, temp_xmm_reg);
3454 j(
below, &done, Label::kNear);
3455 Set(result_reg, 255);
3460 void MacroAssembler::LoadUint32(XMMRegister dst,
3462 if (FLAG_debug_code) {
3463 cmpq(src, Immediate(0xffffffff));
3464 Assert(
below_equal, kInputGPRIsExpectedToHaveUpper32Cleared);
3466 cvtqsi2sd(dst, src);
3470 void MacroAssembler::SlowTruncateToI(Register result_reg,
3473 DoubleToIStub stub(isolate(), input_reg, result_reg, offset,
true);
3474 call(stub.GetCode(), RelocInfo::CODE_TARGET);
3478 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
3479 Register input_reg) {
3482 cvttsd2siq(result_reg,
xmm0);
3483 cmpq(result_reg, Immediate(1));
3487 if (input_reg.is(result_reg)) {
3490 SlowTruncateToI(result_reg,
rsp, 0);
3493 SlowTruncateToI(result_reg, input_reg);
3498 movl(result_reg, result_reg);
3502 void MacroAssembler::TruncateDoubleToI(Register result_reg,
3503 XMMRegister input_reg) {
3505 cvttsd2siq(result_reg, input_reg);
3506 cmpq(result_reg, Immediate(1));
3511 SlowTruncateToI(result_reg,
rsp, 0);
3516 movl(result_reg, result_reg);
3520 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
3521 XMMRegister scratch,
3523 Label* lost_precision, Label* is_nan,
3524 Label* minus_zero, Label::Distance dst) {
3525 cvttsd2si(result_reg, input_reg);
3526 Cvtlsi2sd(
xmm0, result_reg);
3527 ucomisd(
xmm0, input_reg);
3534 testl(result_reg, result_reg);
3536 movmskpd(result_reg, input_reg);
3540 andl(result_reg, Immediate(1));
3547 void MacroAssembler::LoadInstanceDescriptors(Register
map,
3548 Register descriptors) {
3553 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register
map) {
3555 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3559 void MacroAssembler::EnumLength(Register dst, Register
map) {
3562 andl(dst, Immediate(Map::EnumLengthBits::kMask));
3563 Integer32ToSmi(dst, dst);
3567 void MacroAssembler::DispatchMap(Register obj,
3570 Handle<Code> success,
3574 JumpIfSmi(obj, &fail);
3577 j(
equal, success, RelocInfo::CODE_TARGET);
3583 void MacroAssembler::AssertNumber(Register
object) {
3584 if (emit_debug_code()) {
3587 j(is_smi, &ok, Label::kNear);
3589 isolate()->factory()->heap_number_map());
3590 Check(
equal, kOperandIsNotANumber);
3596 void MacroAssembler::AssertNotSmi(Register
object) {
3597 if (emit_debug_code()) {
3604 void MacroAssembler::AssertSmi(Register
object) {
3605 if (emit_debug_code()) {
3607 Check(is_smi, kOperandIsNotASmi);
3612 void MacroAssembler::AssertSmi(
const Operand&
object) {
3613 if (emit_debug_code()) {
3615 Check(is_smi, kOperandIsNotASmi);
3620 void MacroAssembler::AssertZeroExtended(Register int32_register) {
3621 if (emit_debug_code()) {
3625 Check(
above_equal, k32BitValueInRegisterIsNotZeroExtended);
3630 void MacroAssembler::AssertString(Register
object) {
3631 if (emit_debug_code()) {
3633 Check(
not_equal, kOperandIsASmiAndNotAString);
3635 movp(
object,
FieldOperand(
object, HeapObject::kMapOffset));
3638 Check(
below, kOperandIsNotAString);
3643 void MacroAssembler::AssertName(Register
object) {
3644 if (emit_debug_code()) {
3646 Check(
not_equal, kOperandIsASmiAndNotAName);
3648 movp(
object,
FieldOperand(
object, HeapObject::kMapOffset));
3656 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object) {
3657 if (emit_debug_code()) {
3658 Label done_checking;
3659 AssertNotSmi(
object);
3660 Cmp(
object, isolate()->factory()->undefined_value());
3661 j(
equal, &done_checking);
3662 Cmp(
FieldOperand(
object, 0), isolate()->factory()->allocation_site_map());
3663 Assert(
equal, kExpectedUndefinedOrCell);
3664 bind(&done_checking);
3669 void MacroAssembler::AssertRootValue(Register src,
3670 Heap::RootListIndex root_value_index,
3672 if (emit_debug_code()) {
3676 Check(
equal, reason);
3682 Condition MacroAssembler::IsObjectStringType(Register heap_object,
3684 Register instance_type) {
3693 Condition MacroAssembler::IsObjectNameType(Register heap_object,
3695 Register instance_type) {
3698 cmpb(instance_type, Immediate(
static_cast<uint8_t
>(
LAST_NAME_TYPE)));
3703 void MacroAssembler::TryGetFunctionPrototype(Register
function,
3706 bool miss_on_bound_function) {
3708 if (miss_on_bound_function) {
3718 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
3722 SharedFunctionInfo::kCompilerHintsOffset,
3723 SharedFunctionInfo::kBoundFunction);
3728 Immediate(1 << Map::kHasNonInstancePrototype));
3729 j(
not_zero, &non_instance, Label::kNear);
3734 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
3739 CompareRoot(result, Heap::kTheHoleValueRootIndex);
3748 movp(result,
FieldOperand(result, Map::kPrototypeOffset));
3750 if (miss_on_bound_function) {
3751 jmp(&done, Label::kNear);
3755 bind(&non_instance);
3756 movp(result,
FieldOperand(result, Map::kConstructorOffset));
3764 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
3765 if (FLAG_native_code_counters && counter->Enabled()) {
3766 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3767 movl(counter_operand, Immediate(value));
3772 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
3774 if (FLAG_native_code_counters && counter->Enabled()) {
3775 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3777 incl(counter_operand);
3779 addl(counter_operand, Immediate(value));
3785 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
3787 if (FLAG_native_code_counters && counter->Enabled()) {
3788 Operand counter_operand = ExternalOperand(ExternalReference(counter));
3790 decl(counter_operand);
3792 subl(counter_operand, Immediate(value));
3798 void MacroAssembler::DebugBreak() {
3800 LoadAddress(
rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
3801 CEntryStub ces(isolate(), 1);
3802 DCHECK(AllowThisStubCall(&ces));
3807 void MacroAssembler::InvokeCode(Register code,
3808 const ParameterCount& expected,
3809 const ParameterCount& actual,
3811 const CallWrapper& call_wrapper) {
3816 bool definitely_mismatches =
false;
3817 InvokePrologue(expected,
3819 Handle<Code>::null(),
3822 &definitely_mismatches,
3826 if (!definitely_mismatches) {
3828 call_wrapper.BeforeCall(CallSize(code));
3830 call_wrapper.AfterCall();
3840 void MacroAssembler::InvokeFunction(Register
function,
3841 const ParameterCount& actual,
3843 const CallWrapper& call_wrapper) {
3848 movp(
rdx,
FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
3850 LoadSharedFunctionInfoSpecialField(
rbx,
rdx,
3851 SharedFunctionInfo::kFormalParameterCountOffset);
3856 ParameterCount expected(
rbx);
3857 InvokeCode(
rdx, expected, actual,
flag, call_wrapper);
3861 void MacroAssembler::InvokeFunction(Register
function,
3862 const ParameterCount& expected,
3863 const ParameterCount& actual,
3865 const CallWrapper& call_wrapper) {
3875 InvokeCode(
rdx, expected, actual,
flag, call_wrapper);
3879 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
3880 const ParameterCount& expected,
3881 const ParameterCount& actual,
3883 const CallWrapper& call_wrapper) {
3884 Move(
rdi,
function);
3885 InvokeFunction(
rdi, expected, actual,
flag, call_wrapper);
3889 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
3890 const ParameterCount& actual,
3891 Handle<Code> code_constant,
3892 Register code_register,
3894 bool* definitely_mismatches,
3896 Label::Distance near_jump,
3897 const CallWrapper& call_wrapper) {
3898 bool definitely_matches =
false;
3899 *definitely_mismatches =
false;
3901 if (expected.is_immediate()) {
3902 DCHECK(actual.is_immediate());
3903 if (expected.immediate() == actual.immediate()) {
3904 definitely_matches =
true;
3906 Set(
rax, actual.immediate());
3907 if (expected.immediate() ==
3908 SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
3913 definitely_matches =
true;
3915 *definitely_mismatches =
true;
3916 Set(
rbx, expected.immediate());
3920 if (actual.is_immediate()) {
3924 cmpp(expected.reg(), Immediate(actual.immediate()));
3925 j(
equal, &invoke, Label::kNear);
3927 Set(
rax, actual.immediate());
3928 }
else if (!expected.reg().is(actual.reg())) {
3931 cmpp(expected.reg(), actual.reg());
3932 j(
equal, &invoke, Label::kNear);
3938 if (!definitely_matches) {
3939 Handle<Code> adaptor = isolate()->builtins()->ArgumentsAdaptorTrampoline();
3940 if (!code_constant.is_null()) {
3941 Move(
rdx, code_constant, RelocInfo::EMBEDDED_OBJECT);
3943 }
else if (!code_register.is(
rdx)) {
3944 movp(
rdx, code_register);
3948 call_wrapper.BeforeCall(CallSize(adaptor));
3949 Call(adaptor, RelocInfo::CODE_TARGET);
3950 call_wrapper.AfterCall();
3951 if (!*definitely_mismatches) {
3952 jmp(done, near_jump);
3955 Jump(adaptor, RelocInfo::CODE_TARGET);
3962 void MacroAssembler::StubPrologue() {
3970 void MacroAssembler::Prologue(
bool code_pre_aging) {
3971 PredictableCodeSizeScope predictible_code_size_scope(
this,
3973 if (code_pre_aging) {
3975 Call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
3976 RelocInfo::CODE_AGE_SEQUENCE);
3991 Push(Smi::FromInt(type));
3994 if (emit_debug_code()) {
3996 isolate()->factory()->undefined_value(),
3997 RelocInfo::EMBEDDED_OBJECT);
3999 Check(
not_equal, kCodeObjectNotProperlyPatched);
4005 if (emit_debug_code()) {
4008 Check(
equal, kStackFrameTypesMustMatch);
4015 void MacroAssembler::EnterExitFramePrologue(
bool save_rax) {
4018 DCHECK(ExitFrameConstants::kCallerSPDisplacement ==
4036 Store(ExternalReference(Isolate::kCEntryFPAddress, isolate()),
rbp);
4037 Store(ExternalReference(Isolate::kContextAddress, isolate()),
rsi);
4041 void MacroAssembler::EnterExitFrameEpilogue(
int arg_stack_space,
4042 bool save_doubles) {
4044 const int kShadowSpace = 4;
4045 arg_stack_space += kShadowSpace;
4053 for (
int i = 0;
i < XMMRegister::NumAllocatableRegisters();
i++) {
4054 XMMRegister reg = XMMRegister::FromAllocationIndex(
i);
4057 }
else if (arg_stack_space > 0) {
4062 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
4063 if (kFrameAlignment > 0) {
4065 DCHECK(is_int8(kFrameAlignment));
4066 andp(
rsp, Immediate(-kFrameAlignment));
4070 movp(Operand(
rbp, ExitFrameConstants::kSPOffset),
rsp);
4074 void MacroAssembler::EnterExitFrame(
int arg_stack_space,
bool save_doubles) {
4075 EnterExitFramePrologue(
true);
4079 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
4082 EnterExitFrameEpilogue(arg_stack_space, save_doubles);
4086 void MacroAssembler::EnterApiExitFrame(
int arg_stack_space) {
4087 EnterExitFramePrologue(
false);
4088 EnterExitFrameEpilogue(arg_stack_space,
false);
4092 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
4097 for (
int i = 0;
i < XMMRegister::NumAllocatableRegisters();
i++) {
4098 XMMRegister reg = XMMRegister::FromAllocationIndex(
i);
4110 PushReturnAddressFrom(
rcx);
4112 LeaveExitFrameEpilogue(
true);
4116 void MacroAssembler::LeaveApiExitFrame(
bool restore_context) {
4120 LeaveExitFrameEpilogue(restore_context);
4124 void MacroAssembler::LeaveExitFrameEpilogue(
bool restore_context) {
4126 ExternalReference context_address(Isolate::kContextAddress, isolate());
4127 Operand context_operand = ExternalOperand(context_address);
4128 if (restore_context) {
4129 movp(
rsi, context_operand);
4132 movp(context_operand, Immediate(0));
4136 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
4138 Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
4139 movp(c_entry_fp_operand, Immediate(0));
4143 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
4146 Label same_contexts;
4148 DCHECK(!holder_reg.is(scratch));
4151 movp(scratch, Operand(
rbp, StandardFrameConstants::kContextOffset));
4154 if (emit_debug_code()) {
4155 cmpp(scratch, Immediate(0));
4156 Check(
not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
4160 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
4162 movp(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
4165 if (emit_debug_code()) {
4167 isolate()->factory()->native_context_map());
4168 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
4172 cmpp(scratch,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
4173 j(
equal, &same_contexts);
4181 if (emit_debug_code()) {
4185 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
4186 CompareRoot(holder_reg, Heap::kNullValueRootIndex);
4187 Check(
not_equal, kJSGlobalProxyContextShouldNotBeNull);
4190 movp(holder_reg,
FieldOperand(holder_reg, HeapObject::kMapOffset));
4191 CompareRoot(holder_reg, Heap::kNativeContextMapRootIndex);
4192 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
4197 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
4199 Context::kHeaderSize + Context::SECURITY_TOKEN_INDEX *
kPointerSize;
4204 bind(&same_contexts);
4211 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
4213 LoadRoot(scratch, Heap::kHashSeedRootIndex);
4214 SmiToInteger32(scratch, scratch);
4225 shll(scratch, Immediate(15));
4229 shrl(scratch, Immediate(12));
4235 shrl(scratch, Immediate(4));
4238 imull(
r0,
r0, Immediate(2057));
4241 shrl(scratch, Immediate(16));
4247 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
4277 GetNumberHash(
r0,
r1);
4281 SeededNumberDictionary::kCapacityOffset));
4290 addl(
r2, Immediate(SeededNumberDictionary::GetProbeOffset(
i)));
4295 DCHECK(SeededNumberDictionary::kEntrySize == 3);
4302 SeededNumberDictionary::kElementsStartOffset));
4312 const int kDetailsOffset =
4313 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
4316 Smi::FromInt(PropertyDetails::TypeField::kMask));
4320 const int kValueOffset =
4321 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
4326 void MacroAssembler::LoadAllocationTopHelper(Register result,
4329 ExternalReference allocation_top =
4330 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
4335 DCHECK(!scratch.is_valid());
4338 Operand top_operand = ExternalOperand(allocation_top);
4339 cmpp(result, top_operand);
4340 Check(
equal, kUnexpectedAllocationTop);
4347 if (scratch.is_valid()) {
4348 LoadAddress(scratch, allocation_top);
4349 movp(result, Operand(scratch, 0));
4351 Load(result, allocation_top);
4356 void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
4361 if (FLAG_debug_code) {
4363 Check(
zero, kAllocationIsNotDoubleAligned);
4378 ExternalReference allocation_limit =
4379 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
4380 cmpp(result, ExternalOperand(allocation_limit));
4391 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
4394 if (emit_debug_code()) {
4396 Check(
zero, kUnalignedAllocationInNewSpace);
4399 ExternalReference allocation_top =
4400 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
4403 if (scratch.is_valid()) {
4405 movp(Operand(scratch, 0), result_end);
4407 Store(allocation_top, result_end);
4412 void MacroAssembler::Allocate(
int object_size,
4414 Register result_end,
4419 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
4420 if (!FLAG_inline_new) {
4421 if (emit_debug_code()) {
4423 movl(result, Immediate(0x7091));
4424 if (result_end.is_valid()) {
4425 movl(result_end, Immediate(0x7191));
4427 if (scratch.is_valid()) {
4428 movl(scratch, Immediate(0x7291));
4434 DCHECK(!result.is(result_end));
4437 LoadAllocationTopHelper(result, scratch,
flags);
4440 MakeSureDoubleAlignedHelper(result, scratch, gc_required,
flags);
4444 ExternalReference allocation_limit =
4445 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
4447 Register top_reg = result_end.is_valid() ? result_end : result;
4449 if (!top_reg.is(result)) {
4450 movp(top_reg, result);
4452 addp(top_reg, Immediate(object_size));
4453 j(
carry, gc_required);
4454 Operand limit_operand = ExternalOperand(allocation_limit);
4455 cmpp(top_reg, limit_operand);
4456 j(
above, gc_required);
4459 UpdateAllocationTopHelper(top_reg, scratch,
flags);
4462 if (top_reg.is(result)) {
4466 subp(result, Immediate(object_size));
4468 }
else if (tag_result) {
4476 void MacroAssembler::Allocate(
int header_size,
4478 Register element_count,
4480 Register result_end,
4485 leap(result_end, Operand(element_count, element_size, header_size));
4486 Allocate(result_end, result, result_end, scratch, gc_required,
flags);
4490 void MacroAssembler::Allocate(Register object_size,
4492 Register result_end,
4497 if (!FLAG_inline_new) {
4498 if (emit_debug_code()) {
4500 movl(result, Immediate(0x7091));
4501 movl(result_end, Immediate(0x7191));
4502 if (scratch.is_valid()) {
4503 movl(scratch, Immediate(0x7291));
4510 DCHECK(!result.is(result_end));
4513 LoadAllocationTopHelper(result, scratch,
flags);
4516 MakeSureDoubleAlignedHelper(result, scratch, gc_required,
flags);
4520 ExternalReference allocation_limit =
4521 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
4522 if (!object_size.is(result_end)) {
4523 movp(result_end, object_size);
4525 addp(result_end, result);
4526 j(
carry, gc_required);
4527 Operand limit_operand = ExternalOperand(allocation_limit);
4528 cmpp(result_end, limit_operand);
4529 j(
above, gc_required);
4532 UpdateAllocationTopHelper(result_end, scratch,
flags);
4541 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
4542 ExternalReference new_space_allocation_top =
4543 ExternalReference::new_space_allocation_top_address(isolate());
4547 Operand top_operand = ExternalOperand(new_space_allocation_top);
4549 cmpp(
object, top_operand);
4550 Check(
below, kUndoAllocationOfNonAllocatedMemory);
4552 movp(top_operand,
object);
4556 void MacroAssembler::AllocateHeapNumber(Register result,
4561 Allocate(HeapNumber::kSize, result, scratch,
no_reg, gc_required,
TAG_OBJECT);
4564 ? Heap::kMutableHeapNumberMapRootIndex
4565 : Heap::kHeapNumberMapRootIndex;
4573 void MacroAssembler::AllocateTwoByteString(Register result,
4578 Label* gc_required) {
4581 const int kHeaderAlignment = SeqTwoByteString::kHeaderSize &
4588 if (kHeaderAlignment > 0) {
4589 subp(scratch1, Immediate(kHeaderAlignment));
4593 Allocate(SeqTwoByteString::kHeaderSize,
4605 Integer32ToSmi(scratch1, length);
4606 movp(
FieldOperand(result, String::kLengthOffset), scratch1);
4608 Immediate(String::kEmptyHashField));
4612 void MacroAssembler::AllocateOneByteString(Register result, Register length,
4613 Register scratch1, Register scratch2,
4615 Label* gc_required) {
4618 const int kHeaderAlignment = SeqOneByteString::kHeaderSize &
4620 movl(scratch1, length);
4624 if (kHeaderAlignment > 0) {
4625 subp(scratch1, Immediate(kHeaderAlignment));
4629 Allocate(SeqOneByteString::kHeaderSize,
4641 Integer32ToSmi(scratch1, length);
4642 movp(
FieldOperand(result, String::kLengthOffset), scratch1);
4644 Immediate(String::kEmptyHashField));
4648 void MacroAssembler::AllocateTwoByteConsString(Register result,
4651 Label* gc_required) {
4653 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
4662 void MacroAssembler::AllocateOneByteConsString(Register result,
4665 Label* gc_required) {
4666 Allocate(ConsString::kSize,
4679 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
4682 Label* gc_required) {
4684 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4693 void MacroAssembler::AllocateOneByteSlicedString(Register result,
4696 Label* gc_required) {
4698 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
4720 if (emit_debug_code()) {
4721 cmpl(length, Immediate(min_length));
4724 Label short_loop, len8, len16, len24, done, short_string;
4727 if (min_length <= kLongStringLimit) {
4729 j(
below, &short_string, Label::kNear);
4736 if (min_length <= kLongStringLimit) {
4748 movp(scratch, length);
4755 addp(destination, scratch);
4757 if (min_length <= kLongStringLimit) {
4758 jmp(&done, Label::kNear);
4766 movp(scratch, Operand(source, 0));
4767 movp(Operand(destination, 0), scratch);
4771 addp(destination, length);
4772 jmp(&done, Label::kNear);
4774 bind(&short_string);
4775 if (min_length == 0) {
4776 testl(length, length);
4777 j(
zero, &done, Label::kNear);
4781 movb(scratch, Operand(source, 0));
4782 movb(Operand(destination, 0), scratch);
4793 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
4794 Register end_offset,
4799 movp(Operand(start_offset, 0), filler);
4802 cmpp(start_offset, end_offset);
4807 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
4808 if (context_chain_length > 0) {
4810 movp(dst, Operand(
rsi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4811 for (
int i = 1;
i < context_chain_length;
i++) {
4812 movp(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4825 if (emit_debug_code()) {
4827 Heap::kWithContextMapRootIndex);
4828 Check(
not_equal, kVariableResolvedToWithContext);
4833 void MacroAssembler::LoadTransitionedArrayMapConditional(
4836 Register map_in_out,
4838 Label* no_map_match) {
4841 Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4842 movp(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
4845 movp(scratch, Operand(scratch,
4846 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4849 FixedArrayBase::kHeaderSize;
4855 FixedArrayBase::kHeaderSize;
4861 static const int kRegisterPassedArguments = 4;
4863 static const int kRegisterPassedArguments = 6;
4866 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
4869 Operand(
rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4871 movp(
function,
FieldOperand(
function, GlobalObject::kNativeContextOffset));
4873 movp(
function, Operand(
function, Context::SlotOffset(index)));
4877 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
4880 movp(
map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
4881 if (emit_debug_code()) {
4886 Abort(kGlobalFunctionsMustHaveInitialMap);
4892 int MacroAssembler::ArgumentStackSlotsForCFunctionCall(
int num_arguments) {
4899 DCHECK(num_arguments >= 0);
4901 const int kMinimumStackSlots = kRegisterPassedArguments;
4902 if (num_arguments < kMinimumStackSlots)
return kMinimumStackSlots;
4903 return num_arguments;
4905 if (num_arguments < kRegisterPassedArguments)
return 0;
4906 return num_arguments - kRegisterPassedArguments;
4911 void MacroAssembler::EmitSeqStringSetCharCheck(Register
string,
4916 JumpIfNotSmi(
string, &is_object);
4921 movp(value,
FieldOperand(
string, HeapObject::kMapOffset));
4922 movzxbp(value,
FieldOperand(value, Map::kInstanceTypeOffset));
4925 cmpp(value, Immediate(encoding_mask));
4927 Check(
equal, kUnexpectedStringType);
4932 Integer32ToSmi(index, index);
4933 SmiCompare(index,
FieldOperand(
string, String::kLengthOffset));
4934 Check(
less, kIndexIsTooLarge);
4936 SmiCompare(index, Smi::FromInt(0));
4940 SmiToInteger32(index, index);
4944 void MacroAssembler::PrepareCallCFunction(
int num_arguments) {
4945 int frame_alignment = base::OS::ActivationFrameAlignment();
4946 DCHECK(frame_alignment != 0);
4947 DCHECK(num_arguments >= 0);
4952 int argument_slots_on_stack =
4953 ArgumentStackSlotsForCFunctionCall(num_arguments);
4955 andp(
rsp, Immediate(-frame_alignment));
4960 void MacroAssembler::CallCFunction(ExternalReference
function,
4961 int num_arguments) {
4962 LoadAddress(
rax,
function);
4963 CallCFunction(
rax, num_arguments);
4967 void MacroAssembler::CallCFunction(Register
function,
int num_arguments) {
4970 if (emit_debug_code()) {
4971 CheckStackAlignment();
4975 DCHECK(base::OS::ActivationFrameAlignment() != 0);
4976 DCHECK(num_arguments >= 0);
4977 int argument_slots_on_stack =
4978 ArgumentStackSlotsForCFunctionCall(num_arguments);
4992 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
4993 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
4994 reg7.is_valid() + reg8.is_valid();
4997 if (reg1.is_valid()) regs |= reg1.bit();
4998 if (reg2.is_valid()) regs |= reg2.bit();
4999 if (reg3.is_valid()) regs |= reg3.bit();
5000 if (reg4.is_valid()) regs |= reg4.bit();
5001 if (reg5.is_valid()) regs |= reg5.bit();
5002 if (reg6.is_valid()) regs |= reg6.bit();
5003 if (reg7.is_valid()) regs |= reg7.bit();
5004 if (reg8.is_valid()) regs |= reg8.bit();
5005 int n_of_non_aliasing_regs =
NumRegs(regs);
5007 return n_of_valid_regs != n_of_non_aliasing_regs;
5012 CodePatcher::CodePatcher(
byte* address,
int size)
5013 : address_(address),
5015 masm_(
NULL, address,
size + Assembler::kGap) {
5019 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
5023 CodePatcher::~CodePatcher() {
5025 CpuFeatures::FlushICache(address_, size_);
5028 DCHECK(masm_.pc_ == address_ + size_);
5029 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
5033 void MacroAssembler::CheckPageFlag(
5038 Label* condition_met,
5039 Label::Distance condition_met_distance) {
5041 if (scratch.is(
object)) {
5042 andp(scratch, Immediate(~Page::kPageAlignmentMask));
5044 movp(scratch, Immediate(~Page::kPageAlignmentMask));
5045 andp(scratch,
object);
5048 testb(Operand(scratch, MemoryChunk::kFlagsOffset),
5049 Immediate(
static_cast<uint8_t
>(mask)));
5051 testl(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
5053 j(
cc, condition_met, condition_met_distance);
5057 void MacroAssembler::CheckMapDeprecated(Handle<Map>
map,
5059 Label* if_deprecated) {
5060 if (
map->CanBeDeprecated()) {
5062 movl(scratch,
FieldOperand(scratch, Map::kBitField3Offset));
5063 andl(scratch, Immediate(Map::Deprecated::kMask));
5069 void MacroAssembler::JumpIfBlack(Register
object,
5070 Register bitmap_scratch,
5071 Register mask_scratch,
5073 Label::Distance on_black_distance) {
5075 GetMarkBits(
object, bitmap_scratch, mask_scratch);
5077 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
5080 movp(
rcx, mask_scratch);
5083 leap(
rcx, Operand(mask_scratch, mask_scratch,
times_2, 0));
5085 andp(
rcx, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
5086 cmpp(mask_scratch,
rcx);
5087 j(
equal, on_black, on_black_distance);
5094 void MacroAssembler::JumpIfDataObject(
5097 Label* not_data_object,
5098 Label::Distance not_data_object_distance) {
5099 Label is_data_object;
5100 movp(scratch,
FieldOperand(value, HeapObject::kMapOffset));
5101 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
5102 j(
equal, &is_data_object, Label::kNear);
5109 j(
not_zero, not_data_object, not_data_object_distance);
5110 bind(&is_data_object);
5114 void MacroAssembler::GetMarkBits(Register addr_reg,
5115 Register bitmap_reg,
5116 Register mask_reg) {
5118 movp(bitmap_reg, addr_reg);
5120 andp(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
5121 movp(
rcx, addr_reg);
5126 Immediate((Page::kPageAlignmentMask >>
shift) &
5127 ~(Bitmap::kBytesPerCell - 1)));
5129 addp(bitmap_reg,
rcx);
5130 movp(
rcx, addr_reg);
5132 andp(
rcx, Immediate((1 << Bitmap::kBitsPerCellLog2) - 1));
5133 movl(mask_reg, Immediate(1));
5138 void MacroAssembler::EnsureNotWhite(
5140 Register bitmap_scratch,
5141 Register mask_scratch,
5142 Label* value_is_white_and_not_data,
5143 Label::Distance distance) {
5145 GetMarkBits(value, bitmap_scratch, mask_scratch);
5148 DCHECK(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
5149 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
5150 DCHECK(strcmp(Marking::kGreyBitPattern,
"11") == 0);
5151 DCHECK(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
5157 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
5160 if (emit_debug_code()) {
5165 addp(mask_scratch, mask_scratch);
5166 testp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
5167 j(
zero, &ok, Label::kNear);
5176 Register length =
rcx;
5177 Label not_heap_number;
5178 Label is_data_object;
5182 CompareRoot(
map, Heap::kHeapNumberMapRootIndex);
5183 j(
not_equal, ¬_heap_number, Label::kNear);
5184 movp(length, Immediate(HeapNumber::kSize));
5185 jmp(&is_data_object, Label::kNear);
5187 bind(¬_heap_number);
5193 Register instance_type =
rcx;
5196 j(
not_zero, value_is_white_and_not_data);
5206 j(
zero, ¬_external, Label::kNear);
5207 movp(length, Immediate(ExternalString::kSize));
5208 jmp(&is_data_object, Label::kNear);
5210 bind(¬_external);
5215 addp(length, Immediate(0x04));
5217 imulp(length,
FieldOperand(value, String::kLengthOffset));
5222 bind(&is_data_object);
5225 orp(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
5227 andp(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
5228 addl(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset), length);
5234 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
5236 Register empty_fixed_array_value =
r8;
5237 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
5246 j(
equal, call_runtime);
5256 Cmp(
rdx, Smi::FromInt(0));
5264 cmpp(empty_fixed_array_value,
5266 j(
equal, &no_elements);
5275 cmpp(
rcx, null_value);
5279 void MacroAssembler::TestJSArrayForAllocationMemento(
5280 Register receiver_reg,
5281 Register scratch_reg,
5282 Label* no_memento_found) {
5283 ExternalReference new_space_start =
5284 ExternalReference::new_space_start(isolate());
5285 ExternalReference new_space_allocation_top =
5286 ExternalReference::new_space_allocation_top_address(isolate());
5288 leap(scratch_reg, Operand(receiver_reg,
5292 j(
less, no_memento_found);
5293 cmpp(scratch_reg, ExternalOperand(new_space_allocation_top));
5295 CompareRoot(
MemOperand(scratch_reg, -AllocationMemento::kSize),
5296 Heap::kAllocationMementoMapRootIndex);
5300 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5306 DCHECK(!scratch1.is(scratch0));
5307 Register current = scratch0;
5310 movp(current,
object);
5314 movp(current,
FieldOperand(current, HeapObject::kMapOffset));
5315 movp(scratch1,
FieldOperand(current, Map::kBitField2Offset));
5316 DecodeField<Map::ElementsKindBits>(scratch1);
5319 movp(current,
FieldOperand(current, Map::kPrototypeOffset));
5320 CompareRoot(current, Heap::kNullValueRootIndex);
5325 void MacroAssembler::TruncatingDiv(Register dividend,
int32_t divisor) {
5328 base::MagicNumbersForDivision<uint32_t> mag =
5330 movl(
rax, Immediate(mag.multiplier));
5332 bool neg = (mag.multiplier & (
static_cast<uint32_t>(1) << 31)) != 0;
5333 if (divisor > 0 && neg) addl(
rdx, dividend);
5334 if (divisor < 0 && !neg && mag.multiplier > 0) subl(
rdx, dividend);
5335 if (mag.shift > 0) sarl(
rdx, Immediate(mag.shift));
5336 movl(
rax, dividend);
5337 shrl(
rax, Immediate(31));
MacroAssembler(Isolate *isolate, void *buffer, int size)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions true
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be aligned(ARM64 only)") DEFINE_STRING(expose_gc_as
#define kNumSafepointSavedRegisters
#define CHECK_EQ(expected, value)
#define DCHECK_NOT_NULL(p)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
#define STATIC_ASSERT(test)
bool IsPowerOfTwo32(uint32_t value)
MagicNumbersForDivision< T > SignedDivisionByConstant(T d)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const intptr_t kHeapObjectTagMask
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
TypeImpl< ZoneTypeConfig > Type
const Register kScratchRegister
const int kSmiConstantRegisterValue
const int kNumSafepointRegisters
const uint32_t kNotStringTag
Operand FieldOperand(Register object, int offset)
const int kPointerSizeLog2
const uint32_t kStringTag
@ FAST_HOLEY_SMI_ELEMENTS
const uint32_t kOneByteStringTag
const intptr_t kObjectAlignmentMask
int NumRegs(RegList reglist)
static const int kInvalidEnumCacheSentinel
const char * GetBailoutReason(BailoutReason reason)
Condition NegateCondition(Condition cond)
const Register kSmiConstantRegister
const uint32_t kStringRepresentationMask
static bool SmiValuesAre32Bits()
const int kRootRegisterBias
const uint32_t kIsIndirectStringTag
int TenToThe(int exponent)
kFeedbackVectorOffset flag
static bool SmiValuesAre31Bits()
const uint32_t kInternalizedTag
static const int kNumberDictionaryProbes
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
bool is_intn(int64_t x, unsigned n)
static const int kNoCodeAgeSequenceLength
const uint32_t kHoleNanLower32
const uint32_t kIsNotStringMask
bool IsAligned(T value, U alignment)
@ PRESERVE_SOURCE_REGISTER
const intptr_t kDoubleAlignment
@ kPointersToHereAreAlwaysInteresting
const intptr_t kPointerAlignment
void CopyBytes(uint8_t *target, uint8_t *source)
const intptr_t kDoubleAlignmentMask
const uint32_t kIsIndirectStringMask
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool is(Register reg) const
#define T(name, string, precedence)