9 #if V8_TARGET_ARCH_MIPS64
23 : Assembler(arg_isolate, buffer,
size),
24 generating_stub_(
false),
26 if (isolate() !=
NULL) {
27 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
33 void MacroAssembler::Load(Register dst,
39 }
else if (r.IsUInteger8()) {
41 }
else if (r.IsInteger16()) {
43 }
else if (r.IsUInteger16()) {
45 }
else if (r.IsInteger32()) {
53 void MacroAssembler::Store(Register src,
57 if (r.IsInteger8() || r.IsUInteger8()) {
59 }
else if (r.IsInteger16() || r.IsUInteger16()) {
61 }
else if (r.IsInteger32()) {
64 if (r.IsHeapObject()) {
66 }
else if (r.IsSmi()) {
74 void MacroAssembler::LoadRoot(Register destination,
75 Heap::RootListIndex index) {
80 void MacroAssembler::LoadRoot(Register destination,
81 Heap::RootListIndex index,
83 Register src1,
const Operand& src2) {
89 void MacroAssembler::StoreRoot(Register source,
90 Heap::RootListIndex index) {
95 void MacroAssembler::StoreRoot(Register source,
96 Heap::RootListIndex index,
98 Register src1,
const Operand& src2) {
105 void MacroAssembler::PushSafepointRegisters() {
110 if (num_unsaved > 0) {
117 void MacroAssembler::PopSafepointRegisters() {
120 if (num_unsaved > 0) {
126 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
127 sd(src, SafepointRegisterSlot(dst));
131 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
132 ld(dst, SafepointRegisterSlot(src));
136 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
143 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
148 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
151 int doubles_size = FPURegister::NumAllocatableRegisters() *
kDoubleSize;
152 int register_offset = SafepointRegisterStackIndex(reg.code()) *
kPointerSize;
157 void MacroAssembler::InNewSpace(Register
object,
162 And(scratch,
object, Operand(ExternalReference::new_space_mask(isolate())));
163 Branch(branch,
cc, scratch,
164 Operand(ExternalReference::new_space_start(isolate())));
168 void MacroAssembler::RecordWriteField(
185 JumpIfSmi(value, &done);
193 if (emit_debug_code()) {
196 Branch(&ok,
eq, t8, Operand(zero_reg));
197 stop(
"Unaligned cell in write barrier");
206 remembered_set_action,
208 pointers_to_here_check_for_value);
214 if (emit_debug_code()) {
215 li(value, Operand(bit_cast<int64_t>(
kZapValue + 4)));
216 li(dst, Operand(bit_cast<int64_t>(
kZapValue + 8)));
223 void MacroAssembler::RecordWriteForMap(Register
object,
228 if (emit_debug_code()) {
232 kWrongAddressOrValuePassedToRecordWrite,
234 Operand(isolate()->factory()->meta_map()));
237 if (!FLAG_incremental_marking) {
241 if (emit_debug_code()) {
244 kWrongAddressOrValuePassedToRecordWrite,
257 MemoryChunk::kPointersToHereAreInterestingMask,
261 Daddu(dst,
object, Operand(HeapObject::kMapOffset -
kHeapObjectTag));
262 if (emit_debug_code()) {
265 Branch(&ok,
eq, at, Operand(zero_reg));
266 stop(
"Unaligned cell in write barrier");
284 isolate()->counters()->write_barriers_static()->Increment();
285 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at, dst);
289 if (emit_debug_code()) {
290 li(dst, Operand(bit_cast<int64_t>(
kZapValue + 12)));
299 void MacroAssembler::RecordWrite(
311 if (emit_debug_code()) {
314 eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
318 !FLAG_incremental_marking) {
328 JumpIfSmi(value, &done);
334 MemoryChunk::kPointersToHereAreInterestingMask,
338 CheckPageFlag(
object,
340 MemoryChunk::kPointersFromHereAreInterestingMask,
348 RecordWriteStub stub(isolate(),
object, value, address, remembered_set_action,
358 isolate()->counters()->write_barriers_static()->Increment();
359 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, at,
364 if (emit_debug_code()) {
365 li(address, Operand(bit_cast<int64_t>(
kZapValue + 12)));
366 li(value, Operand(bit_cast<int64_t>(
kZapValue + 16)));
371 void MacroAssembler::RememberedSetHelper(Register
object,
375 RememberedSetFinalAction and_then) {
377 if (emit_debug_code()) {
379 JumpIfNotInNewSpace(
object, scratch, &ok);
380 stop(
"Remembered set pointer is in new space");
384 ExternalReference store_buffer =
385 ExternalReference::store_buffer_top(isolate());
386 li(t8, Operand(store_buffer));
395 And(t8, scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
397 if (and_then == kFallThroughAtEnd) {
398 Branch(&done,
eq, t8, Operand(zero_reg));
400 DCHECK(and_then == kReturnAtEnd);
401 Ret(
eq, t8, Operand(zero_reg));
404 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
405 CallStub(&store_buffer_overflow);
408 if (and_then == kReturnAtEnd) {
418 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
423 DCHECK(!holder_reg.is(scratch));
424 DCHECK(!holder_reg.is(at));
428 ld(scratch,
MemOperand(
fp, StandardFrameConstants::kContextOffset));
431 Check(
ne, kWeShouldNotHaveAnEmptyLexicalContext,
432 scratch, Operand(zero_reg));
437 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
439 ld(scratch,
FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
442 if (emit_debug_code()) {
446 LoadRoot(at, Heap::kNativeContextMapRootIndex);
447 Check(
eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
448 holder_reg, Operand(at));
453 ld(at,
FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
454 Branch(&same_contexts,
eq, scratch, Operand(at));
457 if (emit_debug_code()) {
460 LoadRoot(at, Heap::kNullValueRootIndex);
461 Check(
ne, kJSGlobalProxyContextShouldNotBeNull,
462 holder_reg, Operand(at));
465 LoadRoot(at, Heap::kNativeContextMapRootIndex);
466 Check(
eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
467 holder_reg, Operand(at));
471 ld(at,
FieldMemOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
477 int token_offset = Context::kHeaderSize +
482 Branch(miss,
ne, scratch, Operand(at));
484 bind(&same_contexts);
491 void MacroAssembler::GetNumberHash(Register reg0, Register scratch) {
493 LoadRoot(scratch, Heap::kHashSeedRootIndex);
497 xor_(reg0, reg0, scratch);
504 nor(scratch, reg0, zero_reg);
506 addu(reg0, scratch, at);
510 xor_(reg0, reg0, at);
514 addu(reg0, reg0, at);
518 xor_(reg0, reg0, at);
521 sll(scratch, reg0, 11);
523 addu(reg0, reg0, at);
524 addu(reg0, reg0, scratch);
528 xor_(reg0, reg0, at);
532 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
563 GetNumberHash(reg0, reg1);
566 ld(reg1,
FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
567 SmiUntag(reg1, reg1);
568 Dsubu(reg1, reg1, Operand(1));
576 Daddu(reg2, reg2, Operand(SeededNumberDictionary::GetProbeOffset(
i)));
578 and_(reg2, reg2, reg1);
581 DCHECK(SeededNumberDictionary::kEntrySize == 3);
583 daddu(reg2, reg2, at);
587 daddu(reg2, elements, at);
589 ld(at,
FieldMemOperand(reg2, SeededNumberDictionary::kElementsStartOffset));
591 Branch(&done,
eq, key, Operand(at));
593 Branch(miss,
ne, key, Operand(at));
600 const int kDetailsOffset =
601 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
603 And(at, reg1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
604 Branch(miss,
ne, at, Operand(zero_reg));
607 const int kValueOffset =
608 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
616 void MacroAssembler::Addu(Register rd, Register rs,
const Operand& rt) {
618 addu(rd, rs, rt.rm());
620 if (is_int16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
621 addiu(rd, rs, rt.imm64_);
632 void MacroAssembler::Daddu(Register rd, Register rs,
const Operand& rt) {
634 daddu(rd, rs, rt.rm());
636 if (is_int16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
637 daddiu(rd, rs, rt.imm64_);
648 void MacroAssembler::Subu(Register rd, Register rs,
const Operand& rt) {
650 subu(rd, rs, rt.rm());
652 if (is_int16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
653 addiu(rd, rs, -rt.imm64_);
664 void MacroAssembler::Dsubu(Register rd, Register rs,
const Operand& rt) {
666 dsubu(rd, rs, rt.rm());
668 if (is_int16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
669 daddiu(rd, rs, -rt.imm64_);
680 void MacroAssembler::Mul(Register rd, Register rs,
const Operand& rt) {
682 mul(rd, rs, rt.rm());
692 void MacroAssembler::Mulh(Register rd, Register rs,
const Operand& rt) {
698 muh(rd, rs, rt.rm());
714 void MacroAssembler::Dmul(Register rd, Register rs,
const Operand& rt) {
717 dmul(rd, rs, rt.rm());
736 void MacroAssembler::Dmulh(Register rd, Register rs,
const Operand& rt) {
739 dmuh(rd, rs, rt.rm());
758 void MacroAssembler::Mult(Register rs,
const Operand& rt) {
770 void MacroAssembler::Dmult(Register rs,
const Operand& rt) {
782 void MacroAssembler::Multu(Register rs,
const Operand& rt) {
794 void MacroAssembler::Dmultu(Register rs,
const Operand& rt) {
806 void MacroAssembler::Div(Register rs,
const Operand& rt) {
818 void MacroAssembler::Ddiv(Register rs,
const Operand& rt) {
830 void MacroAssembler::Ddiv(Register rd, Register rs,
const Operand& rt) {
844 ddiv(rd, rs, rt.rm());
855 void MacroAssembler::Divu(Register rs,
const Operand& rt) {
867 void MacroAssembler::Ddivu(Register rs,
const Operand& rt) {
879 void MacroAssembler::Dmod(Register rd, Register rs,
const Operand& rt) {
893 dmod(rd, rs, rt.rm());
904 void MacroAssembler::And(Register rd, Register rs,
const Operand& rt) {
906 and_(rd, rs, rt.rm());
908 if (is_uint16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
909 andi(rd, rs, rt.imm64_);
920 void MacroAssembler::Or(Register rd, Register rs,
const Operand& rt) {
922 or_(rd, rs, rt.rm());
924 if (is_uint16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
925 ori(rd, rs, rt.imm64_);
936 void MacroAssembler::Xor(Register rd, Register rs,
const Operand& rt) {
938 xor_(rd, rs, rt.rm());
940 if (is_uint16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
941 xori(rd, rs, rt.imm64_);
952 void MacroAssembler::Nor(Register rd, Register rs,
const Operand& rt) {
954 nor(rd, rs, rt.rm());
964 void MacroAssembler::Neg(Register rs,
const Operand& rt) {
969 xor_(rs, rt.rm(), at);
973 void MacroAssembler::Slt(Register rd, Register rs,
const Operand& rt) {
975 slt(rd, rs, rt.rm());
977 if (is_int16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
978 slti(rd, rs, rt.imm64_);
989 void MacroAssembler::Sltu(Register rd, Register rs,
const Operand& rt) {
991 sltu(rd, rs, rt.rm());
993 if (is_int16(rt.imm64_) && !MustUseReg(rt.rmode_)) {
994 sltiu(rd, rs, rt.imm64_);
1005 void MacroAssembler::Ror(Register rd, Register rs,
const Operand& rt) {
1008 rotrv(rd, rs, rt.rm());
1010 rotr(rd, rs, rt.imm64_);
1014 subu(at, zero_reg, rt.rm());
1016 srlv(rd, rs, rt.rm());
1019 if (rt.imm64_ == 0) {
1022 srl(at, rs, rt.imm64_);
1023 sll(rd, rs, (0x20 - rt.imm64_) & 0x1f);
1031 void MacroAssembler::Dror(Register rd, Register rs,
const Operand& rt) {
1033 drotrv(rd, rs, rt.rm());
1035 drotr(rd, rs, rt.imm64_);
1047 void MacroAssembler::Ulw(Register rd,
const MemOperand& rs) {
1049 lwl(rd,
MemOperand(rs.rm(), rs.offset() + 3));
1053 void MacroAssembler::Usw(Register rd,
const MemOperand& rs) {
1055 swl(rd,
MemOperand(rs.rm(), rs.offset() + 3));
1061 void MacroAssembler::Uld(Register rd,
const MemOperand& rs, Register scratch) {
1068 dsll32(scratch, scratch, 0);
1069 Daddu(rd, rd, scratch);
1075 void MacroAssembler::Usd(Register rd,
const MemOperand& rs, Register scratch) {
1081 dsrl32(scratch, rd, 0);
1086 void MacroAssembler::li(Register dst, Handle<Object> value,
LiFlags mode) {
1088 if (value->IsSmi()) {
1089 li(dst, Operand(value),
mode);
1091 DCHECK(value->IsHeapObject());
1092 if (isolate()->heap()->InNewSpace(*value)) {
1093 Handle<Cell> cell = isolate()->factory()->NewCell(value);
1094 li(dst, Operand(cell));
1097 li(dst, Operand(value));
1103 void MacroAssembler::li(Register rd, Operand j,
LiFlags mode) {
1105 BlockTrampolinePoolScope block_trampoline_pool(
this);
1108 if (is_int32(j.imm64_)) {
1109 if (is_int16(j.imm64_)) {
1110 daddiu(rd, zero_reg, (j.imm64_ &
kImm16Mask));
1111 }
else if (!(j.imm64_ &
kHiMask)) {
1127 }
else if (MustUseReg(j.rmode_)) {
1128 RecordRelocInfo(j.rmode_, j.imm64_);
1151 void MacroAssembler::MultiPush(
RegList regs) {
1155 Dsubu(
sp,
sp, Operand(stack_offset));
1157 if ((regs & (1 <<
i)) != 0) {
1165 void MacroAssembler::MultiPushReversed(
RegList regs) {
1169 Dsubu(
sp,
sp, Operand(stack_offset));
1171 if ((regs & (1 <<
i)) != 0) {
1179 void MacroAssembler::MultiPop(
RegList regs) {
1183 if ((regs & (1 <<
i)) != 0) {
1188 daddiu(
sp,
sp, stack_offset);
1192 void MacroAssembler::MultiPopReversed(
RegList regs) {
1196 if ((regs & (1 <<
i)) != 0) {
1201 daddiu(
sp,
sp, stack_offset);
1205 void MacroAssembler::MultiPushFPU(
RegList regs) {
1209 Dsubu(
sp,
sp, Operand(stack_offset));
1211 if ((regs & (1 <<
i)) != 0) {
1213 sdc1(FPURegister::from_code(
i),
MemOperand(
sp, stack_offset));
1219 void MacroAssembler::MultiPushReversedFPU(
RegList regs) {
1223 Dsubu(
sp,
sp, Operand(stack_offset));
1225 if ((regs & (1 <<
i)) != 0) {
1227 sdc1(FPURegister::from_code(
i),
MemOperand(
sp, stack_offset));
1233 void MacroAssembler::MultiPopFPU(
RegList regs) {
1237 if ((regs & (1 <<
i)) != 0) {
1238 ldc1(FPURegister::from_code(
i),
MemOperand(
sp, stack_offset));
1242 daddiu(
sp,
sp, stack_offset);
1246 void MacroAssembler::MultiPopReversedFPU(
RegList regs) {
1250 if ((regs & (1 <<
i)) != 0) {
1251 ldc1(FPURegister::from_code(
i),
MemOperand(
sp, stack_offset));
1255 daddiu(
sp,
sp, stack_offset);
1259 void MacroAssembler::FlushICache(Register address,
unsigned instructions) {
1261 MultiPush(saved_regs);
1262 AllowExternalCallThatCantCauseGC scope(
this);
1266 PrepareCallCFunction(2, a4);
1268 li(a1, instructions * kInstrSize);
1269 CallCFunction(ExternalReference::flush_icache_function(isolate()), 2);
1270 MultiPop(saved_regs);
1274 void MacroAssembler::Ext(Register rt,
1280 ext_(rt, rs, pos,
size);
1284 void MacroAssembler::Ins(Register rt,
1291 ins_(rt, rs, pos,
size);
1295 void MacroAssembler::Cvt_d_uw(FPURegister fd,
1297 FPURegister scratch) {
1300 Cvt_d_uw(fd, t8, scratch);
1304 void MacroAssembler::Cvt_d_uw(FPURegister fd,
1306 FPURegister scratch) {
1321 mthc1(zero_reg, fd);
1326 Label conversion_done;
1330 Branch(&conversion_done,
eq, t9, Operand(zero_reg));
1334 mtc1(zero_reg, scratch);
1337 add_d(fd, fd, scratch);
1339 bind(&conversion_done);
1343 void MacroAssembler::Round_l_d(FPURegister fd, FPURegister fs) {
1348 void MacroAssembler::Floor_l_d(FPURegister fd, FPURegister fs) {
1353 void MacroAssembler::Ceil_l_d(FPURegister fd, FPURegister fs) {
1358 void MacroAssembler::Trunc_l_d(FPURegister fd, FPURegister fs) {
1363 void MacroAssembler::Trunc_l_ud(FPURegister fd,
1365 FPURegister scratch) {
1369 li(at, 0x7fffffffffffffff);
1376 void MacroAssembler::Trunc_uw_d(FPURegister fd,
1378 FPURegister scratch) {
1379 Trunc_uw_d(fs, t8, scratch);
1384 void MacroAssembler::Trunc_w_d(FPURegister fd, FPURegister fs) {
1389 void MacroAssembler::Round_w_d(FPURegister fd, FPURegister fs) {
1394 void MacroAssembler::Floor_w_d(FPURegister fd, FPURegister fs) {
1399 void MacroAssembler::Ceil_w_d(FPURegister fd, FPURegister fs) {
1404 void MacroAssembler::Trunc_uw_d(FPURegister fd,
1406 FPURegister scratch) {
1412 mtc1(zero_reg, scratch);
1416 Label simple_convert;
1417 BranchF(&simple_convert,
NULL,
lt, fd, scratch);
1421 sub_d(scratch, fd, scratch);
1422 trunc_w_d(scratch, scratch);
1424 Or(rs, rs, 1 << 31);
1429 bind(&simple_convert);
1430 trunc_w_d(scratch, fd);
1437 void MacroAssembler::Madd_d(FPURegister fd, FPURegister fr, FPURegister fs,
1438 FPURegister ft, FPURegister scratch) {
1440 madd_d(fd, fr, fs, ft);
1443 DCHECK(!fr.is(scratch) && !fs.is(scratch) && !ft.is(scratch));
1444 mul_d(scratch, fs, ft);
1445 add_d(fd, fr, scratch);
1450 void MacroAssembler::BranchF(Label* target,
1456 BlockTrampolinePoolScope block_trampoline_pool(
this);
1466 c(
UN,
D, cmp1, cmp2);
1472 cmp(
UN,
L,
f31, cmp1, cmp2);
1483 c(
OLT,
D, cmp1, cmp2);
1487 c(
ULE,
D, cmp1, cmp2);
1491 c(
ULT,
D, cmp1, cmp2);
1495 c(
OLE,
D, cmp1, cmp2);
1499 c(
EQ,
D, cmp1, cmp2);
1503 c(
UEQ,
D, cmp1, cmp2);
1507 c(
EQ,
D, cmp1, cmp2);
1511 c(
UEQ,
D, cmp1, cmp2);
1528 bc1nez(target,
f31);
1532 bc1eqz(target,
f31);
1536 bc1eqz(target,
f31);
1540 bc1nez(target,
f31);
1543 cmp(
EQ,
L,
f31, cmp1, cmp2);
1544 bc1nez(target,
f31);
1548 bc1nez(target,
f31);
1551 cmp(
EQ,
L,
f31, cmp1, cmp2);
1552 bc1eqz(target,
f31);
1556 bc1eqz(target,
f31);
1570 void MacroAssembler::Move(FPURegister dst,
double imm) {
1571 static const DoubleRepresentation minus_zero(-0.0);
1572 static const DoubleRepresentation
zero(0.0);
1573 DoubleRepresentation value_rep(imm);
1576 if (value_rep ==
zero && !force_load) {
1578 }
else if (value_rep == minus_zero && !force_load) {
1582 DoubleAsTwoUInt32(imm, &
lo, &
hi);
1586 li(at, Operand(
lo));
1589 mtc1(zero_reg, dst);
1594 li(at, Operand(
hi));
1597 mthc1(zero_reg, dst);
1603 void MacroAssembler::Movz(Register rd, Register rs, Register rt) {
1606 Branch(&done,
ne, rt, Operand(zero_reg));
1615 void MacroAssembler::Movn(Register rd, Register rs, Register rt) {
1618 Branch(&done,
eq, rt, Operand(zero_reg));
1627 void MacroAssembler::Movt(Register rd, Register rs,
uint16_t cc) {
1632 void MacroAssembler::Movf(Register rd, Register rs,
uint16_t cc) {
1637 void MacroAssembler::Clz(Register rd, Register rs) {
1647 Register except_flag,
1649 DCHECK(!result.is(scratch));
1650 DCHECK(!double_input.is(double_scratch));
1651 DCHECK(!except_flag.is(scratch));
1656 mov(except_flag, zero_reg);
1659 cvt_w_d(double_scratch, double_input);
1660 mfc1(result, double_scratch);
1661 cvt_d_w(double_scratch, double_scratch);
1662 BranchF(&done,
NULL,
eq, double_input, double_scratch);
1672 cfc1(scratch,
FCSR);
1674 ctc1(zero_reg,
FCSR);
1677 switch (rounding_mode) {
1679 Round_w_d(double_scratch, double_input);
1682 Trunc_w_d(double_scratch, double_input);
1685 Ceil_w_d(double_scratch, double_input);
1688 Floor_w_d(double_scratch, double_input);
1693 cfc1(except_flag,
FCSR);
1695 ctc1(scratch,
FCSR);
1697 mfc1(result, double_scratch);
1700 And(except_flag, except_flag, Operand(except_mask));
1706 void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
1710 Register scratch = at;
1711 Register scratch2 = t9;
1714 cfc1(scratch2,
FCSR);
1715 ctc1(zero_reg,
FCSR);
1717 trunc_w_d(single_scratch, double_input);
1718 mfc1(result, single_scratch);
1720 cfc1(scratch,
FCSR);
1721 ctc1(scratch2,
FCSR);
1727 Branch(done,
eq, scratch, Operand(zero_reg));
1731 void MacroAssembler::TruncateDoubleToI(Register result,
1735 TryInlineTruncateDoubleToI(result, double_input, &done);
1742 DoubleToIStub stub(isolate(),
sp, result, 0,
true,
true);
1752 void MacroAssembler::TruncateHeapNumberToI(Register result, Register
object) {
1755 DCHECK(!result.is(
object));
1757 ldc1(double_scratch,
1759 TryInlineTruncateDoubleToI(result, double_scratch, &done);
1763 DoubleToIStub stub(isolate(),
1776 void MacroAssembler::TruncateNumberToI(Register
object,
1778 Register heap_number_map,
1780 Label* not_number) {
1782 DCHECK(!result.is(
object));
1784 UntagAndJumpIfSmi(result,
object, &done);
1785 JumpIfNotHeapNumber(
object, heap_number_map, scratch, not_number);
1786 TruncateHeapNumberToI(result,
object);
1792 void MacroAssembler::GetLeastBitsFromSmi(Register dst,
1794 int num_least_bits) {
1797 And(dst, dst, Operand((1 << num_least_bits) - 1));
1801 void MacroAssembler::GetLeastBitsFromInt32(Register dst,
1803 int num_least_bits) {
1805 And(dst, src, Operand((1 << num_least_bits) - 1));
1812 #define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \
1813 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
1814 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
1818 BranchShort(offset, bdslot);
1825 BranchShort(offset, cond, rs, rt, bdslot);
1830 if (
L->is_bound()) {
1832 BranchShort(
L, bdslot);
1837 if (is_trampoline_emitted()) {
1840 BranchShort(
L, bdslot);
1846 void MacroAssembler::Branch(Label*
L,
Condition cond, Register rs,
1849 if (
L->is_bound()) {
1851 BranchShort(
L, cond, rs, rt, bdslot);
1856 BranchShort(&skip, neg_cond, rs, rt);
1864 if (is_trampoline_emitted()) {
1868 BranchShort(&skip, neg_cond, rs, rt);
1875 BranchShort(
L, cond, rs, rt, bdslot);
1881 void MacroAssembler::Branch(Label*
L,
1884 Heap::RootListIndex index,
1886 LoadRoot(at, index);
1887 Branch(
L, cond, rs, Operand(at), bdslot);
1900 void MacroAssembler::BranchShort(
int16_t offset,
Condition cond, Register rs,
1903 BRANCH_ARGS_CHECK(cond, rs, rt);
1904 DCHECK(!rs.is(zero_reg));
1906 Register scratch = at;
1911 BlockTrampolinePoolScope block_trampoline_pool(
this);
1918 beq(rs,
r2, offset);
1921 bne(rs,
r2, offset);
1925 if (
r2.
is(zero_reg)) {
1928 slt(scratch,
r2, rs);
1929 bne(scratch, zero_reg, offset);
1933 if (
r2.
is(zero_reg)) {
1936 slt(scratch, rs,
r2);
1937 beq(scratch, zero_reg, offset);
1941 if (
r2.
is(zero_reg)) {
1944 slt(scratch, rs,
r2);
1945 bne(scratch, zero_reg, offset);
1949 if (
r2.
is(zero_reg)) {
1952 slt(scratch,
r2, rs);
1953 beq(scratch, zero_reg, offset);
1958 if (
r2.
is(zero_reg)) {
1961 sltu(scratch,
r2, rs);
1962 bne(scratch, zero_reg, offset);
1966 if (
r2.
is(zero_reg)) {
1969 sltu(scratch, rs,
r2);
1970 beq(scratch, zero_reg, offset);
1974 if (
r2.
is(zero_reg)) {
1978 sltu(scratch, rs,
r2);
1979 bne(scratch, zero_reg, offset);
1983 if (
r2.
is(zero_reg)) {
1986 sltu(scratch,
r2, rs);
1987 beq(scratch, zero_reg, offset);
1997 BlockTrampolinePoolScope block_trampoline_pool(
this);
2007 beq(rs,
r2, offset);
2014 bne(rs,
r2, offset);
2018 if (rt.imm64_ == 0) {
2023 slt(scratch,
r2, rs);
2024 bne(scratch, zero_reg, offset);
2028 if (rt.imm64_ == 0) {
2030 }
else if (is_int16(rt.imm64_)) {
2031 slti(scratch, rs, rt.imm64_);
2032 beq(scratch, zero_reg, offset);
2036 slt(scratch, rs,
r2);
2037 beq(scratch, zero_reg, offset);
2041 if (rt.imm64_ == 0) {
2043 }
else if (is_int16(rt.imm64_)) {
2044 slti(scratch, rs, rt.imm64_);
2045 bne(scratch, zero_reg, offset);
2049 slt(scratch, rs,
r2);
2050 bne(scratch, zero_reg, offset);
2054 if (rt.imm64_ == 0) {
2059 slt(scratch,
r2, rs);
2060 beq(scratch, zero_reg, offset);
2065 if (rt.imm64_ == 0) {
2070 sltu(scratch,
r2, rs);
2071 bne(scratch, zero_reg, offset);
2075 if (rt.imm64_ == 0) {
2077 }
else if (is_int16(rt.imm64_)) {
2078 sltiu(scratch, rs, rt.imm64_);
2079 beq(scratch, zero_reg, offset);
2083 sltu(scratch, rs,
r2);
2084 beq(scratch, zero_reg, offset);
2088 if (rt.imm64_ == 0) {
2091 }
else if (is_int16(rt.imm64_)) {
2092 sltiu(scratch, rs, rt.imm64_);
2093 bne(scratch, zero_reg, offset);
2097 sltu(scratch, rs,
r2);
2098 bne(scratch, zero_reg, offset);
2102 if (rt.imm64_ == 0) {
2107 sltu(scratch,
r2, rs);
2108 beq(scratch, zero_reg, offset);
2125 b(shifted_branch_offset(
L,
false));
2133 void MacroAssembler::BranchShort(Label*
L,
Condition cond, Register rs,
2136 BRANCH_ARGS_CHECK(cond, rs, rt);
2140 Register scratch = at;
2142 BlockTrampolinePoolScope block_trampoline_pool(
this);
2149 offset = shifted_branch_offset(
L,
false);
2153 offset = shifted_branch_offset(
L,
false);
2154 beq(rs,
r2, offset);
2157 offset = shifted_branch_offset(
L,
false);
2158 bne(rs,
r2, offset);
2162 if (
r2.
is(zero_reg)) {
2163 offset = shifted_branch_offset(
L,
false);
2166 slt(scratch,
r2, rs);
2167 offset = shifted_branch_offset(
L,
false);
2168 bne(scratch, zero_reg, offset);
2172 if (
r2.
is(zero_reg)) {
2173 offset = shifted_branch_offset(
L,
false);
2176 slt(scratch, rs,
r2);
2177 offset = shifted_branch_offset(
L,
false);
2178 beq(scratch, zero_reg, offset);
2182 if (
r2.
is(zero_reg)) {
2183 offset = shifted_branch_offset(
L,
false);
2186 slt(scratch, rs,
r2);
2187 offset = shifted_branch_offset(
L,
false);
2188 bne(scratch, zero_reg, offset);
2192 if (
r2.
is(zero_reg)) {
2193 offset = shifted_branch_offset(
L,
false);
2196 slt(scratch,
r2, rs);
2197 offset = shifted_branch_offset(
L,
false);
2198 beq(scratch, zero_reg, offset);
2203 if (
r2.
is(zero_reg)) {
2204 offset = shifted_branch_offset(
L,
false);
2207 sltu(scratch,
r2, rs);
2208 offset = shifted_branch_offset(
L,
false);
2209 bne(scratch, zero_reg, offset);
2213 if (
r2.
is(zero_reg)) {
2214 offset = shifted_branch_offset(
L,
false);
2217 sltu(scratch, rs,
r2);
2218 offset = shifted_branch_offset(
L,
false);
2219 beq(scratch, zero_reg, offset);
2223 if (
r2.
is(zero_reg)) {
2227 sltu(scratch, rs,
r2);
2228 offset = shifted_branch_offset(
L,
false);
2229 bne(scratch, zero_reg, offset);
2233 if (
r2.
is(zero_reg)) {
2234 offset = shifted_branch_offset(
L,
false);
2237 sltu(scratch,
r2, rs);
2238 offset = shifted_branch_offset(
L,
false);
2239 beq(scratch, zero_reg, offset);
2249 BlockTrampolinePoolScope block_trampoline_pool(
this);
2252 offset = shifted_branch_offset(
L,
false);
2259 offset = shifted_branch_offset(
L,
false);
2260 beq(rs,
r2, offset);
2266 offset = shifted_branch_offset(
L,
false);
2267 bne(rs,
r2, offset);
2271 if (rt.imm64_ == 0) {
2272 offset = shifted_branch_offset(
L,
false);
2278 slt(scratch,
r2, rs);
2279 offset = shifted_branch_offset(
L,
false);
2280 bne(scratch, zero_reg, offset);
2284 if (rt.imm64_ == 0) {
2285 offset = shifted_branch_offset(
L,
false);
2287 }
else if (is_int16(rt.imm64_)) {
2288 slti(scratch, rs, rt.imm64_);
2289 offset = shifted_branch_offset(
L,
false);
2290 beq(scratch, zero_reg, offset);
2295 slt(scratch, rs,
r2);
2296 offset = shifted_branch_offset(
L,
false);
2297 beq(scratch, zero_reg, offset);
2301 if (rt.imm64_ == 0) {
2302 offset = shifted_branch_offset(
L,
false);
2304 }
else if (is_int16(rt.imm64_)) {
2305 slti(scratch, rs, rt.imm64_);
2306 offset = shifted_branch_offset(
L,
false);
2307 bne(scratch, zero_reg, offset);
2312 slt(scratch, rs,
r2);
2313 offset = shifted_branch_offset(
L,
false);
2314 bne(scratch, zero_reg, offset);
2318 if (rt.imm64_ == 0) {
2319 offset = shifted_branch_offset(
L,
false);
2325 slt(scratch,
r2, rs);
2326 offset = shifted_branch_offset(
L,
false);
2327 beq(scratch, zero_reg, offset);
2332 if (rt.imm64_ == 0) {
2333 offset = shifted_branch_offset(
L,
false);
2334 bne(rs, zero_reg, offset);
2339 sltu(scratch,
r2, rs);
2340 offset = shifted_branch_offset(
L,
false);
2341 bne(scratch, zero_reg, offset);
2345 if (rt.imm64_ == 0) {
2346 offset = shifted_branch_offset(
L,
false);
2348 }
else if (is_int16(rt.imm64_)) {
2349 sltiu(scratch, rs, rt.imm64_);
2350 offset = shifted_branch_offset(
L,
false);
2351 beq(scratch, zero_reg, offset);
2356 sltu(scratch, rs,
r2);
2357 offset = shifted_branch_offset(
L,
false);
2358 beq(scratch, zero_reg, offset);
2362 if (rt.imm64_ == 0) {
2365 }
else if (is_int16(rt.imm64_)) {
2366 sltiu(scratch, rs, rt.imm64_);
2367 offset = shifted_branch_offset(
L,
false);
2368 bne(scratch, zero_reg, offset);
2373 sltu(scratch, rs,
r2);
2374 offset = shifted_branch_offset(
L,
false);
2375 bne(scratch, zero_reg, offset);
2379 if (rt.imm64_ == 0) {
2380 offset = shifted_branch_offset(
L,
false);
2381 beq(rs, zero_reg, offset);
2386 sltu(scratch,
r2, rs);
2387 offset = shifted_branch_offset(
L,
false);
2388 beq(scratch, zero_reg, offset);
2396 DCHECK(is_int16(offset));
2404 BranchAndLinkShort(offset, bdslot);
2408 void MacroAssembler::BranchAndLink(
int16_t offset,
Condition cond, Register rs,
2411 BranchAndLinkShort(offset, cond, rs, rt, bdslot);
2416 if (
L->is_bound()) {
2418 BranchAndLinkShort(
L, bdslot);
2423 if (is_trampoline_emitted()) {
2426 BranchAndLinkShort(
L, bdslot);
2432 void MacroAssembler::BranchAndLink(Label*
L,
Condition cond, Register rs,
2435 if (
L->is_bound()) {
2437 BranchAndLinkShort(
L, cond, rs, rt, bdslot);
2441 BranchShort(&skip, neg_cond, rs, rt);
2446 if (is_trampoline_emitted()) {
2449 BranchShort(&skip, neg_cond, rs, rt);
2453 BranchAndLinkShort(
L, cond, rs, rt, bdslot);
2462 void MacroAssembler::BranchAndLinkShort(
int16_t offset,
2473 Register rs,
const Operand& rt,
2475 BRANCH_ARGS_CHECK(cond, rs, rt);
2477 Register scratch = at;
2487 BlockTrampolinePoolScope block_trampoline_pool(
this);
2506 slt(scratch,
r2, rs);
2507 beq(scratch, zero_reg, 2);
2513 slt(scratch, rs,
r2);
2514 bne(scratch, zero_reg, 2);
2520 slt(scratch, rs,
r2);
2521 bne(scratch, zero_reg, 2);
2527 slt(scratch,
r2, rs);
2528 bne(scratch, zero_reg, 2);
2537 sltu(scratch,
r2, rs);
2538 beq(scratch, zero_reg, 2);
2544 sltu(scratch, rs,
r2);
2545 bne(scratch, zero_reg, 2);
2551 sltu(scratch, rs,
r2);
2552 bne(scratch, zero_reg, 2);
2558 sltu(scratch,
r2, rs);
2559 bne(scratch, zero_reg, 2);
2573 void MacroAssembler::BranchAndLinkShort(Label*
L,
BranchDelaySlot bdslot) {
2574 bal(shifted_branch_offset(
L,
false));
2582 void MacroAssembler::BranchAndLinkShort(Label*
L,
Condition cond, Register rs,
2585 BRANCH_ARGS_CHECK(cond, rs, rt);
2589 Register scratch = at;
2598 BlockTrampolinePoolScope block_trampoline_pool(
this);
2601 offset = shifted_branch_offset(
L,
false);
2607 offset = shifted_branch_offset(
L,
false);
2613 offset = shifted_branch_offset(
L,
false);
2620 slt(scratch,
r2, rs);
2621 beq(scratch, zero_reg, 2);
2623 offset = shifted_branch_offset(
L,
false);
2628 slt(scratch, rs,
r2);
2629 bne(scratch, zero_reg, 2);
2631 offset = shifted_branch_offset(
L,
false);
2636 slt(scratch, rs,
r2);
2637 bne(scratch, zero_reg, 2);
2639 offset = shifted_branch_offset(
L,
false);
2644 slt(scratch,
r2, rs);
2645 bne(scratch, zero_reg, 2);
2647 offset = shifted_branch_offset(
L,
false);
2655 sltu(scratch,
r2, rs);
2656 beq(scratch, zero_reg, 2);
2658 offset = shifted_branch_offset(
L,
false);
2663 sltu(scratch, rs,
r2);
2664 bne(scratch, zero_reg, 2);
2666 offset = shifted_branch_offset(
L,
false);
2671 sltu(scratch, rs,
r2);
2672 bne(scratch, zero_reg, 2);
2674 offset = shifted_branch_offset(
L,
false);
2679 sltu(scratch,
r2, rs);
2680 bne(scratch, zero_reg, 2);
2682 offset = shifted_branch_offset(
L,
false);
2691 DCHECK(is_int16(offset));
2699 void MacroAssembler::Jump(Register target,
2704 BlockTrampolinePoolScope block_trampoline_pool(
this);
2708 BRANCH_ARGS_CHECK(cond, rs, rt);
2718 void MacroAssembler::Jump(intptr_t target,
2719 RelocInfo::Mode rmode,
2730 li(t9, Operand(target, rmode));
2731 Jump(t9,
al, zero_reg, Operand(zero_reg), bd);
2736 void MacroAssembler::Jump(
Address target,
2737 RelocInfo::Mode rmode,
2742 DCHECK(!RelocInfo::IsCodeTarget(rmode));
2743 Jump(
reinterpret_cast<intptr_t
>(target), rmode, cond, rs, rt, bd);
2747 void MacroAssembler::Jump(Handle<Code> code,
2748 RelocInfo::Mode rmode,
2753 DCHECK(RelocInfo::IsCodeTarget(rmode));
2755 Jump(
reinterpret_cast<intptr_t
>(code.location()), rmode, cond, rs, rt, bd);
2759 int MacroAssembler::CallSize(Register target,
2775 return size * kInstrSize;
2780 void MacroAssembler::Call(Register target,
2785 BlockTrampolinePoolScope block_trampoline_pool(
this);
2791 BRANCH_ARGS_CHECK(cond, rs, rt);
2799 DCHECK_EQ(CallSize(target, cond, rs, rt, bd),
2800 SizeOfCodeGeneratedSince(&start));
2804 int MacroAssembler::CallSize(
Address target,
2805 RelocInfo::Mode rmode,
2810 int size = CallSize(t9, cond, rs, rt, bd);
2811 return size + 4 * kInstrSize;
2815 void MacroAssembler::Call(
Address target,
2816 RelocInfo::Mode rmode,
2821 BlockTrampolinePoolScope block_trampoline_pool(
this);
2824 int64_t target_int =
reinterpret_cast<int64_t
>(target);
2827 positions_recorder()->WriteRecordedPositions();
2829 Call(t9, cond, rs, rt, bd);
2830 DCHECK_EQ(CallSize(target, rmode, cond, rs, rt, bd),
2831 SizeOfCodeGeneratedSince(&start));
2835 int MacroAssembler::CallSize(Handle<Code> code,
2836 RelocInfo::Mode rmode,
2837 TypeFeedbackId ast_id,
2843 return CallSize(
reinterpret_cast<Address>(code.location()),
2844 rmode, cond, rs, rt, bd);
2848 void MacroAssembler::Call(Handle<Code> code,
2849 RelocInfo::Mode rmode,
2850 TypeFeedbackId ast_id,
2855 BlockTrampolinePoolScope block_trampoline_pool(
this);
2858 DCHECK(RelocInfo::IsCodeTarget(rmode));
2859 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
2860 SetRecordedAstId(ast_id);
2861 rmode = RelocInfo::CODE_TARGET_WITH_ID;
2864 Call(
reinterpret_cast<Address>(code.location()), rmode, cond, rs, rt, bd);
2865 DCHECK_EQ(CallSize(code, rmode, ast_id, cond, rs, rt, bd),
2866 SizeOfCodeGeneratedSince(&start));
2870 void MacroAssembler::Ret(
Condition cond,
2874 Jump(ra, cond, rs, rt, bd);
2879 BlockTrampolinePoolScope block_trampoline_pool(
this);
2882 imm28 = jump_address(
L);
2884 { BlockGrowBufferScope block_buf_growth(
this);
2887 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2897 BlockTrampolinePoolScope block_trampoline_pool(
this);
2900 imm64 = jump_address(
L);
2901 { BlockGrowBufferScope block_buf_growth(
this);
2904 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2916 BlockTrampolinePoolScope block_trampoline_pool(
this);
2919 imm64 = jump_address(
L);
2920 { BlockGrowBufferScope block_buf_growth(
this);
2923 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2934 void MacroAssembler::DropAndRet(
int drop) {
2939 void MacroAssembler::DropAndRet(
int drop,
2942 const Operand&
r2) {
2958 void MacroAssembler::Drop(
int count,
2961 const Operand& op) {
2981 void MacroAssembler::Swap(Register reg1,
2984 if (scratch.is(
no_reg)) {
2985 Xor(reg1, reg1, Operand(reg2));
2986 Xor(reg2, reg2, Operand(reg1));
2987 Xor(reg1, reg1, Operand(reg2));
2996 void MacroAssembler::Call(Label* target) {
2997 BranchAndLink(target);
3007 void MacroAssembler::PushRegisterAsTwoSmis(Register src, Register scratch) {
3008 DCHECK(!src.is(scratch));
3010 dsrl32(src, src, 0);
3011 dsll32(src, src, 0);
3013 dsll32(scratch, scratch, 0);
3018 void MacroAssembler::PopRegisterAsTwoSmis(Register dst, Register scratch) {
3019 DCHECK(!dst.is(scratch));
3021 dsrl32(scratch, scratch, 0);
3023 dsrl32(dst, dst, 0);
3024 dsll32(dst, dst, 0);
3025 or_(dst, dst, scratch);
3029 void MacroAssembler::DebugBreak() {
3030 PrepareCEntryArgs(0);
3031 PrepareCEntryFunction(ExternalReference(Runtime::kDebugBreak, isolate()));
3032 CEntryStub ces(isolate(), 1);
3033 DCHECK(AllowThisStubCall(&ces));
3041 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
3042 int handler_index) {
3056 StackHandler::IndexField::encode(handler_index) |
3057 StackHandler::KindField::encode(kind);
3059 li(a6, Operand(state));
3062 if (kind == StackHandler::JS_ENTRY) {
3067 Push(zero_reg, zero_reg, a6, a5);
3069 MultiPush(a5.bit() | a6.bit() |
cp.bit() |
fp.
bit());
3073 li(a6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
3081 void MacroAssembler::PopTryHandler() {
3085 li(at, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
3090 void MacroAssembler::JumpToHandlerEntry() {
3095 Daddu(a3, a3, Operand(FixedArray::kHeaderSize -
kHeapObjectTag));
3096 dsrl(a2, a2, StackHandler::kKindWidth);
3120 li(a3, Operand(ExternalReference(Isolate::kHandlerAddress,
3130 MultiPop(a1.bit() | a2.bit() |
cp.bit() |
fp.
bit());
3136 Branch(&done,
eq,
cp, Operand(zero_reg));
3140 JumpToHandlerEntry();
3144 void MacroAssembler::ThrowUncatchable(Register value) {
3154 if (!value.is(v0)) {
3158 li(a3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
3162 Label fetch_next, check_kind;
3169 ld(a2,
MemOperand(
sp, StackHandlerConstants::kStateOffset));
3170 And(a2, a2, Operand(StackHandler::KindField::kMask));
3171 Branch(&fetch_next,
ne, a2, Operand(zero_reg));
3179 MultiPop(a1.bit() | a2.bit() |
cp.bit() |
fp.
bit());
3181 JumpToHandlerEntry();
3185 void MacroAssembler::Allocate(
int object_size,
3191 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
3192 if (!FLAG_inline_new) {
3193 if (emit_debug_code()) {
3196 li(scratch1, 0x7191);
3197 li(scratch2, 0x7291);
3203 DCHECK(!result.is(scratch1));
3204 DCHECK(!result.is(scratch2));
3205 DCHECK(!scratch1.is(scratch2));
3206 DCHECK(!scratch1.is(t9));
3207 DCHECK(!scratch2.is(t9));
3219 ExternalReference allocation_top =
3220 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
3221 ExternalReference allocation_limit =
3222 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
3225 reinterpret_cast<intptr_t
>(allocation_top.address());
3227 reinterpret_cast<intptr_t
>(allocation_limit.address());
3231 Register topaddr = scratch1;
3232 li(topaddr, Operand(allocation_top));
3240 if (emit_debug_code()) {
3245 Check(
eq, kUnexpectedAllocationTop, result, Operand(t9));
3252 if (emit_debug_code()) {
3254 Check(
eq, kAllocationIsNotDoubleAligned, at, Operand(zero_reg));
3259 Daddu(scratch2, result, Operand(object_size));
3260 Branch(gc_required,
Ugreater, scratch2, Operand(t9));
3270 void MacroAssembler::Allocate(Register object_size,
3276 if (!FLAG_inline_new) {
3277 if (emit_debug_code()) {
3280 li(scratch1, 0x7191);
3281 li(scratch2, 0x7291);
3287 DCHECK(!result.is(scratch1));
3288 DCHECK(!result.is(scratch2));
3289 DCHECK(!scratch1.is(scratch2));
3290 DCHECK(!object_size.is(t9));
3291 DCHECK(!scratch1.is(t9) && !scratch2.is(t9) && !result.is(t9));
3296 ExternalReference allocation_top =
3297 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
3298 ExternalReference allocation_limit =
3299 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
3301 reinterpret_cast<intptr_t
>(allocation_top.address());
3303 reinterpret_cast<intptr_t
>(allocation_limit.address());
3307 Register topaddr = scratch1;
3308 li(topaddr, Operand(allocation_top));
3316 if (emit_debug_code()) {
3321 Check(
eq, kUnexpectedAllocationTop, result, Operand(t9));
3328 if (emit_debug_code()) {
3330 Check(
eq, kAllocationIsNotDoubleAligned, at, Operand(zero_reg));
3338 Daddu(scratch2, result, scratch2);
3340 Daddu(scratch2, result, Operand(object_size));
3342 Branch(gc_required,
Ugreater, scratch2, Operand(t9));
3345 if (emit_debug_code()) {
3347 Check(
eq, kUnalignedAllocationInNewSpace, t9, Operand(zero_reg));
3358 void MacroAssembler::UndoAllocationInNewSpace(Register
object,
3360 ExternalReference new_space_allocation_top =
3361 ExternalReference::new_space_allocation_top_address(isolate());
3367 li(scratch, Operand(new_space_allocation_top));
3369 Check(
less, kUndoAllocationOfNonAllocatedMemory,
3370 object, Operand(scratch));
3373 li(scratch, Operand(new_space_allocation_top));
3378 void MacroAssembler::AllocateTwoByteString(Register result,
3383 Label* gc_required) {
3387 dsll(scratch1, length, 1);
3388 daddiu(scratch1, scratch1,
3401 InitializeNewString(result,
3403 Heap::kStringMapRootIndex,
3409 void MacroAssembler::AllocateOneByteString(Register result, Register length,
3410 Register scratch1, Register scratch2,
3412 Label* gc_required) {
3417 daddiu(scratch1, length,
3430 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
3431 scratch1, scratch2);
3435 void MacroAssembler::AllocateTwoByteConsString(Register result,
3439 Label* gc_required) {
3440 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
3442 InitializeNewString(result,
3444 Heap::kConsStringMapRootIndex,
3450 void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
3453 Label* gc_required) {
3454 Allocate(ConsString::kSize,
3461 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
3462 scratch1, scratch2);
3466 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
3470 Label* gc_required) {
3471 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3474 InitializeNewString(result,
3476 Heap::kSlicedStringMapRootIndex,
3482 void MacroAssembler::AllocateOneByteSlicedString(Register result,
3486 Label* gc_required) {
3487 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
3490 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
3491 scratch1, scratch2);
3495 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3496 Label* not_unique_name) {
3500 Branch(&succeed,
eq, at, Operand(zero_reg));
3509 void MacroAssembler::AllocateHeapNumber(Register result,
3512 Register heap_number_map,
3518 Allocate(HeapNumber::kSize, result, scratch1, scratch2, need_gc,
3522 ? Heap::kMutableHeapNumberMapRootIndex
3523 : Heap::kHeapNumberMapRootIndex;
3524 AssertIsRoot(heap_number_map, map_index);
3530 sd(heap_number_map,
MemOperand(result, HeapObject::kMapOffset));
3535 void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3539 Label* gc_required) {
3540 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
3541 AllocateHeapNumber(result, scratch1, scratch2, t8, gc_required);
3547 void MacroAssembler::CopyFields(Register dst,
3551 DCHECK((temps & dst.bit()) == 0);
3552 DCHECK((temps & src.bit()) == 0);
3558 if ((temps & (1 <<
i)) != 0) {
3565 for (
int i = 0;
i < field_count;
i++) {
3576 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
3580 bind(&align_loop_1);
3582 Branch(&word_loop,
eq, scratch, Operand(zero_reg));
3587 Dsubu(length, length, Operand(1));
3588 Branch(&align_loop_1,
ne, length, Operand(zero_reg));
3592 if (emit_debug_code()) {
3594 Assert(
eq, kExpectingAlignmentForCopyBytes,
3595 scratch, Operand(zero_reg));
3604 dsrl(scratch, scratch, 8);
3606 dsrl(scratch, scratch, 8);
3608 dsrl(scratch, scratch, 8);
3610 dsrl(scratch, scratch, 8);
3612 dsrl(scratch, scratch, 8);
3614 dsrl(scratch, scratch, 8);
3616 dsrl(scratch, scratch, 8);
3625 Branch(&done,
eq, length, Operand(zero_reg));
3631 Dsubu(length, length, Operand(1));
3632 Branch(&byte_loop_1,
ne, length, Operand(zero_reg));
3637 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3638 Register end_offset,
3646 Branch(&loop,
lt, start_offset, Operand(end_offset));
3650 void MacroAssembler::CheckFastElements(Register
map,
3658 Branch(fail,
hi, scratch,
3659 Operand(Map::kMaximumBitField2FastHoleyElementValue));
3663 void MacroAssembler::CheckFastObjectElements(Register
map,
3671 Branch(fail,
ls, scratch,
3672 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
3673 Branch(fail,
hi, scratch,
3674 Operand(Map::kMaximumBitField2FastHoleyElementValue));
3678 void MacroAssembler::CheckFastSmiElements(Register
map,
3684 Branch(fail,
hi, scratch,
3685 Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
3689 void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
3691 Register elements_reg,
3696 int elements_offset) {
3697 Label smi_value, maybe_nan, have_double_value, is_nan, done;
3698 Register mantissa_reg = scratch2;
3699 Register exponent_reg = scratch3;
3702 JumpIfSmi(value_reg, &smi_value);
3707 Heap::kHeapNumberMapRootIndex,
3714 lw(exponent_reg,
FieldMemOperand(value_reg, HeapNumber::kExponentOffset));
3715 Branch(&maybe_nan,
ge, exponent_reg, Operand(scratch1));
3717 lwu(mantissa_reg,
FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
3719 bind(&have_double_value);
3722 Daddu(scratch1, scratch1, elements_reg);
3724 scratch1, FixedDoubleArray::kHeaderSize - elements_offset));
3725 uint32_t offset = FixedDoubleArray::kHeaderSize - elements_offset +
3733 lw(mantissa_reg,
FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
3734 Branch(&have_double_value,
eq, mantissa_reg, Operand(zero_reg));
3737 LoadRoot(at, Heap::kNanValueRootIndex);
3740 jmp(&have_double_value);
3743 Daddu(scratch1, elements_reg,
3748 Daddu(scratch1, scratch1, scratch2);
3751 Register untagged_value = elements_reg;
3752 SmiUntag(untagged_value, value_reg);
3753 mtc1(untagged_value,
f2);
3760 void MacroAssembler::CompareMapAndBranch(Register obj,
3763 Label* early_success,
3767 CompareMapAndBranch(scratch,
map, early_success, cond, branch_to);
3771 void MacroAssembler::CompareMapAndBranch(Register obj_map,
3773 Label* early_success,
3776 Branch(branch_to, cond, obj_map, Operand(
map));
3780 void MacroAssembler::CheckMap(Register obj,
3786 JumpIfSmi(obj, fail);
3789 CompareMapAndBranch(obj, scratch,
map, &success,
ne, fail);
3794 void MacroAssembler::DispatchMap(Register obj,
3797 Handle<Code> success,
3801 JumpIfSmi(obj, &fail);
3804 Jump(success, RelocInfo::CODE_TARGET,
eq, scratch, Operand(
map));
3809 void MacroAssembler::CheckMap(Register obj,
3811 Heap::RootListIndex index,
3815 JumpIfSmi(obj, fail);
3818 LoadRoot(at, index);
3819 Branch(fail,
ne, scratch, Operand(at));
3823 void MacroAssembler::MovFromFloatResult(
const DoubleRegister dst) {
3832 void MacroAssembler::MovFromFloatParameter(
const DoubleRegister dst) {
3864 DCHECK(!src1.is(fparg2));
3881 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
3882 const ParameterCount& actual,
3883 Handle<Code> code_constant,
3886 bool* definitely_mismatches,
3888 const CallWrapper& call_wrapper) {
3889 bool definitely_matches =
false;
3890 *definitely_mismatches =
false;
3891 Label regular_invoke;
3902 DCHECK(actual.is_immediate() || actual.reg().is(a0));
3903 DCHECK(expected.is_immediate() || expected.reg().is(a2));
3904 DCHECK((!code_constant.is_null() && code_reg.is(
no_reg)) || code_reg.is(a3));
3906 if (expected.is_immediate()) {
3907 DCHECK(actual.is_immediate());
3908 if (expected.immediate() == actual.immediate()) {
3909 definitely_matches =
true;
3911 li(a0, Operand(actual.immediate()));
3912 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
3913 if (expected.immediate() == sentinel) {
3918 definitely_matches =
true;
3920 *definitely_mismatches =
true;
3921 li(a2, Operand(expected.immediate()));
3924 }
else if (actual.is_immediate()) {
3925 Branch(®ular_invoke,
eq, expected.reg(), Operand(actual.immediate()));
3926 li(a0, Operand(actual.immediate()));
3928 Branch(®ular_invoke,
eq, expected.reg(), Operand(actual.reg()));
3931 if (!definitely_matches) {
3932 if (!code_constant.is_null()) {
3933 li(a3, Operand(code_constant));
3937 Handle<Code> adaptor =
3938 isolate()->builtins()->ArgumentsAdaptorTrampoline();
3940 call_wrapper.BeforeCall(CallSize(adaptor));
3942 call_wrapper.AfterCall();
3943 if (!*definitely_mismatches) {
3947 Jump(adaptor, RelocInfo::CODE_TARGET);
3949 bind(®ular_invoke);
3954 void MacroAssembler::InvokeCode(Register code,
3955 const ParameterCount& expected,
3956 const ParameterCount& actual,
3958 const CallWrapper& call_wrapper) {
3964 bool definitely_mismatches =
false;
3965 InvokePrologue(expected, actual, Handle<Code>::null(), code,
3966 &done, &definitely_mismatches,
flag,
3968 if (!definitely_mismatches) {
3970 call_wrapper.BeforeCall(CallSize(code));
3972 call_wrapper.AfterCall();
3984 void MacroAssembler::InvokeFunction(Register
function,
3985 const ParameterCount& actual,
3987 const CallWrapper& call_wrapper) {
3993 Register expected_reg = a2;
3994 Register code_reg = a3;
3995 ld(code_reg,
FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
4001 SharedFunctionInfo::kFormalParameterCountOffset));
4003 ParameterCount expected(expected_reg);
4004 InvokeCode(code_reg, expected, actual,
flag, call_wrapper);
4008 void MacroAssembler::InvokeFunction(Register
function,
4009 const ParameterCount& expected,
4010 const ParameterCount& actual,
4012 const CallWrapper& call_wrapper) {
4026 InvokeCode(a3, expected, actual,
flag, call_wrapper);
4030 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
4031 const ParameterCount& expected,
4032 const ParameterCount& actual,
4034 const CallWrapper& call_wrapper) {
4036 InvokeFunction(a1, expected, actual,
flag, call_wrapper);
4040 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
4045 IsInstanceJSObjectType(
map, scratch, fail);
4049 void MacroAssembler::IsInstanceJSObjectType(Register
map,
4058 void MacroAssembler::IsObjectJSStringType(Register
object,
4066 Branch(fail,
ne, scratch, Operand(zero_reg));
4070 void MacroAssembler::IsObjectNameType(Register
object,
4083 void MacroAssembler::TryGetFunctionPrototype(Register
function,
4087 bool miss_on_bound_function) {
4089 if (miss_on_bound_function) {
4091 JumpIfSmi(
function, miss);
4094 GetObjectType(
function, result, scratch);
4101 And(scratch, scratch,
4102 Operand(1 << SharedFunctionInfo::kBoundFunction));
4103 Branch(miss,
ne, scratch, Operand(zero_reg));
4107 And(scratch, scratch, Operand(1 << Map::kHasNonInstancePrototype));
4108 Branch(&non_instance,
ne, scratch, Operand(zero_reg));
4118 LoadRoot(t8, Heap::kTheHoleValueRootIndex);
4119 Branch(miss,
eq, result, Operand(t8));
4123 GetObjectType(result, scratch, scratch);
4124 Branch(&done,
ne, scratch, Operand(
MAP_TYPE));
4129 if (miss_on_bound_function) {
4134 bind(&non_instance);
4143 void MacroAssembler::GetObjectType(Register
object,
4145 Register type_reg) {
4154 void MacroAssembler::CallStub(
CodeStub* stub,
4155 TypeFeedbackId ast_id,
4160 DCHECK(AllowThisStubCall(stub));
4161 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id,
4166 void MacroAssembler::TailCallStub(
CodeStub* stub,
4171 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond,
r1,
r2, bd);
4175 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
4176 int64_t offset = (ref0.address() - ref1.address());
4177 DCHECK(
static_cast<int>(offset) == offset);
4178 return static_cast<int>(offset);
4182 void MacroAssembler::CallApiFunctionAndReturn(
4183 Register function_address,
4184 ExternalReference thunk_ref,
4188 ExternalReference next_address =
4189 ExternalReference::handle_scope_next_address(isolate());
4190 const int kNextOffset = 0;
4191 const int kLimitOffset = AddressOffset(
4192 ExternalReference::handle_scope_limit_address(isolate()),
4194 const int kLevelOffset = AddressOffset(
4195 ExternalReference::handle_scope_level_address(isolate()),
4198 DCHECK(function_address.is(a1) || function_address.is(a2));
4200 Label profiler_disabled;
4201 Label end_profiler_check;
4202 li(t9, Operand(ExternalReference::is_profiling_address(isolate())));
4204 Branch(&profiler_disabled,
eq, t9, Operand(zero_reg));
4207 li(t9, Operand(thunk_ref));
4208 jmp(&end_profiler_check);
4210 bind(&profiler_disabled);
4211 mov(t9, function_address);
4212 bind(&end_profiler_check);
4215 li(
s3, Operand(next_address));
4219 Daddu(
s2,
s2, Operand(1));
4222 if (FLAG_log_timer_events) {
4223 FrameScope frame(
this, StackFrame::MANUAL);
4224 PushSafepointRegisters();
4225 PrepareCallCFunction(1, a0);
4226 li(a0, Operand(ExternalReference::isolate_address(isolate())));
4227 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
4228 PopSafepointRegisters();
4234 DirectCEntryStub stub(isolate());
4235 stub.GenerateCall(
this, t9);
4237 if (FLAG_log_timer_events) {
4238 FrameScope frame(
this, StackFrame::MANUAL);
4239 PushSafepointRegisters();
4240 PrepareCallCFunction(1, a0);
4241 li(a0, Operand(ExternalReference::isolate_address(isolate())));
4242 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
4243 PopSafepointRegisters();
4246 Label promote_scheduled_exception;
4247 Label exception_handled;
4248 Label delete_allocated_handles;
4249 Label leave_exit_frame;
4250 Label return_value_loaded;
4253 ld(v0, return_value_operand);
4254 bind(&return_value_loaded);
4259 if (emit_debug_code()) {
4261 Check(
eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(
s2));
4263 Dsubu(
s2,
s2, Operand(1));
4266 Branch(&delete_allocated_handles,
ne,
s1, Operand(at));
4269 bind(&leave_exit_frame);
4270 LoadRoot(a4, Heap::kTheHoleValueRootIndex);
4271 li(at, Operand(ExternalReference::scheduled_exception_address(isolate())));
4273 Branch(&promote_scheduled_exception,
ne, a4, Operand(a5));
4274 bind(&exception_handled);
4276 bool restore_context = context_restore_operand !=
NULL;
4277 if (restore_context) {
4278 ld(
cp, *context_restore_operand);
4280 li(
s0, Operand(stack_space));
4283 bind(&promote_scheduled_exception);
4286 CallExternalReference(
4287 ExternalReference(Runtime::kPromoteScheduledException, isolate()),
4290 jmp(&exception_handled);
4293 bind(&delete_allocated_handles);
4297 PrepareCallCFunction(1,
s1);
4298 li(a0, Operand(ExternalReference::isolate_address(isolate())));
4299 CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate()),
4302 jmp(&leave_exit_frame);
4306 bool MacroAssembler::AllowThisStubCall(
CodeStub* stub) {
4307 return has_frame_ || !stub->SometimesSetsUpAFrame();
4311 void MacroAssembler::IndexFromHash(Register hash, Register index) {
4317 (1 << String::kArrayIndexValueBits));
4318 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
4322 void MacroAssembler::ObjectToDoubleFPURegister(Register
object,
4326 Register heap_number_map,
4332 JumpIfNotSmi(
object, ¬_smi);
4335 dsra32(scratch1,
object, 0);
4336 mtc1(scratch1, result);
4337 cvt_d_w(result, result);
4343 Branch(not_number,
ne, scratch1, Operand(heap_number_map));
4347 Register exponent = scratch1;
4348 Register mask_reg = scratch2;
4350 li(mask_reg, HeapNumber::kExponentMask);
4352 And(exponent, exponent, mask_reg);
4353 Branch(not_number,
eq, exponent, Operand(mask_reg));
4360 void MacroAssembler::SmiToDoubleFPURegister(Register smi,
4362 Register scratch1) {
4364 dsra32(scratch1, smi, 0);
4365 mtc1(scratch1, value);
4366 cvt_d_w(value, value);
4370 void MacroAssembler::AdduAndCheckForOverflow(Register dst,
4373 Register overflow_dst,
4375 DCHECK(!dst.is(overflow_dst));
4376 DCHECK(!dst.is(scratch));
4377 DCHECK(!overflow_dst.is(scratch));
4378 DCHECK(!overflow_dst.is(left));
4379 DCHECK(!overflow_dst.is(right));
4381 if (left.is(right) && dst.is(left)) {
4386 DCHECK(!overflow_dst.is(t9));
4393 daddu(dst, left, right);
4394 xor_(scratch, dst, scratch);
4395 xor_(overflow_dst, dst, right);
4396 and_(overflow_dst, overflow_dst, scratch);
4397 }
else if (dst.is(right)) {
4398 mov(scratch, right);
4399 daddu(dst, left, right);
4400 xor_(scratch, dst, scratch);
4401 xor_(overflow_dst, dst, left);
4402 and_(overflow_dst, overflow_dst, scratch);
4404 daddu(dst, left, right);
4405 xor_(overflow_dst, dst, left);
4406 xor_(scratch, dst, right);
4407 and_(overflow_dst, scratch, overflow_dst);
4412 void MacroAssembler::SubuAndCheckForOverflow(Register dst,
4415 Register overflow_dst,
4417 DCHECK(!dst.is(overflow_dst));
4418 DCHECK(!dst.is(scratch));
4419 DCHECK(!overflow_dst.is(scratch));
4420 DCHECK(!overflow_dst.is(left));
4421 DCHECK(!overflow_dst.is(right));
4422 DCHECK(!scratch.is(left));
4423 DCHECK(!scratch.is(right));
4427 if (left.is(right)) {
4429 mov(overflow_dst, zero_reg);
4435 dsubu(dst, left, right);
4436 xor_(overflow_dst, dst, scratch);
4437 xor_(scratch, scratch, right);
4438 and_(overflow_dst, scratch, overflow_dst);
4439 }
else if (dst.is(right)) {
4440 mov(scratch, right);
4441 dsubu(dst, left, right);
4442 xor_(overflow_dst, dst, left);
4443 xor_(scratch, left, scratch);
4444 and_(overflow_dst, scratch, overflow_dst);
4446 dsubu(dst, left, right);
4447 xor_(overflow_dst, dst, left);
4448 xor_(scratch, left, right);
4449 and_(overflow_dst, scratch, overflow_dst);
4454 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
4462 CHECK(f->nargs < 0 || f->nargs == num_arguments);
4468 PrepareCEntryArgs(num_arguments);
4469 PrepareCEntryFunction(ExternalReference(f, isolate()));
4470 CEntryStub stub(isolate(), 1, save_doubles);
4475 void MacroAssembler::CallExternalReference(
const ExternalReference& ext,
4478 PrepareCEntryArgs(num_arguments);
4479 PrepareCEntryFunction(ext);
4481 CEntryStub stub(isolate(), 1);
4486 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
4493 PrepareCEntryArgs(num_arguments);
4494 JumpToExternalReference(ext);
4498 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
4501 TailCallExternalReference(ExternalReference(fid, isolate()),
4507 void MacroAssembler::JumpToExternalReference(
const ExternalReference& builtin,
4509 PrepareCEntryFunction(builtin);
4510 CEntryStub stub(isolate(), 1);
4511 Jump(stub.GetCode(),
4512 RelocInfo::CODE_TARGET,
4520 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
4522 const CallWrapper& call_wrapper) {
4526 GetBuiltinEntry(t9,
id);
4528 call_wrapper.BeforeCall(CallSize(t9));
4530 call_wrapper.AfterCall();
4538 void MacroAssembler::GetBuiltinFunction(Register target,
4539 Builtins::JavaScript
id) {
4541 ld(target,
MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4545 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
4549 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
4551 GetBuiltinFunction(a1,
id);
4557 void MacroAssembler::SetCounter(StatsCounter* counter,
int value,
4558 Register scratch1, Register scratch2) {
4559 if (FLAG_native_code_counters && counter->Enabled()) {
4560 li(scratch1, Operand(value));
4561 li(scratch2, Operand(ExternalReference(counter)));
4567 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value,
4568 Register scratch1, Register scratch2) {
4570 if (FLAG_native_code_counters && counter->Enabled()) {
4571 li(scratch2, Operand(ExternalReference(counter)));
4573 Daddu(scratch1, scratch1, Operand(value));
4579 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value,
4580 Register scratch1, Register scratch2) {
4582 if (FLAG_native_code_counters && counter->Enabled()) {
4583 li(scratch2, Operand(ExternalReference(counter)));
4585 Dsubu(scratch1, scratch1, Operand(value));
4595 Register rs, Operand rt) {
4596 if (emit_debug_code())
4597 Check(
cc, reason, rs, rt);
4601 void MacroAssembler::AssertFastElements(Register elements) {
4602 if (emit_debug_code()) {
4603 DCHECK(!elements.is(at));
4607 LoadRoot(at, Heap::kFixedArrayMapRootIndex);
4608 Branch(&ok,
eq, elements, Operand(at));
4609 LoadRoot(at, Heap::kFixedDoubleArrayMapRootIndex);
4610 Branch(&ok,
eq, elements, Operand(at));
4611 LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
4612 Branch(&ok,
eq, elements, Operand(at));
4613 Abort(kJSObjectWithFastElementsMapHasSlowElements);
4621 Register rs, Operand rt) {
4623 Branch(&
L,
cc, rs, rt);
4636 RecordComment(
"Abort message: ");
4640 if (FLAG_trap_on_abort) {
4646 li(a0, Operand(Smi::FromInt(reason)));
4653 CallRuntime(Runtime::kAbort, 1);
4655 CallRuntime(Runtime::kAbort, 1);
4658 if (is_trampoline_pool_blocked()) {
4664 static const int kExpectedAbortInstructions = 10;
4665 int abort_instructions = InstructionsGeneratedSince(&abort_start);
4666 DCHECK(abort_instructions <= kExpectedAbortInstructions);
4667 while (abort_instructions++ < kExpectedAbortInstructions) {
4674 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
4675 if (context_chain_length > 0) {
4677 ld(dst,
MemOperand(
cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4678 for (
int i = 1;
i < context_chain_length;
i++) {
4679 ld(dst,
MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
4690 void MacroAssembler::LoadTransitionedArrayMapConditional(
4693 Register map_in_out,
4695 Label* no_map_match) {
4698 MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4699 ld(scratch,
FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
4704 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
4706 FixedArrayBase::kHeaderSize;
4708 Branch(no_map_match,
ne, map_in_out, Operand(at));
4712 FixedArrayBase::kHeaderSize;
4717 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
4720 MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
4723 GlobalObject::kNativeContextOffset));
4725 ld(
function,
MemOperand(
function, Context::SlotOffset(index)));
4729 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
4734 if (emit_debug_code()) {
4739 Abort(kGlobalFunctionsMustHaveInitialMap);
4745 void MacroAssembler::StubPrologue() {
4749 Daddu(
fp,
sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
4753 void MacroAssembler::Prologue(
bool code_pre_aging) {
4754 PredictableCodeSizeScope predictible_code_size_scope(
4758 if (code_pre_aging) {
4760 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
4761 nop(Assembler::CODE_AGE_MARKER_NOP);
4765 Operand(
reinterpret_cast<uint64_t
>(stub->instruction_start())),
4773 nop(Assembler::CODE_AGE_SEQUENCE_NOP);
4774 nop(Assembler::CODE_AGE_SEQUENCE_NOP);
4775 nop(Assembler::CODE_AGE_SEQUENCE_NOP);
4777 Daddu(
fp,
sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
4784 li(t8, Operand(Smi::FromInt(type)));
4793 Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
kPointerSize));
4805 void MacroAssembler::EnterExitFrame(
bool save_doubles,
4827 if (emit_debug_code()) {
4828 sd(zero_reg,
MemOperand(
fp, ExitFrameConstants::kSPOffset));
4833 sd(t8,
MemOperand(
fp, ExitFrameConstants::kCodeOffset));
4836 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
4838 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
4841 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
4844 int kNumOfSavedRegisters = FPURegister::kMaxNumRegisters / 2;
4848 for (
int i = 0;
i < kNumOfSavedRegisters;
i++) {
4849 FPURegister reg = FPURegister::from_code(2 *
i);
4857 DCHECK(stack_space >= 0);
4859 if (frame_alignment > 0) {
4861 And(
sp,
sp, Operand(-frame_alignment));
4871 void MacroAssembler::LeaveExitFrame(
bool save_doubles,
4872 Register argument_count,
4873 bool restore_context,
4878 int kNumOfSavedRegisters = FPURegister::kMaxNumRegisters / 2;
4879 Dsubu(t8,
fp, Operand(ExitFrameConstants::kFrameSize +
4881 for (
int i = 0;
i < kNumOfSavedRegisters;
i++) {
4882 FPURegister reg = FPURegister::from_code(2 *
i);
4888 li(t8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
4892 if (restore_context) {
4893 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
4897 li(t8, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
4904 ld(ra,
MemOperand(
sp, ExitFrameConstants::kCallerPCOffset));
4906 if (argument_count.is_valid()) {
4919 void MacroAssembler::InitializeNewString(Register
string,
4921 Heap::RootListIndex map_index,
4923 Register scratch2) {
4925 dsll32(scratch1, length, 0);
4926 LoadRoot(scratch2, map_index);
4928 li(scratch1, Operand(String::kEmptyHashField));
4934 int MacroAssembler::ActivationFrameAlignment() {
4935 #if V8_HOST_ARCH_MIPS || V8_HOST_ARCH_MIPS64
4940 return base::OS::ActivationFrameAlignment();
4946 return FLAG_sim_stack_alignment;
4951 void MacroAssembler::AssertStackIsAligned() {
4952 if (emit_debug_code()) {
4953 const int frame_alignment = ActivationFrameAlignment();
4954 const int frame_alignment_mask = frame_alignment - 1;
4957 Label alignment_as_expected;
4959 andi(at,
sp, frame_alignment_mask);
4960 Branch(&alignment_as_expected,
eq, at, Operand(zero_reg));
4962 stop(
"Unexpected stack alignment");
4963 bind(&alignment_as_expected);
4969 void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
4972 Label* not_power_of_two_or_zero) {
4973 Dsubu(scratch, reg, Operand(1));
4975 scratch, Operand(zero_reg));
4976 and_(at, scratch, reg);
4977 Branch(not_power_of_two_or_zero,
ne, at, Operand(zero_reg));
4981 void MacroAssembler::SmiTagCheckOverflow(Register reg, Register
overflow) {
4989 void MacroAssembler::SmiTagCheckOverflow(Register dst,
4994 SmiTagCheckOverflow(dst,
overflow);
5005 void MacroAssembler::SmiLoadUntag(Register dst,
MemOperand src) {
5015 void MacroAssembler::SmiLoadScale(Register dst,
MemOperand src,
int scale) {
5019 dsll(dst, dst, scale);
5029 void MacroAssembler::SmiLoadWithScale(Register d_smi,
5035 dsra(d_scaled, d_smi,
kSmiShift - scale);
5045 void MacroAssembler::SmiLoadUntagWithScale(Register d_int,
5051 dsll(d_scaled, d_int, scale);
5056 sll(d_scaled, d_int, scale);
5061 void MacroAssembler::UntagAndJumpIfSmi(Register dst,
5070 void MacroAssembler::UntagAndJumpIfNotSmi(Register dst,
5072 Label* non_smi_case) {
5078 void MacroAssembler::JumpIfSmi(Register value,
5084 Branch(bd, smi_label,
eq, scratch, Operand(zero_reg));
5087 void MacroAssembler::JumpIfNotSmi(Register value,
5088 Label* not_smi_label,
5093 Branch(bd, not_smi_label,
ne, scratch, Operand(zero_reg));
5097 void MacroAssembler::JumpIfNotBothSmi(Register reg1,
5099 Label* on_not_both_smi) {
5102 #if defined(__APPLE__)
5107 or_(at, reg1, reg2);
5108 JumpIfNotSmi(at, on_not_both_smi);
5112 void MacroAssembler::JumpIfEitherSmi(Register reg1,
5114 Label* on_either_smi) {
5117 #if defined(__APPLE__)
5123 and_(at, reg1, reg2);
5124 JumpIfSmi(at, on_either_smi);
5128 void MacroAssembler::AssertNotSmi(Register
object) {
5129 if (emit_debug_code()) {
5132 Check(
ne, kOperandIsASmi, at, Operand(zero_reg));
5137 void MacroAssembler::AssertSmi(Register
object) {
5138 if (emit_debug_code()) {
5141 Check(
eq, kOperandIsASmi, at, Operand(zero_reg));
5146 void MacroAssembler::AssertString(Register
object) {
5147 if (emit_debug_code()) {
5150 Check(
ne, kOperandIsASmiAndNotAString, a4, Operand(zero_reg));
5160 void MacroAssembler::AssertName(Register
object) {
5161 if (emit_debug_code()) {
5164 Check(
ne, kOperandIsASmiAndNotAName, a4, Operand(zero_reg));
5174 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object,
5176 if (emit_debug_code()) {
5177 Label done_checking;
5178 AssertNotSmi(
object);
5179 LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5180 Branch(&done_checking,
eq,
object, Operand(scratch));
5183 LoadRoot(scratch, Heap::kAllocationSiteMapRootIndex);
5184 Assert(
eq, kExpectedUndefinedOrCell,
object, Operand(scratch));
5186 bind(&done_checking);
5191 void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
5192 if (emit_debug_code()) {
5194 LoadRoot(at, index);
5195 Check(
eq, kHeapNumberMapRegisterClobbered, reg, Operand(at));
5200 void MacroAssembler::JumpIfNotHeapNumber(Register
object,
5201 Register heap_number_map,
5203 Label* on_not_heap_number) {
5205 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
5206 Branch(on_not_heap_number,
ne, scratch, Operand(heap_number_map));
5210 void MacroAssembler::LookupNumberStringCache(Register
object,
5217 Register number_string_cache = result;
5218 Register mask = scratch3;
5221 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
5225 ld(mask,
FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
5228 dsra32(mask, mask, 1);
5229 Daddu(mask, mask, -1);
5236 Label load_result_from_cache;
5237 JumpIfSmi(
object, &is_smi);
5240 Heap::kHeapNumberMapRootIndex,
5250 Xor(scratch1, scratch1, Operand(scratch2));
5251 And(scratch1, scratch1, Operand(mask));
5256 Daddu(scratch1, number_string_cache, scratch1);
5258 Register probe = mask;
5260 JumpIfSmi(probe, not_found);
5267 Register scratch = scratch1;
5269 dsra32(scratch, scratch, 0);
5270 And(scratch, mask, Operand(scratch));
5275 Daddu(scratch, number_string_cache, scratch);
5279 Branch(not_found,
ne,
object, Operand(probe));
5282 bind(&load_result_from_cache);
5285 IncrementCounter(isolate()->counters()->number_to_string_native(),
5292 void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
5293 Register first, Register second, Register scratch1, Register scratch2,
5302 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
5307 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
5314 And(scratch1, first, Operand(second));
5315 JumpIfSmi(scratch1, failure);
5316 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
5321 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
5322 Register first, Register second, Register scratch1, Register scratch2,
5324 const int kFlatOneByteStringMask =
5326 const int kFlatOneByteStringTag =
5328 DCHECK(kFlatOneByteStringTag <= 0xffff);
5329 andi(scratch1, first, kFlatOneByteStringMask);
5330 Branch(failure,
ne, scratch1, Operand(kFlatOneByteStringTag));
5331 andi(scratch2, second, kFlatOneByteStringMask);
5332 Branch(failure,
ne, scratch2, Operand(kFlatOneByteStringTag));
5336 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
5339 const int kFlatOneByteStringMask =
5341 const int kFlatOneByteStringTag =
5343 And(scratch, type, Operand(kFlatOneByteStringMask));
5344 Branch(failure,
ne, scratch, Operand(kFlatOneByteStringTag));
5348 static const int kRegisterPassedArguments = (
kMipsAbi ==
kN64) ? 8 : 4;
5350 int MacroAssembler::CalculateStackPassedWords(
int num_reg_arguments,
5351 int num_double_arguments) {
5352 int stack_passed_words = 0;
5353 num_reg_arguments += 2 * num_double_arguments;
5357 if (num_reg_arguments > kRegisterPassedArguments) {
5358 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
5361 return stack_passed_words;
5365 void MacroAssembler::EmitSeqStringSetCharCheck(Register
string,
5372 Check(
ne, kNonObject, at, Operand(zero_reg));
5378 li(scratch, Operand(encoding_mask));
5379 Check(
eq, kUnexpectedStringType, at, Operand(scratch));
5384 Check(
lt, kIndexIsTooLarge, index, Operand(at));
5386 DCHECK(Smi::FromInt(0) == 0);
5387 Check(
ge, kIndexIsNegative, index, Operand(zero_reg));
5391 void MacroAssembler::PrepareCallCFunction(
int num_reg_arguments,
5392 int num_double_arguments,
5394 int frame_alignment = ActivationFrameAlignment();
5403 int stack_passed_arguments = CalculateStackPassedWords(
5404 num_reg_arguments, num_double_arguments);
5411 And(
sp,
sp, Operand(-frame_alignment));
5419 void MacroAssembler::PrepareCallCFunction(
int num_reg_arguments,
5421 PrepareCallCFunction(num_reg_arguments, 0, scratch);
5425 void MacroAssembler::CallCFunction(ExternalReference
function,
5426 int num_reg_arguments,
5427 int num_double_arguments) {
5428 li(t8, Operand(
function));
5429 CallCFunctionHelper(t8, num_reg_arguments, num_double_arguments);
5433 void MacroAssembler::CallCFunction(Register
function,
5434 int num_reg_arguments,
5435 int num_double_arguments) {
5436 CallCFunctionHelper(
function, num_reg_arguments, num_double_arguments);
5440 void MacroAssembler::CallCFunction(ExternalReference
function,
5441 int num_arguments) {
5442 CallCFunction(
function, num_arguments, 0);
5446 void MacroAssembler::CallCFunction(Register
function,
5447 int num_arguments) {
5448 CallCFunction(
function, num_arguments, 0);
5452 void MacroAssembler::CallCFunctionHelper(Register
function,
5453 int num_reg_arguments,
5454 int num_double_arguments) {
5462 #if V8_HOST_ARCH_MIPS || V8_HOST_ARCH_MIPS64
5463 if (emit_debug_code()) {
5464 int frame_alignment = base::OS::ActivationFrameAlignment();
5465 int frame_alignment_mask = frame_alignment - 1;
5468 Label alignment_as_expected;
5469 And(at,
sp, Operand(frame_alignment_mask));
5470 Branch(&alignment_as_expected,
eq, at, Operand(zero_reg));
5473 stop(
"Unexpected alignment in CallCFunction");
5474 bind(&alignment_as_expected);
5483 if (!
function.is(t9)) {
5490 int stack_passed_arguments = CalculateStackPassedWords(
5491 num_reg_arguments, num_double_arguments);
5493 if (base::OS::ActivationFrameAlignment() >
kPointerSize) {
5501 #undef BRANCH_ARGS_CHECK
5504 void MacroAssembler::PatchRelocatedValue(Register li_location,
5506 Register new_value) {
5509 if (emit_debug_code()) {
5511 Check(
eq, kTheInstructionToPatchShouldBeALui,
5512 scratch, Operand(
LUI));
5515 dsrl32(t9, new_value, 0);
5519 lwu(scratch,
MemOperand(li_location, kInstrSize));
5521 if (emit_debug_code()) {
5523 Check(
eq, kTheInstructionToPatchShouldBeAnOri,
5524 scratch, Operand(
ORI));
5525 lwu(scratch,
MemOperand(li_location, kInstrSize));
5529 sw(scratch,
MemOperand(li_location, kInstrSize));
5531 lwu(scratch,
MemOperand(li_location, kInstrSize * 3));
5533 if (emit_debug_code()) {
5535 Check(
eq, kTheInstructionToPatchShouldBeAnOri,
5536 scratch, Operand(
ORI));
5537 lwu(scratch,
MemOperand(li_location, kInstrSize * 3));
5541 sw(scratch,
MemOperand(li_location, kInstrSize * 3));
5544 FlushICache(li_location, 4);
5547 void MacroAssembler::GetRelocatedValue(Register li_location,
5551 if (emit_debug_code()) {
5553 Check(
eq, kTheInstructionShouldBeALui,
5554 value, Operand(
LUI));
5562 lwu(scratch,
MemOperand(li_location, kInstrSize));
5563 if (emit_debug_code()) {
5565 Check(
eq, kTheInstructionShouldBeAnOri,
5566 scratch, Operand(
ORI));
5567 lwu(scratch,
MemOperand(li_location, kInstrSize));
5571 dsll32(scratch, scratch, 0);
5573 or_(value, value, scratch);
5575 lwu(scratch,
MemOperand(li_location, kInstrSize * 3));
5576 if (emit_debug_code()) {
5578 Check(
eq, kTheInstructionShouldBeAnOri,
5579 scratch, Operand(
ORI));
5580 lwu(scratch,
MemOperand(li_location, kInstrSize * 3));
5586 or_(value, value, scratch);
5592 void MacroAssembler::CheckPageFlag(
5597 Label* condition_met) {
5598 And(scratch,
object, Operand(~Page::kPageAlignmentMask));
5599 ld(scratch,
MemOperand(scratch, MemoryChunk::kFlagsOffset));
5600 And(scratch, scratch, Operand(mask));
5601 Branch(condition_met,
cc, scratch, Operand(zero_reg));
5605 void MacroAssembler::CheckMapDeprecated(Handle<Map>
map,
5607 Label* if_deprecated) {
5608 if (
map->CanBeDeprecated()) {
5609 li(scratch, Operand(
map));
5611 And(scratch, scratch, Operand(Map::Deprecated::kMask));
5612 Branch(if_deprecated,
ne, scratch, Operand(zero_reg));
5617 void MacroAssembler::JumpIfBlack(Register
object,
5621 HasColor(
object, scratch0, scratch1, on_black, 1, 0);
5622 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
5626 void MacroAssembler::HasColor(Register
object,
5627 Register bitmap_scratch,
5628 Register mask_scratch,
5635 GetMarkBits(
object, bitmap_scratch, mask_scratch);
5639 Uld(t9,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5640 And(t8, t9, Operand(mask_scratch));
5641 Branch(&other_color, first_bit == 1 ?
eq :
ne, t8, Operand(zero_reg));
5643 Daddu(mask_scratch, mask_scratch, Operand(mask_scratch));
5644 And(t8, t9, Operand(mask_scratch));
5645 Branch(has_color, second_bit == 1 ?
ne :
eq, t8, Operand(zero_reg));
5654 void MacroAssembler::JumpIfDataObject(Register value,
5656 Label* not_data_object) {
5658 Label is_data_object;
5660 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
5661 Branch(&is_data_object,
eq, t8, Operand(scratch));
5668 Branch(not_data_object,
ne, t8, Operand(zero_reg));
5669 bind(&is_data_object);
5673 void MacroAssembler::GetMarkBits(Register addr_reg,
5674 Register bitmap_reg,
5675 Register mask_reg) {
5681 And(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
5685 dsll(t8, t8, Bitmap::kBytesPerCellLog2);
5686 Daddu(bitmap_reg, bitmap_reg, t8);
5688 dsllv(mask_reg, t8, mask_reg);
5692 void MacroAssembler::EnsureNotWhite(
5694 Register bitmap_scratch,
5695 Register mask_scratch,
5696 Register load_scratch,
5697 Label* value_is_white_and_not_data) {
5699 GetMarkBits(value, bitmap_scratch, mask_scratch);
5702 DCHECK(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
5703 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
5704 DCHECK(strcmp(Marking::kGreyBitPattern,
"11") == 0);
5705 DCHECK(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
5712 Uld(load_scratch,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5713 And(t8, mask_scratch, load_scratch);
5714 Branch(&done,
ne, t8, Operand(zero_reg));
5716 if (emit_debug_code()) {
5720 dsll(t8, mask_scratch, 1);
5721 And(t8, load_scratch, t8);
5722 Branch(&ok,
eq, t8, Operand(zero_reg));
5723 stop(
"Impossible marking bit pattern");
5729 Register
map = load_scratch;
5730 Register length = load_scratch;
5731 Label is_data_object;
5735 LoadRoot(t8, Heap::kHeapNumberMapRootIndex);
5738 Branch(&skip,
ne, t8, Operand(
map));
5739 li(length, HeapNumber::kSize);
5740 Branch(&is_data_object);
5749 Register instance_type = load_scratch;
5752 Branch(value_is_white_and_not_data,
ne, t8, Operand(zero_reg));
5763 Branch(&skip,
eq, t8, Operand(zero_reg));
5764 li(length, ExternalString::kSize);
5765 Branch(&is_data_object);
5779 Branch(&skip,
ne, t8, Operand(zero_reg));
5788 bind(&is_data_object);
5791 Uld(t8,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5792 Or(t8, t8, Operand(mask_scratch));
5793 Usd(t8,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
5795 And(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask));
5796 Uld(t8,
MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
5797 Daddu(t8, t8, Operand(length));
5798 Usd(t8,
MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
5804 void MacroAssembler::LoadInstanceDescriptors(Register
map,
5805 Register descriptors) {
5810 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register
map) {
5812 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
5816 void MacroAssembler::EnumLength(Register dst, Register
map) {
5819 And(dst, dst, Operand(Map::EnumLengthBits::kMask));
5824 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
5825 Register empty_fixed_array_value = a6;
5826 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
5845 Branch(call_runtime,
ne, a3, Operand(Smi::FromInt(0)));
5853 Branch(&no_elements,
eq, a2, Operand(empty_fixed_array_value));
5856 LoadRoot(at, Heap::kEmptySlowElementDictionaryRootIndex);
5857 Branch(call_runtime,
ne, a2, Operand(at));
5861 Branch(&next,
ne, a2, Operand(null_value));
5865 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
5866 DCHECK(!output_reg.is(input_reg));
5868 li(output_reg, Operand(255));
5870 Branch(&done,
gt, input_reg, Operand(output_reg));
5873 mov(output_reg, zero_reg);
5874 mov(output_reg, input_reg);
5879 void MacroAssembler::ClampDoubleToUint8(Register result_reg,
5886 Move(temp_double_reg, 0.0);
5887 BranchF(&above_zero,
NULL,
gt, input_reg, temp_double_reg);
5890 mov(result_reg, zero_reg);
5895 Move(temp_double_reg, 255.0);
5896 BranchF(&in_bounds,
NULL,
le, input_reg, temp_double_reg);
5897 li(result_reg, Operand(255));
5902 cvt_w_d(temp_double_reg, input_reg);
5903 mfc1(result_reg, temp_double_reg);
5908 void MacroAssembler::TestJSArrayForAllocationMemento(
5909 Register receiver_reg,
5910 Register scratch_reg,
5911 Label* no_memento_found,
5913 Label* allocation_memento_present) {
5914 ExternalReference new_space_start =
5915 ExternalReference::new_space_start(isolate());
5916 ExternalReference new_space_allocation_top =
5917 ExternalReference::new_space_allocation_top_address(isolate());
5918 Daddu(scratch_reg, receiver_reg,
5919 Operand(JSArray::kSize + AllocationMemento::kSize -
kHeapObjectTag));
5920 Branch(no_memento_found,
lt, scratch_reg, Operand(new_space_start));
5921 li(at, Operand(new_space_allocation_top));
5923 Branch(no_memento_found,
gt, scratch_reg, Operand(at));
5924 ld(scratch_reg,
MemOperand(scratch_reg, -AllocationMemento::kSize));
5925 if (allocation_memento_present) {
5926 Branch(allocation_memento_present, cond, scratch_reg,
5927 Operand(isolate()->factory()->allocation_memento_map()));
5939 if (reg1.is_valid()) regs |= reg1.bit();
5940 if (reg2.is_valid()) regs |= reg2.bit();
5941 if (reg3.is_valid()) regs |= reg3.bit();
5942 if (reg4.is_valid()) regs |= reg4.bit();
5943 if (reg5.is_valid()) regs |= reg5.bit();
5944 if (reg6.is_valid()) regs |= reg6.bit();
5946 for (
int i = 0;
i < Register::NumAllocatableRegisters();
i++) {
5947 Register candidate = Register::FromAllocationIndex(
i);
5948 if (regs & candidate.bit())
continue;
5956 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
5961 DCHECK(!scratch1.is(scratch0));
5962 Factory* factory = isolate()->factory();
5963 Register current = scratch0;
5967 Move(current,
object);
5973 DecodeField<Map::ElementsKindBits>(scratch1);
5976 Branch(&loop_again,
ne, current, Operand(factory->null_value()));
5988 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
5989 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
5990 reg7.is_valid() + reg8.is_valid();
5993 if (reg1.is_valid()) regs |= reg1.bit();
5994 if (reg2.is_valid()) regs |= reg2.bit();
5995 if (reg3.is_valid()) regs |= reg3.bit();
5996 if (reg4.is_valid()) regs |= reg4.bit();
5997 if (reg5.is_valid()) regs |= reg5.bit();
5998 if (reg6.is_valid()) regs |= reg6.bit();
5999 if (reg7.is_valid()) regs |= reg7.bit();
6000 if (reg8.is_valid()) regs |= reg8.bit();
6001 int n_of_non_aliasing_regs =
NumRegs(regs);
6003 return n_of_valid_regs != n_of_non_aliasing_regs;
6007 CodePatcher::CodePatcher(
byte* address,
6009 FlushICache flush_cache)
6010 : address_(address),
6011 size_(instructions * Assembler::kInstrSize),
6012 masm_(
NULL, address, size_ + Assembler::kGap),
6013 flush_cache_(flush_cache) {
6017 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
6021 CodePatcher::~CodePatcher() {
6023 if (flush_cache_ == FLUSH) {
6024 CpuFeatures::FlushICache(address_, size_);
6027 DCHECK(masm_.pc_ == address_ + size_);
6028 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
6032 void CodePatcher::Emit(
Instr instr) {
6033 masm()->emit(instr);
6037 void CodePatcher::Emit(
Address addr) {
6042 void CodePatcher::ChangeBranchCondition(
Condition cond) {
6043 Instr instr = Assembler::instr_at(masm_.pc_);
6045 uint32_t opcode = Assembler::GetOpcodeField(instr);
6064 void MacroAssembler::TruncatingDiv(Register result,
6067 DCHECK(!dividend.is(result));
6068 DCHECK(!dividend.is(at));
6070 base::MagicNumbersForDivision<uint32_t> mag =
6072 li(at, Operand(mag.multiplier));
6073 Mulh(result, dividend, Operand(at));
6074 bool neg = (mag.multiplier & (
static_cast<uint32_t>(1) << 31)) != 0;
6075 if (divisor > 0 && neg) {
6076 Addu(result, result, Operand(dividend));
6078 if (divisor < 0 && !neg && mag.multiplier > 0) {
6079 Subu(result, result, Operand(dividend));
6081 if (mag.shift > 0) sra(result, result, mag.shift);
6082 srl(at, dividend, 31);
6083 Addu(result, result, Operand(at));
#define kLithiumScratchDouble
MacroAssembler(Isolate *isolate, void *buffer, int size)
static const AbiVariants kMipsAbi
const bool IsMipsSoftFloatABI
#define UNIMPLEMENTED_MIPS()
static const ArchVariants kArchVariant
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define kNumSafepointSavedRegisters
#define kSafepointSavedRegisters
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define STATIC_ASSERT(test)
bool IsPowerOfTwo32(uint32_t value)
MagicNumbersForDivision< T > SignedDivisionByConstant(T d)
Matcher< Node * > IsBranch(const Matcher< Node * > &value_matcher, const Matcher< Node * > &control_matcher)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const intptr_t kHeapObjectTagMask
const uint32_t kStringEncodingMask
const FPUControlRegister FCSR
const RegList kJSCallerSaved
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const int kSafepointRegisterStackIndexMap[kNumRegs]
TypeImpl< ZoneTypeConfig > Type
CheckForInexactConversion
@ kDontCheckForInexactConversion
@ AVOID_NANS_AND_INFINITIES
const int kDoubleSizeLog2
const int kNumSafepointRegisters
const uint32_t kNotStringTag
DwVfpRegister DoubleRegister
const uint32_t kFCSRUnderflowFlagMask
const int kPointerSizeLog2
const uint32_t kStringTag
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FAST_HOLEY_SMI_ELEMENTS
Handle< T > handle(T *t, Isolate *isolate)
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignmentMask
const uint32_t kFCSRInvalidOpFlagMask
int NumRegs(RegList reglist)
static const int kInvalidEnumCacheSentinel
const char * GetBailoutReason(BailoutReason reason)
Condition NegateCondition(Condition cond)
const uint32_t kStringRepresentationMask
const uint32_t kFCSROverflowFlagMask
static bool SmiValuesAre32Bits()
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
const uint32_t kIsIndirectStringTag
int TenToThe(int exponent)
kFeedbackVectorOffset flag
Register ToRegister(int num)
const uint32_t kInternalizedTag
static const int kNumberDictionaryProbes
const uint32_t kFCSRInexactFlagMask
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
const uint32_t kNaNOrInfinityLowerBoundUpper32
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
static const int kNoCodeAgeSequenceLength
const uint32_t kHoleNanLower32
const uint32_t kIsNotStringMask
bool IsAligned(T value, U alignment)
int NumberOfBitsSet(uint32_t x)
const uint32_t kFCSRFlagMask
@ kPointersToHereAreAlwaysInteresting
void CopyBytes(uint8_t *target, uint8_t *source)
const intptr_t kDoubleAlignmentMask
const uint32_t kIsIndirectStringMask
MemOperand UntagSmiMemOperand(Register object, int offset)
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool is(Register reg) const