38 #if V8_TARGET_ARCH_MIPS64
52 static unsigned CpuFeaturesImpliedByCompiler() {
54 #ifdef CAN_USE_FPU_INSTRUCTIONS
61 #if defined(__mips__) && defined(__mips_hard_float) && __mips_hard_float != 0
71 const char*
const names[] = {
95 if (cross_compile)
return;
116 const int kNumbers[] = {
150 return kNumbers[reg.code()];
156 const Register kRegisters[] = {
160 a0, a1, a2, a3, a4, a5, a6, a7,
170 return kRegisters[num];
197 Instr* instr =
reinterpret_cast<Instr*
>(instructions);
198 for (
int i = 0;
i < instruction_count;
i++) {
199 *(
pc +
i) = *(instr +
i);
224 if (obj->IsHeapObject()) {
225 DCHECK(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
226 imm64_ =
reinterpret_cast<intptr_t
>(
handle.location());
230 imm64_ =
reinterpret_cast<intptr_t
>(obj);
242 OffsetAddend offset_addend) : Operand(rm) {
243 offset_ = unit * multiplier + offset_addend;
250 static const int kNegOffset = 0x00008000;
285 Assembler::Assembler(Isolate* isolate,
void* buffer,
int buffer_size)
286 : AssemblerBase(isolate, buffer, buffer_size),
287 recorded_ast_id_(TypeFeedbackId::
None()),
288 positions_recorder_(this) {
289 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
291 last_trampoline_pool_end_ = 0;
292 no_trampoline_pool_before_ = 0;
293 trampoline_pool_blocked_nesting_ = 0;
296 next_buffer_check_ = FLAG_force_long_branches
297 ?
kMaxInt : kMaxBranchOffset - kTrampolineSlotsSize * 16;
298 internal_trampoline_exception_ =
false;
301 trampoline_emitted_ = FLAG_force_long_branches;
302 unbound_labels_count_ = 0;
303 block_buffer_growth_ =
false;
305 ClearRecordedAstId();
309 void Assembler::GetCode(CodeDesc* desc) {
310 DCHECK(pc_ <= reloc_info_writer.pos());
312 desc->buffer = buffer_;
313 desc->buffer_size = buffer_size_;
314 desc->instr_size = pc_offset();
315 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
322 while ((pc_offset() & (m - 1)) != 0) {
328 void Assembler::CodeTargetAlign() {
335 Register Assembler::GetRtReg(
Instr instr) {
342 Register Assembler::GetRsReg(
Instr instr) {
349 Register Assembler::GetRdReg(
Instr instr) {
421 bool Assembler::IsPop(
Instr instr) {
426 bool Assembler::IsPush(
Instr instr) {
431 bool Assembler::IsSwRegFpOffset(
Instr instr) {
436 bool Assembler::IsLwRegFpOffset(
Instr instr) {
441 bool Assembler::IsSwRegFpNegOffset(
Instr instr) {
447 bool Assembler::IsLwRegFpNegOffset(
Instr instr) {
468 const int kEndOfChain = -4;
470 const int kEndOfJumpChain = 0;
474 uint32_t opcode = GetOpcodeField(instr);
475 uint32_t rt_field = GetRtField(instr);
476 uint32_t rs_field = GetRsField(instr);
478 return opcode ==
BEQ ||
488 (opcode ==
COP1 && rs_field ==
BC1) ||
494 bool Assembler::IsEmittedConstant(
Instr instr) {
495 uint32_t label_constant = GetLabelConst(instr);
496 return label_constant == 0;
500 bool Assembler::IsBeq(
Instr instr) {
501 return GetOpcodeField(instr) ==
BEQ;
505 bool Assembler::IsBne(
Instr instr) {
506 return GetOpcodeField(instr) ==
BNE;
510 bool Assembler::IsJump(
Instr instr) {
511 uint32_t opcode = GetOpcodeField(instr);
512 uint32_t rt_field = GetRtField(instr);
513 uint32_t rd_field = GetRdField(instr);
514 uint32_t function_field = GetFunctionField(instr);
516 return opcode ==
J || opcode ==
JAL ||
517 (opcode ==
SPECIAL && rt_field == 0 &&
518 ((function_field ==
JALR) || (rd_field == 0 && (function_field ==
JR))));
522 bool Assembler::IsJ(
Instr instr) {
523 uint32_t opcode = GetOpcodeField(instr);
529 bool Assembler::IsJal(
Instr instr) {
530 return GetOpcodeField(instr) ==
JAL;
534 bool Assembler::IsJr(
Instr instr) {
535 return GetOpcodeField(instr) ==
SPECIAL && GetFunctionField(instr) ==
JR;
539 bool Assembler::IsJalr(
Instr instr) {
540 return GetOpcodeField(instr) ==
SPECIAL && GetFunctionField(instr) ==
JALR;
544 bool Assembler::IsLui(
Instr instr) {
545 uint32_t opcode = GetOpcodeField(instr);
547 return opcode ==
LUI;
551 bool Assembler::IsOri(
Instr instr) {
552 uint32_t opcode = GetOpcodeField(instr);
554 return opcode ==
ORI;
558 bool Assembler::IsNop(
Instr instr,
unsigned int type) {
561 uint32_t opcode = GetOpcodeField(instr);
562 uint32_t function = GetFunctionField(instr);
572 Register nop_rt_reg = (type == 0) ? zero_reg : at;
573 bool ret = (opcode ==
SPECIAL &&
function ==
SLL &&
588 bool Assembler::IsLw(
Instr instr) {
610 bool Assembler::IsSw(
Instr instr) {
621 bool Assembler::IsAddImmediate(
Instr instr) {
627 DCHECK(IsAddImmediate(instr));
632 bool Assembler::IsAndImmediate(
Instr instr) {
633 return GetOpcodeField(instr) ==
ANDI;
637 int64_t Assembler::target_at(int64_t pos) {
638 Instr instr = instr_at(pos);
645 return (imm18 + pos);
654 if (imm18 == kEndOfChain) {
658 return pos + kBranchPCOffset + imm18;
660 }
else if (IsLui(instr)) {
661 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize);
662 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize);
663 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize);
665 DCHECK(IsOri(instr_ori2));
668 int64_t imm =
static_cast<int64_t
>(instr_lui &
kImm16Mask) << 48;
669 imm |=
static_cast<int64_t
>(instr_ori &
kImm16Mask) << 32;
670 imm |=
static_cast<int64_t
>(instr_ori2 &
kImm16Mask) << 16;
674 if (imm == kEndOfJumpChain) {
678 uint64_t instr_address =
reinterpret_cast<int64_t
>(buffer_ + pos);
679 int64_t delta = instr_address - imm;
685 if (imm28 == kEndOfJumpChain) {
689 uint64_t instr_address =
reinterpret_cast<int64_t
>(buffer_ + pos);
691 int64_t delta = instr_address - imm28;
699 void Assembler::target_at_put(int64_t pos, int64_t target_pos) {
700 Instr instr = instr_at(pos);
702 DCHECK(target_pos == kEndOfChain || target_pos >= 0);
705 instr_at_put(pos, target_pos + (Code::kHeaderSize -
kHeapObjectTag));
711 int32_t imm18 = target_pos - (pos + kBranchPCOffset);
718 instr_at_put(pos, instr | (imm16 &
kImm16Mask));
719 }
else if (IsLui(instr)) {
720 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize);
721 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize);
722 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize);
724 DCHECK(IsOri(instr_ori2));
726 uint64_t imm =
reinterpret_cast<uint64_t
>(buffer_) + target_pos;
733 instr_at_put(pos + 0 * Assembler::kInstrSize,
735 instr_at_put(pos + 1 * Assembler::kInstrSize,
737 instr_at_put(pos + 3 * Assembler::kInstrSize,
740 uint64_t imm28 =
reinterpret_cast<uint64_t
>(buffer_) + target_pos;
748 instr_at_put(pos, instr | (imm26 &
kImm26Mask));
753 void Assembler::print(Label*
L) {
754 if (
L->is_unused()) {
756 }
else if (
L->is_bound()) {
757 PrintF(
"bound label to %d\n",
L->pos());
758 }
else if (
L->is_linked()) {
761 while (l.is_linked()) {
763 Instr instr = instr_at(l.pos());
772 PrintF(
"label in inconsistent state (pos = %d)\n",
L->pos_);
777 void Assembler::bind_to(Label*
L,
int pos) {
778 DCHECK(0 <= pos && pos <= pc_offset());
779 int32_t trampoline_pos = kInvalidSlotPos;
780 if (
L->is_linked() && !trampoline_emitted_) {
781 unbound_labels_count_--;
782 next_buffer_check_ += kTrampolineSlotsSize;
785 while (
L->is_linked()) {
787 int32_t dist = pos - fixup_pos;
789 Instr instr = instr_at(fixup_pos);
791 if (dist > kMaxBranchOffset) {
792 if (trampoline_pos == kInvalidSlotPos) {
793 trampoline_pos = get_trampoline_entry(fixup_pos);
794 CHECK(trampoline_pos != kInvalidSlotPos);
796 DCHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset);
797 target_at_put(fixup_pos, trampoline_pos);
798 fixup_pos = trampoline_pos;
799 dist = pos - fixup_pos;
801 target_at_put(fixup_pos, pos);
803 DCHECK(IsJ(instr) || IsLui(instr) || IsEmittedConstant(instr));
804 target_at_put(fixup_pos, pos);
811 if (pos > last_bound_pos_)
812 last_bound_pos_ = pos;
816 void Assembler::bind(Label*
L) {
818 bind_to(
L, pc_offset());
822 void Assembler::next(Label*
L) {
824 int link = target_at(
L->pos());
825 if (link == kEndOfChain) {
834 bool Assembler::is_near(Label*
L) {
836 return ((pc_offset() -
L->pos()) < kMaxBranchOffset - 4 * kInstrSize);
846 bool Assembler::MustUseReg(RelocInfo::Mode rmode) {
847 return !RelocInfo::IsNone(rmode);
850 void Assembler::GenInstrRegister(
Opcode opcode,
856 DCHECK(rd.is_valid() && rs.is_valid() && rt.is_valid() && is_uint5(sa));
863 void Assembler::GenInstrRegister(
Opcode opcode,
869 DCHECK(rs.is_valid() && rt.is_valid() && is_uint5(msb) && is_uint5(lsb));
876 void Assembler::GenInstrRegister(
Opcode opcode,
882 DCHECK(fd.is_valid() && fs.is_valid() && ft.is_valid());
889 void Assembler::GenInstrRegister(
Opcode opcode,
895 DCHECK(fd.is_valid() && fr.is_valid() && fs.is_valid() && ft.is_valid());
902 void Assembler::GenInstrRegister(
Opcode opcode,
908 DCHECK(fd.is_valid() && fs.is_valid() && rt.is_valid());
915 void Assembler::GenInstrRegister(
Opcode opcode,
918 FPUControlRegister fs,
920 DCHECK(fs.is_valid() && rt.is_valid());
929 void Assembler::GenInstrImmediate(
Opcode opcode,
933 DCHECK(rs.is_valid() && rt.is_valid() && (is_int16(j) || is_uint16(j)));
940 void Assembler::GenInstrImmediate(
Opcode opcode,
944 DCHECK(rs.is_valid() && (is_int16(j) || is_uint16(j)));
950 void Assembler::GenInstrImmediate(
Opcode opcode,
954 DCHECK(rs.is_valid() && ft.is_valid() && (is_int16(j) || is_uint16(j)));
961 void Assembler::GenInstrJump(
Opcode opcode,
963 BlockTrampolinePoolScope block_trampoline_pool(
this);
964 DCHECK(is_uint26(address));
965 Instr instr = opcode | address;
967 BlockTrampolinePoolFor(1);
973 int32_t trampoline_entry = kInvalidSlotPos;
974 if (!internal_trampoline_exception_) {
975 if (trampoline_.start() > pos) {
976 trampoline_entry = trampoline_.take_slot();
979 if (kInvalidSlotPos == trampoline_entry) {
980 internal_trampoline_exception_ =
true;
983 return trampoline_entry;
987 uint64_t Assembler::jump_address(Label*
L) {
990 target_pos =
L->pos();
992 if (
L->is_linked()) {
993 target_pos =
L->pos();
994 L->link_to(pc_offset());
996 L->link_to(pc_offset());
997 return kEndOfJumpChain;
1001 uint64_t imm =
reinterpret_cast<uint64_t
>(buffer_) + target_pos;
1008 int32_t Assembler::branch_offset(Label*
L,
bool jump_elimination_allowed) {
1010 if (
L->is_bound()) {
1011 target_pos =
L->pos();
1013 if (
L->is_linked()) {
1014 target_pos =
L->pos();
1015 L->link_to(pc_offset());
1017 L->link_to(pc_offset());
1018 if (!trampoline_emitted_) {
1019 unbound_labels_count_++;
1020 next_buffer_check_ -= kTrampolineSlotsSize;
1026 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset);
1027 DCHECK((offset & 3) == 0);
1028 DCHECK(is_int16(offset >> 2));
1034 int32_t Assembler::branch_offset_compact(Label*
L,
1035 bool jump_elimination_allowed) {
1037 if (
L->is_bound()) {
1038 target_pos =
L->pos();
1040 if (
L->is_linked()) {
1041 target_pos =
L->pos();
1042 L->link_to(pc_offset());
1044 L->link_to(pc_offset());
1045 if (!trampoline_emitted_) {
1046 unbound_labels_count_++;
1047 next_buffer_check_ -= kTrampolineSlotsSize;
1053 int32_t offset = target_pos - pc_offset();
1054 DCHECK((offset & 3) == 0);
1055 DCHECK(is_int16(offset >> 2));
1061 int32_t Assembler::branch_offset21(Label*
L,
bool jump_elimination_allowed) {
1063 if (
L->is_bound()) {
1064 target_pos =
L->pos();
1066 if (
L->is_linked()) {
1067 target_pos =
L->pos();
1068 L->link_to(pc_offset());
1070 L->link_to(pc_offset());
1071 if (!trampoline_emitted_) {
1072 unbound_labels_count_++;
1073 next_buffer_check_ -= kTrampolineSlotsSize;
1079 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset);
1080 DCHECK((offset & 3) == 0);
1081 DCHECK(((offset >> 2) & 0xFFE00000) == 0);
1087 int32_t Assembler::branch_offset21_compact(Label*
L,
1088 bool jump_elimination_allowed) {
1090 if (
L->is_bound()) {
1091 target_pos =
L->pos();
1093 if (
L->is_linked()) {
1094 target_pos =
L->pos();
1095 L->link_to(pc_offset());
1097 L->link_to(pc_offset());
1098 if (!trampoline_emitted_) {
1099 unbound_labels_count_++;
1100 next_buffer_check_ -= kTrampolineSlotsSize;
1106 int32_t offset = target_pos - pc_offset();
1107 DCHECK((offset & 3) == 0);
1108 DCHECK(((offset >> 2) & 0xFFE00000) == 0);
1114 void Assembler::label_at_put(Label*
L,
int at_offset) {
1116 if (
L->is_bound()) {
1117 target_pos =
L->pos();
1118 instr_at_put(at_offset, target_pos + (Code::kHeaderSize -
kHeapObjectTag));
1120 if (
L->is_linked()) {
1121 target_pos =
L->pos();
1122 int32_t imm18 = target_pos - at_offset;
1123 DCHECK((imm18 & 3) == 0);
1126 instr_at_put(at_offset, (imm16 &
kImm16Mask));
1128 target_pos = kEndOfChain;
1129 instr_at_put(at_offset, 0);
1130 if (!trampoline_emitted_) {
1131 unbound_labels_count_++;
1132 next_buffer_check_ -= kTrampolineSlotsSize;
1135 L->link_to(at_offset);
1142 void Assembler::b(
int16_t offset) {
1143 beq(zero_reg, zero_reg, offset);
1147 void Assembler::bal(
int16_t offset) {
1148 positions_recorder()->WriteRecordedPositions();
1149 bgezal(zero_reg, offset);
1153 void Assembler::beq(Register rs, Register rt,
int16_t offset) {
1154 BlockTrampolinePoolScope block_trampoline_pool(
this);
1155 GenInstrImmediate(
BEQ, rs, rt, offset);
1156 BlockTrampolinePoolFor(1);
1160 void Assembler::bgez(Register rs,
int16_t offset) {
1161 BlockTrampolinePoolScope block_trampoline_pool(
this);
1163 BlockTrampolinePoolFor(1);
1167 void Assembler::bgezc(Register rt,
int16_t offset) {
1169 DCHECK(!(rt.is(zero_reg)));
1170 GenInstrImmediate(
BLEZL, rt, rt, offset);
1174 void Assembler::bgeuc(Register rs, Register rt,
int16_t offset) {
1176 DCHECK(!(rs.is(zero_reg)));
1177 DCHECK(!(rt.is(zero_reg)));
1178 DCHECK(rs.code() != rt.code());
1179 GenInstrImmediate(
BLEZ, rs, rt, offset);
1183 void Assembler::bgec(Register rs, Register rt,
int16_t offset) {
1185 DCHECK(!(rs.is(zero_reg)));
1186 DCHECK(!(rt.is(zero_reg)));
1187 DCHECK(rs.code() != rt.code());
1188 GenInstrImmediate(
BLEZL, rs, rt, offset);
1192 void Assembler::bgezal(Register rs,
int16_t offset) {
1194 BlockTrampolinePoolScope block_trampoline_pool(
this);
1195 positions_recorder()->WriteRecordedPositions();
1197 BlockTrampolinePoolFor(1);
1201 void Assembler::bgtz(Register rs,
int16_t offset) {
1202 BlockTrampolinePoolScope block_trampoline_pool(
this);
1203 GenInstrImmediate(
BGTZ, rs, zero_reg, offset);
1204 BlockTrampolinePoolFor(1);
1208 void Assembler::bgtzc(Register rt,
int16_t offset) {
1210 DCHECK(!(rt.is(zero_reg)));
1211 GenInstrImmediate(
BGTZL, zero_reg, rt, offset);
1215 void Assembler::blez(Register rs,
int16_t offset) {
1216 BlockTrampolinePoolScope block_trampoline_pool(
this);
1217 GenInstrImmediate(
BLEZ, rs, zero_reg, offset);
1218 BlockTrampolinePoolFor(1);
1222 void Assembler::blezc(Register rt,
int16_t offset) {
1224 DCHECK(!(rt.is(zero_reg)));
1225 GenInstrImmediate(
BLEZL, zero_reg, rt, offset);
1229 void Assembler::bltzc(Register rt,
int16_t offset) {
1231 DCHECK(!(rt.is(zero_reg)));
1232 GenInstrImmediate(
BGTZL, rt, rt, offset);
1236 void Assembler::bltuc(Register rs, Register rt,
int16_t offset) {
1238 DCHECK(!(rs.is(zero_reg)));
1239 DCHECK(!(rt.is(zero_reg)));
1240 DCHECK(rs.code() != rt.code());
1241 GenInstrImmediate(
BGTZ, rs, rt, offset);
1245 void Assembler::bltc(Register rs, Register rt,
int16_t offset) {
1247 DCHECK(!(rs.is(zero_reg)));
1248 DCHECK(!(rt.is(zero_reg)));
1249 DCHECK(rs.code() != rt.code());
1250 GenInstrImmediate(
BGTZL, rs, rt, offset);
1254 void Assembler::bltz(Register rs,
int16_t offset) {
1255 BlockTrampolinePoolScope block_trampoline_pool(
this);
1257 BlockTrampolinePoolFor(1);
1261 void Assembler::bltzal(Register rs,
int16_t offset) {
1263 BlockTrampolinePoolScope block_trampoline_pool(
this);
1264 positions_recorder()->WriteRecordedPositions();
1266 BlockTrampolinePoolFor(1);
1270 void Assembler::bne(Register rs, Register rt,
int16_t offset) {
1271 BlockTrampolinePoolScope block_trampoline_pool(
this);
1272 GenInstrImmediate(
BNE, rs, rt, offset);
1273 BlockTrampolinePoolFor(1);
1277 void Assembler::bovc(Register rs, Register rt,
int16_t offset) {
1279 DCHECK(!(rs.is(zero_reg)));
1280 DCHECK(rs.code() >= rt.code());
1281 GenInstrImmediate(
ADDI, rs, rt, offset);
1285 void Assembler::bnvc(Register rs, Register rt,
int16_t offset) {
1287 DCHECK(!(rs.is(zero_reg)));
1288 DCHECK(rs.code() >= rt.code());
1289 GenInstrImmediate(
DADDI, rs, rt, offset);
1293 void Assembler::blezalc(Register rt,
int16_t offset) {
1295 DCHECK(!(rt.is(zero_reg)));
1296 GenInstrImmediate(
BLEZ, zero_reg, rt, offset);
1300 void Assembler::bgezalc(Register rt,
int16_t offset) {
1302 DCHECK(!(rt.is(zero_reg)));
1303 GenInstrImmediate(
BLEZ, rt, rt, offset);
1307 void Assembler::bgezall(Register rs,
int16_t offset) {
1309 DCHECK(!(rs.is(zero_reg)));
1314 void Assembler::bltzalc(Register rt,
int16_t offset) {
1316 DCHECK(!(rt.is(zero_reg)));
1317 GenInstrImmediate(
BGTZ, rt, rt, offset);
1321 void Assembler::bgtzalc(Register rt,
int16_t offset) {
1323 DCHECK(!(rt.is(zero_reg)));
1324 GenInstrImmediate(
BGTZ, zero_reg, rt, offset);
1328 void Assembler::beqzalc(Register rt,
int16_t offset) {
1330 DCHECK(!(rt.is(zero_reg)));
1331 GenInstrImmediate(
ADDI, zero_reg, rt, offset);
1335 void Assembler::bnezalc(Register rt,
int16_t offset) {
1337 DCHECK(!(rt.is(zero_reg)));
1338 GenInstrImmediate(
DADDI, zero_reg, rt, offset);
1342 void Assembler::beqc(Register rs, Register rt,
int16_t offset) {
1344 DCHECK(rs.code() < rt.code());
1345 GenInstrImmediate(
ADDI, rs, rt, offset);
1349 void Assembler::beqzc(Register rs,
int32_t offset) {
1351 DCHECK(!(rs.is(zero_reg)));
1357 void Assembler::bnec(Register rs, Register rt,
int16_t offset) {
1359 DCHECK(rs.code() < rt.code());
1360 GenInstrImmediate(
DADDI, rs, rt, offset);
1364 void Assembler::bnezc(Register rs,
int32_t offset) {
1366 DCHECK(!(rs.is(zero_reg)));
1372 void Assembler::j(int64_t target) {
1375 uint64_t ipc =
reinterpret_cast<uint64_t
>(pc_ + 1 * kInstrSize);
1376 bool in_range = (ipc ^
static_cast<uint64_t
>(target) >>
1378 DCHECK(in_range && ((target & 3) == 0));
1380 GenInstrJump(
J, target >> 2);
1384 void Assembler::jr(Register rs) {
1386 BlockTrampolinePoolScope block_trampoline_pool(
this);
1388 positions_recorder()->WriteRecordedPositions();
1390 GenInstrRegister(
SPECIAL, rs, zero_reg, zero_reg, 0,
JR);
1391 BlockTrampolinePoolFor(1);
1398 void Assembler::jal(int64_t target) {
1401 uint64_t ipc =
reinterpret_cast<uint64_t
>(pc_ + 1 * kInstrSize);
1402 bool in_range = (ipc ^
static_cast<uint64_t
>(target) >>
1404 DCHECK(in_range && ((target & 3) == 0));
1406 positions_recorder()->WriteRecordedPositions();
1407 GenInstrJump(
JAL, target >> 2);
1411 void Assembler::jalr(Register rs, Register rd) {
1412 BlockTrampolinePoolScope block_trampoline_pool(
this);
1413 positions_recorder()->WriteRecordedPositions();
1414 GenInstrRegister(
SPECIAL, rs, zero_reg, rd, 0,
JALR);
1415 BlockTrampolinePoolFor(1);
1419 void Assembler::j_or_jr(int64_t target, Register rs) {
1421 uint64_t ipc =
reinterpret_cast<uint64_t
>(pc_ + 1 * kInstrSize);
1422 bool in_range = (ipc ^
static_cast<uint64_t
>(target) >>
1432 void Assembler::jal_or_jalr(int64_t target, Register rs) {
1434 uint64_t ipc =
reinterpret_cast<uint64_t
>(pc_ + 1 * kInstrSize);
1435 bool in_range = (ipc ^
static_cast<uint64_t
>(target) >>
1449 void Assembler::addu(Register rd, Register rs, Register rt) {
1454 void Assembler::addiu(Register rd, Register rs,
int32_t j) {
1455 GenInstrImmediate(
ADDIU, rs, rd, j);
1459 void Assembler::subu(Register rd, Register rs, Register rt) {
1464 void Assembler::mul(Register rd, Register rs, Register rt) {
1473 void Assembler::muh(Register rd, Register rs, Register rt) {
1479 void Assembler::mulu(Register rd, Register rs, Register rt) {
1485 void Assembler::muhu(Register rd, Register rs, Register rt) {
1491 void Assembler::dmul(Register rd, Register rs, Register rt) {
1497 void Assembler::dmuh(Register rd, Register rs, Register rt) {
1503 void Assembler::dmulu(Register rd, Register rs, Register rt) {
1509 void Assembler::dmuhu(Register rd, Register rs, Register rt) {
1515 void Assembler::mult(Register rs, Register rt) {
1517 GenInstrRegister(
SPECIAL, rs, rt, zero_reg, 0,
MULT);
1521 void Assembler::multu(Register rs, Register rt) {
1527 void Assembler::daddiu(Register rd, Register rs,
int32_t j) {
1528 GenInstrImmediate(
DADDIU, rs, rd, j);
1532 void Assembler::div(Register rs, Register rt) {
1533 GenInstrRegister(
SPECIAL, rs, rt, zero_reg, 0,
DIV);
1537 void Assembler::div(Register rd, Register rs, Register rt) {
1543 void Assembler::mod(Register rd, Register rs, Register rt) {
1549 void Assembler::divu(Register rs, Register rt) {
1550 GenInstrRegister(
SPECIAL, rs, rt, zero_reg, 0,
DIVU);
1554 void Assembler::divu(Register rd, Register rs, Register rt) {
1560 void Assembler::modu(Register rd, Register rs, Register rt) {
1566 void Assembler::daddu(Register rd, Register rs, Register rt) {
1571 void Assembler::dsubu(Register rd, Register rs, Register rt) {
1576 void Assembler::dmult(Register rs, Register rt) {
1581 void Assembler::dmultu(Register rs, Register rt) {
1586 void Assembler::ddiv(Register rs, Register rt) {
1587 GenInstrRegister(
SPECIAL, rs, rt, zero_reg, 0,
DDIV);
1591 void Assembler::ddiv(Register rd, Register rs, Register rt) {
1597 void Assembler::dmod(Register rd, Register rs, Register rt) {
1603 void Assembler::ddivu(Register rs, Register rt) {
1608 void Assembler::ddivu(Register rd, Register rs, Register rt) {
1614 void Assembler::dmodu(Register rd, Register rs, Register rt) {
1622 void Assembler::and_(Register rd, Register rs, Register rt) {
1623 GenInstrRegister(
SPECIAL, rs, rt, rd, 0,
AND);
1627 void Assembler::andi(Register rt, Register rs,
int32_t j) {
1629 GenInstrImmediate(
ANDI, rs, rt, j);
1633 void Assembler::or_(Register rd, Register rs, Register rt) {
1634 GenInstrRegister(
SPECIAL, rs, rt, rd, 0,
OR);
1638 void Assembler::ori(Register rt, Register rs,
int32_t j) {
1640 GenInstrImmediate(
ORI, rs, rt, j);
1644 void Assembler::xor_(Register rd, Register rs, Register rt) {
1645 GenInstrRegister(
SPECIAL, rs, rt, rd, 0,
XOR);
1649 void Assembler::xori(Register rt, Register rs,
int32_t j) {
1651 GenInstrImmediate(
XORI, rs, rt, j);
1655 void Assembler::nor(Register rd, Register rs, Register rt) {
1656 GenInstrRegister(
SPECIAL, rs, rt, rd, 0,
NOR);
1661 void Assembler::sll(Register rd,
1664 bool coming_from_nop) {
1669 DCHECK(coming_from_nop || !(rd.is(zero_reg) && rt.is(zero_reg)));
1670 GenInstrRegister(
SPECIAL, zero_reg, rt, rd, sa,
SLL);
1674 void Assembler::sllv(Register rd, Register rt, Register rs) {
1679 void Assembler::srl(Register rd, Register rt,
uint16_t sa) {
1680 GenInstrRegister(
SPECIAL, zero_reg, rt, rd, sa,
SRL);
1684 void Assembler::srlv(Register rd, Register rt, Register rs) {
1689 void Assembler::sra(Register rd, Register rt,
uint16_t sa) {
1690 GenInstrRegister(
SPECIAL, zero_reg, rt, rd, sa,
SRA);
1694 void Assembler::srav(Register rd, Register rt, Register rs) {
1699 void Assembler::rotr(Register rd, Register rt,
uint16_t sa) {
1701 DCHECK(rd.is_valid() && rt.is_valid() && is_uint5(sa));
1709 void Assembler::rotrv(Register rd, Register rt, Register rs) {
1711 DCHECK(rd.is_valid() && rt.is_valid() && rs.is_valid() );
1719 void Assembler::dsll(Register rd, Register rt,
uint16_t sa) {
1720 GenInstrRegister(
SPECIAL, zero_reg, rt, rd, sa,
DSLL);
1724 void Assembler::dsllv(Register rd, Register rt, Register rs) {
1729 void Assembler::dsrl(Register rd, Register rt,
uint16_t sa) {
1730 GenInstrRegister(
SPECIAL, zero_reg, rt, rd, sa,
DSRL);
1734 void Assembler::dsrlv(Register rd, Register rt, Register rs) {
1739 void Assembler::drotr(Register rd, Register rt,
uint16_t sa) {
1740 DCHECK(rd.is_valid() && rt.is_valid() && is_uint5(sa));
1747 void Assembler::drotrv(Register rd, Register rt, Register rs) {
1748 DCHECK(rd.is_valid() && rt.is_valid() && rs.is_valid() );
1755 void Assembler::dsra(Register rd, Register rt,
uint16_t sa) {
1756 GenInstrRegister(
SPECIAL, zero_reg, rt, rd, sa,
DSRA);
1760 void Assembler::dsrav(Register rd, Register rt, Register rs) {
1765 void Assembler::dsll32(Register rd, Register rt,
uint16_t sa) {
1770 void Assembler::dsrl32(Register rd, Register rt,
uint16_t sa) {
1775 void Assembler::dsra32(Register rd, Register rt,
uint16_t sa) {
1783 void Assembler::LoadRegPlusOffsetToAt(
const MemOperand& src) {
1784 DCHECK(!src.rm().is(at));
1785 DCHECK(is_int32(src.offset_));
1789 daddu(at, at, src.rm());
1793 void Assembler::lb(Register rd,
const MemOperand& rs) {
1794 if (is_int16(rs.offset_)) {
1795 GenInstrImmediate(
LB, rs.rm(), rd, rs.offset_);
1797 LoadRegPlusOffsetToAt(rs);
1798 GenInstrImmediate(
LB, at, rd, 0);
1803 void Assembler::lbu(Register rd,
const MemOperand& rs) {
1804 if (is_int16(rs.offset_)) {
1805 GenInstrImmediate(
LBU, rs.rm(), rd, rs.offset_);
1807 LoadRegPlusOffsetToAt(rs);
1808 GenInstrImmediate(
LBU, at, rd, 0);
1813 void Assembler::lh(Register rd,
const MemOperand& rs) {
1814 if (is_int16(rs.offset_)) {
1815 GenInstrImmediate(
LH, rs.rm(), rd, rs.offset_);
1817 LoadRegPlusOffsetToAt(rs);
1818 GenInstrImmediate(
LH, at, rd, 0);
1823 void Assembler::lhu(Register rd,
const MemOperand& rs) {
1824 if (is_int16(rs.offset_)) {
1825 GenInstrImmediate(
LHU, rs.rm(), rd, rs.offset_);
1827 LoadRegPlusOffsetToAt(rs);
1828 GenInstrImmediate(
LHU, at, rd, 0);
1833 void Assembler::lw(Register rd,
const MemOperand& rs) {
1834 if (is_int16(rs.offset_)) {
1835 GenInstrImmediate(
LW, rs.rm(), rd, rs.offset_);
1837 LoadRegPlusOffsetToAt(rs);
1838 GenInstrImmediate(
LW, at, rd, 0);
1843 void Assembler::lwu(Register rd,
const MemOperand& rs) {
1844 if (is_int16(rs.offset_)) {
1845 GenInstrImmediate(
LWU, rs.rm(), rd, rs.offset_);
1847 LoadRegPlusOffsetToAt(rs);
1848 GenInstrImmediate(
LWU, at, rd, 0);
1853 void Assembler::lwl(Register rd,
const MemOperand& rs) {
1854 GenInstrImmediate(
LWL, rs.rm(), rd, rs.offset_);
1858 void Assembler::lwr(Register rd,
const MemOperand& rs) {
1859 GenInstrImmediate(
LWR, rs.rm(), rd, rs.offset_);
1863 void Assembler::sb(Register rd,
const MemOperand& rs) {
1864 if (is_int16(rs.offset_)) {
1865 GenInstrImmediate(
SB, rs.rm(), rd, rs.offset_);
1867 LoadRegPlusOffsetToAt(rs);
1868 GenInstrImmediate(
SB, at, rd, 0);
1873 void Assembler::sh(Register rd,
const MemOperand& rs) {
1874 if (is_int16(rs.offset_)) {
1875 GenInstrImmediate(
SH, rs.rm(), rd, rs.offset_);
1877 LoadRegPlusOffsetToAt(rs);
1878 GenInstrImmediate(
SH, at, rd, 0);
1883 void Assembler::sw(Register rd,
const MemOperand& rs) {
1884 if (is_int16(rs.offset_)) {
1885 GenInstrImmediate(
SW, rs.rm(), rd, rs.offset_);
1887 LoadRegPlusOffsetToAt(rs);
1888 GenInstrImmediate(
SW, at, rd, 0);
1893 void Assembler::swl(Register rd,
const MemOperand& rs) {
1894 GenInstrImmediate(
SWL, rs.rm(), rd, rs.offset_);
1898 void Assembler::swr(Register rd,
const MemOperand& rs) {
1899 GenInstrImmediate(
SWR, rs.rm(), rd, rs.offset_);
1903 void Assembler::lui(Register rd,
int32_t j) {
1905 GenInstrImmediate(
LUI, zero_reg, rd, j);
1909 void Assembler::aui(Register rs, Register rt,
int32_t j) {
1913 GenInstrImmediate(
LUI, rs, rt, j);
1917 void Assembler::daui(Register rs, Register rt,
int32_t j) {
1919 GenInstrImmediate(
DAUI, rs, rt, j);
1923 void Assembler::dahi(Register rs,
int32_t j) {
1929 void Assembler::dati(Register rs,
int32_t j) {
1935 void Assembler::ldl(Register rd,
const MemOperand& rs) {
1936 GenInstrImmediate(
LDL, rs.rm(), rd, rs.offset_);
1940 void Assembler::ldr(Register rd,
const MemOperand& rs) {
1941 GenInstrImmediate(
LDR, rs.rm(), rd, rs.offset_);
1945 void Assembler::sdl(Register rd,
const MemOperand& rs) {
1946 GenInstrImmediate(
SDL, rs.rm(), rd, rs.offset_);
1950 void Assembler::sdr(Register rd,
const MemOperand& rs) {
1951 GenInstrImmediate(
SDR, rs.rm(), rd, rs.offset_);
1955 void Assembler::ld(Register rd,
const MemOperand& rs) {
1956 if (is_int16(rs.offset_)) {
1957 GenInstrImmediate(
LD, rs.rm(), rd, rs.offset_);
1959 LoadRegPlusOffsetToAt(rs);
1960 GenInstrImmediate(
LD, at, rd, 0);
1965 void Assembler::sd(Register rd,
const MemOperand& rs) {
1966 if (is_int16(rs.offset_)) {
1967 GenInstrImmediate(
SD, rs.rm(), rd, rs.offset_);
1969 LoadRegPlusOffsetToAt(rs);
1970 GenInstrImmediate(
SD, at, rd, 0);
1978 void Assembler::break_(
uint32_t code,
bool break_as_stop) {
1979 DCHECK((code & ~0xfffff) == 0);
1994 void Assembler::stop(
const char* msg,
uint32_t code) {
1997 #if defined(V8_HOST_ARCH_MIPS) || defined(V8_HOST_ARCH_MIPS64)
2000 BlockTrampolinePoolFor(3);
2004 emit(
reinterpret_cast<uint64_t
>(msg));
2009 void Assembler::tge(Register rs, Register rt,
uint16_t code) {
2012 | rt.code() <<
kRtShift | code << 6;
2017 void Assembler::tgeu(Register rs, Register rt,
uint16_t code) {
2020 | rt.code() <<
kRtShift | code << 6;
2025 void Assembler::tlt(Register rs, Register rt,
uint16_t code) {
2033 void Assembler::tltu(Register rs, Register rt,
uint16_t code) {
2037 | rt.code() <<
kRtShift | code << 6;
2042 void Assembler::teq(Register rs, Register rt,
uint16_t code) {
2050 void Assembler::tne(Register rs, Register rt,
uint16_t code) {
2060 void Assembler::mfhi(Register rd) {
2061 GenInstrRegister(
SPECIAL, zero_reg, zero_reg, rd, 0,
MFHI);
2065 void Assembler::mflo(Register rd) {
2066 GenInstrRegister(
SPECIAL, zero_reg, zero_reg, rd, 0,
MFLO);
2071 void Assembler::slt(Register rd, Register rs, Register rt) {
2072 GenInstrRegister(
SPECIAL, rs, rt, rd, 0,
SLT);
2076 void Assembler::sltu(Register rd, Register rs, Register rt) {
2081 void Assembler::slti(Register rt, Register rs,
int32_t j) {
2082 GenInstrImmediate(
SLTI, rs, rt, j);
2086 void Assembler::sltiu(Register rt, Register rs,
int32_t j) {
2087 GenInstrImmediate(
SLTIU, rs, rt, j);
2092 void Assembler::movz(Register rd, Register rs, Register rt) {
2097 void Assembler::movn(Register rd, Register rs, Register rt) {
2102 void Assembler::movt(Register rd, Register rs,
uint16_t cc) {
2104 rt.code_ = (
cc & 0x0007) << 2 | 1;
2109 void Assembler::movf(Register rd, Register rs,
uint16_t cc) {
2111 rt.code_ = (
cc & 0x0007) << 2 | 0;
2117 FPURegister ft, FPURegister fs, uint8_t sel) {
2129 void Assembler::seleqz(Register rs, Register rt, Register rd) {
2137 FPURegister ft, FPURegister fs) {
2149 void Assembler::selnez(Register rs, Register rt, Register rd) {
2157 FPURegister ft, FPURegister fs) {
2169 void Assembler::clz(Register rd, Register rs) {
2196 DCHECK(is_uint5(hint) && is_uint16(rs.offset_));
2206 void Assembler::lwc1(FPURegister fd,
const MemOperand& src) {
2207 GenInstrImmediate(
LWC1, src.rm(), fd, src.offset_);
2211 void Assembler::ldc1(FPURegister fd,
const MemOperand& src) {
2212 GenInstrImmediate(
LDC1, src.rm(), fd, src.offset_);
2216 void Assembler::swc1(FPURegister fd,
const MemOperand& src) {
2217 GenInstrImmediate(
SWC1, src.rm(), fd, src.offset_);
2221 void Assembler::sdc1(FPURegister fd,
const MemOperand& src) {
2222 GenInstrImmediate(
SDC1, src.rm(), fd, src.offset_);
2226 void Assembler::mtc1(Register rt, FPURegister fs) {
2231 void Assembler::mthc1(Register rt, FPURegister fs) {
2236 void Assembler::dmtc1(Register rt, FPURegister fs) {
2241 void Assembler::mfc1(Register rt, FPURegister fs) {
2246 void Assembler::mfhc1(Register rt, FPURegister fs) {
2251 void Assembler::dmfc1(Register rt, FPURegister fs) {
2256 void Assembler::ctc1(Register rt, FPUControlRegister fs) {
2257 GenInstrRegister(
COP1,
CTC1, rt, fs);
2261 void Assembler::cfc1(Register rt, FPUControlRegister fs) {
2262 GenInstrRegister(
COP1,
CFC1, rt, fs);
2270 *
lo =
i & 0xffffffff;
2277 void Assembler::add_d(FPURegister fd, FPURegister fs, FPURegister ft) {
2278 GenInstrRegister(
COP1,
D, ft, fs, fd,
ADD_D);
2282 void Assembler::sub_d(FPURegister fd, FPURegister fs, FPURegister ft) {
2283 GenInstrRegister(
COP1,
D, ft, fs, fd,
SUB_D);
2287 void Assembler::mul_d(FPURegister fd, FPURegister fs, FPURegister ft) {
2288 GenInstrRegister(
COP1,
D, ft, fs, fd,
MUL_D);
2292 void Assembler::madd_d(FPURegister fd, FPURegister fr, FPURegister fs,
2298 void Assembler::div_d(FPURegister fd, FPURegister fs, FPURegister ft) {
2299 GenInstrRegister(
COP1,
D, ft, fs, fd,
DIV_D);
2303 void Assembler::abs_d(FPURegister fd, FPURegister fs) {
2308 void Assembler::mov_d(FPURegister fd, FPURegister fs) {
2313 void Assembler::neg_d(FPURegister fd, FPURegister fs) {
2318 void Assembler::sqrt_d(FPURegister fd, FPURegister fs) {
2325 void Assembler::cvt_w_s(FPURegister fd, FPURegister fs) {
2330 void Assembler::cvt_w_d(FPURegister fd, FPURegister fs) {
2335 void Assembler::trunc_w_s(FPURegister fd, FPURegister fs) {
2340 void Assembler::trunc_w_d(FPURegister fd, FPURegister fs) {
2345 void Assembler::round_w_s(FPURegister fd, FPURegister fs) {
2350 void Assembler::round_w_d(FPURegister fd, FPURegister fs) {
2355 void Assembler::floor_w_s(FPURegister fd, FPURegister fs) {
2360 void Assembler::floor_w_d(FPURegister fd, FPURegister fs) {
2365 void Assembler::ceil_w_s(FPURegister fd, FPURegister fs) {
2370 void Assembler::ceil_w_d(FPURegister fd, FPURegister fs) {
2375 void Assembler::cvt_l_s(FPURegister fd, FPURegister fs) {
2381 void Assembler::cvt_l_d(FPURegister fd, FPURegister fs) {
2387 void Assembler::trunc_l_s(FPURegister fd, FPURegister fs) {
2393 void Assembler::trunc_l_d(FPURegister fd, FPURegister fs) {
2399 void Assembler::round_l_s(FPURegister fd, FPURegister fs) {
2404 void Assembler::round_l_d(FPURegister fd, FPURegister fs) {
2409 void Assembler::floor_l_s(FPURegister fd, FPURegister fs) {
2414 void Assembler::floor_l_d(FPURegister fd, FPURegister fs) {
2419 void Assembler::ceil_l_s(FPURegister fd, FPURegister fs) {
2424 void Assembler::ceil_l_d(FPURegister fd, FPURegister fs) {
2433 GenInstrRegister(
COP1, fmt, ft, fs, fd,
MIN);
2437 void Assembler::mina(
SecondaryField fmt, FPURegister fd, FPURegister ft,
2441 GenInstrRegister(
COP1, fmt, ft, fs, fd,
MINA);
2445 void Assembler::max(
SecondaryField fmt, FPURegister fd, FPURegister ft,
2449 GenInstrRegister(
COP1, fmt, ft, fs, fd,
MAX);
2453 void Assembler::maxa(
SecondaryField fmt, FPURegister fd, FPURegister ft,
2457 GenInstrRegister(
COP1, fmt, ft, fs, fd,
MAXA);
2461 void Assembler::cvt_s_w(FPURegister fd, FPURegister fs) {
2466 void Assembler::cvt_s_l(FPURegister fd, FPURegister fs) {
2472 void Assembler::cvt_s_d(FPURegister fd, FPURegister fs) {
2477 void Assembler::cvt_d_w(FPURegister fd, FPURegister fs) {
2482 void Assembler::cvt_d_l(FPURegister fd, FPURegister fs) {
2488 void Assembler::cvt_d_s(FPURegister fd, FPURegister fs) {
2495 FPURegister fd, FPURegister fs, FPURegister ft) {
2504 void Assembler::bc1eqz(
int16_t offset, FPURegister ft) {
2511 void Assembler::bc1nez(
int16_t offset, FPURegister ft) {
2520 FPURegister fs, FPURegister ft,
uint16_t cc) {
2525 |
cc << 8 | 3 << 4 | cond;
2530 void Assembler::fcmp(FPURegister src1,
const double src2,
2533 mtc1(zero_reg,
f14);
2535 c(cond,
D, src1,
f14, 0);
2554 void Assembler::RecordJSReturn() {
2555 positions_recorder()->WriteRecordedPositions();
2557 RecordRelocInfo(RelocInfo::JS_RETURN);
2561 void Assembler::RecordDebugBreakSlot() {
2562 positions_recorder()->WriteRecordedPositions();
2564 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
2568 void Assembler::RecordComment(
const char* msg) {
2569 if (FLAG_code_comments) {
2571 RecordRelocInfo(RelocInfo::COMMENT,
reinterpret_cast<intptr_t
>(msg));
2576 int Assembler::RelocateInternalReference(
byte*
pc, intptr_t pc_delta) {
2578 DCHECK(IsJ(instr) || IsLui(instr));
2580 Instr instr_lui = instr_at(
pc + 0 * Assembler::kInstrSize);
2581 Instr instr_ori = instr_at(
pc + 1 * Assembler::kInstrSize);
2582 Instr instr_ori2 = instr_at(
pc + 3 * Assembler::kInstrSize);
2583 DCHECK(IsOri(instr_ori));
2584 DCHECK(IsOri(instr_ori2));
2586 int64_t imm = (instr_lui &
static_cast<int64_t
>(
kImm16Mask)) << 48;
2587 imm |= (instr_ori &
static_cast<int64_t
>(
kImm16Mask)) << 32;
2588 imm |= (instr_ori2 &
static_cast<int64_t
>(
kImm16Mask)) << 16;
2592 if (imm == kEndOfJumpChain) {
2602 instr_at_put(
pc + 0 * Assembler::kInstrSize,
2604 instr_at_put(
pc + 1 * Assembler::kInstrSize,
2606 instr_at_put(
pc + 3 * Assembler::kInstrSize,
2611 if (
static_cast<int32_t>(imm28) == kEndOfJumpChain) {
2617 DCHECK((imm28 & 3) == 0);
2621 DCHECK(is_uint26(imm26));
2629 void Assembler::GrowBuffer() {
2630 if (!own_buffer_)
FATAL(
"external code buffer is too small");
2634 if (buffer_size_ < 1 *
MB) {
2635 desc.buffer_size = 2*buffer_size_;
2637 desc.buffer_size = buffer_size_ + 1*
MB;
2642 desc.buffer = NewArray<byte>(desc.buffer_size);
2644 desc.instr_size = pc_offset();
2645 desc.reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
2648 intptr_t pc_delta = desc.buffer - buffer_;
2649 intptr_t rc_delta = (desc.buffer + desc.buffer_size) -
2650 (buffer_ + buffer_size_);
2651 MemMove(desc.buffer, buffer_, desc.instr_size);
2652 MemMove(reloc_info_writer.pos() + rc_delta,
2653 reloc_info_writer.pos(), desc.reloc_size);
2657 buffer_ = desc.buffer;
2658 buffer_size_ = desc.buffer_size;
2660 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta,
2661 reloc_info_writer.last_pc() + pc_delta);
2664 for (RelocIterator it(desc); !it.done(); it.next()) {
2665 RelocInfo::Mode rmode = it.rinfo()->rmode();
2666 if (rmode == RelocInfo::INTERNAL_REFERENCE) {
2667 byte* p =
reinterpret_cast<byte*
>(it.rinfo()->pc());
2668 RelocateInternalReference(p, pc_delta);
2678 *
reinterpret_cast<uint8_t*
>(pc_) = data;
2679 pc_ +=
sizeof(uint8_t);
2683 void Assembler::dd(
uint32_t data) {
2685 *
reinterpret_cast<uint32_t*
>(pc_) = data;
2690 void Assembler::emit_code_stub_address(Code* stub) {
2692 *
reinterpret_cast<uint64_t*
>(pc_) =
2693 reinterpret_cast<uint64_t
>(stub->instruction_start());
2694 pc_ +=
sizeof(uint64_t);
2698 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
2700 RelocInfo rinfo(pc_, rmode, data,
NULL);
2701 if (rmode >= RelocInfo::JS_RETURN && rmode <= RelocInfo::DEBUG_BREAK_SLOT) {
2703 DCHECK(RelocInfo::IsDebugBreakSlot(rmode)
2704 || RelocInfo::IsJSReturn(rmode)
2705 || RelocInfo::IsComment(rmode)
2706 || RelocInfo::IsPosition(rmode));
2709 if (!RelocInfo::IsNone(rinfo.rmode())) {
2711 if (rmode == RelocInfo::EXTERNAL_REFERENCE &&
2712 !serializer_enabled() && !emit_debug_code()) {
2715 DCHECK(buffer_space() >= kMaxRelocSize);
2716 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) {
2717 RelocInfo reloc_info_with_ast_id(pc_,
2719 RecordedAstId().ToInt(),
2721 ClearRecordedAstId();
2722 reloc_info_writer.Write(&reloc_info_with_ast_id);
2724 reloc_info_writer.Write(&rinfo);
2730 void Assembler::BlockTrampolinePoolFor(
int instructions) {
2731 BlockTrampolinePoolBefore(pc_offset() + instructions * kInstrSize);
2735 void Assembler::CheckTrampolinePool() {
2741 if ((trampoline_pool_blocked_nesting_ > 0) ||
2742 (pc_offset() < no_trampoline_pool_before_)) {
2745 if (trampoline_pool_blocked_nesting_ > 0) {
2746 next_buffer_check_ = pc_offset() + kInstrSize;
2748 next_buffer_check_ = no_trampoline_pool_before_;
2753 DCHECK(!trampoline_emitted_);
2754 DCHECK(unbound_labels_count_ >= 0);
2755 if (unbound_labels_count_ > 0) {
2757 { BlockTrampolinePoolScope block_trampoline_pool(
this);
2762 int pool_start = pc_offset();
2763 for (
int i = 0;
i < unbound_labels_count_;
i++) {
2765 imm64 = jump_address(&after_pool);
2766 { BlockGrowBufferScope block_buf_growth(
this);
2770 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
2782 trampoline_ = Trampoline(pool_start, unbound_labels_count_);
2784 trampoline_emitted_ =
true;
2792 next_buffer_check_ = pc_offset() +
2793 kMaxBranchOffset - kTrampolineSlotsSize * 16;
2801 Instr instr1 = instr_at(
pc + 1 * kInstrSize);
2802 Instr instr3 = instr_at(
pc + 3 * kInstrSize);
2806 if ((GetOpcodeField(instr0) ==
LUI) && (GetOpcodeField(instr1) ==
ORI) &&
2807 (GetOpcodeField(instr3) ==
ORI)) {
2809 int64_t addr =
static_cast<int64_t
>(
2810 ((uint64_t)(GetImmediate16(instr0)) << 32) |
2811 ((uint64_t)(GetImmediate16(instr1)) << 16) |
2812 ((uint64_t)(GetImmediate16(instr3))));
2815 addr = (addr << 16) >> 16;
2816 return reinterpret_cast<Address>(addr);
2828 void Assembler::QuietNaN(HeapObject*
object) {
2829 HeapNumber::cast(
object)->set_value(base::OS::nan_value());
2845 void Assembler::set_target_address_at(
Address pc,
2853 Instr instr1 = instr_at(
pc + kInstrSize);
2856 uint64_t itarget =
reinterpret_cast<uint64_t
>(target);
2861 Instr instr3 = instr_at(
pc + kInstrSize * 3);
2862 CHECK((GetOpcodeField(instr0) ==
LUI && GetOpcodeField(instr1) ==
ORI &&
2863 GetOpcodeField(instr3) ==
ORI));
2878 CpuFeatures::FlushICache(
pc, 4 * Assembler::kInstrSize);
2883 void Assembler::JumpLabelToJumpRegister(
Address pc) {
2890 Instr instr2 = instr_at(
pc + 1 * kInstrSize);
2891 Instr instr3 = instr_at(
pc + 6 * kInstrSize);
2892 bool patched =
false;
2894 if (IsJal(instr3)) {
2902 }
else if (IsJ(instr3)) {
2912 CpuFeatures::FlushICache(
pc+6,
sizeof(
int32_t));
2917 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
2919 DCHECK(!FLAG_enable_ool_constant_pool);
2920 return isolate->factory()->empty_constant_pool_array();
2924 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
2926 DCHECK(!FLAG_enable_ool_constant_pool);
static const int kInstrSize
static void FlushICache(void *start, size_t size)
static unsigned supported_
static void PrintFeatures()
static void PrintTarget()
static void ProbeImpl(bool cross_compile)
Operand(Register reg, Shift shift=LSL, unsigned shift_amount=0)
static const int kCodeTargetMask
static const int kApplyMask
void PatchCode(byte *instructions, int instruction_count)
void PatchCodeWithCall(Address target, int guard_bytes)
#define UNIMPLEMENTED_MIPS()
static const ArchVariants kArchVariant
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DCHECK(condition)
bool IsPowerOfTwo32(uint32_t value)
Matcher< Node * > IsBranch(const Matcher< Node * > &value_matcher, const Matcher< Node * > &control_matcher)
LinkageHelper< ArmLinkageHelperTraits > LH
const Instr kSwRegFpNegOffsetPattern
void DeleteArray(T *array)
const Instr kPopInstruction
static int min(int a, int b)
const uint32_t kMaxStopCode
const uint32_t kMaxWatchpointCode
const Instr kLwSwInstrArgumentMask
void MemMove(void *dest, const void *src, size_t size)
int ToNumber(Register reg)
kSerializedDataOffset Object
const int kFunctionFieldMask
const Instr kLwSwOffsetMask
Handle< T > handle(T *t, Isolate *isolate)
const Instr kSwRegFpOffsetPattern
const Instr kLwRegFpNegOffsetPattern
void PrintF(const char *format,...)
Register ToRegister(int num)
const int kRegister_fp_Code
const Instr kPushRegPattern
const Instr kLwRegFpOffsetPattern
const Instr kPushInstruction
const Instr kPopRegPattern
const Instr kLwSwInstrTypeMask
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
const int kRegister_sp_Code
Debugger support for the V8 JavaScript engine.
static const uint16_t * Align(const uint16_t *chars)
static const char * AllocationIndexToString(int index)
static const int kMaxNumAllocatableRegisters