24 : Assembler(arg_isolate, buffer,
size),
25 generating_stub_(
false),
27 if (isolate() !=
NULL) {
28 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
34 void MacroAssembler::Jump(Register target,
Condition cond) {
39 void MacroAssembler::Jump(intptr_t target, RelocInfo::Mode rmode,
41 DCHECK(RelocInfo::IsCodeTarget(rmode));
42 mov(
pc, Operand(target, rmode),
LeaveCC, cond);
46 void MacroAssembler::Jump(
Address target, RelocInfo::Mode rmode,
48 DCHECK(!RelocInfo::IsCodeTarget(rmode));
49 Jump(
reinterpret_cast<intptr_t
>(target), rmode, cond);
53 void MacroAssembler::Jump(Handle<Code> code, RelocInfo::Mode rmode,
55 DCHECK(RelocInfo::IsCodeTarget(rmode));
58 Jump(
reinterpret_cast<intptr_t
>(code.location()), rmode, cond);
62 int MacroAssembler::CallSize(Register target,
Condition cond) {
67 void MacroAssembler::Call(Register target,
Condition cond) {
69 BlockConstPoolScope block_const_pool(
this);
73 DCHECK_EQ(CallSize(target, cond), SizeOfCodeGeneratedSince(&start));
77 int MacroAssembler::CallSize(
80 Operand mov_operand = Operand(
reinterpret_cast<intptr_t
>(target), rmode);
82 mov_operand.instructions_required(
this, mov_instr) * kInstrSize;
86 int MacroAssembler::CallStubSize(
88 return CallSize(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
92 int MacroAssembler::CallSizeNotPredictableCodeSize(Isolate* isolate,
94 RelocInfo::Mode rmode,
97 Operand mov_operand = Operand(
reinterpret_cast<intptr_t
>(target), rmode);
99 mov_operand.instructions_required(
NULL, mov_instr) * kInstrSize;
103 void MacroAssembler::Call(
Address target,
104 RelocInfo::Mode rmode,
108 BlockConstPoolScope block_const_pool(
this);
112 bool old_predictable_code_size = predictable_code_size();
114 set_predictable_code_size(
true);
120 int expected_size = CallSize(target, rmode, cond);
138 positions_recorder()->WriteRecordedPositions();
140 mov(
ip, Operand(
reinterpret_cast<int32_t>(target), rmode));
143 DCHECK_EQ(expected_size, SizeOfCodeGeneratedSince(&start));
145 set_predictable_code_size(old_predictable_code_size);
150 int MacroAssembler::CallSize(Handle<Code> code,
151 RelocInfo::Mode rmode,
152 TypeFeedbackId ast_id,
155 return CallSize(
reinterpret_cast<Address>(code.location()), rmode, cond);
159 void MacroAssembler::Call(Handle<Code> code,
160 RelocInfo::Mode rmode,
161 TypeFeedbackId ast_id,
166 DCHECK(RelocInfo::IsCodeTarget(rmode));
167 if (rmode == RelocInfo::CODE_TARGET && !ast_id.IsNone()) {
168 SetRecordedAstId(ast_id);
169 rmode = RelocInfo::CODE_TARGET_WITH_ID;
173 Call(
reinterpret_cast<Address>(code.location()), rmode, cond,
mode);
177 void MacroAssembler::Ret(
Condition cond) {
182 void MacroAssembler::Drop(
int count,
Condition cond) {
189 void MacroAssembler::Ret(
int drop,
Condition cond) {
195 void MacroAssembler::Swap(Register reg1,
200 eor(reg1, reg1, Operand(reg2),
LeaveCC, cond);
201 eor(reg2, reg2, Operand(reg1),
LeaveCC, cond);
202 eor(reg1, reg1, Operand(reg2),
LeaveCC, cond);
204 mov(scratch, reg1,
LeaveCC, cond);
205 mov(reg1, reg2,
LeaveCC, cond);
206 mov(reg2, scratch,
LeaveCC, cond);
211 void MacroAssembler::Call(Label* target) {
222 void MacroAssembler::Move(Register dst, Handle<Object> value) {
224 if (value->IsSmi()) {
225 mov(dst, Operand(value));
227 DCHECK(value->IsHeapObject());
228 if (isolate()->heap()->InNewSpace(*value)) {
229 Handle<Cell> cell = isolate()->factory()->NewCell(value);
230 mov(dst, Operand(cell));
233 mov(dst, Operand(value));
239 void MacroAssembler::Move(Register dst, Register src,
Condition cond) {
246 void MacroAssembler::Move(DwVfpRegister dst, DwVfpRegister src) {
253 void MacroAssembler::Mls(Register dst, Register src1, Register src2,
255 if (CpuFeatures::IsSupported(
MLS)) {
256 CpuFeatureScope scope(
this,
MLS);
257 mls(dst, src1, src2, srcA, cond);
266 void MacroAssembler::And(Register dst, Register src1,
const Operand& src2,
268 if (!src2.is_reg() &&
269 !src2.must_output_reloc_info(
this) &&
270 src2.immediate() == 0) {
271 mov(dst, Operand::Zero(),
LeaveCC, cond);
272 }
else if (!(src2.instructions_required(
this) == 1) &&
273 !src2.must_output_reloc_info(
this) &&
274 CpuFeatures::IsSupported(
ARMv7) &&
279 and_(dst, src1, src2,
LeaveCC, cond);
284 void MacroAssembler::Ubfx(Register dst, Register src1,
int lsb,
int width,
287 if (!CpuFeatures::IsSupported(
ARMv7) || predictable_code_size()) {
288 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
289 and_(dst, src1, Operand(mask),
LeaveCC, cond);
291 mov(dst, Operand(dst,
LSR, lsb),
LeaveCC, cond);
294 ubfx(dst, src1, lsb, width, cond);
299 void MacroAssembler::Sbfx(Register dst, Register src1,
int lsb,
int width,
302 if (!CpuFeatures::IsSupported(
ARMv7) || predictable_code_size()) {
303 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
304 and_(dst, src1, Operand(mask),
LeaveCC, cond);
305 int shift_up = 32 - lsb - width;
306 int shift_down = lsb + shift_up;
308 mov(dst, Operand(dst,
LSL, shift_up),
LeaveCC, cond);
310 if (shift_down != 0) {
311 mov(dst, Operand(dst,
ASR, shift_down),
LeaveCC, cond);
314 sbfx(dst, src1, lsb, width, cond);
319 void MacroAssembler::Bfi(Register dst,
325 DCHECK(0 <= lsb && lsb < 32);
326 DCHECK(0 <= width && width < 32);
329 if (width == 0)
return;
330 if (!CpuFeatures::IsSupported(
ARMv7) || predictable_code_size()) {
331 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
332 bic(dst, dst, Operand(mask));
333 and_(scratch, src, Operand((1 << width) - 1));
334 mov(scratch, Operand(scratch,
LSL, lsb));
335 orr(dst, dst, scratch);
337 bfi(dst, src, lsb, width, cond);
342 void MacroAssembler::Bfc(Register dst, Register src,
int lsb,
int width,
345 if (!CpuFeatures::IsSupported(
ARMv7) || predictable_code_size()) {
346 int mask = (1 << (width + lsb)) - 1 - ((1 << lsb) - 1);
347 bic(dst, src, Operand(mask));
349 Move(dst, src, cond);
350 bfc(dst, lsb, width, cond);
355 void MacroAssembler::Usat(Register dst,
int satpos,
const Operand& src,
357 if (!CpuFeatures::IsSupported(
ARMv7) || predictable_code_size()) {
359 DCHECK((satpos >= 0) && (satpos <= 31));
363 DCHECK((src.shift_op() ==
ASR) || (src.shift_op() ==
LSL));
367 int satval = (1 << satpos) - 1;
372 if (!(src.is_reg() && dst.is(src.rm()))) {
375 tst(dst, Operand(~satval));
381 usat(dst, satpos, src, cond);
386 void MacroAssembler::Load(Register dst,
390 if (r.IsInteger8()) {
392 }
else if (r.IsUInteger8()) {
394 }
else if (r.IsInteger16()) {
396 }
else if (r.IsUInteger16()) {
404 void MacroAssembler::Store(Register src,
408 if (r.IsInteger8() || r.IsUInteger8()) {
410 }
else if (r.IsInteger16() || r.IsUInteger16()) {
413 if (r.IsHeapObject()) {
415 }
else if (r.IsSmi()) {
423 void MacroAssembler::LoadRoot(Register destination,
424 Heap::RootListIndex index,
427 isolate()->heap()->RootCanBeTreatedAsConstant(index) &&
428 !predictable_code_size()) {
431 Handle<Object> root(&isolate()->heap()->roots_array_start()[index]);
432 mov(destination, Operand(root),
LeaveCC, cond);
439 void MacroAssembler::StoreRoot(Register source,
440 Heap::RootListIndex index,
446 void MacroAssembler::InNewSpace(Register
object,
451 and_(scratch,
object, Operand(ExternalReference::new_space_mask(isolate())));
452 cmp(scratch, Operand(ExternalReference::new_space_start(isolate())));
457 void MacroAssembler::RecordWriteField(
473 JumpIfSmi(value, &done);
481 if (emit_debug_code()) {
485 stop(
"Unaligned cell in write barrier");
494 remembered_set_action,
496 pointers_to_here_check_for_value);
502 if (emit_debug_code()) {
503 mov(value, Operand(bit_cast<int32_t>(
kZapValue + 4)));
504 mov(dst, Operand(bit_cast<int32_t>(
kZapValue + 8)));
511 void MacroAssembler::RecordWriteForMap(Register
object,
516 if (emit_debug_code()) {
518 cmp(dst, Operand(isolate()->factory()->meta_map()));
519 Check(
eq, kWrongAddressOrValuePassedToRecordWrite);
522 if (!FLAG_incremental_marking) {
526 if (emit_debug_code()) {
529 Check(
eq, kWrongAddressOrValuePassedToRecordWrite);
540 MemoryChunk::kPointersToHereAreInterestingMask,
544 add(dst,
object, Operand(HeapObject::kMapOffset -
kHeapObjectTag));
545 if (emit_debug_code()) {
549 stop(
"Unaligned cell in write barrier");
567 isolate()->counters()->write_barriers_static()->Increment();
568 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1,
ip, dst);
572 if (emit_debug_code()) {
573 mov(dst, Operand(bit_cast<int32_t>(
kZapValue + 12)));
582 void MacroAssembler::RecordWrite(
591 DCHECK(!
object.is(value));
592 if (emit_debug_code()) {
595 Check(
eq, kWrongAddressOrValuePassedToRecordWrite);
599 !FLAG_incremental_marking) {
608 JumpIfSmi(value, &done);
614 MemoryChunk::kPointersToHereAreInterestingMask,
618 CheckPageFlag(
object,
620 MemoryChunk::kPointersFromHereAreInterestingMask,
628 RecordWriteStub stub(isolate(),
object, value, address, remembered_set_action,
638 isolate()->counters()->write_barriers_static()->Increment();
639 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1,
ip,
644 if (emit_debug_code()) {
645 mov(address, Operand(bit_cast<int32_t>(
kZapValue + 12)));
646 mov(value, Operand(bit_cast<int32_t>(
kZapValue + 16)));
651 void MacroAssembler::RememberedSetHelper(Register
object,
655 RememberedSetFinalAction and_then) {
657 if (emit_debug_code()) {
659 JumpIfNotInNewSpace(
object, scratch, &ok);
660 stop(
"Remembered set pointer is in new space");
664 ExternalReference store_buffer =
665 ExternalReference::store_buffer_top(isolate());
666 mov(
ip, Operand(store_buffer));
674 tst(scratch, Operand(StoreBuffer::kStoreBufferOverflowBit));
675 if (and_then == kFallThroughAtEnd) {
678 DCHECK(and_then == kReturnAtEnd);
682 StoreBufferOverflowStub store_buffer_overflow(isolate(), fp_mode);
683 CallStub(&store_buffer_overflow);
686 if (and_then == kReturnAtEnd) {
692 void MacroAssembler::PushFixedFrame(Register marker_reg) {
693 DCHECK(!marker_reg.is_valid() || marker_reg.code() <
cp.code());
694 stm(
db_w,
sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
696 (FLAG_enable_ool_constant_pool ?
pp.
bit() : 0) |
702 void MacroAssembler::PopFixedFrame(Register marker_reg) {
703 DCHECK(!marker_reg.is_valid() || marker_reg.code() <
cp.code());
704 ldm(
ia_w,
sp, (marker_reg.is_valid() ? marker_reg.bit() : 0) |
706 (FLAG_enable_ool_constant_pool ?
pp.
bit() : 0) |
713 void MacroAssembler::PushSafepointRegisters() {
725 void MacroAssembler::PopSafepointRegisters() {
732 void MacroAssembler::StoreToSafepointRegisterSlot(Register src, Register dst) {
733 str(src, SafepointRegisterSlot(dst));
737 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
738 ldr(dst, SafepointRegisterSlot(src));
742 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
750 MemOperand MacroAssembler::SafepointRegisterSlot(Register reg) {
755 MemOperand MacroAssembler::SafepointRegistersAndDoublesSlot(Register reg) {
757 DCHECK(!serializer_enabled());
759 int doubles_size = DwVfpRegister::NumAllocatableRegisters() *
kDoubleSize;
760 int register_offset = SafepointRegisterStackIndex(reg.code()) *
kPointerSize;
765 void MacroAssembler::Ldrd(Register dst1, Register dst2,
775 if (CpuFeatures::IsSupported(
ARMv7) && !predictable_code_size() &&
776 (dst1.code() % 2 == 0) && (dst1.code() + 1 == dst2.code())) {
777 CpuFeatureScope scope(
this,
ARMv7);
778 ldrd(dst1, dst2, src, cond);
782 src2.set_offset(src2.offset() + 4);
783 if (dst1.is(src.rn())) {
784 ldr(dst2, src2, cond);
785 ldr(dst1, src, cond);
787 ldr(dst1, src, cond);
788 ldr(dst2, src2, cond);
792 if (dst1.is(src.rn())) {
794 ldr(dst1, src, cond);
797 src2.set_offset(src2.offset() - 4);
799 ldr(dst2, src2, cond);
806 void MacroAssembler::Strd(Register src1, Register src2,
816 if (CpuFeatures::IsSupported(
ARMv7) && !predictable_code_size() &&
817 (src1.code() % 2 == 0) && (src1.code() + 1 == src2.code())) {
818 CpuFeatureScope scope(
this,
ARMv7);
819 strd(src1, src2, dst, cond);
823 dst2.set_offset(dst2.offset() + 4);
824 str(src1, dst, cond);
825 str(src2, dst2, cond);
828 dst2.set_offset(dst2.offset() - 4);
830 str(src2, dst2, cond);
836 void MacroAssembler::VFPEnsureFPSCRState(Register scratch) {
840 if (emit_debug_code()) {
841 Label rounding_mode_correct;
843 b(
eq, &rounding_mode_correct);
845 stop(
"Default rounding mode not set");
846 bind(&rounding_mode_correct);
856 void MacroAssembler::VFPCanonicalizeNaN(
const DwVfpRegister dst,
857 const DwVfpRegister src,
863 void MacroAssembler::VFPCompareAndSetFlags(
const DwVfpRegister src1,
864 const DwVfpRegister src2,
867 VFPCompareAndLoadFlags(src1, src2,
pc, cond);
870 void MacroAssembler::VFPCompareAndSetFlags(
const DwVfpRegister src1,
874 VFPCompareAndLoadFlags(src1, src2,
pc, cond);
878 void MacroAssembler::VFPCompareAndLoadFlags(
const DwVfpRegister src1,
879 const DwVfpRegister src2,
880 const Register fpscr_flags,
883 vcmp(src1, src2, cond);
884 vmrs(fpscr_flags, cond);
887 void MacroAssembler::VFPCompareAndLoadFlags(
const DwVfpRegister src1,
889 const Register fpscr_flags,
892 vcmp(src1, src2, cond);
893 vmrs(fpscr_flags, cond);
896 void MacroAssembler::Vmov(
const DwVfpRegister dst,
898 const Register scratch) {
899 static const DoubleRepresentation minus_zero(-0.0);
900 static const DoubleRepresentation
zero(0.0);
901 DoubleRepresentation value_rep(imm);
903 if (value_rep ==
zero) {
905 }
else if (value_rep == minus_zero) {
908 vmov(dst, imm, scratch);
913 void MacroAssembler::VmovHigh(Register dst, DwVfpRegister src) {
914 if (src.code() < 16) {
915 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
916 vmov(dst, loc.high());
923 void MacroAssembler::VmovHigh(DwVfpRegister dst, Register src) {
924 if (dst.code() < 16) {
925 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
926 vmov(loc.high(), src);
933 void MacroAssembler::VmovLow(Register dst, DwVfpRegister src) {
934 if (src.code() < 16) {
935 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(src.code());
936 vmov(dst, loc.low());
943 void MacroAssembler::VmovLow(DwVfpRegister dst, Register src) {
944 if (dst.code() < 16) {
945 const LowDwVfpRegister loc = LowDwVfpRegister::from_code(dst.code());
946 vmov(loc.low(), src);
953 void MacroAssembler::LoadConstantPoolPointerRegister() {
954 if (FLAG_enable_ool_constant_pool) {
955 int constant_pool_offset = Code::kConstantPoolOffset - Code::kHeaderSize -
956 pc_offset() - Instruction::kPCReadOffset;
957 DCHECK(ImmediateFitsAddrMode2Instruction(constant_pool_offset));
963 void MacroAssembler::StubPrologue() {
967 add(
fp,
sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
968 if (FLAG_enable_ool_constant_pool) {
969 LoadConstantPoolPointerRegister();
970 set_constant_pool_available(
true);
975 void MacroAssembler::Prologue(
bool code_pre_aging) {
976 { PredictableCodeSizeScope predictible_code_size_scope(
980 if (code_pre_aging) {
982 Code* stub = Code::GetPreAgedCodeAgeStub(isolate());
983 add(
r0,
pc, Operand(-8));
985 emit_code_stub_address(stub);
990 add(
fp,
sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
993 if (FLAG_enable_ool_constant_pool) {
994 LoadConstantPoolPointerRegister();
995 set_constant_pool_available(
true);
1001 bool load_constant_pool) {
1004 if (FLAG_enable_ool_constant_pool && load_constant_pool) {
1005 LoadConstantPoolPointerRegister();
1007 mov(
ip, Operand(Smi::FromInt(type)));
1009 mov(
ip, Operand(CodeObject()));
1013 Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
kPointerSize));
1026 if (FLAG_enable_ool_constant_pool) {
1027 add(
sp,
fp, Operand(StandardFrameConstants::kConstantPoolOffset));
1028 frame_ends = pc_offset();
1032 frame_ends = pc_offset();
1039 void MacroAssembler::EnterExitFrame(
bool save_doubles,
int stack_space) {
1045 mov(
fp, Operand(
sp));
1047 sub(
sp,
sp, Operand(ExitFrameConstants::kFrameSize));
1048 if (emit_debug_code()) {
1049 mov(
ip, Operand::Zero());
1052 if (FLAG_enable_ool_constant_pool) {
1053 str(
pp,
MemOperand(
fp, ExitFrameConstants::kConstantPoolOffset));
1055 mov(
ip, Operand(CodeObject()));
1059 mov(
ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
1061 mov(
ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1076 const int frame_alignment = MacroAssembler::ActivationFrameAlignment();
1078 if (frame_alignment > 0) {
1080 and_(
sp,
sp, Operand(-frame_alignment));
1090 void MacroAssembler::InitializeNewString(Register
string,
1092 Heap::RootListIndex map_index,
1094 Register scratch2) {
1095 SmiTag(scratch1, length);
1096 LoadRoot(scratch2, map_index);
1098 mov(scratch1, Operand(String::kEmptyHashField));
1104 int MacroAssembler::ActivationFrameAlignment() {
1105 #if V8_HOST_ARCH_ARM
1110 return base::OS::ActivationFrameAlignment();
1116 return FLAG_sim_stack_alignment;
1121 void MacroAssembler::LeaveExitFrame(
bool save_doubles,
1122 Register argument_count,
1123 bool restore_context) {
1124 ConstantPoolUnavailableScope constant_pool_unavailable(
this);
1129 const int offset = ExitFrameConstants::kFrameSize;
1131 Operand(offset + DwVfpRegister::kMaxNumRegisters *
kDoubleSize));
1132 RestoreFPRegs(
r3,
ip);
1136 mov(
r3, Operand::Zero());
1137 mov(
ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
1141 if (restore_context) {
1142 mov(
ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1146 mov(
ip, Operand(ExternalReference(Isolate::kContextAddress, isolate())));
1151 if (FLAG_enable_ool_constant_pool) {
1152 ldr(
pp,
MemOperand(
fp, ExitFrameConstants::kConstantPoolOffset));
1154 mov(
sp, Operand(
fp));
1156 if (argument_count.is_valid()) {
1162 void MacroAssembler::MovFromFloatResult(
const DwVfpRegister dst) {
1163 if (use_eabi_hardfloat()) {
1172 void MacroAssembler::MovFromFloatParameter(DwVfpRegister dst) {
1173 MovFromFloatResult(dst);
1177 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
1178 const ParameterCount& actual,
1179 Handle<Code> code_constant,
1182 bool* definitely_mismatches,
1184 const CallWrapper& call_wrapper) {
1185 bool definitely_matches =
false;
1186 *definitely_mismatches =
false;
1187 Label regular_invoke;
1198 DCHECK(actual.is_immediate() || actual.reg().is(
r0));
1199 DCHECK(expected.is_immediate() || expected.reg().is(
r2));
1200 DCHECK((!code_constant.is_null() && code_reg.is(
no_reg)) || code_reg.is(
r3));
1202 if (expected.is_immediate()) {
1203 DCHECK(actual.is_immediate());
1204 if (expected.immediate() == actual.immediate()) {
1205 definitely_matches =
true;
1207 mov(
r0, Operand(actual.immediate()));
1208 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1209 if (expected.immediate() == sentinel) {
1214 definitely_matches =
true;
1216 *definitely_mismatches =
true;
1217 mov(
r2, Operand(expected.immediate()));
1221 if (actual.is_immediate()) {
1222 cmp(expected.reg(), Operand(actual.immediate()));
1223 b(
eq, ®ular_invoke);
1224 mov(
r0, Operand(actual.immediate()));
1226 cmp(expected.reg(), Operand(actual.reg()));
1227 b(
eq, ®ular_invoke);
1231 if (!definitely_matches) {
1232 if (!code_constant.is_null()) {
1233 mov(
r3, Operand(code_constant));
1237 Handle<Code> adaptor =
1238 isolate()->builtins()->ArgumentsAdaptorTrampoline();
1240 call_wrapper.BeforeCall(CallSize(adaptor));
1242 call_wrapper.AfterCall();
1243 if (!*definitely_mismatches) {
1247 Jump(adaptor, RelocInfo::CODE_TARGET);
1249 bind(®ular_invoke);
1254 void MacroAssembler::InvokeCode(Register code,
1255 const ParameterCount& expected,
1256 const ParameterCount& actual,
1258 const CallWrapper& call_wrapper) {
1263 bool definitely_mismatches =
false;
1264 InvokePrologue(expected, actual, Handle<Code>::null(), code,
1265 &done, &definitely_mismatches,
flag,
1267 if (!definitely_mismatches) {
1269 call_wrapper.BeforeCall(CallSize(code));
1271 call_wrapper.AfterCall();
1284 void MacroAssembler::InvokeFunction(Register fun,
1285 const ParameterCount& actual,
1287 const CallWrapper& call_wrapper) {
1294 Register expected_reg =
r2;
1295 Register code_reg =
r3;
1301 SharedFunctionInfo::kFormalParameterCountOffset));
1302 SmiUntag(expected_reg);
1306 ParameterCount expected(expected_reg);
1307 InvokeCode(code_reg, expected, actual,
flag, call_wrapper);
1311 void MacroAssembler::InvokeFunction(Register
function,
1312 const ParameterCount& expected,
1313 const ParameterCount& actual,
1315 const CallWrapper& call_wrapper) {
1329 InvokeCode(
r3, expected, actual,
flag, call_wrapper);
1333 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
1334 const ParameterCount& expected,
1335 const ParameterCount& actual,
1337 const CallWrapper& call_wrapper) {
1339 InvokeFunction(
r1, expected, actual,
flag, call_wrapper);
1343 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
1348 IsInstanceJSObjectType(
map, scratch, fail);
1352 void MacroAssembler::IsInstanceJSObjectType(Register
map,
1363 void MacroAssembler::IsObjectJSStringType(Register
object,
1375 void MacroAssembler::IsObjectNameType(Register
object,
1385 void MacroAssembler::DebugBreak() {
1386 mov(
r0, Operand::Zero());
1387 mov(
r1, Operand(ExternalReference(Runtime::kDebugBreak, isolate())));
1388 CEntryStub ces(isolate(), 1);
1389 DCHECK(AllowThisStubCall(&ces));
1394 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1395 int handler_index) {
1408 StackHandler::IndexField::encode(handler_index) |
1409 StackHandler::KindField::encode(kind);
1410 mov(
r5, Operand(CodeObject()));
1411 mov(
r6, Operand(state));
1414 if (kind == StackHandler::JS_ENTRY) {
1415 mov(
cp, Operand(Smi::FromInt(0)));
1416 mov(
ip, Operand::Zero());
1423 mov(
r6, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1431 void MacroAssembler::PopTryHandler() {
1434 mov(
ip, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1440 void MacroAssembler::JumpToHandlerEntry() {
1445 ConstantPoolUnavailableScope constant_pool_unavailable(
this);
1446 if (FLAG_enable_ool_constant_pool) {
1451 mov(
r2, Operand(
r2,
LSR, StackHandler::kKindWidth));
1454 add(
pc,
r1, Operand::SmiUntag(
r2));
1468 if (!value.is(
r0)) {
1472 mov(
r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1488 JumpToHandlerEntry();
1492 void MacroAssembler::ThrowUncatchable(Register value) {
1502 if (!value.is(
r0)) {
1506 mov(
r3, Operand(ExternalReference(Isolate::kHandlerAddress, isolate())));
1510 Label fetch_next, check_kind;
1518 tst(
r2, Operand(StackHandler::KindField::kMask));
1528 JumpToHandlerEntry();
1532 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1535 Label same_contexts;
1537 DCHECK(!holder_reg.is(scratch));
1542 ldr(scratch,
MemOperand(
fp, StandardFrameConstants::kContextOffset));
1545 cmp(scratch, Operand::Zero());
1546 Check(
ne, kWeShouldNotHaveAnEmptyLexicalContext);
1551 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
1553 ldr(scratch,
FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
1556 if (emit_debug_code()) {
1562 LoadRoot(
ip, Heap::kNativeContextMapRootIndex);
1563 cmp(holder_reg,
ip);
1564 Check(
eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
1570 cmp(scratch, Operand(
ip));
1571 b(
eq, &same_contexts);
1574 if (emit_debug_code()) {
1578 mov(holder_reg,
ip);
1579 LoadRoot(
ip, Heap::kNullValueRootIndex);
1580 cmp(holder_reg,
ip);
1581 Check(
ne, kJSGlobalProxyContextShouldNotBeNull);
1584 LoadRoot(
ip, Heap::kNativeContextMapRootIndex);
1585 cmp(holder_reg,
ip);
1586 Check(
eq, kJSGlobalObjectNativeContextShouldBeANativeContext);
1596 int token_offset = Context::kHeaderSize +
1601 cmp(scratch, Operand(
ip));
1604 bind(&same_contexts);
1611 void MacroAssembler::GetNumberHash(Register t0, Register scratch) {
1613 LoadRoot(scratch, Heap::kHashSeedRootIndex);
1617 eor(t0, t0, Operand(scratch));
1623 mvn(scratch, Operand(t0));
1624 add(t0, scratch, Operand(t0,
LSL, 15));
1626 eor(t0, t0, Operand(t0,
LSR, 12));
1628 add(t0, t0, Operand(t0,
LSL, 2));
1630 eor(t0, t0, Operand(t0,
LSR, 4));
1632 mov(scratch, Operand(t0,
LSL, 11));
1633 add(t0, t0, Operand(t0,
LSL, 3));
1634 add(t0, t0, scratch);
1636 eor(t0, t0, Operand(t0,
LSR, 16));
1640 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1669 GetNumberHash(t0, t1);
1672 ldr(t1,
FieldMemOperand(elements, SeededNumberDictionary::kCapacityOffset));
1674 sub(t1, t1, Operand(1));
1682 add(t2, t2, Operand(SeededNumberDictionary::GetProbeOffset(
i)));
1684 and_(t2, t2, Operand(t1));
1687 DCHECK(SeededNumberDictionary::kEntrySize == 3);
1688 add(t2, t2, Operand(t2,
LSL, 1));
1693 cmp(key, Operand(
ip));
1704 const int kDetailsOffset =
1705 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
1707 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask)));
1711 const int kValueOffset =
1712 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
1717 void MacroAssembler::Allocate(
int object_size,
1723 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1724 if (!FLAG_inline_new) {
1725 if (emit_debug_code()) {
1727 mov(result, Operand(0x7091));
1728 mov(scratch1, Operand(0x7191));
1729 mov(scratch2, Operand(0x7291));
1735 DCHECK(!result.is(scratch1));
1736 DCHECK(!result.is(scratch2));
1737 DCHECK(!scratch1.is(scratch2));
1751 ExternalReference allocation_top =
1752 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
1753 ExternalReference allocation_limit =
1754 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1757 reinterpret_cast<intptr_t
>(allocation_top.address());
1759 reinterpret_cast<intptr_t
>(allocation_limit.address());
1764 Register topaddr = scratch1;
1765 mov(topaddr, Operand(allocation_top));
1771 ldm(
ia, topaddr, result.bit() |
ip.
bit());
1773 if (emit_debug_code()) {
1779 Check(
eq, kUnexpectedAllocationTop);
1794 cmp(result, Operand(
ip));
1797 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
1806 Register source = result;
1809 while (object_size != 0) {
1810 if (((object_size >>
shift) & 0x03) == 0) {
1813 int bits = object_size & (0xff <<
shift);
1814 object_size -= bits;
1816 Operand bits_operand(bits);
1817 DCHECK(bits_operand.instructions_required(
this) == 1);
1818 add(scratch2, source, bits_operand,
SetCC, cond);
1824 cmp(scratch2, Operand(
ip));
1835 void MacroAssembler::Allocate(Register object_size,
1841 if (!FLAG_inline_new) {
1842 if (emit_debug_code()) {
1844 mov(result, Operand(0x7091));
1845 mov(scratch1, Operand(0x7191));
1846 mov(scratch2, Operand(0x7291));
1854 DCHECK(!result.is(scratch1));
1855 DCHECK(!result.is(scratch2));
1856 DCHECK(!scratch1.is(scratch2));
1866 ExternalReference allocation_top =
1867 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
1868 ExternalReference allocation_limit =
1869 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1871 reinterpret_cast<intptr_t
>(allocation_top.address());
1873 reinterpret_cast<intptr_t
>(allocation_limit.address());
1878 Register topaddr = scratch1;
1879 mov(topaddr, Operand(allocation_top));
1885 ldm(
ia, topaddr, result.bit() |
ip.
bit());
1887 if (emit_debug_code()) {
1893 Check(
eq, kUnexpectedAllocationTop);
1908 cmp(result, Operand(
ip));
1911 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
1922 add(scratch2, result, Operand(object_size),
SetCC);
1925 cmp(scratch2, Operand(
ip));
1929 if (emit_debug_code()) {
1931 Check(
eq, kUnalignedAllocationInNewSpace);
1942 void MacroAssembler::UndoAllocationInNewSpace(Register
object,
1944 ExternalReference new_space_allocation_top =
1945 ExternalReference::new_space_allocation_top_address(isolate());
1951 mov(scratch, Operand(new_space_allocation_top));
1953 cmp(
object, scratch);
1954 Check(
lt, kUndoAllocationOfNonAllocatedMemory);
1957 mov(scratch, Operand(new_space_allocation_top));
1962 void MacroAssembler::AllocateTwoByteString(Register result,
1967 Label* gc_required) {
1971 mov(scratch1, Operand(length,
LSL, 1));
1972 add(scratch1, scratch1,
1985 InitializeNewString(result,
1987 Heap::kStringMapRootIndex,
1993 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1994 Register scratch1, Register scratch2,
1996 Label* gc_required) {
2001 add(scratch1, length,
2014 InitializeNewString(result, length, Heap::kOneByteStringMapRootIndex,
2015 scratch1, scratch2);
2019 void MacroAssembler::AllocateTwoByteConsString(Register result,
2023 Label* gc_required) {
2024 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
2027 InitializeNewString(result,
2029 Heap::kConsStringMapRootIndex,
2035 void MacroAssembler::AllocateOneByteConsString(Register result, Register length,
2038 Label* gc_required) {
2039 Allocate(ConsString::kSize,
2046 InitializeNewString(result, length, Heap::kConsOneByteStringMapRootIndex,
2047 scratch1, scratch2);
2051 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
2055 Label* gc_required) {
2056 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2059 InitializeNewString(result,
2061 Heap::kSlicedStringMapRootIndex,
2067 void MacroAssembler::AllocateOneByteSlicedString(Register result,
2071 Label* gc_required) {
2072 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
2075 InitializeNewString(result, length, Heap::kSlicedOneByteStringMapRootIndex,
2076 scratch1, scratch2);
2080 void MacroAssembler::CompareObjectType(Register
object,
2084 const Register temp = type_reg.is(
no_reg) ?
ip : type_reg;
2087 CompareInstanceType(
map, temp, type);
2091 void MacroAssembler::CheckObjectTypeRange(Register
object,
2095 Label* false_label) {
2100 sub(
ip,
ip, Operand(min_type));
2101 cmp(
ip, Operand(max_type - min_type));
2106 void MacroAssembler::CompareInstanceType(Register
map,
2115 cmp(type_reg, Operand(type));
2119 void MacroAssembler::CompareRoot(Register obj,
2120 Heap::RootListIndex index) {
2122 LoadRoot(
ip, index);
2127 void MacroAssembler::CheckFastElements(Register
map,
2135 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
2140 void MacroAssembler::CheckFastObjectElements(Register
map,
2148 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
2150 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleyElementValue));
2155 void MacroAssembler::CheckFastSmiElements(Register
map,
2161 cmp(scratch, Operand(Map::kMaximumBitField2FastHoleySmiElementValue));
2166 void MacroAssembler::StoreNumberToDoubleElements(
2169 Register elements_reg,
2171 LowDwVfpRegister double_scratch,
2173 int elements_offset) {
2174 Label smi_value, store;
2177 JumpIfSmi(value_reg, &smi_value);
2182 isolate()->factory()->heap_number_map(),
2186 vldr(double_scratch,
FieldMemOperand(value_reg, HeapNumber::kValueOffset));
2188 if (emit_debug_code()) {
2191 Assert(
ne, kDefaultNaNModeNotSet);
2193 VFPCanonicalizeNaN(double_scratch);
2197 SmiToDouble(double_scratch, value_reg);
2200 add(scratch1, elements_reg, Operand::DoubleOffsetFromSmiKey(key_reg));
2201 vstr(double_scratch,
2203 FixedDoubleArray::kHeaderSize - elements_offset));
2207 void MacroAssembler::CompareMap(Register obj,
2210 Label* early_success) {
2212 CompareMap(scratch,
map, early_success);
2216 void MacroAssembler::CompareMap(Register obj_map,
2218 Label* early_success) {
2219 cmp(obj_map, Operand(
map));
2223 void MacroAssembler::CheckMap(Register obj,
2229 JumpIfSmi(obj, fail);
2233 CompareMap(obj, scratch,
map, &success);
2239 void MacroAssembler::CheckMap(Register obj,
2241 Heap::RootListIndex index,
2245 JumpIfSmi(obj, fail);
2248 LoadRoot(
ip, index);
2254 void MacroAssembler::DispatchMap(Register obj,
2257 Handle<Code> success,
2261 JumpIfSmi(obj, &fail);
2264 mov(
ip, Operand(
map));
2266 Jump(success, RelocInfo::CODE_TARGET,
eq);
2271 void MacroAssembler::TryGetFunctionPrototype(Register
function,
2275 bool miss_on_bound_function) {
2277 if (miss_on_bound_function) {
2279 JumpIfSmi(
function, miss);
2290 Operand(Smi::FromInt(1 << SharedFunctionInfo::kBoundFunction)));
2295 tst(scratch, Operand(1 << Map::kHasNonInstancePrototype));
2296 b(
ne, &non_instance);
2306 LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
2312 CompareObjectType(result, scratch, scratch,
MAP_TYPE);
2318 if (miss_on_bound_function) {
2323 bind(&non_instance);
2332 void MacroAssembler::CallStub(
CodeStub* stub,
2333 TypeFeedbackId ast_id,
2335 DCHECK(AllowThisStubCall(stub));
2336 Call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id, cond);
2341 Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
2345 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
2346 return ref0.address() - ref1.address();
2350 void MacroAssembler::CallApiFunctionAndReturn(
2351 Register function_address,
2352 ExternalReference thunk_ref,
2356 ExternalReference next_address =
2357 ExternalReference::handle_scope_next_address(isolate());
2358 const int kNextOffset = 0;
2359 const int kLimitOffset = AddressOffset(
2360 ExternalReference::handle_scope_limit_address(isolate()),
2362 const int kLevelOffset = AddressOffset(
2363 ExternalReference::handle_scope_level_address(isolate()),
2366 DCHECK(function_address.is(
r1) || function_address.is(
r2));
2368 Label profiler_disabled;
2369 Label end_profiler_check;
2370 mov(
r9, Operand(ExternalReference::is_profiling_address(isolate())));
2372 cmp(
r9, Operand(0));
2373 b(
eq, &profiler_disabled);
2376 mov(
r3, Operand(thunk_ref));
2377 jmp(&end_profiler_check);
2379 bind(&profiler_disabled);
2380 Move(
r3, function_address);
2381 bind(&end_profiler_check);
2384 mov(
r9, Operand(next_address));
2388 add(
r6,
r6, Operand(1));
2391 if (FLAG_log_timer_events) {
2392 FrameScope frame(
this, StackFrame::MANUAL);
2393 PushSafepointRegisters();
2394 PrepareCallCFunction(1,
r0);
2395 mov(
r0, Operand(ExternalReference::isolate_address(isolate())));
2396 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2397 PopSafepointRegisters();
2403 DirectCEntryStub stub(isolate());
2404 stub.GenerateCall(
this,
r3);
2406 if (FLAG_log_timer_events) {
2407 FrameScope frame(
this, StackFrame::MANUAL);
2408 PushSafepointRegisters();
2409 PrepareCallCFunction(1,
r0);
2410 mov(
r0, Operand(ExternalReference::isolate_address(isolate())));
2411 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2412 PopSafepointRegisters();
2415 Label promote_scheduled_exception;
2416 Label exception_handled;
2417 Label delete_allocated_handles;
2418 Label leave_exit_frame;
2419 Label return_value_loaded;
2422 ldr(
r0, return_value_operand);
2423 bind(&return_value_loaded);
2427 if (emit_debug_code()) {
2430 Check(
eq, kUnexpectedLevelAfterReturnFromApiCall);
2432 sub(
r6,
r6, Operand(1));
2436 b(
ne, &delete_allocated_handles);
2439 bind(&leave_exit_frame);
2440 LoadRoot(
r4, Heap::kTheHoleValueRootIndex);
2441 mov(
ip, Operand(ExternalReference::scheduled_exception_address(isolate())));
2444 b(
ne, &promote_scheduled_exception);
2445 bind(&exception_handled);
2447 bool restore_context = context_restore_operand !=
NULL;
2448 if (restore_context) {
2449 ldr(
cp, *context_restore_operand);
2452 mov(
r4, Operand(stack_space));
2453 LeaveExitFrame(
false,
r4, !restore_context);
2456 bind(&promote_scheduled_exception);
2459 CallExternalReference(
2460 ExternalReference(Runtime::kPromoteScheduledException, isolate()),
2463 jmp(&exception_handled);
2466 bind(&delete_allocated_handles);
2469 PrepareCallCFunction(1,
r5);
2470 mov(
r0, Operand(ExternalReference::isolate_address(isolate())));
2472 ExternalReference::delete_handle_scope_extensions(isolate()), 1);
2474 jmp(&leave_exit_frame);
2478 bool MacroAssembler::AllowThisStubCall(
CodeStub* stub) {
2479 return has_frame_ || !stub->SometimesSetsUpAFrame();
2483 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2489 (1 << String::kArrayIndexValueBits));
2490 DecodeFieldToSmi<String::ArrayIndexValueBits>(index, hash);
2494 void MacroAssembler::SmiToDouble(LowDwVfpRegister value, Register smi) {
2495 if (CpuFeatures::IsSupported(
VFP3)) {
2496 vmov(value.low(), smi);
2497 vcvt_f64_s32(value, 1);
2500 vmov(value.low(),
ip);
2501 vcvt_f64_s32(value, value.low());
2506 void MacroAssembler::TestDoubleIsInt32(DwVfpRegister double_input,
2507 LowDwVfpRegister double_scratch) {
2508 DCHECK(!double_input.is(double_scratch));
2509 vcvt_s32_f64(double_scratch.low(), double_input);
2510 vcvt_f64_s32(double_scratch, double_scratch.low());
2511 VFPCompareAndSetFlags(double_input, double_scratch);
2515 void MacroAssembler::TryDoubleToInt32Exact(Register result,
2516 DwVfpRegister double_input,
2517 LowDwVfpRegister double_scratch) {
2518 DCHECK(!double_input.is(double_scratch));
2519 vcvt_s32_f64(double_scratch.low(), double_input);
2520 vmov(result, double_scratch.low());
2521 vcvt_f64_s32(double_scratch, double_scratch.low());
2522 VFPCompareAndSetFlags(double_input, double_scratch);
2526 void MacroAssembler::TryInt32Floor(Register result,
2527 DwVfpRegister double_input,
2528 Register input_high,
2529 LowDwVfpRegister double_scratch,
2532 DCHECK(!result.is(input_high));
2533 DCHECK(!double_input.is(double_scratch));
2536 VmovHigh(input_high, double_input);
2539 Sbfx(result, input_high,
2540 HeapNumber::kExponentShift, HeapNumber::kExponentBits);
2541 cmp(result, Operand(-1));
2545 TryDoubleToInt32Exact(result, double_input, double_scratch);
2548 cmp(input_high, Operand::Zero());
2557 cmn(result, Operand(1));
2565 sub(result, result, Operand(1),
SetCC);
2572 void MacroAssembler::TryInlineTruncateDoubleToI(Register result,
2573 DwVfpRegister double_input,
2576 vcvt_s32_f64(double_scratch.low(), double_input);
2577 vmov(result, double_scratch.low());
2580 sub(
ip, result, Operand(1));
2581 cmp(
ip, Operand(0x7ffffffe));
2586 void MacroAssembler::TruncateDoubleToI(Register result,
2587 DwVfpRegister double_input) {
2590 TryInlineTruncateDoubleToI(result, double_input, &done);
2597 DoubleToIStub stub(isolate(),
sp, result, 0,
true,
true);
2607 void MacroAssembler::TruncateHeapNumberToI(Register result,
2611 DCHECK(!result.is(
object));
2613 vldr(double_scratch,
2615 TryInlineTruncateDoubleToI(result, double_scratch, &done);
2619 DoubleToIStub stub(isolate(),
2632 void MacroAssembler::TruncateNumberToI(Register
object,
2634 Register heap_number_map,
2636 Label* not_number) {
2638 DCHECK(!result.is(
object));
2640 UntagAndJumpIfSmi(result,
object, &done);
2641 JumpIfNotHeapNumber(
object, heap_number_map, scratch1, not_number);
2642 TruncateHeapNumberToI(result,
object);
2648 void MacroAssembler::GetLeastBitsFromSmi(Register dst,
2650 int num_least_bits) {
2651 if (CpuFeatures::IsSupported(
ARMv7) && !predictable_code_size()) {
2655 and_(dst, dst, Operand((1 << num_least_bits) - 1));
2660 void MacroAssembler::GetLeastBitsFromInt32(Register dst,
2662 int num_least_bits) {
2663 and_(dst, src, Operand((1 << num_least_bits) - 1));
2667 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
2675 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2681 mov(
r0, Operand(num_arguments));
2682 mov(
r1, Operand(ExternalReference(f, isolate())));
2683 CEntryStub stub(isolate(), 1, save_doubles);
2688 void MacroAssembler::CallExternalReference(
const ExternalReference& ext,
2689 int num_arguments) {
2690 mov(
r0, Operand(num_arguments));
2691 mov(
r1, Operand(ext));
2693 CEntryStub stub(isolate(), 1);
2698 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
2705 mov(
r0, Operand(num_arguments));
2706 JumpToExternalReference(ext);
2710 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2713 TailCallExternalReference(ExternalReference(fid, isolate()),
2719 void MacroAssembler::JumpToExternalReference(
const ExternalReference& builtin) {
2720 #if defined(__thumb__)
2722 DCHECK((
reinterpret_cast<intptr_t
>(builtin.address()) & 1) == 1);
2724 mov(
r1, Operand(builtin));
2725 CEntryStub stub(isolate(), 1);
2726 Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
2730 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
2732 const CallWrapper& call_wrapper) {
2736 GetBuiltinEntry(
r2,
id);
2738 call_wrapper.BeforeCall(CallSize(
r2));
2740 call_wrapper.AfterCall();
2748 void MacroAssembler::GetBuiltinFunction(Register target,
2749 Builtins::JavaScript
id) {
2752 MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2756 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
2760 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
2762 GetBuiltinFunction(
r1,
id);
2768 void MacroAssembler::SetCounter(StatsCounter* counter,
int value,
2769 Register scratch1, Register scratch2) {
2770 if (FLAG_native_code_counters && counter->Enabled()) {
2771 mov(scratch1, Operand(value));
2772 mov(scratch2, Operand(ExternalReference(counter)));
2778 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value,
2779 Register scratch1, Register scratch2) {
2781 if (FLAG_native_code_counters && counter->Enabled()) {
2782 mov(scratch2, Operand(ExternalReference(counter)));
2784 add(scratch1, scratch1, Operand(value));
2790 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value,
2791 Register scratch1, Register scratch2) {
2793 if (FLAG_native_code_counters && counter->Enabled()) {
2794 mov(scratch2, Operand(ExternalReference(counter)));
2796 sub(scratch1, scratch1, Operand(value));
2803 if (emit_debug_code())
2804 Check(cond, reason);
2808 void MacroAssembler::AssertFastElements(Register elements) {
2809 if (emit_debug_code()) {
2814 LoadRoot(
ip, Heap::kFixedArrayMapRootIndex);
2817 LoadRoot(
ip, Heap::kFixedDoubleArrayMapRootIndex);
2820 LoadRoot(
ip, Heap::kFixedCOWArrayMapRootIndex);
2823 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2845 RecordComment(
"Abort message: ");
2849 if (FLAG_trap_on_abort) {
2855 mov(
r0, Operand(Smi::FromInt(reason)));
2863 CallRuntime(Runtime::kAbort, 1);
2865 CallRuntime(Runtime::kAbort, 1);
2868 if (is_const_pool_blocked()) {
2872 static const int kExpectedAbortInstructions = 7;
2873 int abort_instructions = InstructionsGeneratedSince(&abort_start);
2874 DCHECK(abort_instructions <= kExpectedAbortInstructions);
2875 while (abort_instructions++ < kExpectedAbortInstructions) {
2882 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
2883 if (context_chain_length > 0) {
2885 ldr(dst,
MemOperand(
cp, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2886 for (
int i = 1;
i < context_chain_length;
i++) {
2887 ldr(dst,
MemOperand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2898 void MacroAssembler::LoadTransitionedArrayMapConditional(
2901 Register map_in_out,
2903 Label* no_map_match) {
2906 MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2907 ldr(scratch,
FieldMemOperand(scratch, GlobalObject::kNativeContextOffset));
2912 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2914 FixedArrayBase::kHeaderSize;
2916 cmp(map_in_out,
ip);
2917 b(
ne, no_map_match);
2921 FixedArrayBase::kHeaderSize;
2926 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
2929 MemOperand(
cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2932 GlobalObject::kNativeContextOffset));
2934 ldr(
function,
MemOperand(
function, Context::SlotOffset(index)));
2938 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
2943 if (emit_debug_code()) {
2948 Abort(kGlobalFunctionsMustHaveInitialMap);
2954 void MacroAssembler::JumpIfNotPowerOfTwoOrZero(
2957 Label* not_power_of_two_or_zero) {
2958 sub(scratch, reg, Operand(1),
SetCC);
2959 b(
mi, not_power_of_two_or_zero);
2961 b(
ne, not_power_of_two_or_zero);
2965 void MacroAssembler::JumpIfNotPowerOfTwoOrZeroAndNeg(
2968 Label* zero_and_neg,
2969 Label* not_power_of_two) {
2970 sub(scratch, reg, Operand(1),
SetCC);
2971 b(
mi, zero_and_neg);
2973 b(
ne, not_power_of_two);
2977 void MacroAssembler::JumpIfNotBothSmi(Register reg1,
2979 Label* on_not_both_smi) {
2983 b(
ne, on_not_both_smi);
2987 void MacroAssembler::UntagAndJumpIfSmi(
2988 Register dst, Register src, Label* smi_case) {
2990 SmiUntag(dst, src,
SetCC);
2995 void MacroAssembler::UntagAndJumpIfNotSmi(
2996 Register dst, Register src, Label* non_smi_case) {
2998 SmiUntag(dst, src,
SetCC);
2999 b(
cs, non_smi_case);
3003 void MacroAssembler::JumpIfEitherSmi(Register reg1,
3005 Label* on_either_smi) {
3009 b(
eq, on_either_smi);
3013 void MacroAssembler::AssertNotSmi(Register
object) {
3014 if (emit_debug_code()) {
3017 Check(
ne, kOperandIsASmi);
3022 void MacroAssembler::AssertSmi(Register
object) {
3023 if (emit_debug_code()) {
3026 Check(
eq, kOperandIsNotSmi);
3031 void MacroAssembler::AssertString(Register
object) {
3032 if (emit_debug_code()) {
3035 Check(
ne, kOperandIsASmiAndNotAString);
3040 Check(
lo, kOperandIsNotAString);
3045 void MacroAssembler::AssertName(Register
object) {
3046 if (emit_debug_code()) {
3049 Check(
ne, kOperandIsASmiAndNotAName);
3054 Check(
le, kOperandIsNotAName);
3059 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object,
3061 if (emit_debug_code()) {
3062 Label done_checking;
3063 AssertNotSmi(
object);
3064 CompareRoot(
object, Heap::kUndefinedValueRootIndex);
3065 b(
eq, &done_checking);
3067 CompareRoot(scratch, Heap::kAllocationSiteMapRootIndex);
3068 Assert(
eq, kExpectedUndefinedOrCell);
3069 bind(&done_checking);
3074 void MacroAssembler::AssertIsRoot(Register reg, Heap::RootListIndex index) {
3075 if (emit_debug_code()) {
3076 CompareRoot(reg, index);
3077 Check(
eq, kHeapNumberMapRegisterClobbered);
3082 void MacroAssembler::JumpIfNotHeapNumber(Register
object,
3083 Register heap_number_map,
3085 Label* on_not_heap_number) {
3087 AssertIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
3088 cmp(scratch, heap_number_map);
3089 b(
ne, on_not_heap_number);
3093 void MacroAssembler::LookupNumberStringCache(Register
object,
3100 Register number_string_cache = result;
3101 Register mask = scratch3;
3104 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
3108 ldr(mask,
FieldMemOperand(number_string_cache, FixedArray::kLengthOffset));
3111 sub(mask, mask, Operand(1));
3118 Label load_result_from_cache;
3119 JumpIfSmi(
object, &is_smi);
3122 Heap::kHeapNumberMapRootIndex,
3130 ldm(
ia, scratch1, scratch1.bit() | scratch2.bit());
3131 eor(scratch1, scratch1, Operand(scratch2));
3132 and_(scratch1, scratch1, Operand(mask));
3137 number_string_cache,
3140 Register probe = mask;
3142 JumpIfSmi(probe, not_found);
3144 vldr(
d0, scratch2, HeapNumber::kValueOffset);
3146 vldr(
d1, probe, HeapNumber::kValueOffset);
3147 VFPCompareAndSetFlags(
d0,
d1);
3149 b(&load_result_from_cache);
3152 Register scratch = scratch1;
3153 and_(scratch, mask, Operand(
object,
ASR, 1));
3157 number_string_cache,
3166 bind(&load_result_from_cache);
3168 IncrementCounter(isolate()->counters()->number_to_string_native(),
3175 void MacroAssembler::JumpIfNonSmisNotBothSequentialOneByteStrings(
3176 Register first, Register second, Register scratch1, Register scratch2,
3185 JumpIfBothInstanceTypesAreNotSequentialOneByte(scratch1, scratch2, scratch1,
3189 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register first,
3195 and_(scratch1, first, Operand(second));
3196 JumpIfSmi(scratch1, failure);
3197 JumpIfNonSmisNotBothSequentialOneByteStrings(first, second, scratch1,
3202 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Register reg,
3203 Label* not_unique_name) {
3209 b(
ne, not_unique_name);
3217 void MacroAssembler::AllocateHeapNumber(Register result,
3220 Register heap_number_map,
3226 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
3230 ? Heap::kMutableHeapNumberMapRootIndex
3231 : Heap::kHeapNumberMapRootIndex;
3232 AssertIsRoot(heap_number_map, map_index);
3236 str(heap_number_map,
FieldMemOperand(result, HeapObject::kMapOffset));
3238 str(heap_number_map,
MemOperand(result, HeapObject::kMapOffset));
3243 void MacroAssembler::AllocateHeapNumberWithValue(Register result,
3244 DwVfpRegister value,
3247 Register heap_number_map,
3248 Label* gc_required) {
3249 AllocateHeapNumber(result, scratch1, scratch2, heap_number_map, gc_required);
3251 vstr(value, scratch1, HeapNumber::kValueOffset);
3256 void MacroAssembler::CopyFields(Register dst,
3258 LowDwVfpRegister double_scratch,
3260 int double_count = field_count / (DwVfpRegister::kSizeInBytes /
kPointerSize);
3261 for (
int i = 0;
i < double_count;
i++) {
3262 vldr(double_scratch,
FieldMemOperand(src,
i * DwVfpRegister::kSizeInBytes));
3263 vstr(double_scratch,
FieldMemOperand(dst,
i * DwVfpRegister::kSizeInBytes));
3267 STATIC_ASSERT(2 * SwVfpRegister::kSizeInBytes == DwVfpRegister::kSizeInBytes);
3269 int remain = field_count % (DwVfpRegister::kSizeInBytes /
kPointerSize);
3271 vldr(double_scratch.low(),
3273 vstr(double_scratch.low(),
3283 Label align_loop_1, word_loop, byte_loop, byte_loop_1, done;
3289 bind(&align_loop_1);
3294 sub(length, length, Operand(1),
SetCC);
3298 if (emit_debug_code()) {
3300 Assert(
eq, kExpectingAlignmentForCopyBytes);
3309 mov(scratch, Operand(scratch,
LSR, 8));
3311 mov(scratch, Operand(scratch,
LSR, 8));
3313 mov(scratch, Operand(scratch,
LSR, 8));
3321 cmp(length, Operand::Zero());
3326 sub(length, length, Operand(1),
SetCC);
3327 b(
ne, &byte_loop_1);
3332 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
3333 Register end_offset,
3340 cmp(start_offset, end_offset);
3345 void MacroAssembler::CheckFor32DRegs(Register scratch) {
3346 mov(scratch, Operand(ExternalReference::cpu_features()));
3352 void MacroAssembler::SaveFPRegs(Register location, Register scratch) {
3353 CheckFor32DRegs(scratch);
3360 void MacroAssembler::RestoreFPRegs(Register location, Register scratch) {
3361 CheckFor32DRegs(scratch);
3368 void MacroAssembler::JumpIfBothInstanceTypesAreNotSequentialOneByte(
3369 Register first, Register second, Register scratch1, Register scratch2,
3371 const int kFlatOneByteStringMask =
3373 const int kFlatOneByteStringTag =
3375 and_(scratch1, first, Operand(kFlatOneByteStringMask));
3376 and_(scratch2, second, Operand(kFlatOneByteStringMask));
3377 cmp(scratch1, Operand(kFlatOneByteStringTag));
3379 cmp(scratch2, Operand(kFlatOneByteStringTag),
eq);
3384 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(Register type,
3387 const int kFlatOneByteStringMask =
3389 const int kFlatOneByteStringTag =
3391 and_(scratch, type, Operand(kFlatOneByteStringMask));
3392 cmp(scratch, Operand(kFlatOneByteStringTag));
3396 static const int kRegisterPassedArguments = 4;
3399 int MacroAssembler::CalculateStackPassedWords(
int num_reg_arguments,
3400 int num_double_arguments) {
3401 int stack_passed_words = 0;
3402 if (use_eabi_hardfloat()) {
3405 if (num_double_arguments > DoubleRegister::NumRegisters()) {
3406 stack_passed_words +=
3407 2 * (num_double_arguments - DoubleRegister::NumRegisters());
3412 num_reg_arguments += 2 * num_double_arguments;
3415 if (num_reg_arguments > kRegisterPassedArguments) {
3416 stack_passed_words += num_reg_arguments - kRegisterPassedArguments;
3418 return stack_passed_words;
3422 void MacroAssembler::EmitSeqStringSetCharCheck(Register
string,
3428 Check(
ne, kNonObject);
3434 cmp(
ip, Operand(encoding_mask));
3435 Check(
eq, kUnexpectedStringType);
3440 Label index_tag_ok, index_tag_bad;
3441 TrySmiTag(index, index, &index_tag_bad);
3443 bind(&index_tag_bad);
3444 Abort(kIndexIsTooLarge);
3445 bind(&index_tag_ok);
3449 Check(
lt, kIndexIsTooLarge);
3451 cmp(index, Operand(Smi::FromInt(0)));
3452 Check(
ge, kIndexIsNegative);
3454 SmiUntag(index, index);
3458 void MacroAssembler::PrepareCallCFunction(
int num_reg_arguments,
3459 int num_double_arguments,
3461 int frame_alignment = ActivationFrameAlignment();
3462 int stack_passed_arguments = CalculateStackPassedWords(
3463 num_reg_arguments, num_double_arguments);
3470 and_(
sp,
sp, Operand(-frame_alignment));
3478 void MacroAssembler::PrepareCallCFunction(
int num_reg_arguments,
3480 PrepareCallCFunction(num_reg_arguments, 0, scratch);
3484 void MacroAssembler::MovToFloatParameter(DwVfpRegister src) {
3486 if (!use_eabi_hardfloat()) {
3493 void MacroAssembler::MovToFloatResult(DwVfpRegister src) {
3494 MovToFloatParameter(src);
3498 void MacroAssembler::MovToFloatParameters(DwVfpRegister src1,
3499 DwVfpRegister src2) {
3502 if (!use_eabi_hardfloat()) {
3509 void MacroAssembler::CallCFunction(ExternalReference
function,
3510 int num_reg_arguments,
3511 int num_double_arguments) {
3512 mov(
ip, Operand(
function));
3513 CallCFunctionHelper(
ip, num_reg_arguments, num_double_arguments);
3517 void MacroAssembler::CallCFunction(Register
function,
3518 int num_reg_arguments,
3519 int num_double_arguments) {
3520 CallCFunctionHelper(
function, num_reg_arguments, num_double_arguments);
3524 void MacroAssembler::CallCFunction(ExternalReference
function,
3525 int num_arguments) {
3526 CallCFunction(
function, num_arguments, 0);
3530 void MacroAssembler::CallCFunction(Register
function,
3531 int num_arguments) {
3532 CallCFunction(
function, num_arguments, 0);
3536 void MacroAssembler::CallCFunctionHelper(Register
function,
3537 int num_reg_arguments,
3538 int num_double_arguments) {
3543 #if V8_HOST_ARCH_ARM
3544 if (emit_debug_code()) {
3545 int frame_alignment = base::OS::ActivationFrameAlignment();
3546 int frame_alignment_mask = frame_alignment - 1;
3549 Label alignment_as_expected;
3550 tst(
sp, Operand(frame_alignment_mask));
3551 b(
eq, &alignment_as_expected);
3554 stop(
"Unexpected alignment");
3555 bind(&alignment_as_expected);
3564 int stack_passed_arguments = CalculateStackPassedWords(
3565 num_reg_arguments, num_double_arguments);
3574 void MacroAssembler::GetRelocatedValueLocation(Register ldr_location,
3577 Label small_constant_pool_load, load_result;
3580 if (FLAG_enable_ool_constant_pool) {
3582 and_(scratch, result, Operand(GetConsantPoolLoadMask()));
3583 teq(scratch, Operand(GetConsantPoolLoadPattern()));
3584 b(
eq, &small_constant_pool_load);
3585 if (emit_debug_code()) {
3590 Instr patterns[] = {GetMovWPattern(), GetMovTPattern(),
3591 GetLdrPpRegOffsetPattern()};
3592 for (
int i = 0;
i < 3;
i++) {
3593 ldr(result,
MemOperand(ldr_location,
i * kInstrSize));
3594 and_(result, result, Operand(patterns[
i]));
3595 cmp(result, Operand(patterns[
i]));
3596 Check(
eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
3604 and_(scratch, result, Operand(0xfff));
3605 mov(
ip, Operand(result,
LSR, 4));
3606 and_(
ip,
ip, Operand(0xf000));
3607 orr(result, scratch, Operand(
ip));
3609 ldr(scratch,
MemOperand(ldr_location, kInstrSize));
3610 and_(
ip, scratch, Operand(0xf0000));
3611 orr(result, result, Operand(
ip,
LSL, 12));
3612 and_(scratch, scratch, Operand(0xfff));
3613 orr(result, result, Operand(scratch,
LSL, 16));
3618 bind(&small_constant_pool_load);
3619 if (emit_debug_code()) {
3621 and_(result, result, Operand(GetConsantPoolLoadPattern()));
3622 cmp(result, Operand(GetConsantPoolLoadPattern()));
3623 Check(
eq, kTheInstructionToPatchShouldBeALoadFromConstantPool);
3629 const uint32_t kLdrOffsetMask = (1 << 12) - 1;
3630 and_(result, result, Operand(kLdrOffsetMask));
3634 if (FLAG_enable_ool_constant_pool) {
3635 add(result,
pp, Operand(result));
3637 add(result, ldr_location, Operand(result));
3638 add(result, result, Operand(Instruction::kPCReadOffset));
3643 void MacroAssembler::CheckPageFlag(
3648 Label* condition_met) {
3650 ldr(scratch,
MemOperand(scratch, MemoryChunk::kFlagsOffset));
3651 tst(scratch, Operand(mask));
3652 b(
cc, condition_met);
3656 void MacroAssembler::CheckMapDeprecated(Handle<Map>
map,
3658 Label* if_deprecated) {
3659 if (
map->CanBeDeprecated()) {
3660 mov(scratch, Operand(
map));
3662 tst(scratch, Operand(Map::Deprecated::kMask));
3663 b(
ne, if_deprecated);
3668 void MacroAssembler::JumpIfBlack(Register
object,
3672 HasColor(
object, scratch0, scratch1, on_black, 1, 0);
3673 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3677 void MacroAssembler::HasColor(Register
object,
3678 Register bitmap_scratch,
3679 Register mask_scratch,
3685 GetMarkBits(
object, bitmap_scratch, mask_scratch);
3687 Label other_color, word_boundary;
3688 ldr(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3689 tst(
ip, Operand(mask_scratch));
3690 b(first_bit == 1 ?
eq :
ne, &other_color);
3692 add(mask_scratch, mask_scratch, Operand(mask_scratch),
SetCC);
3693 b(
eq, &word_boundary);
3694 tst(
ip, Operand(mask_scratch));
3695 b(second_bit == 1 ?
ne :
eq, has_color);
3698 bind(&word_boundary);
3700 tst(
ip, Operand(1));
3701 b(second_bit == 1 ?
ne :
eq, has_color);
3709 void MacroAssembler::JumpIfDataObject(Register value,
3711 Label* not_data_object) {
3712 Label is_data_object;
3714 CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
3715 b(
eq, &is_data_object);
3722 b(
ne, not_data_object);
3723 bind(&is_data_object);
3727 void MacroAssembler::GetMarkBits(Register addr_reg,
3728 Register bitmap_reg,
3729 Register mask_reg) {
3731 and_(bitmap_reg, addr_reg, Operand(~Page::kPageAlignmentMask));
3736 mov(
ip, Operand(1));
3737 mov(mask_reg, Operand(
ip,
LSL, mask_reg));
3741 void MacroAssembler::EnsureNotWhite(
3743 Register bitmap_scratch,
3744 Register mask_scratch,
3745 Register load_scratch,
3746 Label* value_is_white_and_not_data) {
3748 GetMarkBits(value, bitmap_scratch, mask_scratch);
3751 DCHECK(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
3752 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3753 DCHECK(strcmp(Marking::kGreyBitPattern,
"11") == 0);
3754 DCHECK(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
3760 ldr(load_scratch,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3761 tst(mask_scratch, load_scratch);
3764 if (emit_debug_code()) {
3768 tst(load_scratch, Operand(mask_scratch,
LSL, 1));
3770 stop(
"Impossible marking bit pattern");
3776 Register
map = load_scratch;
3777 Register length = load_scratch;
3778 Label is_data_object;
3782 CompareRoot(
map, Heap::kHeapNumberMapRootIndex);
3783 mov(length, Operand(HeapNumber::kSize),
LeaveCC,
eq);
3784 b(
eq, &is_data_object);
3791 Register instance_type = load_scratch;
3794 b(
ne, value_is_white_and_not_data);
3803 mov(length, Operand(ExternalString::kSize),
LeaveCC,
ne);
3804 b(
ne, &is_data_object);
3818 bind(&is_data_object);
3821 ldr(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3822 orr(
ip,
ip, Operand(mask_scratch));
3823 str(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kHeaderSize));
3825 and_(bitmap_scratch, bitmap_scratch, Operand(~Page::kPageAlignmentMask));
3826 ldr(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3827 add(
ip,
ip, Operand(length));
3828 str(
ip,
MemOperand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3834 void MacroAssembler::ClampUint8(Register output_reg, Register input_reg) {
3835 Usat(output_reg, 8, Operand(input_reg));
3839 void MacroAssembler::ClampDoubleToUint8(Register result_reg,
3840 DwVfpRegister input_reg,
3841 LowDwVfpRegister double_scratch) {
3845 Vmov(double_scratch, 255.0, result_reg);
3846 mov(result_reg, Operand(255));
3847 VFPCompareAndSetFlags(input_reg, double_scratch);
3853 vmov(result_reg, double_scratch.low());
3859 void MacroAssembler::LoadInstanceDescriptors(Register
map,
3860 Register descriptors) {
3865 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register
map) {
3867 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
3871 void MacroAssembler::EnumLength(Register dst, Register
map) {
3874 and_(dst, dst, Operand(Map::EnumLengthBits::kMask));
3879 void MacroAssembler::CheckEnumCache(Register null_value, Label* call_runtime) {
3880 Register empty_fixed_array_value =
r6;
3881 LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
3891 b(
eq, call_runtime);
3900 cmp(
r3, Operand(Smi::FromInt(0)));
3901 b(
ne, call_runtime);
3909 cmp(
r2, empty_fixed_array_value);
3910 b(
eq, &no_elements);
3913 CompareRoot(
r2, Heap::kEmptySlowElementDictionaryRootIndex);
3914 b(
ne, call_runtime);
3918 cmp(
r2, null_value);
3923 void MacroAssembler::TestJSArrayForAllocationMemento(
3924 Register receiver_reg,
3925 Register scratch_reg,
3926 Label* no_memento_found) {
3927 ExternalReference new_space_start =
3928 ExternalReference::new_space_start(isolate());
3929 ExternalReference new_space_allocation_top =
3930 ExternalReference::new_space_allocation_top_address(isolate());
3931 add(scratch_reg, receiver_reg,
3932 Operand(JSArray::kSize + AllocationMemento::kSize -
kHeapObjectTag));
3933 cmp(scratch_reg, Operand(new_space_start));
3934 b(
lt, no_memento_found);
3935 mov(
ip, Operand(new_space_allocation_top));
3937 cmp(scratch_reg,
ip);
3938 b(
gt, no_memento_found);
3939 ldr(scratch_reg,
MemOperand(scratch_reg, -AllocationMemento::kSize));
3941 Operand(isolate()->factory()->allocation_memento_map()));
3952 if (reg1.is_valid()) regs |= reg1.bit();
3953 if (reg2.is_valid()) regs |= reg2.bit();
3954 if (reg3.is_valid()) regs |= reg3.bit();
3955 if (reg4.is_valid()) regs |= reg4.bit();
3956 if (reg5.is_valid()) regs |= reg5.bit();
3957 if (reg6.is_valid()) regs |= reg6.bit();
3959 for (
int i = 0;
i < Register::NumAllocatableRegisters();
i++) {
3960 Register candidate = Register::FromAllocationIndex(
i);
3961 if (regs & candidate.bit())
continue;
3969 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3974 DCHECK(!scratch1.is(scratch0));
3975 Factory* factory = isolate()->factory();
3976 Register current = scratch0;
3980 mov(current,
object);
3986 DecodeField<Map::ElementsKindBits>(scratch1);
3990 cmp(current, Operand(factory->null_value()));
4004 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
4005 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
4006 reg7.is_valid() + reg8.is_valid();
4009 if (reg1.is_valid()) regs |= reg1.bit();
4010 if (reg2.is_valid()) regs |= reg2.bit();
4011 if (reg3.is_valid()) regs |= reg3.bit();
4012 if (reg4.is_valid()) regs |= reg4.bit();
4013 if (reg5.is_valid()) regs |= reg5.bit();
4014 if (reg6.is_valid()) regs |= reg6.bit();
4015 if (reg7.is_valid()) regs |= reg7.bit();
4016 if (reg8.is_valid()) regs |= reg8.bit();
4017 int n_of_non_aliasing_regs =
NumRegs(regs);
4019 return n_of_valid_regs != n_of_non_aliasing_regs;
4024 CodePatcher::CodePatcher(
byte* address,
4026 FlushICache flush_cache)
4027 : address_(address),
4028 size_(instructions * Assembler::kInstrSize),
4029 masm_(
NULL, address, size_ + Assembler::kGap),
4030 flush_cache_(flush_cache) {
4034 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4038 CodePatcher::~CodePatcher() {
4040 if (flush_cache_ == FLUSH) {
4041 CpuFeatures::FlushICache(address_, size_);
4045 DCHECK(masm_.pc_ == address_ + size_);
4046 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
4050 void CodePatcher::Emit(
Instr instr) {
4051 masm()->emit(instr);
4055 void CodePatcher::Emit(
Address addr) {
4056 masm()->emit(
reinterpret_cast<Instr>(addr));
4060 void CodePatcher::EmitCondition(
Condition cond) {
4061 Instr instr = Assembler::instr_at(masm_.pc_);
4067 void MacroAssembler::TruncatingDiv(Register result,
4070 DCHECK(!dividend.is(result));
4073 base::MagicNumbersForDivision<uint32_t> mag =
4075 mov(
ip, Operand(mag.multiplier));
4076 smull(
ip, result, dividend,
ip);
4077 bool neg = (mag.multiplier & (
static_cast<uint32_t>(1) << 31)) != 0;
4078 if (divisor > 0 && neg) {
4079 add(result, result, Operand(dividend));
4081 if (divisor < 0 && !neg && mag.multiplier > 0) {
4082 sub(result, result, Operand(dividend));
4084 if (mag.shift > 0) mov(result, Operand(result,
ASR, mag.shift));
4085 add(result, result, Operand(dividend,
LSR, 31));
#define kScratchDoubleReg
MacroAssembler(Isolate *isolate, void *buffer, int size)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for ARMv7(ARM only)") DEFINE_BOOL(enable_32dregs
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be aligned(ARM64 only)") DEFINE_STRING(expose_gc_as
#define kNumSafepointSavedRegisters
#define kSafepointSavedRegisters
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
#define STATIC_ASSERT(test)
bool IsPowerOfTwo32(uint32_t value)
MagicNumbersForDivision< T > SignedDivisionByConstant(T d)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
int WhichPowerOf2(uint32_t x)
const intptr_t kHeapObjectTagMask
const uint32_t kStringEncodingMask
@ MOVW_MOVT_IMMEDIATE_LOADS
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const uint32_t kVFPDefaultNaNModeControlBit
TypeImpl< ZoneTypeConfig > Type
const LowDwVfpRegister d1
const VmovIndex VmovIndexHi
const LowDwVfpRegister d0
const int kNumSafepointRegisters
const uint32_t kNotStringTag
const int kPointerSizeLog2
const uint32_t kStringTag
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FAST_HOLEY_SMI_ELEMENTS
Handle< T > handle(T *t, Isolate *isolate)
const uint32_t kOneByteStringTag
MemOperand FieldMemOperand(Register object, int offset)
const intptr_t kObjectAlignmentMask
int NumRegs(RegList reglist)
static const int kInvalidEnumCacheSentinel
const char * GetBailoutReason(BailoutReason reason)
Condition NegateCondition(Condition cond)
const uint32_t kStringRepresentationMask
@ NEVER_INLINE_TARGET_ADDRESS
const VmovIndex VmovIndexLo
const uint32_t kIsIndirectStringTag
int TenToThe(int exponent)
kFeedbackVectorOffset flag
const uint32_t kInternalizedTag
static const int kNumberDictionaryProbes
const intptr_t kSmiTagMask
const uint32_t kIsNotInternalizedMask
Register GetRegisterThatIsNotOneOf(Register reg1, Register reg2=no_reg, Register reg3=no_reg, Register reg4=no_reg, Register reg5=no_reg, Register reg6=no_reg)
static const int kNoCodeAgeSequenceLength
const uint32_t kIsNotStringMask
const uint32_t kVFPRoundingModeMask
bool IsAligned(T value, U alignment)
const intptr_t kDoubleAlignment
@ kPointersToHereAreAlwaysInteresting
const LowDwVfpRegister d15
const intptr_t kPointerAlignment
void CopyBytes(uint8_t *target, uint8_t *source)
const intptr_t kDoubleAlignmentMask
const uint32_t kIsIndirectStringMask
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)