26 : Assembler(arg_isolate, buffer,
size),
27 generating_stub_(
false),
29 if (isolate() !=
NULL) {
31 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
37 void MacroAssembler::Load(Register dst,
const Operand& src, Representation r) {
41 }
else if (r.IsUInteger8()) {
43 }
else if (r.IsInteger16()) {
45 }
else if (r.IsUInteger16()) {
53 void MacroAssembler::Store(Register src,
const Operand& dst, Representation r) {
55 if (r.IsInteger8() || r.IsUInteger8()) {
57 }
else if (r.IsInteger16() || r.IsUInteger16()) {
60 if (r.IsHeapObject()) {
62 }
else if (r.IsSmi()) {
70 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
71 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
72 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
73 mov(destination, value);
76 ExternalReference roots_array_start =
77 ExternalReference::roots_array_start(isolate());
78 mov(destination, Immediate(index));
79 mov(destination, Operand::StaticArray(destination,
85 void MacroAssembler::StoreRoot(Register source,
87 Heap::RootListIndex index) {
88 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
89 ExternalReference roots_array_start =
90 ExternalReference::roots_array_start(isolate());
91 mov(scratch, Immediate(index));
97 void MacroAssembler::CompareRoot(Register with,
99 Heap::RootListIndex index) {
100 ExternalReference roots_array_start =
101 ExternalReference::roots_array_start(isolate());
102 mov(scratch, Immediate(index));
103 cmp(with, Operand::StaticArray(scratch,
109 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
110 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
111 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
116 void MacroAssembler::CompareRoot(
const Operand& with,
117 Heap::RootListIndex index) {
118 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
119 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
124 void MacroAssembler::InNewSpace(
128 Label* condition_met,
129 Label::Distance condition_met_distance) {
131 if (scratch.is(
object)) {
132 and_(scratch, Immediate(~Page::kPageAlignmentMask));
134 mov(scratch, Immediate(~Page::kPageAlignmentMask));
135 and_(scratch,
object);
138 DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139 DCHECK(MemoryChunk::IN_TO_SPACE < 8);
140 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141 | (1 << MemoryChunk::IN_TO_SPACE);
143 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144 static_cast<uint8_t
>(mask));
145 j(
cc, condition_met, condition_met_distance);
149 void MacroAssembler::RememberedSetHelper(
152 MacroAssembler::RememberedSetFinalAction and_then) {
154 if (emit_debug_code()) {
156 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
161 ExternalReference store_buffer =
162 ExternalReference::store_buffer_top(isolate());
163 mov(scratch, Operand::StaticVariable(store_buffer));
165 mov(Operand(scratch, 0), addr);
169 mov(Operand::StaticVariable(store_buffer), scratch);
172 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
173 if (and_then == kReturnAtEnd) {
174 Label buffer_overflowed;
175 j(
not_equal, &buffer_overflowed, Label::kNear);
177 bind(&buffer_overflowed);
179 DCHECK(and_then == kFallThroughAtEnd);
180 j(
equal, &done, Label::kNear);
182 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
183 CallStub(&store_buffer_overflow);
184 if (and_then == kReturnAtEnd) {
187 DCHECK(and_then == kFallThroughAtEnd);
193 void MacroAssembler::ClampTOSToUint8(Register result_reg) {
194 Label done, conv_failure;
197 fist_s(Operand(
esp, 0));
200 j(
equal, &conv_failure, Label::kNear);
201 test(result_reg, Immediate(0xFFFFFF00));
202 j(
zero, &done, Label::kNear);
203 setcc(
sign, result_reg);
204 sub(result_reg, Immediate(1));
205 and_(result_reg, Immediate(255));
206 jmp(&done, Label::kNear);
212 setcc(
below, result_reg);
218 void MacroAssembler::ClampUint8(Register reg) {
220 test(reg, Immediate(0xFFFFFF00));
221 j(
zero, &done, Label::kNear);
228 void MacroAssembler::SlowTruncateToI(Register result_reg,
231 DoubleToIStub stub(isolate(), input_reg, result_reg, offset,
true);
232 call(stub.GetCode(), RelocInfo::CODE_TARGET);
236 void MacroAssembler::TruncateX87TOSToI(Register result_reg) {
239 SlowTruncateToI(result_reg,
esp, 0);
244 void MacroAssembler::X87TOSToI(Register result_reg,
246 Label* lost_precision, Label* is_nan,
247 Label* minus_zero, Label::Distance dst) {
258 test(result_reg, Operand(result_reg));
265 test(result_reg, Operand(result_reg));
272 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
273 Register input_reg) {
274 Label done, slow_case;
276 SlowTruncateToI(result_reg, input_reg);
281 void MacroAssembler::LoadUint32NoSSE2(Register src) {
284 fild_s(Operand(
esp, 0));
285 cmp(src, Immediate(0));
287 ExternalReference uint32_bias =
288 ExternalReference::address_of_uint32_bias();
289 fld_d(Operand::StaticVariable(uint32_bias));
296 void MacroAssembler::RecordWriteArray(
297 Register
object, Register value, Register index,
SaveFPRegsMode save_fp,
314 Register dst = index;
318 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
325 if (emit_debug_code()) {
326 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
327 mov(index, Immediate(bit_cast<int32_t>(
kZapValue)));
332 void MacroAssembler::RecordWriteField(
333 Register
object,
int offset, Register value, Register dst,
342 JumpIfSmi(value, &done, Label::kNear);
350 if (emit_debug_code()) {
353 j(
zero, &ok, Label::kNear);
358 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
365 if (emit_debug_code()) {
366 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
367 mov(dst, Immediate(bit_cast<int32_t>(
kZapValue)));
372 void MacroAssembler::RecordWriteForMap(Register
object, Handle<Map>
map,
373 Register scratch1, Register scratch2,
377 Register address = scratch1;
378 Register value = scratch2;
379 if (emit_debug_code()) {
381 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
383 j(
zero, &ok, Label::kNear);
388 DCHECK(!
object.is(value));
389 DCHECK(!
object.is(address));
390 DCHECK(!value.is(address));
391 AssertNotSmi(
object);
393 if (!FLAG_incremental_marking) {
398 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
404 DCHECK(!isolate()->heap()->InNewSpace(*
map));
405 CheckPageFlagForMap(
map,
406 MemoryChunk::kPointersToHereAreInterestingMask,
418 isolate()->counters()->write_barriers_static()->Increment();
419 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
423 if (emit_debug_code()) {
424 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
425 mov(scratch1, Immediate(bit_cast<int32_t>(
kZapValue)));
426 mov(scratch2, Immediate(bit_cast<int32_t>(
kZapValue)));
431 void MacroAssembler::RecordWrite(
432 Register
object, Register address, Register value,
SaveFPRegsMode fp_mode,
435 DCHECK(!
object.is(value));
436 DCHECK(!
object.is(address));
437 DCHECK(!value.is(address));
438 AssertNotSmi(
object);
441 !FLAG_incremental_marking) {
445 if (emit_debug_code()) {
447 cmp(value, Operand(address, 0));
448 j(
equal, &ok, Label::kNear);
459 JumpIfSmi(value, &done, Label::kNear);
465 MemoryChunk::kPointersToHereAreInterestingMask,
470 CheckPageFlag(
object,
472 MemoryChunk::kPointersFromHereAreInterestingMask,
477 RecordWriteStub stub(isolate(),
object, value, address, remembered_set_action,
484 isolate()->counters()->write_barriers_static()->Increment();
485 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
489 if (emit_debug_code()) {
490 mov(address, Immediate(bit_cast<int32_t>(
kZapValue)));
491 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
496 void MacroAssembler::DebugBreak() {
497 Move(
eax, Immediate(0));
498 mov(
ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
499 CEntryStub ces(isolate(), 1);
504 bool MacroAssembler::IsUnsafeImmediate(
const Immediate& x) {
505 static const int kMaxImmediateBits = 17;
506 if (!RelocInfo::IsNone(x.rmode_))
return false;
507 return !
is_intn(x.x_, kMaxImmediateBits);
511 void MacroAssembler::SafeMove(Register dst,
const Immediate& x) {
512 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
513 Move(dst, Immediate(x.x_ ^ jit_cookie()));
514 xor_(dst, jit_cookie());
521 void MacroAssembler::SafePush(
const Immediate& x) {
522 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
523 push(Immediate(x.x_ ^ jit_cookie()));
524 xor_(Operand(
esp, 0), Immediate(jit_cookie()));
531 void MacroAssembler::CmpObjectType(Register heap_object,
535 CmpInstanceType(
map, type);
539 void MacroAssembler::CmpInstanceType(Register
map,
InstanceType type) {
541 static_cast<int8_t
>(type));
545 void MacroAssembler::CheckFastElements(Register
map,
547 Label::Distance distance) {
553 Map::kMaximumBitField2FastHoleyElementValue);
554 j(
above, fail, distance);
558 void MacroAssembler::CheckFastObjectElements(Register
map,
560 Label::Distance distance) {
566 Map::kMaximumBitField2FastHoleySmiElementValue);
569 Map::kMaximumBitField2FastHoleyElementValue);
570 j(
above, fail, distance);
574 void MacroAssembler::CheckFastSmiElements(Register
map,
576 Label::Distance distance) {
580 Map::kMaximumBitField2FastHoleySmiElementValue);
581 j(
above, fail, distance);
585 void MacroAssembler::StoreNumberToDoubleElements(
586 Register maybe_number,
591 int elements_offset) {
592 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
593 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
595 CheckMap(maybe_number,
596 isolate()->factory()->heap_number_map(),
607 ExternalReference canonical_nan_reference =
608 ExternalReference::address_of_canonical_non_hole_nan();
609 fld_d(
FieldOperand(maybe_number, HeapNumber::kValueOffset));
610 bind(&have_double_value);
612 FixedDoubleArray::kHeaderSize - elements_offset));
618 j(
greater, &is_nan, Label::kNear);
619 cmp(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
622 fld_d(Operand::StaticVariable(canonical_nan_reference));
623 jmp(&have_double_value, Label::kNear);
628 mov(scratch, maybe_number);
631 fild_s(Operand(
esp, 0));
634 FixedDoubleArray::kHeaderSize - elements_offset));
639 void MacroAssembler::CompareMap(Register obj, Handle<Map>
map) {
644 void MacroAssembler::CheckMap(Register obj,
649 JumpIfSmi(obj, fail);
652 CompareMap(obj,
map);
657 void MacroAssembler::DispatchMap(Register obj,
660 Handle<Code> success,
664 JumpIfSmi(obj, &fail);
673 Condition MacroAssembler::IsObjectStringType(Register heap_object,
675 Register instance_type) {
684 Condition MacroAssembler::IsObjectNameType(Register heap_object,
686 Register instance_type) {
694 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
699 IsInstanceJSObjectType(
map, scratch, fail);
703 void MacroAssembler::IsInstanceJSObjectType(Register
map,
714 void MacroAssembler::FCmp() {
723 void MacroAssembler::FXamMinusZero() {
727 and_(
eax, Immediate(0x4700));
729 cmp(
eax, Immediate(0x4200));
735 void MacroAssembler::FXamSign() {
740 and_(
eax, Immediate(0x0200));
746 void MacroAssembler::X87CheckIA() {
750 and_(
eax, Immediate(0x0041));
751 cmp(
eax, Immediate(0x0001));
760 void MacroAssembler::X87SetRC(
int rc) {
770 void MacroAssembler::AssertNumber(Register
object) {
771 if (emit_debug_code()) {
773 JumpIfSmi(
object, &ok);
775 isolate()->factory()->heap_number_map());
776 Check(
equal, kOperandNotANumber);
782 void MacroAssembler::AssertSmi(Register
object) {
783 if (emit_debug_code()) {
785 Check(
equal, kOperandIsNotASmi);
790 void MacroAssembler::AssertString(Register
object) {
791 if (emit_debug_code()) {
793 Check(
not_equal, kOperandIsASmiAndNotAString);
795 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
798 Check(
below, kOperandIsNotAString);
803 void MacroAssembler::AssertName(Register
object) {
804 if (emit_debug_code()) {
806 Check(
not_equal, kOperandIsASmiAndNotAName);
808 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
816 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object) {
817 if (emit_debug_code()) {
819 AssertNotSmi(
object);
820 cmp(
object, isolate()->factory()->undefined_value());
821 j(
equal, &done_checking);
823 Immediate(isolate()->factory()->allocation_site_map()));
824 Assert(
equal, kExpectedUndefinedOrCell);
825 bind(&done_checking);
830 void MacroAssembler::AssertNotSmi(Register
object) {
831 if (emit_debug_code()) {
838 void MacroAssembler::StubPrologue() {
846 void MacroAssembler::Prologue(
bool code_pre_aging) {
847 PredictableCodeSizeScope predictible_code_size_scope(
this,
849 if (code_pre_aging) {
851 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
852 RelocInfo::CODE_AGE_SEQUENCE);
867 push(Immediate(Smi::FromInt(type)));
868 push(Immediate(CodeObject()));
869 if (emit_debug_code()) {
870 cmp(Operand(
esp, 0), Immediate(isolate()->factory()->undefined_value()));
871 Check(
not_equal, kCodeObjectNotProperlyPatched);
877 if (emit_debug_code()) {
878 cmp(Operand(
ebp, StandardFrameConstants::kMarkerOffset),
879 Immediate(Smi::FromInt(type)));
880 Check(
equal, kStackFrameTypesMustMatch);
886 void MacroAssembler::EnterExitFramePrologue() {
897 push(Immediate(CodeObject()));
900 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
901 ExternalReference context_address(Isolate::kContextAddress, isolate());
902 mov(Operand::StaticVariable(c_entry_fp_address),
ebp);
903 mov(Operand::StaticVariable(context_address),
esi);
907 void MacroAssembler::EnterExitFrameEpilogue(
int argc,
bool save_doubles) {
920 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
921 if (kFrameAlignment > 0) {
923 and_(
esp, -kFrameAlignment);
927 mov(Operand(
ebp, ExitFrameConstants::kSPOffset),
esp);
931 void MacroAssembler::EnterExitFrame(
bool save_doubles) {
932 EnterExitFramePrologue();
935 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
940 EnterExitFrameEpilogue(3, save_doubles);
944 void MacroAssembler::EnterApiExitFrame(
int argc) {
945 EnterExitFramePrologue();
946 EnterExitFrameEpilogue(argc,
false);
950 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
967 LeaveExitFrameEpilogue(
true);
971 void MacroAssembler::LeaveExitFrameEpilogue(
bool restore_context) {
973 ExternalReference context_address(Isolate::kContextAddress, isolate());
974 if (restore_context) {
975 mov(
esi, Operand::StaticVariable(context_address));
978 mov(Operand::StaticVariable(context_address), Immediate(0));
982 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
984 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
988 void MacroAssembler::LeaveApiExitFrame(
bool restore_context) {
992 LeaveExitFrameEpilogue(restore_context);
996 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1008 if (kind == StackHandler::JS_ENTRY) {
1013 push(Immediate(Smi::FromInt(0)));
1020 StackHandler::IndexField::encode(handler_index) |
1021 StackHandler::KindField::encode(kind);
1022 push(Immediate(state));
1026 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1027 push(Operand::StaticVariable(handler_address));
1029 mov(Operand::StaticVariable(handler_address),
esp);
1033 void MacroAssembler::PopTryHandler() {
1035 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1036 pop(Operand::StaticVariable(handler_address));
1041 void MacroAssembler::JumpToHandlerEntry() {
1046 shr(
edx, StackHandler::kKindWidth);
1064 if (!value.is(
eax)) {
1068 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1069 mov(
esp, Operand::StaticVariable(handler_address));
1071 pop(Operand::StaticVariable(handler_address));
1086 j(
zero, &skip, Label::kNear);
1087 mov(Operand(
ebp, StandardFrameConstants::kContextOffset),
esi);
1090 JumpToHandlerEntry();
1094 void MacroAssembler::ThrowUncatchable(Register value) {
1104 if (!value.is(
eax)) {
1108 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1109 mov(
esp, Operand::StaticVariable(handler_address));
1112 Label fetch_next, check_kind;
1113 jmp(&check_kind, Label::kNear);
1115 mov(
esp, Operand(
esp, StackHandlerConstants::kNextOffset));
1119 test(Operand(
esp, StackHandlerConstants::kStateOffset),
1120 Immediate(StackHandler::KindField::kMask));
1124 pop(Operand::StaticVariable(handler_address));
1134 JumpToHandlerEntry();
1138 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1142 Label same_contexts;
1144 DCHECK(!holder_reg.is(scratch1));
1145 DCHECK(!holder_reg.is(scratch2));
1146 DCHECK(!scratch1.is(scratch2));
1149 mov(scratch1, Operand(
ebp, StandardFrameConstants::kContextOffset));
1152 if (emit_debug_code()) {
1153 cmp(scratch1, Immediate(0));
1154 Check(
not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1158 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
1160 mov(scratch1,
FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1163 if (emit_debug_code()) {
1166 isolate()->factory()->native_context_map());
1167 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1171 cmp(scratch1,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1172 j(
equal, &same_contexts);
1181 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1184 if (emit_debug_code()) {
1185 cmp(scratch2, isolate()->factory()->null_value());
1186 Check(
not_equal, kJSGlobalProxyContextShouldNotBeNull);
1190 isolate()->factory()->native_context_map());
1191 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1194 int token_offset = Context::kHeaderSize +
1200 bind(&same_contexts);
1209 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
1211 if (serializer_enabled()) {
1212 ExternalReference roots_array_start =
1213 ExternalReference::roots_array_start(isolate());
1214 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1220 int32_t seed = isolate()->heap()->HashSeed();
1221 xor_(
r0, Immediate(seed));
1249 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1274 GetNumberHash(
r0,
r1);
1277 mov(
r1,
FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1287 add(
r2, Immediate(SeededNumberDictionary::GetProbeOffset(
i)));
1292 DCHECK(SeededNumberDictionary::kEntrySize == 3);
1299 SeededNumberDictionary::kElementsStartOffset));
1309 const int kDetailsOffset =
1310 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
1313 Immediate(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
1317 const int kValueOffset =
1318 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
1323 void MacroAssembler::LoadAllocationTopHelper(Register result,
1326 ExternalReference allocation_top =
1327 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
1335 cmp(result, Operand::StaticVariable(allocation_top));
1336 Check(
equal, kUnexpectedAllocationTop);
1342 if (scratch.is(
no_reg)) {
1343 mov(result, Operand::StaticVariable(allocation_top));
1345 mov(scratch, Immediate(allocation_top));
1346 mov(result, Operand(scratch, 0));
1351 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1354 if (emit_debug_code()) {
1356 Check(
zero, kUnalignedAllocationInNewSpace);
1359 ExternalReference allocation_top =
1360 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
1363 if (scratch.is(
no_reg)) {
1364 mov(Operand::StaticVariable(allocation_top), result_end);
1366 mov(Operand(scratch, 0), result_end);
1371 void MacroAssembler::Allocate(
int object_size,
1373 Register result_end,
1378 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1379 if (!FLAG_inline_new) {
1380 if (emit_debug_code()) {
1382 mov(result, Immediate(0x7091));
1383 if (result_end.is_valid()) {
1384 mov(result_end, Immediate(0x7191));
1386 if (scratch.is_valid()) {
1387 mov(scratch, Immediate(0x7291));
1393 DCHECK(!result.is(result_end));
1396 LoadAllocationTopHelper(result, scratch,
flags);
1398 ExternalReference allocation_limit =
1399 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1410 cmp(result, Operand::StaticVariable(allocation_limit));
1413 mov(Operand(result, 0),
1414 Immediate(isolate()->factory()->one_pointer_filler_map()));
1420 Register top_reg = result_end.is_valid() ? result_end : result;
1421 if (!top_reg.is(result)) {
1422 mov(top_reg, result);
1424 add(top_reg, Immediate(object_size));
1425 j(
carry, gc_required);
1426 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1427 j(
above, gc_required);
1430 UpdateAllocationTopHelper(top_reg, scratch,
flags);
1434 if (top_reg.is(result)) {
1438 sub(result, Immediate(object_size));
1440 }
else if (tag_result) {
1447 void MacroAssembler::Allocate(
int header_size,
1449 Register element_count,
1452 Register result_end,
1457 if (!FLAG_inline_new) {
1458 if (emit_debug_code()) {
1460 mov(result, Immediate(0x7091));
1461 mov(result_end, Immediate(0x7191));
1462 if (scratch.is_valid()) {
1463 mov(scratch, Immediate(0x7291));
1470 DCHECK(!result.is(result_end));
1473 LoadAllocationTopHelper(result, scratch,
flags);
1475 ExternalReference allocation_limit =
1476 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1487 cmp(result, Operand::StaticVariable(allocation_limit));
1490 mov(Operand(result, 0),
1491 Immediate(isolate()->factory()->one_pointer_filler_map()));
1505 element_size =
static_cast<ScaleFactor>(element_size - 1);
1509 lea(result_end, Operand(element_count, element_size, header_size));
1510 add(result_end, result);
1511 j(
carry, gc_required);
1512 cmp(result_end, Operand::StaticVariable(allocation_limit));
1513 j(
above, gc_required);
1521 UpdateAllocationTopHelper(result_end, scratch,
flags);
1525 void MacroAssembler::Allocate(Register object_size,
1527 Register result_end,
1532 if (!FLAG_inline_new) {
1533 if (emit_debug_code()) {
1535 mov(result, Immediate(0x7091));
1536 mov(result_end, Immediate(0x7191));
1537 if (scratch.is_valid()) {
1538 mov(scratch, Immediate(0x7291));
1545 DCHECK(!result.is(result_end));
1548 LoadAllocationTopHelper(result, scratch,
flags);
1550 ExternalReference allocation_limit =
1551 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1562 cmp(result, Operand::StaticVariable(allocation_limit));
1565 mov(Operand(result, 0),
1566 Immediate(isolate()->factory()->one_pointer_filler_map()));
1572 if (!object_size.is(result_end)) {
1573 mov(result_end, object_size);
1575 add(result_end, result);
1576 j(
carry, gc_required);
1577 cmp(result_end, Operand::StaticVariable(allocation_limit));
1578 j(
above, gc_required);
1587 UpdateAllocationTopHelper(result_end, scratch,
flags);
1591 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
1592 ExternalReference new_space_allocation_top =
1593 ExternalReference::new_space_allocation_top_address(isolate());
1598 cmp(
object, Operand::StaticVariable(new_space_allocation_top));
1599 Check(
below, kUndoAllocationOfNonAllocatedMemory);
1601 mov(Operand::StaticVariable(new_space_allocation_top),
object);
1605 void MacroAssembler::AllocateHeapNumber(Register result,
1611 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1615 ? isolate()->factory()->mutable_heap_number_map()
1616 : isolate()->factory()->heap_number_map();
1623 void MacroAssembler::AllocateTwoByteString(Register result,
1628 Label* gc_required) {
1638 Allocate(SeqTwoByteString::kHeaderSize,
1650 Immediate(isolate()->factory()->string_map()));
1651 mov(scratch1, length);
1653 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1655 Immediate(String::kEmptyHashField));
1659 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1660 Register scratch1, Register scratch2,
1662 Label* gc_required) {
1666 mov(scratch1, length);
1672 Allocate(SeqOneByteString::kHeaderSize,
1684 Immediate(isolate()->factory()->one_byte_string_map()));
1685 mov(scratch1, length);
1687 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1689 Immediate(String::kEmptyHashField));
1693 void MacroAssembler::AllocateOneByteString(Register result,
int length,
1694 Register scratch1, Register scratch2,
1695 Label* gc_required) {
1699 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1704 Immediate(isolate()->factory()->one_byte_string_map()));
1706 Immediate(Smi::FromInt(length)));
1708 Immediate(String::kEmptyHashField));
1712 void MacroAssembler::AllocateTwoByteConsString(Register result,
1715 Label* gc_required) {
1717 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1722 Immediate(isolate()->factory()->cons_string_map()));
1726 void MacroAssembler::AllocateOneByteConsString(Register result,
1729 Label* gc_required) {
1730 Allocate(ConsString::kSize,
1739 Immediate(isolate()->factory()->cons_one_byte_string_map()));
1743 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1746 Label* gc_required) {
1748 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1753 Immediate(isolate()->factory()->sliced_string_map()));
1757 void MacroAssembler::AllocateOneByteSlicedString(Register result,
1760 Label* gc_required) {
1762 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1767 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1780 Register destination,
1783 Label short_loop, len4, len8, len12, done, short_string;
1787 cmp(length, Immediate(4));
1788 j(
below, &short_string, Label::kNear);
1793 mov(scratch, Operand(source, length,
times_1, -4));
1794 mov(Operand(destination, length,
times_1, -4), scratch);
1796 cmp(length, Immediate(8));
1798 cmp(length, Immediate(12));
1800 cmp(length, Immediate(16));
1806 and_(scratch, Immediate(0x3));
1807 add(destination, scratch);
1808 jmp(&done, Label::kNear);
1811 mov(scratch, Operand(source, 8));
1812 mov(Operand(destination, 8), scratch);
1814 mov(scratch, Operand(source, 4));
1815 mov(Operand(destination, 4), scratch);
1817 mov(scratch, Operand(source, 0));
1818 mov(Operand(destination, 0), scratch);
1819 add(destination, length);
1820 jmp(&done, Label::kNear);
1822 bind(&short_string);
1823 test(length, length);
1824 j(
zero, &done, Label::kNear);
1827 mov_b(scratch, Operand(source, 0));
1828 mov_b(Operand(destination, 0), scratch);
1838 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1839 Register end_offset,
1844 mov(Operand(start_offset, 0), filler);
1847 cmp(start_offset, end_offset);
1852 void MacroAssembler::BooleanBitTest(Register
object,
1859 test_b(
FieldOperand(
object, field_offset + byte_index),
1860 static_cast<byte>(1 << byte_bit_index));
1865 void MacroAssembler::NegativeZeroTest(Register result,
1867 Label* then_label) {
1869 test(result, result);
1872 j(
sign, then_label);
1877 void MacroAssembler::NegativeZeroTest(Register result,
1881 Label* then_label) {
1883 test(result, result);
1887 j(
sign, then_label);
1892 void MacroAssembler::TryGetFunctionPrototype(Register
function,
1896 bool miss_on_bound_function) {
1898 if (miss_on_bound_function) {
1900 JumpIfSmi(
function, miss);
1908 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
1909 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1910 SharedFunctionInfo::kBoundFunction);
1914 movzx_b(scratch,
FieldOperand(result, Map::kBitFieldOffset));
1915 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1921 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
1926 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1931 CmpObjectType(result,
MAP_TYPE, scratch);
1935 mov(result,
FieldOperand(result, Map::kPrototypeOffset));
1937 if (miss_on_bound_function) {
1942 bind(&non_instance);
1943 mov(result,
FieldOperand(result, Map::kConstructorOffset));
1951 void MacroAssembler::CallStub(
CodeStub* stub, TypeFeedbackId ast_id) {
1952 DCHECK(AllowThisStubCall(stub));
1953 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1957 void MacroAssembler::TailCallStub(
CodeStub* stub) {
1958 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1962 void MacroAssembler::StubReturn(
int argc) {
1963 DCHECK(argc >= 1 && generating_stub());
1968 bool MacroAssembler::AllowThisStubCall(
CodeStub* stub) {
1969 return has_frame_ || !stub->SometimesSetsUpAFrame();
1973 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1978 (1 << String::kArrayIndexValueBits));
1979 if (!index.is(hash)) {
1982 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
1986 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
int num_arguments,
1991 CHECK(f->nargs < 0 || f->nargs == num_arguments);
1997 Move(
eax, Immediate(num_arguments));
1998 mov(
ebx, Immediate(ExternalReference(f, isolate())));
1999 CEntryStub ces(isolate(), 1, save_doubles);
2004 void MacroAssembler::CallExternalReference(ExternalReference ref,
2005 int num_arguments) {
2006 mov(
eax, Immediate(num_arguments));
2007 mov(
ebx, Immediate(ref));
2009 CEntryStub stub(isolate(), 1);
2014 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
2021 Move(
eax, Immediate(num_arguments));
2022 JumpToExternalReference(ext);
2026 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2029 TailCallExternalReference(ExternalReference(fid, isolate()),
2040 void MacroAssembler::PrepareCallApiFunction(
int argc) {
2041 EnterApiExitFrame(argc);
2042 if (emit_debug_code()) {
2048 void MacroAssembler::CallApiFunctionAndReturn(
2049 Register function_address,
2050 ExternalReference thunk_ref,
2051 Operand thunk_last_arg,
2053 Operand return_value_operand,
2054 Operand* context_restore_operand) {
2055 ExternalReference next_address =
2056 ExternalReference::handle_scope_next_address(isolate());
2057 ExternalReference limit_address =
2058 ExternalReference::handle_scope_limit_address(isolate());
2059 ExternalReference level_address =
2060 ExternalReference::handle_scope_level_address(isolate());
2064 mov(
ebx, Operand::StaticVariable(next_address));
2065 mov(
edi, Operand::StaticVariable(limit_address));
2066 add(Operand::StaticVariable(level_address), Immediate(1));
2068 if (FLAG_log_timer_events) {
2069 FrameScope frame(
this, StackFrame::MANUAL);
2070 PushSafepointRegisters();
2071 PrepareCallCFunction(1,
eax);
2072 mov(Operand(
esp, 0),
2073 Immediate(ExternalReference::isolate_address(isolate())));
2074 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2075 PopSafepointRegisters();
2079 Label profiler_disabled;
2080 Label end_profiler_check;
2081 mov(
eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2082 cmpb(Operand(
eax, 0), 0);
2083 j(
zero, &profiler_disabled);
2086 mov(thunk_last_arg, function_address);
2088 mov(
eax, Immediate(thunk_ref));
2090 jmp(&end_profiler_check);
2092 bind(&profiler_disabled);
2094 call(function_address);
2095 bind(&end_profiler_check);
2097 if (FLAG_log_timer_events) {
2098 FrameScope frame(
this, StackFrame::MANUAL);
2099 PushSafepointRegisters();
2100 PrepareCallCFunction(1,
eax);
2101 mov(Operand(
esp, 0),
2102 Immediate(ExternalReference::isolate_address(isolate())));
2103 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2104 PopSafepointRegisters();
2109 mov(
eax, return_value_operand);
2111 Label promote_scheduled_exception;
2112 Label exception_handled;
2113 Label delete_allocated_handles;
2114 Label leave_exit_frame;
2119 mov(Operand::StaticVariable(next_address),
ebx);
2120 sub(Operand::StaticVariable(level_address), Immediate(1));
2122 cmp(
edi, Operand::StaticVariable(limit_address));
2123 j(
not_equal, &delete_allocated_handles);
2124 bind(&leave_exit_frame);
2127 ExternalReference scheduled_exception_address =
2128 ExternalReference::scheduled_exception_address(isolate());
2129 cmp(Operand::StaticVariable(scheduled_exception_address),
2130 Immediate(isolate()->factory()->the_hole_value()));
2131 j(
not_equal, &promote_scheduled_exception);
2132 bind(&exception_handled);
2134 #if ENABLE_EXTRA_CHECKS
2137 Register return_value =
eax;
2140 JumpIfSmi(return_value, &ok, Label::kNear);
2144 j(
below, &ok, Label::kNear);
2149 cmp(
map, isolate()->factory()->heap_number_map());
2150 j(
equal, &ok, Label::kNear);
2152 cmp(return_value, isolate()->factory()->undefined_value());
2153 j(
equal, &ok, Label::kNear);
2155 cmp(return_value, isolate()->factory()->true_value());
2156 j(
equal, &ok, Label::kNear);
2158 cmp(return_value, isolate()->factory()->false_value());
2159 j(
equal, &ok, Label::kNear);
2161 cmp(return_value, isolate()->factory()->null_value());
2162 j(
equal, &ok, Label::kNear);
2164 Abort(kAPICallReturnedInvalidObject);
2169 bool restore_context = context_restore_operand !=
NULL;
2170 if (restore_context) {
2171 mov(
esi, *context_restore_operand);
2173 LeaveApiExitFrame(!restore_context);
2176 bind(&promote_scheduled_exception);
2179 CallRuntime(Runtime::kPromoteScheduledException, 0);
2181 jmp(&exception_handled);
2184 ExternalReference delete_extensions =
2185 ExternalReference::delete_handle_scope_extensions(isolate());
2186 bind(&delete_allocated_handles);
2187 mov(Operand::StaticVariable(limit_address),
edi);
2189 mov(Operand(
esp, 0),
2190 Immediate(ExternalReference::isolate_address(isolate())));
2191 mov(
eax, Immediate(delete_extensions));
2194 jmp(&leave_exit_frame);
2198 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext) {
2200 mov(
ebx, Immediate(ext));
2201 CEntryStub ces(isolate(), 1);
2202 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2206 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
2207 const ParameterCount& actual,
2208 Handle<Code> code_constant,
2209 const Operand& code_operand,
2211 bool* definitely_mismatches,
2213 Label::Distance done_near,
2214 const CallWrapper& call_wrapper) {
2215 bool definitely_matches =
false;
2216 *definitely_mismatches =
false;
2218 if (expected.is_immediate()) {
2219 DCHECK(actual.is_immediate());
2220 if (expected.immediate() == actual.immediate()) {
2221 definitely_matches =
true;
2223 mov(
eax, actual.immediate());
2224 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2225 if (expected.immediate() == sentinel) {
2230 definitely_matches =
true;
2232 *definitely_mismatches =
true;
2233 mov(
ebx, expected.immediate());
2237 if (actual.is_immediate()) {
2241 cmp(expected.reg(), actual.immediate());
2244 mov(
eax, actual.immediate());
2245 }
else if (!expected.reg().is(actual.reg())) {
2248 cmp(expected.reg(), actual.reg());
2255 if (!definitely_matches) {
2256 Handle<Code> adaptor =
2257 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2258 if (!code_constant.is_null()) {
2259 mov(
edx, Immediate(code_constant));
2261 }
else if (!code_operand.is_reg(
edx)) {
2262 mov(
edx, code_operand);
2266 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2267 call(adaptor, RelocInfo::CODE_TARGET);
2268 call_wrapper.AfterCall();
2269 if (!*definitely_mismatches) {
2270 jmp(done, done_near);
2273 jmp(adaptor, RelocInfo::CODE_TARGET);
2280 void MacroAssembler::InvokeCode(
const Operand& code,
2281 const ParameterCount& expected,
2282 const ParameterCount& actual,
2284 const CallWrapper& call_wrapper) {
2289 bool definitely_mismatches =
false;
2290 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2291 &done, &definitely_mismatches,
flag, Label::kNear,
2293 if (!definitely_mismatches) {
2295 call_wrapper.BeforeCall(CallSize(code));
2297 call_wrapper.AfterCall();
2307 void MacroAssembler::InvokeFunction(Register fun,
2308 const ParameterCount& actual,
2310 const CallWrapper& call_wrapper) {
2320 ParameterCount expected(
ebx);
2322 expected, actual,
flag, call_wrapper);
2326 void MacroAssembler::InvokeFunction(Register fun,
2327 const ParameterCount& expected,
2328 const ParameterCount& actual,
2330 const CallWrapper& call_wrapper) {
2338 expected, actual,
flag, call_wrapper);
2342 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
2343 const ParameterCount& expected,
2344 const ParameterCount& actual,
2346 const CallWrapper& call_wrapper) {
2347 LoadHeapObject(
edi,
function);
2348 InvokeFunction(
edi, expected, actual,
flag, call_wrapper);
2352 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
2354 const CallWrapper& call_wrapper) {
2361 ParameterCount expected(0);
2362 GetBuiltinFunction(
edi,
id);
2364 expected, expected,
flag, call_wrapper);
2368 void MacroAssembler::GetBuiltinFunction(Register target,
2369 Builtins::JavaScript
id) {
2371 mov(target, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2372 mov(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
2374 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
2378 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
2381 GetBuiltinFunction(
edi,
id);
2387 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
2388 if (context_chain_length > 0) {
2390 mov(dst, Operand(
esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2391 for (
int i = 1;
i < context_chain_length;
i++) {
2392 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2405 if (emit_debug_code()) {
2407 isolate()->factory()->with_context_map());
2408 Check(
not_equal, kVariableResolvedToWithContext);
2413 void MacroAssembler::LoadTransitionedArrayMapConditional(
2416 Register map_in_out,
2418 Label* no_map_match) {
2420 mov(scratch, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2421 mov(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2424 mov(scratch, Operand(scratch,
2425 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2428 FixedArrayBase::kHeaderSize;
2434 FixedArrayBase::kHeaderSize;
2439 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
2442 Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2445 FieldOperand(
function, GlobalObject::kNativeContextOffset));
2447 mov(
function, Operand(
function, Context::SlotOffset(index)));
2451 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
2454 mov(
map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2455 if (emit_debug_code()) {
2460 Abort(kGlobalFunctionsMustHaveInitialMap);
2468 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2469 mov(SafepointRegisterSlot(dst), src);
2473 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2474 mov(SafepointRegisterSlot(dst), src);
2478 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2479 mov(dst, SafepointRegisterSlot(src));
2483 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2484 return Operand(
esp, SafepointRegisterStackIndex(reg.code()) *
kPointerSize);
2488 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
2497 void MacroAssembler::LoadHeapObject(Register result,
2498 Handle<HeapObject>
object) {
2500 if (isolate()->heap()->InNewSpace(*
object)) {
2501 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2502 mov(result, Operand::ForCell(cell));
2504 mov(result,
object);
2509 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject>
object) {
2511 if (isolate()->heap()->InNewSpace(*
object)) {
2512 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2513 cmp(reg, Operand::ForCell(cell));
2520 void MacroAssembler::PushHeapObject(Handle<HeapObject>
object) {
2522 if (isolate()->heap()->InNewSpace(*
object)) {
2523 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2524 push(Operand::ForCell(cell));
2531 void MacroAssembler::Ret() {
2536 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2537 if (is_uint16(bytes_dropped)) {
2541 add(
esp, Immediate(bytes_dropped));
2548 void MacroAssembler::VerifyX87StackDepth(
uint32_t depth) {
2551 if (serializer_enabled())
return;
2558 int tos = (8 - depth) % 8;
2559 const int kTopMask = 0x3800;
2563 and_(
eax, kTopMask);
2565 cmp(
eax, Immediate(tos));
2566 Check(
equal, kUnexpectedFPUStackDepthAfterInstruction);
2572 void MacroAssembler::Drop(
int stack_elements) {
2573 if (stack_elements > 0) {
2579 void MacroAssembler::Move(Register dst, Register src) {
2586 void MacroAssembler::Move(Register dst,
const Immediate& x) {
2595 void MacroAssembler::Move(
const Operand& dst,
const Immediate& x) {
2600 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
2601 if (FLAG_native_code_counters && counter->Enabled()) {
2602 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2607 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
2609 if (FLAG_native_code_counters && counter->Enabled()) {
2610 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2614 add(operand, Immediate(value));
2620 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
2622 if (FLAG_native_code_counters && counter->Enabled()) {
2623 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2627 sub(operand, Immediate(value));
2633 void MacroAssembler::IncrementCounter(
Condition cc,
2634 StatsCounter* counter,
2637 if (FLAG_native_code_counters && counter->Enabled()) {
2641 IncrementCounter(counter, value);
2648 void MacroAssembler::DecrementCounter(
Condition cc,
2649 StatsCounter* counter,
2652 if (FLAG_native_code_counters && counter->Enabled()) {
2656 DecrementCounter(counter, value);
2664 if (emit_debug_code()) Check(
cc, reason);
2668 void MacroAssembler::AssertFastElements(Register elements) {
2669 if (emit_debug_code()) {
2670 Factory* factory = isolate()->factory();
2673 Immediate(factory->fixed_array_map()));
2676 Immediate(factory->fixed_double_array_map()));
2679 Immediate(factory->fixed_cow_array_map()));
2681 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2696 void MacroAssembler::CheckStackAlignment() {
2697 int frame_alignment = base::OS::ActivationFrameAlignment();
2698 int frame_alignment_mask = frame_alignment - 1;
2701 Label alignment_as_expected;
2702 test(
esp, Immediate(frame_alignment_mask));
2703 j(
zero, &alignment_as_expected);
2706 bind(&alignment_as_expected);
2715 RecordComment(
"Abort message: ");
2719 if (FLAG_trap_on_abort) {
2725 push(Immediate(
reinterpret_cast<intptr_t
>(Smi::FromInt(reason))));
2731 CallRuntime(Runtime::kAbort, 1);
2733 CallRuntime(Runtime::kAbort, 1);
2740 void MacroAssembler::LoadInstanceDescriptors(Register
map,
2741 Register descriptors) {
2746 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register
map) {
2748 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2752 void MacroAssembler::LookupNumberStringCache(Register
object,
2758 Register number_string_cache = result;
2759 Register mask = scratch1;
2760 Register scratch = scratch2;
2763 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2766 mov(mask,
FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2768 sub(mask, Immediate(1));
2774 Label smi_hash_calculated;
2775 Label load_result_from_cache;
2778 JumpIfNotSmi(
object, ¬_smi, Label::kNear);
2779 mov(scratch,
object);
2781 jmp(&smi_hash_calculated, Label::kNear);
2784 isolate()->factory()->heap_number_map());
2787 mov(scratch,
FieldOperand(
object, HeapNumber::kValueOffset));
2788 xor_(scratch,
FieldOperand(
object, HeapNumber::kValueOffset + 4));
2790 and_(scratch, mask);
2791 Register index = scratch;
2792 Register probe = mask;
2797 FixedArray::kHeaderSize));
2798 JumpIfSmi(probe, not_found);
2804 jmp(&load_result_from_cache, Label::kNear);
2806 bind(&smi_hash_calculated);
2808 and_(scratch, mask);
2814 FixedArray::kHeaderSize));
2818 bind(&load_result_from_cache);
2824 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2828 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2829 Register instance_type, Register scratch, Label* failure) {
2830 if (!scratch.is(instance_type)) {
2831 mov(scratch, instance_type);
2840 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2847 mov(scratch1, object1);
2848 and_(scratch1, object2);
2849 JumpIfSmi(scratch1, failure);
2852 mov(scratch1,
FieldOperand(object1, HeapObject::kMapOffset));
2853 mov(scratch2,
FieldOperand(object2, HeapObject::kMapOffset));
2854 movzx_b(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2855 movzx_b(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2858 const int kFlatOneByteStringMask =
2860 const int kFlatOneByteStringTag =
2863 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2864 and_(scratch1, kFlatOneByteStringMask);
2865 and_(scratch2, kFlatOneByteStringMask);
2866 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2867 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2872 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2873 Label* not_unique_name,
2874 Label::Distance distance) {
2880 j(
not_equal, not_unique_name, distance);
2886 void MacroAssembler::EmitSeqStringSetCharCheck(Register
string,
2891 JumpIfNotSmi(
string, &is_object, Label::kNear);
2896 mov(value,
FieldOperand(
string, HeapObject::kMapOffset));
2897 movzx_b(value,
FieldOperand(value, Map::kInstanceTypeOffset));
2900 cmp(value, Immediate(encoding_mask));
2902 Check(
equal, kUnexpectedStringType);
2910 cmp(index,
FieldOperand(
string, String::kLengthOffset));
2911 Check(
less, kIndexIsTooLarge);
2913 cmp(index, Immediate(Smi::FromInt(0)));
2921 void MacroAssembler::PrepareCallCFunction(
int num_arguments, Register scratch) {
2922 int frame_alignment = base::OS::ActivationFrameAlignment();
2923 if (frame_alignment != 0) {
2929 and_(
esp, -frame_alignment);
2937 void MacroAssembler::CallCFunction(ExternalReference
function,
2938 int num_arguments) {
2940 mov(
eax, Immediate(
function));
2941 CallCFunction(
eax, num_arguments);
2945 void MacroAssembler::CallCFunction(Register
function,
2946 int num_arguments) {
2949 if (emit_debug_code()) {
2950 CheckStackAlignment();
2954 if (base::OS::ActivationFrameAlignment() != 0) {
2971 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2972 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2973 reg7.is_valid() + reg8.is_valid();
2976 if (reg1.is_valid()) regs |= reg1.bit();
2977 if (reg2.is_valid()) regs |= reg2.bit();
2978 if (reg3.is_valid()) regs |= reg3.bit();
2979 if (reg4.is_valid()) regs |= reg4.bit();
2980 if (reg5.is_valid()) regs |= reg5.bit();
2981 if (reg6.is_valid()) regs |= reg6.bit();
2982 if (reg7.is_valid()) regs |= reg7.bit();
2983 if (reg8.is_valid()) regs |= reg8.bit();
2984 int n_of_non_aliasing_regs =
NumRegs(regs);
2986 return n_of_valid_regs != n_of_non_aliasing_regs;
2991 CodePatcher::CodePatcher(
byte* address,
int size)
2992 : address_(address),
2994 masm_(
NULL, address,
size + Assembler::kGap) {
2998 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3002 CodePatcher::~CodePatcher() {
3004 CpuFeatures::FlushICache(address_, size_);
3007 DCHECK(masm_.pc_ == address_ + size_);
3008 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3012 void MacroAssembler::CheckPageFlag(
3017 Label* condition_met,
3018 Label::Distance condition_met_distance) {
3020 if (scratch.is(
object)) {
3021 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3023 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3024 and_(scratch,
object);
3027 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3028 static_cast<uint8_t
>(mask));
3030 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3032 j(
cc, condition_met, condition_met_distance);
3036 void MacroAssembler::CheckPageFlagForMap(
3040 Label* condition_met,
3041 Label::Distance condition_met_distance) {
3043 Page* page = Page::FromAddress(
map->address());
3044 DCHECK(!serializer_enabled());
3045 ExternalReference reference(ExternalReference::page_flags(page));
3048 DCHECK(!isolate()->heap()->mark_compact_collector()->
3049 IsOnEvacuationCandidate(*
map));
3051 test_b(Operand::StaticVariable(reference),
static_cast<uint8_t
>(mask));
3053 test(Operand::StaticVariable(reference), Immediate(mask));
3055 j(
cc, condition_met, condition_met_distance);
3059 void MacroAssembler::CheckMapDeprecated(Handle<Map>
map,
3061 Label* if_deprecated) {
3062 if (
map->CanBeDeprecated()) {
3064 mov(scratch,
FieldOperand(scratch, Map::kBitField3Offset));
3065 and_(scratch, Immediate(Map::Deprecated::kMask));
3071 void MacroAssembler::JumpIfBlack(Register
object,
3075 Label::Distance on_black_near) {
3076 HasColor(
object, scratch0, scratch1,
3077 on_black, on_black_near,
3079 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3083 void MacroAssembler::HasColor(Register
object,
3084 Register bitmap_scratch,
3085 Register mask_scratch,
3087 Label::Distance has_color_distance,
3092 GetMarkBits(
object, bitmap_scratch, mask_scratch);
3094 Label other_color, word_boundary;
3095 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3096 j(first_bit == 1 ?
zero :
not_zero, &other_color, Label::kNear);
3097 add(mask_scratch, mask_scratch);
3098 j(
zero, &word_boundary, Label::kNear);
3099 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3100 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
3101 jmp(&other_color, Label::kNear);
3103 bind(&word_boundary);
3104 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize +
kPointerSize), 1);
3106 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
3111 void MacroAssembler::GetMarkBits(Register addr_reg,
3112 Register bitmap_reg,
3113 Register mask_reg) {
3115 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3116 and_(bitmap_reg, addr_reg);
3122 (Page::kPageAlignmentMask >>
shift) & ~(Bitmap::kBytesPerCell - 1));
3124 add(bitmap_reg,
ecx);
3127 and_(
ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3128 mov(mask_reg, Immediate(1));
3133 void MacroAssembler::EnsureNotWhite(
3135 Register bitmap_scratch,
3136 Register mask_scratch,
3137 Label* value_is_white_and_not_data,
3138 Label::Distance distance) {
3140 GetMarkBits(value, bitmap_scratch, mask_scratch);
3143 DCHECK(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
3144 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3145 DCHECK(strcmp(Marking::kGreyBitPattern,
"11") == 0);
3146 DCHECK(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
3152 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3155 if (emit_debug_code()) {
3160 add(mask_scratch, mask_scratch);
3161 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3162 j(
zero, &ok, Label::kNear);
3171 Register length =
ecx;
3172 Label not_heap_number;
3173 Label is_data_object;
3177 cmp(
map, isolate()->factory()->heap_number_map());
3178 j(
not_equal, ¬_heap_number, Label::kNear);
3179 mov(length, Immediate(HeapNumber::kSize));
3180 jmp(&is_data_object, Label::kNear);
3182 bind(¬_heap_number);
3188 Register instance_type =
ecx;
3191 j(
not_zero, value_is_white_and_not_data);
3201 j(
zero, ¬_external, Label::kNear);
3202 mov(length, Immediate(ExternalString::kSize));
3203 jmp(&is_data_object, Label::kNear);
3205 bind(¬_external);
3210 add(length, Immediate(0x04));
3214 DCHECK_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3215 DCHECK(SeqOneByteString::kMaxSize <=
3216 static_cast<int>(0xffffffffu >> (2 +
kSmiTagSize)));
3217 imul(length,
FieldOperand(value, String::kLengthOffset));
3222 bind(&is_data_object);
3225 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3227 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3228 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3230 if (emit_debug_code()) {
3231 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3232 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3233 Check(
less_equal, kLiveBytesCountOverflowChunkSize);
3240 void MacroAssembler::EnumLength(Register dst, Register
map) {
3243 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3248 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3258 j(
equal, call_runtime);
3267 cmp(
edx, Immediate(Smi::FromInt(0)));
3276 cmp(
ecx, isolate()->factory()->empty_fixed_array());
3277 j(
equal, &no_elements);
3280 cmp(
ecx, isolate()->factory()->empty_slow_element_dictionary());
3285 cmp(
ecx, isolate()->factory()->null_value());
3290 void MacroAssembler::TestJSArrayForAllocationMemento(
3291 Register receiver_reg,
3292 Register scratch_reg,
3293 Label* no_memento_found) {
3294 ExternalReference new_space_start =
3295 ExternalReference::new_space_start(isolate());
3296 ExternalReference new_space_allocation_top =
3297 ExternalReference::new_space_allocation_top_address(isolate());
3299 lea(scratch_reg, Operand(receiver_reg,
3301 cmp(scratch_reg, Immediate(new_space_start));
3302 j(
less, no_memento_found);
3303 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3305 cmp(
MemOperand(scratch_reg, -AllocationMemento::kSize),
3306 Immediate(isolate()->factory()->allocation_memento_map()));
3310 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3315 DCHECK(!scratch1.is(scratch0));
3316 Factory* factory = isolate()->factory();
3317 Register current = scratch0;
3321 mov(current,
object);
3325 mov(current,
FieldOperand(current, HeapObject::kMapOffset));
3326 mov(scratch1,
FieldOperand(current, Map::kBitField2Offset));
3327 DecodeField<Map::ElementsKindBits>(scratch1);
3330 mov(current,
FieldOperand(current, Map::kPrototypeOffset));
3331 cmp(current, Immediate(factory->null_value()));
3336 void MacroAssembler::TruncatingDiv(Register dividend,
int32_t divisor) {
3339 base::MagicNumbersForDivision<uint32_t> mag =
3341 mov(
eax, Immediate(mag.multiplier));
3343 bool neg = (mag.multiplier & (
static_cast<uint32_t>(1) << 31)) != 0;
3344 if (divisor > 0 && neg) add(
edx, dividend);
3345 if (divisor < 0 && !neg && mag.multiplier > 0) sub(
edx, dividend);
3346 if (mag.shift > 0) sar(
edx, mag.shift);
MacroAssembler(Isolate *isolate, void *buffer, int size)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be aligned(ARM64 only)") DEFINE_STRING(expose_gc_as
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
#define STATIC_ASSERT(test)
bool IsPowerOfTwo32(uint32_t value)
MagicNumbersForDivision< T > SignedDivisionByConstant(T d)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const intptr_t kHeapObjectTagMask
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
TypeImpl< ZoneTypeConfig > Type
const int kNumSafepointRegisters
const uint32_t kNotStringTag
Operand FieldOperand(Register object, int offset)
const int kPointerSizeLog2
const uint32_t kStringTag
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FAST_HOLEY_SMI_ELEMENTS
const uint32_t kOneByteStringTag
const intptr_t kObjectAlignmentMask
const bool FLAG_enable_slow_asserts
int NumRegs(RegList reglist)
static const int kInvalidEnumCacheSentinel
const char * GetBailoutReason(BailoutReason reason)
Condition NegateCondition(Condition cond)
@ times_half_pointer_size
@ times_twice_pointer_size
const uint32_t kStringRepresentationMask
OStream & dec(OStream &os)
const uint32_t kIsIndirectStringTag
int TenToThe(int exponent)
kFeedbackVectorOffset flag
const uint32_t kInternalizedTag
static const int kNumberDictionaryProbes
const intptr_t kSmiTagMask
@ REGISTER_VALUE_IS_INT32
const uint32_t kIsNotInternalizedMask
Operand ApiParameterOperand(int index)
const uint32_t kNaNOrInfinityLowerBoundUpper32
bool is_intn(int64_t x, unsigned n)
static const int kNoCodeAgeSequenceLength
const uint32_t kHoleNanLower32
const uint32_t kIsNotStringMask
bool IsAligned(T value, U alignment)
const intptr_t kDoubleAlignment
@ kPointersToHereAreAlwaysInteresting
const intptr_t kPointerAlignment
void CopyBytes(uint8_t *target, uint8_t *source)
const intptr_t kDoubleAlignmentMask
const uint32_t kIsIndirectStringMask
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool is(Register reg) const