7 #if V8_TARGET_ARCH_IA32
26 : Assembler(arg_isolate, buffer,
size),
27 generating_stub_(
false),
29 if (isolate() !=
NULL) {
31 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
37 void MacroAssembler::Load(Register dst,
const Operand& src, Representation r) {
41 }
else if (r.IsUInteger8()) {
43 }
else if (r.IsInteger16()) {
45 }
else if (r.IsUInteger16()) {
53 void MacroAssembler::Store(Register src,
const Operand& dst, Representation r) {
55 if (r.IsInteger8() || r.IsUInteger8()) {
57 }
else if (r.IsInteger16() || r.IsUInteger16()) {
60 if (r.IsHeapObject()) {
62 }
else if (r.IsSmi()) {
70 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
71 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
72 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
73 mov(destination, value);
76 ExternalReference roots_array_start =
77 ExternalReference::roots_array_start(isolate());
78 mov(destination, Immediate(index));
79 mov(destination, Operand::StaticArray(destination,
85 void MacroAssembler::StoreRoot(Register source,
87 Heap::RootListIndex index) {
88 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
89 ExternalReference roots_array_start =
90 ExternalReference::roots_array_start(isolate());
91 mov(scratch, Immediate(index));
97 void MacroAssembler::CompareRoot(Register with,
99 Heap::RootListIndex index) {
100 ExternalReference roots_array_start =
101 ExternalReference::roots_array_start(isolate());
102 mov(scratch, Immediate(index));
103 cmp(with, Operand::StaticArray(scratch,
109 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
110 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
111 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
116 void MacroAssembler::CompareRoot(
const Operand& with,
117 Heap::RootListIndex index) {
118 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
119 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
124 void MacroAssembler::InNewSpace(
128 Label* condition_met,
129 Label::Distance condition_met_distance) {
131 if (scratch.is(
object)) {
132 and_(scratch, Immediate(~Page::kPageAlignmentMask));
134 mov(scratch, Immediate(~Page::kPageAlignmentMask));
135 and_(scratch,
object);
138 DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139 DCHECK(MemoryChunk::IN_TO_SPACE < 8);
140 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141 | (1 << MemoryChunk::IN_TO_SPACE);
143 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144 static_cast<uint8_t
>(mask));
145 j(
cc, condition_met, condition_met_distance);
149 void MacroAssembler::RememberedSetHelper(
154 MacroAssembler::RememberedSetFinalAction and_then) {
156 if (emit_debug_code()) {
158 JumpIfNotInNewSpace(
object, scratch, &ok, Label::kNear);
163 ExternalReference store_buffer =
164 ExternalReference::store_buffer_top(isolate());
165 mov(scratch, Operand::StaticVariable(store_buffer));
167 mov(Operand(scratch, 0), addr);
171 mov(Operand::StaticVariable(store_buffer), scratch);
174 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
175 if (and_then == kReturnAtEnd) {
176 Label buffer_overflowed;
177 j(
not_equal, &buffer_overflowed, Label::kNear);
179 bind(&buffer_overflowed);
181 DCHECK(and_then == kFallThroughAtEnd);
182 j(
equal, &done, Label::kNear);
184 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
185 CallStub(&store_buffer_overflow);
186 if (and_then == kReturnAtEnd) {
189 DCHECK(and_then == kFallThroughAtEnd);
195 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
196 XMMRegister scratch_reg,
197 Register result_reg) {
200 xorps(scratch_reg, scratch_reg);
201 cvtsd2si(result_reg, input_reg);
202 test(result_reg, Immediate(0xFFFFFF00));
203 j(
zero, &done, Label::kNear);
204 cmp(result_reg, Immediate(0x1));
205 j(
overflow, &conv_failure, Label::kNear);
206 mov(result_reg, Immediate(0));
207 setcc(
sign, result_reg);
208 sub(result_reg, Immediate(1));
209 and_(result_reg, Immediate(255));
210 jmp(&done, Label::kNear);
212 Move(result_reg, Immediate(0));
213 ucomisd(input_reg, scratch_reg);
214 j(
below, &done, Label::kNear);
215 Move(result_reg, Immediate(255));
220 void MacroAssembler::ClampUint8(Register reg) {
222 test(reg, Immediate(0xFFFFFF00));
223 j(
zero, &done, Label::kNear);
230 void MacroAssembler::SlowTruncateToI(Register result_reg,
233 DoubleToIStub stub(isolate(), input_reg, result_reg, offset,
true);
234 call(stub.GetCode(), RelocInfo::CODE_TARGET);
238 void MacroAssembler::TruncateDoubleToI(Register result_reg,
239 XMMRegister input_reg) {
241 cvttsd2si(result_reg, Operand(input_reg));
242 cmp(result_reg, 0x1);
247 SlowTruncateToI(result_reg,
esp, 0);
253 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
256 Label* lost_precision, Label* is_nan,
257 Label* minus_zero, Label::Distance dst) {
258 DCHECK(!input_reg.is(scratch));
259 cvttsd2si(result_reg, Operand(input_reg));
260 Cvtsi2sd(scratch, Operand(result_reg));
261 ucomisd(scratch, input_reg);
268 test(result_reg, Operand(result_reg));
270 movmskpd(result_reg, input_reg);
281 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
282 Register input_reg) {
283 Label done, slow_case;
285 if (CpuFeatures::IsSupported(
SSE3)) {
286 CpuFeatureScope scope(
this,
SSE3);
290 fld_d(
FieldOperand(input_reg, HeapNumber::kValueOffset));
292 mov(result_reg,
FieldOperand(input_reg, HeapNumber::kExponentOffset));
293 and_(result_reg, HeapNumber::kExponentMask);
295 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
296 cmp(Operand(result_reg), Immediate(kTooBigExponent));
302 fisttp_d(Operand(
esp, 0));
303 mov(result_reg, Operand(
esp, 0));
305 jmp(&done, Label::kNear);
309 if (input_reg.is(result_reg)) {
312 fstp_d(Operand(
esp, 0));
313 SlowTruncateToI(result_reg,
esp, 0);
317 SlowTruncateToI(result_reg, input_reg);
321 cvttsd2si(result_reg, Operand(
xmm0));
322 cmp(result_reg, 0x1);
326 ExternalReference min_int = ExternalReference::address_of_min_int();
327 ucomisd(
xmm0, Operand::StaticVariable(min_int));
330 jmp(&done, Label::kNear);
334 if (input_reg.is(result_reg)) {
338 SlowTruncateToI(result_reg,
esp, 0);
341 SlowTruncateToI(result_reg, input_reg);
348 void MacroAssembler::LoadUint32(XMMRegister dst,
351 cmp(src, Immediate(0));
352 ExternalReference uint32_bias =
353 ExternalReference::address_of_uint32_bias();
356 addsd(dst, Operand::StaticVariable(uint32_bias));
361 void MacroAssembler::RecordWriteArray(
383 Register dst = index;
387 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
394 if (emit_debug_code()) {
395 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
396 mov(index, Immediate(bit_cast<int32_t>(
kZapValue)));
401 void MacroAssembler::RecordWriteField(
416 JumpIfSmi(value, &done, Label::kNear);
424 if (emit_debug_code()) {
427 j(
zero, &ok, Label::kNear);
432 RecordWrite(
object, dst, value, save_fp, remembered_set_action,
439 if (emit_debug_code()) {
440 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
441 mov(dst, Immediate(bit_cast<int32_t>(
kZapValue)));
446 void MacroAssembler::RecordWriteForMap(
454 Register address = scratch1;
455 Register value = scratch2;
456 if (emit_debug_code()) {
458 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
460 j(
zero, &ok, Label::kNear);
465 DCHECK(!
object.is(value));
466 DCHECK(!
object.is(address));
467 DCHECK(!value.is(address));
468 AssertNotSmi(
object);
470 if (!FLAG_incremental_marking) {
475 lea(address,
FieldOperand(
object, HeapObject::kMapOffset));
481 DCHECK(!isolate()->heap()->InNewSpace(*
map));
482 CheckPageFlagForMap(
map,
483 MemoryChunk::kPointersToHereAreInterestingMask,
495 isolate()->counters()->write_barriers_static()->Increment();
496 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
500 if (emit_debug_code()) {
501 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
502 mov(scratch1, Immediate(bit_cast<int32_t>(
kZapValue)));
503 mov(scratch2, Immediate(bit_cast<int32_t>(
kZapValue)));
508 void MacroAssembler::RecordWrite(
516 DCHECK(!
object.is(value));
517 DCHECK(!
object.is(address));
518 DCHECK(!value.is(address));
519 AssertNotSmi(
object);
522 !FLAG_incremental_marking) {
526 if (emit_debug_code()) {
528 cmp(value, Operand(address, 0));
529 j(
equal, &ok, Label::kNear);
540 JumpIfSmi(value, &done, Label::kNear);
546 MemoryChunk::kPointersToHereAreInterestingMask,
551 CheckPageFlag(
object,
553 MemoryChunk::kPointersFromHereAreInterestingMask,
558 RecordWriteStub stub(isolate(),
object, value, address, remembered_set_action,
565 isolate()->counters()->write_barriers_static()->Increment();
566 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
570 if (emit_debug_code()) {
571 mov(address, Immediate(bit_cast<int32_t>(
kZapValue)));
572 mov(value, Immediate(bit_cast<int32_t>(
kZapValue)));
577 void MacroAssembler::DebugBreak() {
578 Move(
eax, Immediate(0));
579 mov(
ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
580 CEntryStub ces(isolate(), 1);
585 void MacroAssembler::Cvtsi2sd(XMMRegister dst,
const Operand& src) {
591 bool MacroAssembler::IsUnsafeImmediate(
const Immediate& x) {
592 static const int kMaxImmediateBits = 17;
593 if (!RelocInfo::IsNone(x.rmode_))
return false;
594 return !
is_intn(x.x_, kMaxImmediateBits);
598 void MacroAssembler::SafeMove(Register dst,
const Immediate& x) {
599 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
600 Move(dst, Immediate(x.x_ ^ jit_cookie()));
601 xor_(dst, jit_cookie());
608 void MacroAssembler::SafePush(
const Immediate& x) {
609 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
610 push(Immediate(x.x_ ^ jit_cookie()));
611 xor_(Operand(
esp, 0), Immediate(jit_cookie()));
618 void MacroAssembler::CmpObjectType(Register heap_object,
622 CmpInstanceType(
map, type);
626 void MacroAssembler::CmpInstanceType(Register
map,
InstanceType type) {
628 static_cast<int8_t
>(type));
632 void MacroAssembler::CheckFastElements(Register
map,
634 Label::Distance distance) {
640 Map::kMaximumBitField2FastHoleyElementValue);
641 j(
above, fail, distance);
645 void MacroAssembler::CheckFastObjectElements(Register
map,
647 Label::Distance distance) {
653 Map::kMaximumBitField2FastHoleySmiElementValue);
656 Map::kMaximumBitField2FastHoleyElementValue);
657 j(
above, fail, distance);
661 void MacroAssembler::CheckFastSmiElements(Register
map,
663 Label::Distance distance) {
667 Map::kMaximumBitField2FastHoleySmiElementValue);
668 j(
above, fail, distance);
672 void MacroAssembler::StoreNumberToDoubleElements(
673 Register maybe_number,
677 XMMRegister scratch2,
679 int elements_offset) {
680 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
681 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
683 CheckMap(maybe_number,
684 isolate()->factory()->heap_number_map(),
695 ExternalReference canonical_nan_reference =
696 ExternalReference::address_of_canonical_non_hole_nan();
697 movsd(scratch2,
FieldOperand(maybe_number, HeapNumber::kValueOffset));
698 bind(&have_double_value);
700 FixedDoubleArray::kHeaderSize - elements_offset),
707 j(
greater, &is_nan, Label::kNear);
708 cmp(
FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
711 movsd(scratch2, Operand::StaticVariable(canonical_nan_reference));
712 jmp(&have_double_value, Label::kNear);
717 mov(scratch1, maybe_number);
719 Cvtsi2sd(scratch2, scratch1);
721 FixedDoubleArray::kHeaderSize - elements_offset),
727 void MacroAssembler::CompareMap(Register obj, Handle<Map>
map) {
732 void MacroAssembler::CheckMap(Register obj,
737 JumpIfSmi(obj, fail);
740 CompareMap(obj,
map);
745 void MacroAssembler::DispatchMap(Register obj,
748 Handle<Code> success,
752 JumpIfSmi(obj, &fail);
761 Condition MacroAssembler::IsObjectStringType(Register heap_object,
763 Register instance_type) {
772 Condition MacroAssembler::IsObjectNameType(Register heap_object,
774 Register instance_type) {
782 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
787 IsInstanceJSObjectType(
map, scratch, fail);
791 void MacroAssembler::IsInstanceJSObjectType(Register
map,
802 void MacroAssembler::FCmp() {
808 void MacroAssembler::AssertNumber(Register
object) {
809 if (emit_debug_code()) {
811 JumpIfSmi(
object, &ok);
813 isolate()->factory()->heap_number_map());
814 Check(
equal, kOperandNotANumber);
820 void MacroAssembler::AssertSmi(Register
object) {
821 if (emit_debug_code()) {
823 Check(
equal, kOperandIsNotASmi);
828 void MacroAssembler::AssertString(Register
object) {
829 if (emit_debug_code()) {
831 Check(
not_equal, kOperandIsASmiAndNotAString);
833 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
836 Check(
below, kOperandIsNotAString);
841 void MacroAssembler::AssertName(Register
object) {
842 if (emit_debug_code()) {
844 Check(
not_equal, kOperandIsASmiAndNotAName);
846 mov(
object,
FieldOperand(
object, HeapObject::kMapOffset));
854 void MacroAssembler::AssertUndefinedOrAllocationSite(Register
object) {
855 if (emit_debug_code()) {
857 AssertNotSmi(
object);
858 cmp(
object, isolate()->factory()->undefined_value());
859 j(
equal, &done_checking);
861 Immediate(isolate()->factory()->allocation_site_map()));
862 Assert(
equal, kExpectedUndefinedOrCell);
863 bind(&done_checking);
868 void MacroAssembler::AssertNotSmi(Register
object) {
869 if (emit_debug_code()) {
876 void MacroAssembler::StubPrologue() {
884 void MacroAssembler::Prologue(
bool code_pre_aging) {
885 PredictableCodeSizeScope predictible_code_size_scope(
this,
887 if (code_pre_aging) {
889 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
890 RelocInfo::CODE_AGE_SEQUENCE);
905 push(Immediate(Smi::FromInt(type)));
906 push(Immediate(CodeObject()));
907 if (emit_debug_code()) {
908 cmp(Operand(
esp, 0), Immediate(isolate()->factory()->undefined_value()));
909 Check(
not_equal, kCodeObjectNotProperlyPatched);
915 if (emit_debug_code()) {
916 cmp(Operand(
ebp, StandardFrameConstants::kMarkerOffset),
917 Immediate(Smi::FromInt(type)));
918 Check(
equal, kStackFrameTypesMustMatch);
924 void MacroAssembler::EnterExitFramePrologue() {
935 push(Immediate(CodeObject()));
938 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
939 ExternalReference context_address(Isolate::kContextAddress, isolate());
940 mov(Operand::StaticVariable(c_entry_fp_address),
ebp);
941 mov(Operand::StaticVariable(context_address),
esi);
945 void MacroAssembler::EnterExitFrameEpilogue(
int argc,
bool save_doubles) {
952 for (
int i = 0;
i < XMMRegister::kMaxNumRegisters;
i++) {
953 XMMRegister reg = XMMRegister::from_code(
i);
961 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
962 if (kFrameAlignment > 0) {
964 and_(
esp, -kFrameAlignment);
968 mov(Operand(
ebp, ExitFrameConstants::kSPOffset),
esp);
972 void MacroAssembler::EnterExitFrame(
bool save_doubles) {
973 EnterExitFramePrologue();
976 int offset = StandardFrameConstants::kCallerSPOffset -
kPointerSize;
981 EnterExitFrameEpilogue(3, save_doubles);
985 void MacroAssembler::EnterApiExitFrame(
int argc) {
986 EnterExitFramePrologue();
987 EnterExitFrameEpilogue(argc,
false);
991 void MacroAssembler::LeaveExitFrame(
bool save_doubles) {
995 for (
int i = 0;
i < XMMRegister::kMaxNumRegisters;
i++) {
996 XMMRegister reg = XMMRegister::from_code(
i);
1011 LeaveExitFrameEpilogue(
true);
1015 void MacroAssembler::LeaveExitFrameEpilogue(
bool restore_context) {
1017 ExternalReference context_address(Isolate::kContextAddress, isolate());
1018 if (restore_context) {
1019 mov(
esi, Operand::StaticVariable(context_address));
1022 mov(Operand::StaticVariable(context_address), Immediate(0));
1026 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1028 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1032 void MacroAssembler::LeaveApiExitFrame(
bool restore_context) {
1036 LeaveExitFrameEpilogue(restore_context);
1040 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1041 int handler_index) {
1052 if (kind == StackHandler::JS_ENTRY) {
1057 push(Immediate(Smi::FromInt(0)));
1064 StackHandler::IndexField::encode(handler_index) |
1065 StackHandler::KindField::encode(kind);
1066 push(Immediate(state));
1070 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1071 push(Operand::StaticVariable(handler_address));
1073 mov(Operand::StaticVariable(handler_address),
esp);
1077 void MacroAssembler::PopTryHandler() {
1079 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1080 pop(Operand::StaticVariable(handler_address));
1085 void MacroAssembler::JumpToHandlerEntry() {
1090 shr(
edx, StackHandler::kKindWidth);
1108 if (!value.is(
eax)) {
1112 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1113 mov(
esp, Operand::StaticVariable(handler_address));
1115 pop(Operand::StaticVariable(handler_address));
1130 j(
zero, &skip, Label::kNear);
1131 mov(Operand(
ebp, StandardFrameConstants::kContextOffset),
esi);
1134 JumpToHandlerEntry();
1138 void MacroAssembler::ThrowUncatchable(Register value) {
1148 if (!value.is(
eax)) {
1152 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1153 mov(
esp, Operand::StaticVariable(handler_address));
1156 Label fetch_next, check_kind;
1157 jmp(&check_kind, Label::kNear);
1159 mov(
esp, Operand(
esp, StackHandlerConstants::kNextOffset));
1163 test(Operand(
esp, StackHandlerConstants::kStateOffset),
1164 Immediate(StackHandler::KindField::kMask));
1168 pop(Operand::StaticVariable(handler_address));
1178 JumpToHandlerEntry();
1182 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1186 Label same_contexts;
1188 DCHECK(!holder_reg.is(scratch1));
1189 DCHECK(!holder_reg.is(scratch2));
1190 DCHECK(!scratch1.is(scratch2));
1193 mov(scratch1, Operand(
ebp, StandardFrameConstants::kContextOffset));
1196 if (emit_debug_code()) {
1197 cmp(scratch1, Immediate(0));
1198 Check(
not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1202 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX *
kPointerSize;
1204 mov(scratch1,
FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1207 if (emit_debug_code()) {
1210 isolate()->factory()->native_context_map());
1211 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1215 cmp(scratch1,
FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1216 j(
equal, &same_contexts);
1225 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1228 if (emit_debug_code()) {
1229 cmp(scratch2, isolate()->factory()->null_value());
1230 Check(
not_equal, kJSGlobalProxyContextShouldNotBeNull);
1234 isolate()->factory()->native_context_map());
1235 Check(
equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1238 int token_offset = Context::kHeaderSize +
1244 bind(&same_contexts);
1253 void MacroAssembler::GetNumberHash(Register
r0, Register scratch) {
1255 if (serializer_enabled()) {
1256 ExternalReference roots_array_start =
1257 ExternalReference::roots_array_start(isolate());
1258 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1264 int32_t seed = isolate()->heap()->HashSeed();
1265 xor_(
r0, Immediate(seed));
1293 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1318 GetNumberHash(
r0,
r1);
1321 mov(
r1,
FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1331 add(
r2, Immediate(SeededNumberDictionary::GetProbeOffset(
i)));
1336 DCHECK(SeededNumberDictionary::kEntrySize == 3);
1343 SeededNumberDictionary::kElementsStartOffset));
1353 const int kDetailsOffset =
1354 SeededNumberDictionary::kElementsStartOffset + 2 *
kPointerSize;
1357 Immediate(PropertyDetails::TypeField::kMask <<
kSmiTagSize));
1361 const int kValueOffset =
1362 SeededNumberDictionary::kElementsStartOffset +
kPointerSize;
1367 void MacroAssembler::LoadAllocationTopHelper(Register result,
1370 ExternalReference allocation_top =
1371 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
1379 cmp(result, Operand::StaticVariable(allocation_top));
1380 Check(
equal, kUnexpectedAllocationTop);
1386 if (scratch.is(
no_reg)) {
1387 mov(result, Operand::StaticVariable(allocation_top));
1389 mov(scratch, Immediate(allocation_top));
1390 mov(result, Operand(scratch, 0));
1395 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1398 if (emit_debug_code()) {
1400 Check(
zero, kUnalignedAllocationInNewSpace);
1403 ExternalReference allocation_top =
1404 AllocationUtils::GetAllocationTopReference(isolate(),
flags);
1407 if (scratch.is(
no_reg)) {
1408 mov(Operand::StaticVariable(allocation_top), result_end);
1410 mov(Operand(scratch, 0), result_end);
1415 void MacroAssembler::Allocate(
int object_size,
1417 Register result_end,
1422 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1423 if (!FLAG_inline_new) {
1424 if (emit_debug_code()) {
1426 mov(result, Immediate(0x7091));
1427 if (result_end.is_valid()) {
1428 mov(result_end, Immediate(0x7191));
1430 if (scratch.is_valid()) {
1431 mov(scratch, Immediate(0x7291));
1437 DCHECK(!result.is(result_end));
1440 LoadAllocationTopHelper(result, scratch,
flags);
1442 ExternalReference allocation_limit =
1443 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1454 cmp(result, Operand::StaticVariable(allocation_limit));
1457 mov(Operand(result, 0),
1458 Immediate(isolate()->factory()->one_pointer_filler_map()));
1464 Register top_reg = result_end.is_valid() ? result_end : result;
1465 if (!top_reg.is(result)) {
1466 mov(top_reg, result);
1468 add(top_reg, Immediate(object_size));
1469 j(
carry, gc_required);
1470 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1471 j(
above, gc_required);
1474 UpdateAllocationTopHelper(top_reg, scratch,
flags);
1478 if (top_reg.is(result)) {
1482 sub(result, Immediate(object_size));
1484 }
else if (tag_result) {
1491 void MacroAssembler::Allocate(
int header_size,
1493 Register element_count,
1496 Register result_end,
1501 if (!FLAG_inline_new) {
1502 if (emit_debug_code()) {
1504 mov(result, Immediate(0x7091));
1505 mov(result_end, Immediate(0x7191));
1506 if (scratch.is_valid()) {
1507 mov(scratch, Immediate(0x7291));
1514 DCHECK(!result.is(result_end));
1517 LoadAllocationTopHelper(result, scratch,
flags);
1519 ExternalReference allocation_limit =
1520 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1531 cmp(result, Operand::StaticVariable(allocation_limit));
1534 mov(Operand(result, 0),
1535 Immediate(isolate()->factory()->one_pointer_filler_map()));
1549 element_size =
static_cast<ScaleFactor>(element_size - 1);
1553 lea(result_end, Operand(element_count, element_size, header_size));
1554 add(result_end, result);
1555 j(
carry, gc_required);
1556 cmp(result_end, Operand::StaticVariable(allocation_limit));
1557 j(
above, gc_required);
1565 UpdateAllocationTopHelper(result_end, scratch,
flags);
1569 void MacroAssembler::Allocate(Register object_size,
1571 Register result_end,
1576 if (!FLAG_inline_new) {
1577 if (emit_debug_code()) {
1579 mov(result, Immediate(0x7091));
1580 mov(result_end, Immediate(0x7191));
1581 if (scratch.is_valid()) {
1582 mov(scratch, Immediate(0x7291));
1589 DCHECK(!result.is(result_end));
1592 LoadAllocationTopHelper(result, scratch,
flags);
1594 ExternalReference allocation_limit =
1595 AllocationUtils::GetAllocationLimitReference(isolate(),
flags);
1606 cmp(result, Operand::StaticVariable(allocation_limit));
1609 mov(Operand(result, 0),
1610 Immediate(isolate()->factory()->one_pointer_filler_map()));
1616 if (!object_size.is(result_end)) {
1617 mov(result_end, object_size);
1619 add(result_end, result);
1620 j(
carry, gc_required);
1621 cmp(result_end, Operand::StaticVariable(allocation_limit));
1622 j(
above, gc_required);
1631 UpdateAllocationTopHelper(result_end, scratch,
flags);
1635 void MacroAssembler::UndoAllocationInNewSpace(Register
object) {
1636 ExternalReference new_space_allocation_top =
1637 ExternalReference::new_space_allocation_top_address(isolate());
1642 cmp(
object, Operand::StaticVariable(new_space_allocation_top));
1643 Check(
below, kUndoAllocationOfNonAllocatedMemory);
1645 mov(Operand::StaticVariable(new_space_allocation_top),
object);
1649 void MacroAssembler::AllocateHeapNumber(Register result,
1655 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1659 ? isolate()->factory()->mutable_heap_number_map()
1660 : isolate()->factory()->heap_number_map();
1667 void MacroAssembler::AllocateTwoByteString(Register result,
1672 Label* gc_required) {
1682 Allocate(SeqTwoByteString::kHeaderSize,
1694 Immediate(isolate()->factory()->string_map()));
1695 mov(scratch1, length);
1697 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1699 Immediate(String::kEmptyHashField));
1703 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1704 Register scratch1, Register scratch2,
1706 Label* gc_required) {
1710 mov(scratch1, length);
1716 Allocate(SeqOneByteString::kHeaderSize,
1728 Immediate(isolate()->factory()->one_byte_string_map()));
1729 mov(scratch1, length);
1731 mov(
FieldOperand(result, String::kLengthOffset), scratch1);
1733 Immediate(String::kEmptyHashField));
1737 void MacroAssembler::AllocateOneByteString(Register result,
int length,
1738 Register scratch1, Register scratch2,
1739 Label* gc_required) {
1743 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1748 Immediate(isolate()->factory()->one_byte_string_map()));
1750 Immediate(Smi::FromInt(length)));
1752 Immediate(String::kEmptyHashField));
1756 void MacroAssembler::AllocateTwoByteConsString(Register result,
1759 Label* gc_required) {
1761 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1766 Immediate(isolate()->factory()->cons_string_map()));
1770 void MacroAssembler::AllocateOneByteConsString(Register result,
1773 Label* gc_required) {
1774 Allocate(ConsString::kSize,
1783 Immediate(isolate()->factory()->cons_one_byte_string_map()));
1787 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1790 Label* gc_required) {
1792 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1797 Immediate(isolate()->factory()->sliced_string_map()));
1801 void MacroAssembler::AllocateOneByteSlicedString(Register result,
1804 Label* gc_required) {
1806 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1811 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1824 Register destination,
1827 Label short_loop, len4, len8, len12, done, short_string;
1831 cmp(length, Immediate(4));
1832 j(
below, &short_string, Label::kNear);
1837 mov(scratch, Operand(source, length,
times_1, -4));
1838 mov(Operand(destination, length,
times_1, -4), scratch);
1840 cmp(length, Immediate(8));
1842 cmp(length, Immediate(12));
1844 cmp(length, Immediate(16));
1850 and_(scratch, Immediate(0x3));
1851 add(destination, scratch);
1852 jmp(&done, Label::kNear);
1855 mov(scratch, Operand(source, 8));
1856 mov(Operand(destination, 8), scratch);
1858 mov(scratch, Operand(source, 4));
1859 mov(Operand(destination, 4), scratch);
1861 mov(scratch, Operand(source, 0));
1862 mov(Operand(destination, 0), scratch);
1863 add(destination, length);
1864 jmp(&done, Label::kNear);
1866 bind(&short_string);
1867 test(length, length);
1868 j(
zero, &done, Label::kNear);
1871 mov_b(scratch, Operand(source, 0));
1872 mov_b(Operand(destination, 0), scratch);
1882 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1883 Register end_offset,
1888 mov(Operand(start_offset, 0), filler);
1891 cmp(start_offset, end_offset);
1896 void MacroAssembler::BooleanBitTest(Register
object,
1903 test_b(
FieldOperand(
object, field_offset + byte_index),
1904 static_cast<byte>(1 << byte_bit_index));
1909 void MacroAssembler::NegativeZeroTest(Register result,
1911 Label* then_label) {
1913 test(result, result);
1916 j(
sign, then_label);
1921 void MacroAssembler::NegativeZeroTest(Register result,
1925 Label* then_label) {
1927 test(result, result);
1931 j(
sign, then_label);
1936 void MacroAssembler::TryGetFunctionPrototype(Register
function,
1940 bool miss_on_bound_function) {
1942 if (miss_on_bound_function) {
1944 JumpIfSmi(
function, miss);
1952 FieldOperand(
function, JSFunction::kSharedFunctionInfoOffset));
1953 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1954 SharedFunctionInfo::kBoundFunction);
1958 movzx_b(scratch,
FieldOperand(result, Map::kBitFieldOffset));
1959 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1965 FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
1970 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1975 CmpObjectType(result,
MAP_TYPE, scratch);
1979 mov(result,
FieldOperand(result, Map::kPrototypeOffset));
1981 if (miss_on_bound_function) {
1986 bind(&non_instance);
1987 mov(result,
FieldOperand(result, Map::kConstructorOffset));
1995 void MacroAssembler::CallStub(
CodeStub* stub, TypeFeedbackId ast_id) {
1996 DCHECK(AllowThisStubCall(stub));
1997 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
2001 void MacroAssembler::TailCallStub(
CodeStub* stub) {
2002 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
2006 void MacroAssembler::StubReturn(
int argc) {
2007 DCHECK(argc >= 1 && generating_stub());
2012 bool MacroAssembler::AllowThisStubCall(
CodeStub* stub) {
2013 return has_frame_ || !stub->SometimesSetsUpAFrame();
2017 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2022 (1 << String::kArrayIndexValueBits));
2023 if (!index.is(hash)) {
2026 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
2030 void MacroAssembler::CallRuntime(
const Runtime::Function* f,
2036 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2042 Move(
eax, Immediate(num_arguments));
2043 mov(
ebx, Immediate(ExternalReference(f, isolate())));
2044 CEntryStub ces(isolate(), 1, save_doubles);
2049 void MacroAssembler::CallExternalReference(ExternalReference ref,
2050 int num_arguments) {
2051 mov(
eax, Immediate(num_arguments));
2052 mov(
ebx, Immediate(ref));
2054 CEntryStub stub(isolate(), 1);
2059 void MacroAssembler::TailCallExternalReference(
const ExternalReference& ext,
2066 Move(
eax, Immediate(num_arguments));
2067 JumpToExternalReference(ext);
2071 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2074 TailCallExternalReference(ExternalReference(fid, isolate()),
2085 void MacroAssembler::PrepareCallApiFunction(
int argc) {
2086 EnterApiExitFrame(argc);
2087 if (emit_debug_code()) {
2093 void MacroAssembler::CallApiFunctionAndReturn(
2094 Register function_address,
2095 ExternalReference thunk_ref,
2096 Operand thunk_last_arg,
2098 Operand return_value_operand,
2099 Operand* context_restore_operand) {
2100 ExternalReference next_address =
2101 ExternalReference::handle_scope_next_address(isolate());
2102 ExternalReference limit_address =
2103 ExternalReference::handle_scope_limit_address(isolate());
2104 ExternalReference level_address =
2105 ExternalReference::handle_scope_level_address(isolate());
2109 mov(
ebx, Operand::StaticVariable(next_address));
2110 mov(
edi, Operand::StaticVariable(limit_address));
2111 add(Operand::StaticVariable(level_address), Immediate(1));
2113 if (FLAG_log_timer_events) {
2114 FrameScope frame(
this, StackFrame::MANUAL);
2115 PushSafepointRegisters();
2116 PrepareCallCFunction(1,
eax);
2117 mov(Operand(
esp, 0),
2118 Immediate(ExternalReference::isolate_address(isolate())));
2119 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2120 PopSafepointRegisters();
2124 Label profiler_disabled;
2125 Label end_profiler_check;
2126 mov(
eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2127 cmpb(Operand(
eax, 0), 0);
2128 j(
zero, &profiler_disabled);
2131 mov(thunk_last_arg, function_address);
2133 mov(
eax, Immediate(thunk_ref));
2135 jmp(&end_profiler_check);
2137 bind(&profiler_disabled);
2139 call(function_address);
2140 bind(&end_profiler_check);
2142 if (FLAG_log_timer_events) {
2143 FrameScope frame(
this, StackFrame::MANUAL);
2144 PushSafepointRegisters();
2145 PrepareCallCFunction(1,
eax);
2146 mov(Operand(
esp, 0),
2147 Immediate(ExternalReference::isolate_address(isolate())));
2148 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2149 PopSafepointRegisters();
2154 mov(
eax, return_value_operand);
2156 Label promote_scheduled_exception;
2157 Label exception_handled;
2158 Label delete_allocated_handles;
2159 Label leave_exit_frame;
2164 mov(Operand::StaticVariable(next_address),
ebx);
2165 sub(Operand::StaticVariable(level_address), Immediate(1));
2167 cmp(
edi, Operand::StaticVariable(limit_address));
2168 j(
not_equal, &delete_allocated_handles);
2169 bind(&leave_exit_frame);
2172 ExternalReference scheduled_exception_address =
2173 ExternalReference::scheduled_exception_address(isolate());
2174 cmp(Operand::StaticVariable(scheduled_exception_address),
2175 Immediate(isolate()->factory()->the_hole_value()));
2176 j(
not_equal, &promote_scheduled_exception);
2177 bind(&exception_handled);
2179 #if ENABLE_EXTRA_CHECKS
2182 Register return_value =
eax;
2185 JumpIfSmi(return_value, &ok, Label::kNear);
2189 j(
below, &ok, Label::kNear);
2194 cmp(
map, isolate()->factory()->heap_number_map());
2195 j(
equal, &ok, Label::kNear);
2197 cmp(return_value, isolate()->factory()->undefined_value());
2198 j(
equal, &ok, Label::kNear);
2200 cmp(return_value, isolate()->factory()->true_value());
2201 j(
equal, &ok, Label::kNear);
2203 cmp(return_value, isolate()->factory()->false_value());
2204 j(
equal, &ok, Label::kNear);
2206 cmp(return_value, isolate()->factory()->null_value());
2207 j(
equal, &ok, Label::kNear);
2209 Abort(kAPICallReturnedInvalidObject);
2214 bool restore_context = context_restore_operand !=
NULL;
2215 if (restore_context) {
2216 mov(
esi, *context_restore_operand);
2218 LeaveApiExitFrame(!restore_context);
2221 bind(&promote_scheduled_exception);
2224 CallRuntime(Runtime::kPromoteScheduledException, 0);
2226 jmp(&exception_handled);
2229 ExternalReference delete_extensions =
2230 ExternalReference::delete_handle_scope_extensions(isolate());
2231 bind(&delete_allocated_handles);
2232 mov(Operand::StaticVariable(limit_address),
edi);
2234 mov(Operand(
esp, 0),
2235 Immediate(ExternalReference::isolate_address(isolate())));
2236 mov(
eax, Immediate(delete_extensions));
2239 jmp(&leave_exit_frame);
2243 void MacroAssembler::JumpToExternalReference(
const ExternalReference& ext) {
2245 mov(
ebx, Immediate(ext));
2246 CEntryStub ces(isolate(), 1);
2247 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2251 void MacroAssembler::InvokePrologue(
const ParameterCount& expected,
2252 const ParameterCount& actual,
2253 Handle<Code> code_constant,
2254 const Operand& code_operand,
2256 bool* definitely_mismatches,
2258 Label::Distance done_near,
2259 const CallWrapper& call_wrapper) {
2260 bool definitely_matches =
false;
2261 *definitely_mismatches =
false;
2263 if (expected.is_immediate()) {
2264 DCHECK(actual.is_immediate());
2265 if (expected.immediate() == actual.immediate()) {
2266 definitely_matches =
true;
2268 mov(
eax, actual.immediate());
2269 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2270 if (expected.immediate() == sentinel) {
2275 definitely_matches =
true;
2277 *definitely_mismatches =
true;
2278 mov(
ebx, expected.immediate());
2282 if (actual.is_immediate()) {
2286 cmp(expected.reg(), actual.immediate());
2289 mov(
eax, actual.immediate());
2290 }
else if (!expected.reg().is(actual.reg())) {
2293 cmp(expected.reg(), actual.reg());
2300 if (!definitely_matches) {
2301 Handle<Code> adaptor =
2302 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2303 if (!code_constant.is_null()) {
2304 mov(
edx, Immediate(code_constant));
2306 }
else if (!code_operand.is_reg(
edx)) {
2307 mov(
edx, code_operand);
2311 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2312 call(adaptor, RelocInfo::CODE_TARGET);
2313 call_wrapper.AfterCall();
2314 if (!*definitely_mismatches) {
2315 jmp(done, done_near);
2318 jmp(adaptor, RelocInfo::CODE_TARGET);
2325 void MacroAssembler::InvokeCode(
const Operand& code,
2326 const ParameterCount& expected,
2327 const ParameterCount& actual,
2329 const CallWrapper& call_wrapper) {
2334 bool definitely_mismatches =
false;
2335 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2336 &done, &definitely_mismatches,
flag, Label::kNear,
2338 if (!definitely_mismatches) {
2340 call_wrapper.BeforeCall(CallSize(code));
2342 call_wrapper.AfterCall();
2352 void MacroAssembler::InvokeFunction(Register fun,
2353 const ParameterCount& actual,
2355 const CallWrapper& call_wrapper) {
2365 ParameterCount expected(
ebx);
2367 expected, actual,
flag, call_wrapper);
2371 void MacroAssembler::InvokeFunction(Register fun,
2372 const ParameterCount& expected,
2373 const ParameterCount& actual,
2375 const CallWrapper& call_wrapper) {
2383 expected, actual,
flag, call_wrapper);
2387 void MacroAssembler::InvokeFunction(Handle<JSFunction>
function,
2388 const ParameterCount& expected,
2389 const ParameterCount& actual,
2391 const CallWrapper& call_wrapper) {
2392 LoadHeapObject(
edi,
function);
2393 InvokeFunction(
edi, expected, actual,
flag, call_wrapper);
2397 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript
id,
2399 const CallWrapper& call_wrapper) {
2406 ParameterCount expected(0);
2407 GetBuiltinFunction(
edi,
id);
2409 expected, expected,
flag, call_wrapper);
2413 void MacroAssembler::GetBuiltinFunction(Register target,
2414 Builtins::JavaScript
id) {
2416 mov(target, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2417 mov(target,
FieldOperand(target, GlobalObject::kBuiltinsOffset));
2419 JSBuiltinsObject::OffsetOfFunctionWithId(
id)));
2423 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript
id) {
2426 GetBuiltinFunction(
edi,
id);
2432 void MacroAssembler::LoadContext(Register dst,
int context_chain_length) {
2433 if (context_chain_length > 0) {
2435 mov(dst, Operand(
esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2436 for (
int i = 1;
i < context_chain_length;
i++) {
2437 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2450 if (emit_debug_code()) {
2452 isolate()->factory()->with_context_map());
2453 Check(
not_equal, kVariableResolvedToWithContext);
2458 void MacroAssembler::LoadTransitionedArrayMapConditional(
2461 Register map_in_out,
2463 Label* no_map_match) {
2465 mov(scratch, Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2466 mov(scratch,
FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2469 mov(scratch, Operand(scratch,
2470 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2473 FixedArrayBase::kHeaderSize;
2479 FixedArrayBase::kHeaderSize;
2484 void MacroAssembler::LoadGlobalFunction(
int index, Register
function) {
2487 Operand(
esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2490 FieldOperand(
function, GlobalObject::kNativeContextOffset));
2492 mov(
function, Operand(
function, Context::SlotOffset(index)));
2496 void MacroAssembler::LoadGlobalFunctionInitialMap(Register
function,
2499 mov(
map,
FieldOperand(
function, JSFunction::kPrototypeOrInitialMapOffset));
2500 if (emit_debug_code()) {
2505 Abort(kGlobalFunctionsMustHaveInitialMap);
2513 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2514 mov(SafepointRegisterSlot(dst), src);
2518 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2519 mov(SafepointRegisterSlot(dst), src);
2523 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2524 mov(dst, SafepointRegisterSlot(src));
2528 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2529 return Operand(
esp, SafepointRegisterStackIndex(reg.code()) *
kPointerSize);
2533 int MacroAssembler::SafepointRegisterStackIndex(
int reg_code) {
2542 void MacroAssembler::LoadHeapObject(Register result,
2543 Handle<HeapObject>
object) {
2545 if (isolate()->heap()->InNewSpace(*
object)) {
2546 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2547 mov(result, Operand::ForCell(cell));
2549 mov(result,
object);
2554 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject>
object) {
2556 if (isolate()->heap()->InNewSpace(*
object)) {
2557 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2558 cmp(reg, Operand::ForCell(cell));
2565 void MacroAssembler::PushHeapObject(Handle<HeapObject>
object) {
2567 if (isolate()->heap()->InNewSpace(*
object)) {
2568 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2569 push(Operand::ForCell(cell));
2576 void MacroAssembler::Ret() {
2581 void MacroAssembler::Ret(
int bytes_dropped, Register scratch) {
2582 if (is_uint16(bytes_dropped)) {
2586 add(
esp, Immediate(bytes_dropped));
2593 void MacroAssembler::Drop(
int stack_elements) {
2594 if (stack_elements > 0) {
2600 void MacroAssembler::Move(Register dst, Register src) {
2607 void MacroAssembler::Move(Register dst,
const Immediate& x) {
2616 void MacroAssembler::Move(
const Operand& dst,
const Immediate& x) {
2621 void MacroAssembler::Move(XMMRegister dst,
double val) {
2623 uint64_t int_val = bit_cast<uint64_t, double>(val);
2629 push(Immediate(upper));
2630 push(Immediate(lower));
2631 movsd(dst, Operand(
esp, 0));
2637 void MacroAssembler::SetCounter(StatsCounter* counter,
int value) {
2638 if (FLAG_native_code_counters && counter->Enabled()) {
2639 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2644 void MacroAssembler::IncrementCounter(StatsCounter* counter,
int value) {
2646 if (FLAG_native_code_counters && counter->Enabled()) {
2647 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2651 add(operand, Immediate(value));
2657 void MacroAssembler::DecrementCounter(StatsCounter* counter,
int value) {
2659 if (FLAG_native_code_counters && counter->Enabled()) {
2660 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2664 sub(operand, Immediate(value));
2670 void MacroAssembler::IncrementCounter(
Condition cc,
2671 StatsCounter* counter,
2674 if (FLAG_native_code_counters && counter->Enabled()) {
2678 IncrementCounter(counter, value);
2685 void MacroAssembler::DecrementCounter(
Condition cc,
2686 StatsCounter* counter,
2689 if (FLAG_native_code_counters && counter->Enabled()) {
2693 DecrementCounter(counter, value);
2701 if (emit_debug_code()) Check(
cc, reason);
2705 void MacroAssembler::AssertFastElements(Register elements) {
2706 if (emit_debug_code()) {
2707 Factory* factory = isolate()->factory();
2710 Immediate(factory->fixed_array_map()));
2713 Immediate(factory->fixed_double_array_map()));
2716 Immediate(factory->fixed_cow_array_map()));
2718 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2733 void MacroAssembler::CheckStackAlignment() {
2734 int frame_alignment = base::OS::ActivationFrameAlignment();
2735 int frame_alignment_mask = frame_alignment - 1;
2738 Label alignment_as_expected;
2739 test(
esp, Immediate(frame_alignment_mask));
2740 j(
zero, &alignment_as_expected);
2743 bind(&alignment_as_expected);
2752 RecordComment(
"Abort message: ");
2756 if (FLAG_trap_on_abort) {
2762 push(Immediate(
reinterpret_cast<intptr_t
>(Smi::FromInt(reason))));
2768 CallRuntime(Runtime::kAbort, 1);
2770 CallRuntime(Runtime::kAbort, 1);
2777 void MacroAssembler::LoadInstanceDescriptors(Register
map,
2778 Register descriptors) {
2783 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register
map) {
2785 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2789 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2793 HeapNumber::kExponentBits));
2794 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2796 psllq(dst, HeapNumber::kMantissaBits);
2800 void MacroAssembler::LookupNumberStringCache(Register
object,
2806 Register number_string_cache = result;
2807 Register mask = scratch1;
2808 Register scratch = scratch2;
2811 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2814 mov(mask,
FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2816 sub(mask, Immediate(1));
2822 Label smi_hash_calculated;
2823 Label load_result_from_cache;
2826 JumpIfNotSmi(
object, ¬_smi, Label::kNear);
2827 mov(scratch,
object);
2829 jmp(&smi_hash_calculated, Label::kNear);
2832 isolate()->factory()->heap_number_map());
2835 mov(scratch,
FieldOperand(
object, HeapNumber::kValueOffset));
2836 xor_(scratch,
FieldOperand(
object, HeapNumber::kValueOffset + 4));
2838 and_(scratch, mask);
2839 Register index = scratch;
2840 Register probe = mask;
2845 FixedArray::kHeaderSize));
2846 JumpIfSmi(probe, not_found);
2851 jmp(&load_result_from_cache, Label::kNear);
2853 bind(&smi_hash_calculated);
2855 and_(scratch, mask);
2861 FixedArray::kHeaderSize));
2865 bind(&load_result_from_cache);
2871 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2875 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2876 Register instance_type, Register scratch, Label* failure) {
2877 if (!scratch.is(instance_type)) {
2878 mov(scratch, instance_type);
2887 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2894 mov(scratch1, object1);
2895 and_(scratch1, object2);
2896 JumpIfSmi(scratch1, failure);
2899 mov(scratch1,
FieldOperand(object1, HeapObject::kMapOffset));
2900 mov(scratch2,
FieldOperand(object2, HeapObject::kMapOffset));
2901 movzx_b(scratch1,
FieldOperand(scratch1, Map::kInstanceTypeOffset));
2902 movzx_b(scratch2,
FieldOperand(scratch2, Map::kInstanceTypeOffset));
2905 const int kFlatOneByteStringMask =
2907 const int kFlatOneByteStringTag =
2910 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2911 and_(scratch1, kFlatOneByteStringMask);
2912 and_(scratch2, kFlatOneByteStringMask);
2913 lea(scratch1, Operand(scratch1, scratch2,
times_8, 0));
2914 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2919 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2920 Label* not_unique_name,
2921 Label::Distance distance) {
2927 j(
not_equal, not_unique_name, distance);
2933 void MacroAssembler::EmitSeqStringSetCharCheck(Register
string,
2938 JumpIfNotSmi(
string, &is_object, Label::kNear);
2943 mov(value,
FieldOperand(
string, HeapObject::kMapOffset));
2944 movzx_b(value,
FieldOperand(value, Map::kInstanceTypeOffset));
2947 cmp(value, Immediate(encoding_mask));
2949 Check(
equal, kUnexpectedStringType);
2957 cmp(index,
FieldOperand(
string, String::kLengthOffset));
2958 Check(
less, kIndexIsTooLarge);
2960 cmp(index, Immediate(Smi::FromInt(0)));
2968 void MacroAssembler::PrepareCallCFunction(
int num_arguments, Register scratch) {
2969 int frame_alignment = base::OS::ActivationFrameAlignment();
2970 if (frame_alignment != 0) {
2976 and_(
esp, -frame_alignment);
2984 void MacroAssembler::CallCFunction(ExternalReference
function,
2985 int num_arguments) {
2987 mov(
eax, Immediate(
function));
2988 CallCFunction(
eax, num_arguments);
2992 void MacroAssembler::CallCFunction(Register
function,
2993 int num_arguments) {
2996 if (emit_debug_code()) {
2997 CheckStackAlignment();
3001 if (base::OS::ActivationFrameAlignment() != 0) {
3018 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3019 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3020 reg7.is_valid() + reg8.is_valid();
3023 if (reg1.is_valid()) regs |= reg1.bit();
3024 if (reg2.is_valid()) regs |= reg2.bit();
3025 if (reg3.is_valid()) regs |= reg3.bit();
3026 if (reg4.is_valid()) regs |= reg4.bit();
3027 if (reg5.is_valid()) regs |= reg5.bit();
3028 if (reg6.is_valid()) regs |= reg6.bit();
3029 if (reg7.is_valid()) regs |= reg7.bit();
3030 if (reg8.is_valid()) regs |= reg8.bit();
3031 int n_of_non_aliasing_regs =
NumRegs(regs);
3033 return n_of_valid_regs != n_of_non_aliasing_regs;
3038 CodePatcher::CodePatcher(
byte* address,
int size)
3039 : address_(address),
3041 masm_(
NULL, address,
size + Assembler::kGap) {
3045 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3049 CodePatcher::~CodePatcher() {
3051 CpuFeatures::FlushICache(address_, size_);
3054 DCHECK(masm_.pc_ == address_ + size_);
3055 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3059 void MacroAssembler::CheckPageFlag(
3064 Label* condition_met,
3065 Label::Distance condition_met_distance) {
3067 if (scratch.is(
object)) {
3068 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3070 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3071 and_(scratch,
object);
3074 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3075 static_cast<uint8_t
>(mask));
3077 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3079 j(
cc, condition_met, condition_met_distance);
3083 void MacroAssembler::CheckPageFlagForMap(
3087 Label* condition_met,
3088 Label::Distance condition_met_distance) {
3090 Page* page = Page::FromAddress(
map->address());
3091 DCHECK(!serializer_enabled());
3092 ExternalReference reference(ExternalReference::page_flags(page));
3095 DCHECK(!isolate()->heap()->mark_compact_collector()->
3096 IsOnEvacuationCandidate(*
map));
3098 test_b(Operand::StaticVariable(reference),
static_cast<uint8_t
>(mask));
3100 test(Operand::StaticVariable(reference), Immediate(mask));
3102 j(
cc, condition_met, condition_met_distance);
3106 void MacroAssembler::CheckMapDeprecated(Handle<Map>
map,
3108 Label* if_deprecated) {
3109 if (
map->CanBeDeprecated()) {
3111 mov(scratch,
FieldOperand(scratch, Map::kBitField3Offset));
3112 and_(scratch, Immediate(Map::Deprecated::kMask));
3118 void MacroAssembler::JumpIfBlack(Register
object,
3122 Label::Distance on_black_near) {
3123 HasColor(
object, scratch0, scratch1,
3124 on_black, on_black_near,
3126 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3130 void MacroAssembler::HasColor(Register
object,
3131 Register bitmap_scratch,
3132 Register mask_scratch,
3134 Label::Distance has_color_distance,
3139 GetMarkBits(
object, bitmap_scratch, mask_scratch);
3141 Label other_color, word_boundary;
3142 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3143 j(first_bit == 1 ?
zero :
not_zero, &other_color, Label::kNear);
3144 add(mask_scratch, mask_scratch);
3145 j(
zero, &word_boundary, Label::kNear);
3146 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3147 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
3148 jmp(&other_color, Label::kNear);
3150 bind(&word_boundary);
3151 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize +
kPointerSize), 1);
3153 j(second_bit == 1 ?
not_zero :
zero, has_color, has_color_distance);
3158 void MacroAssembler::GetMarkBits(Register addr_reg,
3159 Register bitmap_reg,
3160 Register mask_reg) {
3162 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3163 and_(bitmap_reg, addr_reg);
3169 (Page::kPageAlignmentMask >>
shift) & ~(Bitmap::kBytesPerCell - 1));
3171 add(bitmap_reg,
ecx);
3174 and_(
ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3175 mov(mask_reg, Immediate(1));
3180 void MacroAssembler::EnsureNotWhite(
3182 Register bitmap_scratch,
3183 Register mask_scratch,
3184 Label* value_is_white_and_not_data,
3185 Label::Distance distance) {
3187 GetMarkBits(value, bitmap_scratch, mask_scratch);
3190 DCHECK(strcmp(Marking::kWhiteBitPattern,
"00") == 0);
3191 DCHECK(strcmp(Marking::kBlackBitPattern,
"10") == 0);
3192 DCHECK(strcmp(Marking::kGreyBitPattern,
"11") == 0);
3193 DCHECK(strcmp(Marking::kImpossibleBitPattern,
"01") == 0);
3199 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3202 if (emit_debug_code()) {
3207 add(mask_scratch, mask_scratch);
3208 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3209 j(
zero, &ok, Label::kNear);
3218 Register length =
ecx;
3219 Label not_heap_number;
3220 Label is_data_object;
3224 cmp(
map, isolate()->factory()->heap_number_map());
3225 j(
not_equal, ¬_heap_number, Label::kNear);
3226 mov(length, Immediate(HeapNumber::kSize));
3227 jmp(&is_data_object, Label::kNear);
3229 bind(¬_heap_number);
3235 Register instance_type =
ecx;
3238 j(
not_zero, value_is_white_and_not_data);
3248 j(
zero, ¬_external, Label::kNear);
3249 mov(length, Immediate(ExternalString::kSize));
3250 jmp(&is_data_object, Label::kNear);
3252 bind(¬_external);
3257 add(length, Immediate(0x04));
3261 DCHECK_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3262 DCHECK(SeqOneByteString::kMaxSize <=
3263 static_cast<int>(0xffffffffu >> (2 +
kSmiTagSize)));
3264 imul(length,
FieldOperand(value, String::kLengthOffset));
3269 bind(&is_data_object);
3272 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3274 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3275 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3277 if (emit_debug_code()) {
3278 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3279 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3280 Check(
less_equal, kLiveBytesCountOverflowChunkSize);
3287 void MacroAssembler::EnumLength(Register dst, Register
map) {
3290 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3295 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3305 j(
equal, call_runtime);
3314 cmp(
edx, Immediate(Smi::FromInt(0)));
3323 cmp(
ecx, isolate()->factory()->empty_fixed_array());
3324 j(
equal, &no_elements);
3327 cmp(
ecx, isolate()->factory()->empty_slow_element_dictionary());
3332 cmp(
ecx, isolate()->factory()->null_value());
3337 void MacroAssembler::TestJSArrayForAllocationMemento(
3338 Register receiver_reg,
3339 Register scratch_reg,
3340 Label* no_memento_found) {
3341 ExternalReference new_space_start =
3342 ExternalReference::new_space_start(isolate());
3343 ExternalReference new_space_allocation_top =
3344 ExternalReference::new_space_allocation_top_address(isolate());
3346 lea(scratch_reg, Operand(receiver_reg,
3348 cmp(scratch_reg, Immediate(new_space_start));
3349 j(
less, no_memento_found);
3350 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3352 cmp(
MemOperand(scratch_reg, -AllocationMemento::kSize),
3353 Immediate(isolate()->factory()->allocation_memento_map()));
3357 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3362 DCHECK(!scratch1.is(scratch0));
3363 Factory* factory = isolate()->factory();
3364 Register current = scratch0;
3368 mov(current,
object);
3372 mov(current,
FieldOperand(current, HeapObject::kMapOffset));
3373 mov(scratch1,
FieldOperand(current, Map::kBitField2Offset));
3374 DecodeField<Map::ElementsKindBits>(scratch1);
3377 mov(current,
FieldOperand(current, Map::kPrototypeOffset));
3378 cmp(current, Immediate(factory->null_value()));
3383 void MacroAssembler::TruncatingDiv(Register dividend,
int32_t divisor) {
3386 base::MagicNumbersForDivision<uint32_t> mag =
3388 mov(
eax, Immediate(mag.multiplier));
3390 bool neg = (mag.multiplier & (
static_cast<uint32_t>(1) << 31)) != 0;
3391 if (divisor > 0 && neg) add(
edx, dividend);
3392 if (divisor < 0 && !neg && mag.multiplier > 0) sub(
edx, dividend);
3393 if (mag.shift > 0) sar(
edx, mag.shift);
MacroAssembler(Isolate *isolate, void *buffer, int size)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi space(in MBytes)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be aligned(ARM64 only)") DEFINE_STRING(expose_gc_as
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
#define STATIC_ASSERT(test)
bool IsPowerOfTwo32(uint32_t value)
MagicNumbersForDivision< T > SignedDivisionByConstant(T d)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
const intptr_t kHeapObjectTagMask
const uint32_t kStringEncodingMask
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
TypeImpl< ZoneTypeConfig > Type
bool is_uintn(int64_t x, unsigned n)
const int kNumSafepointRegisters
const uint32_t kNotStringTag
Operand FieldOperand(Register object, int offset)
const int kPointerSizeLog2
const uint32_t kStringTag
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ FAST_HOLEY_SMI_ELEMENTS
const uint32_t kOneByteStringTag
const intptr_t kObjectAlignmentMask
int NumRegs(RegList reglist)
static const int kInvalidEnumCacheSentinel
const char * GetBailoutReason(BailoutReason reason)
Condition NegateCondition(Condition cond)
@ times_half_pointer_size
@ times_twice_pointer_size
const uint32_t kStringRepresentationMask
OStream & dec(OStream &os)
const uint32_t kIsIndirectStringTag
int TenToThe(int exponent)
kFeedbackVectorOffset flag
const uint32_t kInternalizedTag
static const int kNumberDictionaryProbes
const intptr_t kSmiTagMask
@ REGISTER_VALUE_IS_INT32
const uint32_t kIsNotInternalizedMask
Operand ApiParameterOperand(int index)
const uint32_t kNaNOrInfinityLowerBoundUpper32
bool is_intn(int64_t x, unsigned n)
static const int kNoCodeAgeSequenceLength
const uint32_t kHoleNanLower32
const uint32_t kIsNotStringMask
bool IsAligned(T value, U alignment)
const intptr_t kDoubleAlignment
@ kPointersToHereAreAlwaysInteresting
const intptr_t kPointerAlignment
void CopyBytes(uint8_t *target, uint8_t *source)
const intptr_t kDoubleAlignmentMask
const uint32_t kIsIndirectStringMask
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
static Handle< Value > Throw(Isolate *isolate, const char *message)
bool is(Register reg) const