23 class SafepointGenerator
FINAL :
public CallWrapper {
25 SafepointGenerator(LCodeGen* codegen,
26 LPointerMap* pointers,
27 Safepoint::DeoptMode
mode)
31 virtual ~SafepointGenerator() {}
33 virtual void BeforeCall(
int call_size)
const OVERRIDE {}
35 virtual void AfterCall() const
OVERRIDE {
36 codegen_->RecordSafepoint(pointers_, deopt_mode_);
41 LPointerMap* pointers_;
42 Safepoint::DeoptMode deopt_mode_;
49 LPhase phase(
"Z_Code generation",
chunk());
56 FrameScope frame_scope(masm_, StackFrame::MANUAL);
69 code->set_safepoint_table_offset(
safepoints_.GetCodeOffset());
70 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
76 void LCodeGen::MakeSureStackPagesMapped(
int offset) {
77 const int kPageSize = 4 *
KB;
78 for (offset -= kPageSize; offset > 0; offset -= kPageSize) {
86 DCHECK(info()->saves_caller_doubles());
88 Comment(
";;; Save clobbered callee double registers");
90 BitVector* doubles =
chunk()->allocated_double_registers();
91 BitVector::Iterator save_iterator(doubles);
92 while (!save_iterator.Done()) {
95 save_iterator.Advance();
102 DCHECK(info()->saves_caller_doubles());
104 Comment(
";;; Restore clobbered callee double registers");
105 BitVector* doubles =
chunk()->allocated_double_registers();
106 BitVector::Iterator save_iterator(doubles);
108 while (!save_iterator.Done()) {
111 save_iterator.Advance();
120 if (info()->IsOptimizing()) {
124 if (strlen(FLAG_stop_at) > 0 &&
125 info_->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
132 if (info_->this_has_uses() &&
133 info_->strict_mode() ==
SLOPPY &&
134 !info_->is_native()) {
136 StackArgumentsAccessor args(
rsp,
scope()->num_parameters());
137 __ movp(
rcx, args.GetReceiverOperand());
139 __ CompareRoot(
rcx, Heap::kUndefinedValueRootIndex);
145 __ movp(args.GetReceiverOperand(),
rcx);
151 info()->set_prologue_offset(masm_->pc_offset());
155 if (info()->IsStub()) {
158 __ Prologue(info()->IsCodePreAgingActive());
160 info()->AddNoFrameRange(0, masm_->pc_offset());
166 if (FLAG_debug_code) {
188 if (info()->saves_caller_doubles()) {
195 if (heap_slots > 0) {
196 Comment(
";;; Allocate local context");
197 bool need_write_barrier =
true;
199 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
200 FastNewContextStub stub(isolate(), heap_slots);
203 need_write_barrier =
false;
216 for (
int i = 0;
i < num_parameters;
i++) {
218 if (var->IsContextSlot()) {
222 __ movp(
rax, Operand(
rbp, parameter_offset));
225 __ movp(Operand(
rsi, context_offset),
rax);
227 if (need_write_barrier) {
229 }
else if (FLAG_debug_code) {
231 __ JumpIfInNewSpace(
rsi,
rax, &done, Label::kNear);
232 __ Abort(kExpectedNewSpaceObject);
237 Comment(
";;; End allocate local context");
241 if (FLAG_trace && info()->IsOptimizing()) {
244 return !is_aborted();
264 if (instr->IsCall()) {
267 if (!instr->IsLazyBailout() && !instr->IsGap()) {
275 instr->hydrogen_value()->representation().IsInteger32() &&
276 instr->result()->IsRegister()) {
280 if (instr->HasResult() && instr->MustSignExtendResult(
chunk())) {
286 if (instr->result()->IsRegister()) {
287 Register result_reg =
ToRegister(instr->result());
288 __ movsxlq(result_reg, result_reg);
291 DCHECK(instr->result()->IsStackSlot());
292 Operand src =
ToOperand(instr->result());
303 Comment(
";;; -------------------- Jump table --------------------");
306 Deoptimizer::JumpTableEntry* table_entry = &
jump_table_[
i];
307 __ bind(&table_entry->label);
308 Address entry = table_entry->address;
309 DeoptComment(table_entry->reason);
310 if (table_entry->needs_frame) {
311 DCHECK(!info()->saves_caller_doubles());
313 if (needs_frame.is_bound()) {
314 __ jmp(&needs_frame);
316 __ bind(&needs_frame);
331 if (info()->saves_caller_doubles()) {
338 return !is_aborted();
345 for (
int i = 0; !is_aborted() &&
i <
deferred_.length();
i++) {
349 instructions_->at(code->instruction_index())->hydrogen_value();
351 chunk()->
graph()->SourcePositionToScriptPosition(value->position()));
353 Comment(
";;; <@%d,#%d> "
354 "-------------------- Deferred %s --------------------",
355 code->instruction_index(),
356 code->instr()->hydrogen_value()->id(),
357 code->instr()->Mnemonic());
358 __ bind(code->entry());
360 Comment(
";;; Build frame");
369 Comment(
";;; Deferred code");
373 __ bind(code->done());
374 Comment(
";;; Destroy frame");
380 __ jmp(code->exit());
386 if (!is_aborted()) status_ =
DONE;
387 return !is_aborted();
394 return !is_aborted();
415 DCHECK(op->IsDoubleRegister());
421 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
426 return op->IsConstantOperand() &&
427 chunk_->IsDehoistedKey(chunk_->LookupConstant(op));
432 return chunk_->LookupLiteralRepresentation(op).IsSmi();
442 const Representation& r)
const {
443 HConstant* constant = chunk_->LookupConstant(op);
444 int32_t value = constant->Integer32Value();
445 if (r.IsInteger32())
return value;
452 HConstant* constant = chunk_->LookupConstant(op);
458 HConstant* constant = chunk_->LookupConstant(op);
459 DCHECK(constant->HasDoubleValue());
460 return constant->DoubleValue();
465 HConstant* constant = chunk_->LookupConstant(op);
466 DCHECK(constant->HasExternalReferenceValue());
467 return constant->ExternalReferenceValue();
472 HConstant* constant = chunk_->LookupConstant(op);
473 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
474 return constant->handle(isolate());
487 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
499 Translation* translation) {
500 if (environment ==
NULL)
return;
503 int translation_size = environment->translation_size();
505 int height = translation_size - environment->parameter_count();
508 bool has_closure_id = !info()->closure().is_null() &&
509 !info()->closure().is_identical_to(environment->closure());
510 int closure_id = has_closure_id
512 : Translation::kSelfLiteralId;
514 switch (environment->frame_type()) {
516 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
519 translation->BeginConstructStubFrame(closure_id, translation_size);
522 DCHECK(translation_size == 1);
524 translation->BeginGetterStubFrame(closure_id);
527 DCHECK(translation_size == 2);
529 translation->BeginSetterStubFrame(closure_id);
532 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
535 translation->BeginCompiledStubFrame();
539 int object_index = 0;
540 int dematerialized_index = 0;
541 for (
int i = 0;
i < translation_size; ++
i) {
542 LOperand* value = environment->values()->at(
i);
546 environment->HasTaggedValueAt(
i),
547 environment->HasUint32ValueAt(
i),
549 &dematerialized_index);
555 Translation* translation,
559 int* object_index_pointer,
560 int* dematerialized_index_pointer) {
561 if (op == LEnvironment::materialization_marker()) {
562 int object_index = (*object_index_pointer)++;
563 if (environment->ObjectIsDuplicateAt(object_index)) {
564 int dupe_of = environment->ObjectDuplicateOfAt(object_index);
565 translation->DuplicateObject(dupe_of);
568 int object_length = environment->ObjectLengthAt(object_index);
569 if (environment->ObjectIsArgumentsAt(object_index)) {
570 translation->BeginArgumentsObject(object_length);
572 translation->BeginCapturedObject(object_length);
574 int dematerialized_index = *dematerialized_index_pointer;
575 int env_offset = environment->translation_size() + dematerialized_index;
576 *dematerialized_index_pointer += object_length;
577 for (
int i = 0;
i < object_length; ++
i) {
578 LOperand* value = environment->values()->at(env_offset +
i);
582 environment->HasTaggedValueAt(env_offset +
i),
583 environment->HasUint32ValueAt(env_offset +
i),
584 object_index_pointer,
585 dematerialized_index_pointer);
590 if (op->IsStackSlot()) {
592 translation->StoreStackSlot(op->index());
593 }
else if (is_uint32) {
594 translation->StoreUint32StackSlot(op->index());
596 translation->StoreInt32StackSlot(op->index());
598 }
else if (op->IsDoubleStackSlot()) {
599 translation->StoreDoubleStackSlot(op->index());
600 }
else if (op->IsRegister()) {
603 translation->StoreRegister(reg);
604 }
else if (is_uint32) {
605 translation->StoreUint32Register(reg);
607 translation->StoreInt32Register(reg);
609 }
else if (op->IsDoubleRegister()) {
611 translation->StoreDoubleRegister(reg);
612 }
else if (op->IsConstantOperand()) {
613 HConstant* constant =
chunk()->LookupConstant(LConstantOperand::cast(op));
615 translation->StoreLiteral(src_index);
625 SafepointMode safepoint_mode,
633 if (code->kind() == Code::BINARY_OP_IC ||
634 code->kind() == Code::COMPARE_IC) {
642 LInstruction* instr) {
652 DCHECK(instr->HasPointerMap());
661 if (context->IsRegister()) {
665 }
else if (context->IsStackSlot()) {
667 }
else if (context->IsConstantOperand()) {
668 HConstant* constant =
669 chunk_->LookupConstant(LConstantOperand::cast(context));
684 __ CallRuntimeSaveDoubles(
id);
686 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
691 Safepoint::DeoptMode
mode) {
692 environment->set_has_been_used();
693 if (!environment->HasBeenRegistered()) {
708 int jsframe_count = 0;
715 Translation translation(&
translations_, frame_count, jsframe_count, zone());
718 int pc_offset = masm()->pc_offset();
719 environment->Register(deoptimization_index,
721 (
mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
732 DCHECK(environment->HasBeenRegistered());
733 int id = environment->deoptimization_index();
734 DCHECK(info()->IsOptimizing() || info()->IsStub());
738 Abort(kBailoutWasNotPrepared);
743 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
748 __ movl(
rax, count_operand);
749 __ subl(
rax, Immediate(1));
751 if (FLAG_trap_on_deopt)
__ int3();
752 __ movl(
rax, Immediate(FLAG_deopt_every_n_times));
753 __ movl(count_operand,
rax);
759 __ movl(count_operand,
rax);
764 if (info()->ShouldTrapOnDeopt()) {
773 Deoptimizer::Reason reason(instr->hydrogen_value()->position().raw(),
774 instr->Mnemonic(), detail);
779 !info()->saves_caller_doubles()) {
780 DeoptComment(reason);
783 Deoptimizer::JumpTableEntry table_entry(entry, reason, bailout_type,
801 const char* detail) {
811 if (length == 0)
return;
812 Handle<DeoptimizationInputData> data =
815 Handle<ByteArray> translations =
817 data->SetTranslationByteArray(*translations);
819 data->SetOptimizationId(
Smi::FromInt(info_->optimization_id()));
820 if (info_->IsOptimizing()) {
823 data->SetSharedFunctionInfo(*info_->shared_info());
837 data->SetOsrAstId(
Smi::FromInt(info_->osr_ast_id().ToInt()));
841 for (
int i = 0;
i < length;
i++) {
843 data->SetAstId(
i, env->ast_id());
844 data->SetTranslationIndex(
i,
Smi::FromInt(env->translation_index()));
845 data->SetArgumentsStackHeight(
i,
849 code->set_deoptimization_data(*data);
866 const ZoneList<Handle<JSFunction> >* inlined_closures =
867 chunk()->inlined_closures();
869 for (
int i = 0, length = inlined_closures->length();
880 LInstruction* instr, SafepointMode safepoint_mode,
int argc) {
886 instr->pointer_map(), argc, Safepoint::kLazyDeopt);
892 LPointerMap* pointers,
893 Safepoint::Kind kind,
895 Safepoint::DeoptMode deopt_mode) {
898 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
900 Safepoint safepoint =
safepoints_.DefineSafepoint(masm(),
901 kind, arguments, deopt_mode);
902 for (
int i = 0;
i < operands->length();
i++) {
903 LOperand* pointer = operands->at(
i);
904 if (pointer->IsStackSlot()) {
905 safepoint.DefinePointerSlot(pointer->index(), zone());
906 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
907 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
914 Safepoint::DeoptMode deopt_mode) {
920 LPointerMap empty_pointers(zone());
927 Safepoint::DeoptMode deopt_mode) {
928 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
934 masm()->positions_recorder()->RecordPosition(position);
935 masm()->positions_recorder()->WriteRecordedPositions();
939 static const char*
LabelType(LLabel* label) {
940 if (label->is_loop_header())
return " (loop header)";
941 if (label->is_osr_entry())
return " (OSR entry)";
946 void LCodeGen::DoLabel(LLabel* label) {
947 Comment(
";;; <@%d,#%d> -------------------- B%d%s --------------------",
948 current_instruction_,
949 label->hydrogen_value()->id(),
952 __ bind(label->label());
953 current_block_ = label->block_id();
968 LParallelMove* move = gap->GetParallelMove(inner_pos);
974 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
979 void LCodeGen::DoParameter(LParameter* instr) {
984 void LCodeGen::DoCallStub(LCallStub* instr) {
987 switch (instr->hydrogen()->major_key()) {
988 case CodeStub::RegExpExec: {
989 RegExpExecStub stub(isolate());
993 case CodeStub::SubString: {
994 SubStringStub stub(isolate());
998 case CodeStub::StringCompare: {
999 StringCompareStub stub(isolate());
1009 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1014 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
1015 Register dividend =
ToRegister(instr->dividend());
1016 int32_t divisor = instr->divisor();
1025 HMod* hmod = instr->hydrogen();
1026 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1027 Label dividend_is_not_negative, done;
1029 __ testl(dividend, dividend);
1030 __ j(
not_sign, ÷nd_is_not_negative, Label::kNear);
1033 __ andl(dividend, Immediate(mask));
1038 __ jmp(&done, Label::kNear);
1041 __ bind(÷nd_is_not_negative);
1042 __ andl(dividend, Immediate(mask));
1047 void LCodeGen::DoModByConstI(LModByConstI* instr) {
1048 Register dividend =
ToRegister(instr->dividend());
1049 int32_t divisor = instr->divisor();
1057 __ TruncatingDiv(dividend,
Abs(divisor));
1059 __ movl(
rax, dividend);
1063 HMod* hmod = instr->hydrogen();
1065 Label remainder_not_zero;
1066 __ j(
not_zero, &remainder_not_zero, Label::kNear);
1067 __ cmpl(dividend, Immediate(0));
1069 __ bind(&remainder_not_zero);
1074 void LCodeGen::DoModI(LModI* instr) {
1075 HMod* hmod = instr->hydrogen();
1077 Register left_reg =
ToRegister(instr->left());
1079 Register right_reg =
ToRegister(instr->right());
1082 Register result_reg =
ToRegister(instr->result());
1089 __ testl(right_reg, right_reg);
1096 Label no_overflow_possible;
1098 __ j(
not_zero, &no_overflow_possible, Label::kNear);
1099 __ cmpl(right_reg, Immediate(-1));
1103 __ j(
not_equal, &no_overflow_possible, Label::kNear);
1104 __ Set(result_reg, 0);
1105 __ jmp(&done, Label::kNear);
1107 __ bind(&no_overflow_possible);
1116 Label positive_left;
1117 __ testl(left_reg, left_reg);
1118 __ j(
not_sign, &positive_left, Label::kNear);
1119 __ idivl(right_reg);
1120 __ testl(result_reg, result_reg);
1122 __ jmp(&done, Label::kNear);
1123 __ bind(&positive_left);
1125 __ idivl(right_reg);
1130 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1131 Register dividend =
ToRegister(instr->dividend());
1132 int32_t divisor = instr->divisor();
1137 if (divisor == 1)
return;
1140 __ sarl(dividend, Immediate(
shift));
1151 if (divisor == -1) {
1160 __ sarl(dividend, Immediate(
shift));
1164 Label not_kmin_int, done;
1166 __ movl(dividend, Immediate(
kMinInt / divisor));
1167 __ jmp(&done, Label::kNear);
1168 __ bind(¬_kmin_int);
1169 __ sarl(dividend, Immediate(
shift));
1174 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1175 Register dividend =
ToRegister(instr->dividend());
1176 int32_t divisor = instr->divisor();
1185 HMathFloorOfDiv* hdiv = instr->hydrogen();
1187 __ testl(dividend, dividend);
1195 __ TruncatingDiv(dividend,
Abs(divisor));
1196 if (divisor < 0)
__ negl(
rdx);
1203 DCHECK(!temp.is(dividend) && !temp.is(
rax) && !temp.is(
rdx));
1204 Label needs_adjustment, done;
1205 __ cmpl(dividend, Immediate(0));
1206 __ j(divisor > 0 ?
less :
greater, &needs_adjustment, Label::kNear);
1207 __ TruncatingDiv(dividend,
Abs(divisor));
1208 if (divisor < 0)
__ negl(
rdx);
1209 __ jmp(&done, Label::kNear);
1210 __ bind(&needs_adjustment);
1211 __ leal(temp, Operand(dividend, divisor > 0 ? 1 : -1));
1212 __ TruncatingDiv(temp,
Abs(divisor));
1213 if (divisor < 0)
__ negl(
rdx);
1220 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) {
1221 HBinaryOperation* hdiv = instr->hydrogen();
1222 Register dividend =
ToRegister(instr->dividend());
1223 Register divisor =
ToRegister(instr->divisor());
1224 Register remainder =
ToRegister(instr->temp());
1225 Register result =
ToRegister(instr->result());
1234 __ testl(divisor, divisor);
1240 Label dividend_not_zero;
1241 __ testl(dividend, dividend);
1242 __ j(
not_zero, ÷nd_not_zero, Label::kNear);
1243 __ testl(divisor, divisor);
1245 __ bind(÷nd_not_zero);
1250 Label dividend_not_min_int;
1252 __ j(
not_zero, ÷nd_not_min_int, Label::kNear);
1253 __ cmpl(divisor, Immediate(-1));
1255 __ bind(÷nd_not_min_int);
1263 __ testl(remainder, remainder);
1264 __ j(
zero, &done, Label::kNear);
1265 __ xorl(remainder, divisor);
1266 __ sarl(remainder, Immediate(31));
1267 __ addl(result, remainder);
1272 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1273 Register dividend =
ToRegister(instr->dividend());
1274 int32_t divisor = instr->divisor();
1275 Register result =
ToRegister(instr->result());
1277 DCHECK(!result.is(dividend));
1280 HDiv* hdiv = instr->hydrogen();
1282 __ testl(dividend, dividend);
1292 divisor != 1 && divisor != -1) {
1293 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1294 __ testl(dividend, Immediate(mask));
1297 __ Move(result, dividend);
1301 if (
shift > 1)
__ sarl(result, Immediate(31));
1302 __ shrl(result, Immediate(32 -
shift));
1303 __ addl(result, dividend);
1304 __ sarl(result, Immediate(
shift));
1306 if (divisor < 0)
__ negl(result);
1310 void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1311 Register dividend =
ToRegister(instr->dividend());
1312 int32_t divisor = instr->divisor();
1321 HDiv* hdiv = instr->hydrogen();
1323 __ testl(dividend, dividend);
1327 __ TruncatingDiv(dividend,
Abs(divisor));
1328 if (divisor < 0)
__ negl(
rdx);
1332 __ imull(
rax,
rax, Immediate(divisor));
1333 __ subl(
rax, dividend);
1340 void LCodeGen::DoDivI(LDivI* instr) {
1341 HBinaryOperation* hdiv = instr->hydrogen();
1342 Register dividend =
ToRegister(instr->dividend());
1343 Register divisor =
ToRegister(instr->divisor());
1344 Register remainder =
ToRegister(instr->temp());
1353 __ testl(divisor, divisor);
1359 Label dividend_not_zero;
1360 __ testl(dividend, dividend);
1361 __ j(
not_zero, ÷nd_not_zero, Label::kNear);
1362 __ testl(divisor, divisor);
1364 __ bind(÷nd_not_zero);
1369 Label dividend_not_min_int;
1371 __ j(
not_zero, ÷nd_not_min_int, Label::kNear);
1372 __ cmpl(divisor, Immediate(-1));
1374 __ bind(÷nd_not_min_int);
1383 __ testl(remainder, remainder);
1389 void LCodeGen::DoMulI(LMulI* instr) {
1391 LOperand* right = instr->right();
1394 if (instr->hydrogen_value()->representation().IsSmi()) {
1403 if (right->IsConstantOperand()) {
1405 if (right_value == -1) {
1407 }
else if (right_value == 0) {
1408 __ xorl(left, left);
1409 }
else if (right_value == 2) {
1410 __ addl(left, left);
1411 }
else if (!can_overflow) {
1415 switch (right_value) {
1420 __ leal(left, Operand(left, left,
times_2, 0));
1423 __ shll(left, Immediate(2));
1426 __ leal(left, Operand(left, left,
times_4, 0));
1429 __ shll(left, Immediate(3));
1432 __ leal(left, Operand(left, left,
times_8, 0));
1435 __ shll(left, Immediate(4));
1438 __ imull(left, left, Immediate(right_value));
1442 __ imull(left, left, Immediate(right_value));
1444 }
else if (right->IsStackSlot()) {
1445 if (instr->hydrogen_value()->representation().IsSmi()) {
1446 __ SmiToInteger64(left, left);
1452 if (instr->hydrogen_value()->representation().IsSmi()) {
1453 __ SmiToInteger64(left, left);
1467 if (instr->hydrogen_value()->representation().IsSmi()) {
1468 __ testp(left, left);
1470 __ testl(left, left);
1473 if (right->IsConstantOperand()) {
1477 ? !instr->hydrogen_value()->representation().IsSmi()
1479 if (
ToInteger32(LConstantOperand::cast(right)) < 0) {
1481 }
else if (
ToInteger32(LConstantOperand::cast(right)) == 0) {
1485 }
else if (right->IsStackSlot()) {
1486 if (instr->hydrogen_value()->representation().IsSmi()) {
1494 if (instr->hydrogen_value()->representation().IsSmi()) {
1506 void LCodeGen::DoBitI(LBitI* instr) {
1507 LOperand* left = instr->left();
1508 LOperand* right = instr->right();
1509 DCHECK(left->Equals(instr->result()));
1510 DCHECK(left->IsRegister());
1512 if (right->IsConstantOperand()) {
1515 instr->hydrogen()->right()->representation());
1516 switch (instr->op()) {
1517 case Token::BIT_AND:
1523 case Token::BIT_XOR:
1524 if (right_operand ==
int32_t(~0)) {
1534 }
else if (right->IsStackSlot()) {
1535 switch (instr->op()) {
1536 case Token::BIT_AND:
1537 if (instr->IsInteger32()) {
1544 if (instr->IsInteger32()) {
1550 case Token::BIT_XOR:
1551 if (instr->IsInteger32()) {
1562 DCHECK(right->IsRegister());
1563 switch (instr->op()) {
1564 case Token::BIT_AND:
1565 if (instr->IsInteger32()) {
1572 if (instr->IsInteger32()) {
1578 case Token::BIT_XOR:
1579 if (instr->IsInteger32()) {
1593 void LCodeGen::DoShiftI(LShiftI* instr) {
1594 LOperand* left = instr->left();
1595 LOperand* right = instr->right();
1596 DCHECK(left->Equals(instr->result()));
1597 DCHECK(left->IsRegister());
1598 if (right->IsRegister()) {
1601 switch (instr->op()) {
1610 if (instr->can_deopt()) {
1624 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1625 switch (instr->op()) {
1627 if (shift_count != 0) {
1632 if (shift_count != 0) {
1637 if (shift_count != 0) {
1639 }
else if (instr->can_deopt()) {
1645 if (shift_count != 0) {
1646 if (instr->hydrogen_value()->representation().IsSmi()) {
1651 if (instr->can_deopt()) {
1652 if (shift_count != 1) {
1674 void LCodeGen::DoSubI(LSubI* instr) {
1675 LOperand* left = instr->left();
1676 LOperand* right = instr->right();
1677 DCHECK(left->Equals(instr->result()));
1679 if (right->IsConstantOperand()) {
1682 instr->hydrogen()->right()->representation());
1684 }
else if (right->IsRegister()) {
1685 if (instr->hydrogen_value()->representation().IsSmi()) {
1691 if (instr->hydrogen_value()->representation().IsSmi()) {
1704 void LCodeGen::DoConstantI(LConstantI* instr) {
1706 if (instr->value() == 0) {
1709 __ movl(dst, Immediate(instr->value()));
1714 void LCodeGen::DoConstantS(LConstantS* instr) {
1719 void LCodeGen::DoConstantD(LConstantD* instr) {
1720 DCHECK(instr->result()->IsDoubleRegister());
1722 double v = instr->value();
1723 uint64_t int_val = bit_cast<uint64_t, double>(v);
1730 __ Set(tmp, int_val);
1736 void LCodeGen::DoConstantE(LConstantE* instr) {
1737 __ LoadAddress(
ToRegister(instr->result()), instr->value());
1741 void LCodeGen::DoConstantT(LConstantT* instr) {
1742 Handle<Object>
object = instr->value(isolate());
1748 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1749 Register result =
ToRegister(instr->result());
1751 __ EnumLength(result,
map);
1755 void LCodeGen::DoDateField(LDateField* instr) {
1757 Register result =
ToRegister(instr->result());
1758 Smi* index = instr->index();
1759 Label runtime, done, not_date_object;
1760 DCHECK(
object.is(result));
1768 if (index->value() == 0) {
1772 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1773 Operand stamp_operand =
__ ExternalOperand(stamp);
1780 __ jmp(&done, Label::kNear);
1783 __ PrepareCallCFunction(2);
1786 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1795 if (index->IsConstantOperand()) {
1796 int offset =
ToInteger32(LConstantOperand::cast(index));
1810 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1812 Register result =
ToRegister(instr->result());
1813 Register
string =
ToRegister(instr->string());
1815 if (FLAG_debug_code) {
1824 ? one_byte_seq_type : two_byte_seq_type));
1825 __ Check(
equal, kUnexpectedStringType);
1831 __ movzxbl(result, operand);
1833 __ movzxwl(result, operand);
1838 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
1840 Register
string =
ToRegister(instr->string());
1842 if (FLAG_debug_code) {
1849 ? one_byte_seq_type : two_byte_seq_type;
1850 __ EmitSeqStringSetCharCheck(
string, index, value, encoding_mask);
1854 if (instr->value()->IsConstantOperand()) {
1855 int value =
ToInteger32(LConstantOperand::cast(instr->value()));
1859 __ movb(operand, Immediate(value));
1862 __ movw(operand, Immediate(value));
1867 __ movb(operand, value);
1869 __ movw(operand, value);
1875 void LCodeGen::DoAddI(LAddI* instr) {
1876 LOperand* left = instr->left();
1877 LOperand* right = instr->right();
1879 Representation target_rep = instr->hydrogen()->representation();
1880 bool is_p = target_rep.IsSmi() || target_rep.IsExternal();
1882 if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) {
1883 if (right->IsConstantOperand()) {
1888 instr->hydrogen()->right()->representation());
1905 if (right->IsConstantOperand()) {
1910 instr->hydrogen()->right()->representation());
1916 }
else if (right->IsRegister()) {
1936 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
1937 LOperand* left = instr->left();
1938 LOperand* right = instr->right();
1939 DCHECK(left->Equals(instr->result()));
1940 HMathMinMax::Operation operation = instr->hydrogen()->operation();
1941 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
1943 Condition condition = (operation == HMathMinMax::kMathMin)
1947 if (right->IsConstantOperand()) {
1948 Immediate right_imm = Immediate(
1950 instr->hydrogen()->right()->representation()));
1952 ? !instr->hydrogen()->representation().IsSmi()
1954 __ cmpl(left_reg, right_imm);
1955 __ j(condition, &return_left, Label::kNear);
1956 __ movp(left_reg, right_imm);
1957 }
else if (right->IsRegister()) {
1959 if (instr->hydrogen_value()->representation().IsSmi()) {
1960 __ cmpp(left_reg, right_reg);
1962 __ cmpl(left_reg, right_reg);
1964 __ j(condition, &return_left, Label::kNear);
1965 __ movp(left_reg, right_reg);
1968 if (instr->hydrogen_value()->representation().IsSmi()) {
1969 __ cmpp(left_reg, right_op);
1971 __ cmpl(left_reg, right_op);
1973 __ j(condition, &return_left, Label::kNear);
1974 __ movp(left_reg, right_op);
1976 __ bind(&return_left);
1978 DCHECK(instr->hydrogen()->representation().IsDouble());
1979 Label check_nan_left, check_zero, return_left, return_right;
1983 __ ucomisd(left_reg, right_reg);
1985 __ j(
equal, &check_zero, Label::kNear);
1986 __ j(condition, &return_left, Label::kNear);
1987 __ jmp(&return_right, Label::kNear);
1989 __ bind(&check_zero);
1991 __ xorps(xmm_scratch, xmm_scratch);
1992 __ ucomisd(left_reg, xmm_scratch);
1995 if (operation == HMathMinMax::kMathMin) {
1996 __ orps(left_reg, right_reg);
1999 __ addsd(left_reg, right_reg);
2001 __ jmp(&return_left, Label::kNear);
2003 __ bind(&check_nan_left);
2004 __ ucomisd(left_reg, left_reg);
2006 __ bind(&return_right);
2007 __ movaps(left_reg, right_reg);
2009 __ bind(&return_left);
2014 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
2019 DCHECK(instr->op() == Token::MOD || left.is(result));
2020 switch (instr->op()) {
2022 __ addsd(left, right);
2025 __ subsd(left, right);
2028 __ mulsd(left, right);
2031 __ divsd(left, right);
2034 __ movaps(left, left);
2038 __ PrepareCallCFunction(2);
2039 __ movaps(xmm_scratch, left);
2042 ExternalReference::mod_two_doubles_operation(isolate()), 2);
2043 __ movaps(result, xmm_scratch);
2053 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2060 CodeFactory::BinaryOpIC(isolate(), instr->op(),
NO_OVERWRITE).code();
2065 template<
class InstrType>
2067 int left_block = instr->TrueDestination(chunk_);
2068 int right_block = instr->FalseDestination(chunk_);
2070 int next_block = GetNextEmittedBlock();
2074 }
else if (left_block == next_block) {
2076 }
else if (right_block == next_block) {
2077 __ j(
cc, chunk_->GetAssemblyLabel(left_block));
2079 __ j(
cc, chunk_->GetAssemblyLabel(left_block));
2081 __ jmp(chunk_->GetAssemblyLabel(right_block));
2087 template<
class InstrType>
2089 int false_block = instr->FalseDestination(chunk_);
2090 __ j(
cc, chunk_->GetAssemblyLabel(false_block));
2094 void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
2099 void LCodeGen::DoBranch(LBranch* instr) {
2100 Representation r = instr->hydrogen()->value()->representation();
2101 if (r.IsInteger32()) {
2102 DCHECK(!info()->IsStub());
2106 }
else if (r.IsSmi()) {
2107 DCHECK(!info()->IsStub());
2111 }
else if (r.IsDouble()) {
2112 DCHECK(!info()->IsStub());
2115 __ xorps(xmm_scratch, xmm_scratch);
2116 __ ucomisd(reg, xmm_scratch);
2121 HType type = instr->hydrogen()->value()->type();
2122 if (type.IsBoolean()) {
2123 DCHECK(!info()->IsStub());
2124 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
2126 }
else if (type.IsSmi()) {
2127 DCHECK(!info()->IsStub());
2130 }
else if (type.IsJSArray()) {
2131 DCHECK(!info()->IsStub());
2133 }
else if (type.IsHeapNumber()) {
2134 DCHECK(!info()->IsStub());
2136 __ xorps(xmm_scratch, xmm_scratch);
2139 }
else if (type.IsString()) {
2140 DCHECK(!info()->IsStub());
2144 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
2150 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
2151 __ j(
equal, instr->FalseLabel(chunk_));
2155 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
2156 __ j(
equal, instr->TrueLabel(chunk_));
2158 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
2159 __ j(
equal, instr->FalseLabel(chunk_));
2163 __ CompareRoot(reg, Heap::kNullValueRootIndex);
2164 __ j(
equal, instr->FalseLabel(chunk_));
2170 __ j(
equal, instr->FalseLabel(chunk_));
2171 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2172 }
else if (expected.NeedsMap()) {
2179 if (expected.NeedsMap()) {
2182 if (expected.CanBeUndetectable()) {
2203 __ jmp(instr->FalseLabel(chunk_));
2204 __ bind(¬_string);
2210 __ j(
equal, instr->TrueLabel(chunk_));
2215 Label not_heap_number;
2216 __ CompareRoot(
map, Heap::kHeapNumberMapRootIndex);
2219 __ xorps(xmm_scratch, xmm_scratch);
2221 __ j(
zero, instr->FalseLabel(chunk_));
2222 __ jmp(instr->TrueLabel(chunk_));
2223 __ bind(¬_heap_number);
2226 if (!expected.IsGeneric()) {
2238 __ jmp(chunk_->GetAssemblyLabel(chunk_->LookupDestination(block)));
2243 void LCodeGen::DoGoto(LGoto* instr) {
2252 case Token::EQ_STRICT:
2256 case Token::NE_STRICT:
2272 case Token::INSTANCEOF:
2280 void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) {
2281 LOperand* left = instr->left();
2282 LOperand* right = instr->right();
2284 instr->is_double() ||
2289 if (left->IsConstantOperand() && right->IsConstantOperand()) {
2291 double left_val =
ToDouble(LConstantOperand::cast(left));
2292 double right_val =
ToDouble(LConstantOperand::cast(right));
2293 int next_block =
EvalComparison(instr->op(), left_val, right_val) ?
2294 instr->TrueDestination(chunk_) : instr->FalseDestination(chunk_);
2297 if (instr->is_double()) {
2304 if (right->IsConstantOperand()) {
2305 value =
ToInteger32(LConstantOperand::cast(right));
2306 if (instr->hydrogen_value()->representation().IsSmi()) {
2311 }
else if (left->IsConstantOperand()) {
2312 value =
ToInteger32(LConstantOperand::cast(left));
2313 if (instr->hydrogen_value()->representation().IsSmi()) {
2314 if (right->IsRegister()) {
2319 }
else if (right->IsRegister()) {
2326 }
else if (instr->hydrogen_value()->representation().IsSmi()) {
2327 if (right->IsRegister()) {
2333 if (right->IsRegister()) {
2345 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2348 if (instr->right()->IsConstantOperand()) {
2349 Handle<Object> right =
ToHandle(LConstantOperand::cast(instr->right()));
2350 __ Cmp(left, right);
2353 __ cmpp(left, right);
2359 void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
2360 if (instr->hydrogen()->representation().IsTagged()) {
2361 Register input_reg =
ToRegister(instr->object());
2362 __ Cmp(input_reg, factory()->the_hole_value());
2368 __ ucomisd(input_reg, input_reg);
2381 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
2382 Representation rep = instr->hydrogen()->value()->representation();
2383 DCHECK(!rep.IsInteger32());
2385 if (rep.IsDouble()) {
2388 __ xorps(xmm_scratch, xmm_scratch);
2389 __ ucomisd(xmm_scratch, value);
2396 Handle<Map>
map = masm()->isolate()->factory()->heap_number_map();
2402 Immediate(0x00000000));
2409 Label* is_not_object,
2413 __ JumpIfSmi(input, is_not_object);
2415 __ CompareRoot(input, Heap::kNullValueRootIndex);
2433 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
2437 reg, instr->FalseLabel(chunk_), instr->TrueLabel(chunk_));
2445 Label* is_not_string,
2448 __ JumpIfSmi(input, is_not_string);
2451 Condition cond = masm_->IsObjectStringType(input, temp1, temp1);
2457 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
2462 instr->hydrogen()->value()->type().IsHeapObject()
2466 reg, temp, instr->FalseLabel(chunk_), check_needed);
2472 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
2474 if (instr->value()->IsRegister()) {
2476 is_smi = masm()->CheckSmi(input);
2478 Operand input =
ToOperand(instr->value());
2479 is_smi = masm()->CheckSmi(input);
2485 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2489 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2490 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2499 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2503 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2533 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
2536 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2537 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2545 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
2547 Register result =
ToRegister(instr->result());
2549 __ AssertString(input);
2553 __ IndexFromHash(result, result);
2557 void LCodeGen::DoHasCachedArrayIndexAndBranch(
2558 LHasCachedArrayIndexAndBranch* instr) {
2571 Handle<String> class_name,
2576 DCHECK(!input.is(temp2));
2579 __ JumpIfSmi(input, is_false);
2581 if (
String::Equals(isolate()->factory()->Function_string(), class_name)) {
2612 if (
String::Equals(class_name, isolate()->factory()->Object_string())) {
2629 DCHECK(class_name->IsInternalizedString());
2630 __ Cmp(temp, class_name);
2635 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2639 Handle<String> class_name = instr->hydrogen()->class_name();
2642 class_name, input, temp, temp2);
2648 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2656 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2662 Label true_value, done;
2664 __ j(
zero, &true_value, Label::kNear);
2665 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2666 __ jmp(&done, Label::kNear);
2667 __ bind(&true_value);
2668 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2673 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2676 DeferredInstanceOfKnownGlobal(
LCodeGen* codegen,
2677 LInstanceOfKnownGlobal* instr)
2680 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
2682 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
2683 Label* map_check() {
return &map_check_; }
2685 LInstanceOfKnownGlobal* instr_;
2690 DeferredInstanceOfKnownGlobal* deferred;
2691 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
2693 Label done, false_result;
2694 Register
object =
ToRegister(instr->value());
2697 __ JumpIfSmi(
object, &false_result, Label::kNear);
2706 __ bind(deferred->map_check());
2707 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
2712 __ LoadRoot(
ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2715 Label end_of_patched_code;
2716 __ bind(&end_of_patched_code);
2719 __ jmp(&done, Label::kNear);
2723 __ bind(&cache_miss);
2724 __ CompareRoot(
object, Heap::kNullValueRootIndex);
2725 __ j(
equal, &false_result, Label::kNear);
2730 __ bind(&false_result);
2731 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2733 __ bind(deferred->exit());
2741 PushSafepointRegistersScope
scope(
this);
2744 InstanceofStub stub(isolate(),
flags);
2747 __ Push(instr->function());
2751 masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
2753 __ PushImm32(delta);
2764 DCHECK(delta == masm_->SizeOfCodeGeneratedSince(map_check));
2765 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2766 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2775 __ LoadRoot(
rax, Heap::kTrueValueRootIndex);
2776 __ jmp(&done, Label::kNear);
2777 __ bind(&load_false);
2778 __ LoadRoot(
rax, Heap::kFalseValueRootIndex);
2783 void LCodeGen::DoCmpT(LCmpT* instr) {
2787 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2791 Label true_value, done;
2793 __ j(condition, &true_value, Label::kNear);
2794 __ LoadRoot(
ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2795 __ jmp(&done, Label::kNear);
2796 __ bind(&true_value);
2797 __ LoadRoot(
ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2802 void LCodeGen::DoReturn(LReturn* instr) {
2803 if (FLAG_trace && info()->IsOptimizing()) {
2812 if (info()->saves_caller_doubles()) {
2815 int no_frame_start = -1;
2819 no_frame_start = masm_->pc_offset();
2821 if (instr->has_constant_parameter_count()) {
2825 Register reg =
ToRegister(instr->parameter_count());
2827 __ SmiToInteger32(reg, reg);
2828 Register return_addr_reg = reg.is(
rcx) ?
rbx :
rcx;
2829 __ PopReturnAddressTo(return_addr_reg);
2832 __ jmp(return_addr_reg);
2834 if (no_frame_start != -1) {
2835 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
2840 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2841 Register result =
ToRegister(instr->result());
2842 __ LoadGlobalCell(result, instr->hydrogen()->cell().handle());
2843 if (instr->hydrogen()->RequiresHoleCheck()) {
2844 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2853 Register vector =
ToRegister(instr->temp_vector());
2855 __ Move(vector, instr->hydrogen()->feedback_vector());
2863 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
2870 if (FLAG_vector_ics) {
2871 EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
2874 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
mode).code();
2879 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
2881 Handle<Cell> cell_handle = instr->hydrogen()->cell().handle();
2887 if (instr->hydrogen()->RequiresHoleCheck()) {
2892 __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex);
2895 __ movp(Operand(cell, 0), value);
2905 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
2906 Register context =
ToRegister(instr->context());
2907 Register result =
ToRegister(instr->result());
2909 if (instr->hydrogen()->RequiresHoleCheck()) {
2910 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2911 if (instr->hydrogen()->DeoptimizesOnHole()) {
2916 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2917 __ bind(&is_not_hole);
2923 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
2924 Register context =
ToRegister(instr->context());
2929 Label skip_assignment;
2930 if (instr->hydrogen()->RequiresHoleCheck()) {
2931 __ CompareRoot(target, Heap::kTheHoleValueRootIndex);
2932 if (instr->hydrogen()->DeoptimizesOnHole()) {
2938 __ movp(target, value);
2940 if (instr->hydrogen()->NeedsWriteBarrier()) {
2942 instr->hydrogen()->value()->type().IsHeapObject()
2945 Register scratch =
ToRegister(instr->temp());
2946 __ RecordWriteContextSlot(context,
2955 __ bind(&skip_assignment);
2959 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
2960 HObjectAccess access = instr->hydrogen()->access();
2961 int offset = access.offset();
2963 if (access.IsExternalMemory()) {
2964 Register result =
ToRegister(instr->result());
2965 if (instr->object()->IsConstantOperand()) {
2969 Register
object =
ToRegister(instr->object());
2970 __ Load(result,
MemOperand(
object, offset), access.representation());
2975 Register
object =
ToRegister(instr->object());
2976 if (instr->hydrogen()->representation().IsDouble()) {
2982 Register result =
ToRegister(instr->result());
2983 if (!access.IsInobject()) {
2988 Representation representation = access.representation();
2990 instr->hydrogen()->representation().IsInteger32()) {
2991 if (FLAG_debug_code) {
2994 __ AssertSmi(scratch);
3007 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3013 if (FLAG_vector_ics) {
3014 EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
3016 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
NOT_CONTEXTUAL).code();
3021 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3022 Register
function =
ToRegister(instr->function());
3023 Register result =
ToRegister(instr->result());
3030 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
3046 void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
3047 Register result =
ToRegister(instr->result());
3048 __ LoadRoot(result, instr->index());
3052 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
3053 Register arguments =
ToRegister(instr->arguments());
3054 Register result =
ToRegister(instr->result());
3056 if (instr->length()->IsConstantOperand() &&
3057 instr->index()->IsConstantOperand()) {
3060 if (const_index >= 0 && const_index < const_length) {
3061 StackArgumentsAccessor args(arguments, const_length,
3063 __ movp(result, args.GetArgumentOperand(const_index));
3064 }
else if (FLAG_debug_code) {
3068 Register length =
ToRegister(instr->length());
3071 if (instr->index()->IsRegister()) {
3076 StackArgumentsAccessor args(arguments, length,
3078 __ movp(result, args.GetArgumentOperand(0));
3085 LOperand* key = instr->key();
3088 Representation key_representation =
3089 instr->hydrogen()->key()->representation();
3091 __ SmiToInteger64(key_reg, key_reg);
3092 }
else if (instr->hydrogen()->IsDehoisted()) {
3095 __ movsxlq(key_reg, key_reg);
3101 instr->hydrogen()->key()->representation(),
3103 instr->base_offset()));
3108 __ movss(result, operand);
3109 __ cvtss2sd(result, result);
3114 Register result(
ToRegister(instr->result()));
3115 switch (elements_kind) {
3118 __ movsxbl(result, operand);
3124 __ movzxbl(result, operand);
3128 __ movsxwl(result, operand);
3132 __ movzxwl(result, operand);
3136 __ movl(result, operand);
3140 __ movl(result, operand);
3142 __ testl(result, result);
3167 LOperand* key = instr->key();
3169 instr->hydrogen()->IsDehoisted()) {
3174 if (instr->hydrogen()->RequiresHoleCheck()) {
3178 instr->hydrogen()->key()->representation(),
3188 instr->hydrogen()->key()->representation(),
3190 instr->base_offset());
3191 __ movsd(result, double_load_operand);
3196 HLoadKeyed* hinstr = instr->hydrogen();
3197 Register result =
ToRegister(instr->result());
3198 LOperand* key = instr->key();
3199 bool requires_hole_check = hinstr->RequiresHoleCheck();
3200 Representation representation = hinstr->representation();
3201 int offset = instr->base_offset();
3204 instr->hydrogen()->IsDehoisted()) {
3211 DCHECK(!requires_hole_check);
3212 if (FLAG_debug_code) {
3217 instr->hydrogen()->key()->representation(),
3221 __ AssertSmi(scratch);
3231 instr->hydrogen()->key()->representation(),
3236 if (requires_hole_check) {
3241 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
3248 void LCodeGen::DoLoadKeyed(LLoadKeyed* instr) {
3249 if (instr->is_typed_elements()) {
3251 }
else if (instr->hydrogen()->representation().IsDouble()) {
3260 LOperand* elements_pointer,
3262 Representation key_representation,
3265 Register elements_pointer_reg =
ToRegister(elements_pointer);
3267 if (key->IsConstantOperand()) {
3269 if (constant_value & 0xF0000000) {
3270 Abort(kArrayIndexConstantValueTooBig);
3272 return Operand(elements_pointer_reg,
3273 (constant_value << shift_size) + offset);
3276 if (key_representation.IsSmi() && (shift_size >= 1)) {
3281 return Operand(elements_pointer_reg,
3289 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3294 if (FLAG_vector_ics) {
3295 EmitVectorLoadICRegisters<LLoadKeyedGeneric>(instr);
3298 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
3303 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3304 Register result =
ToRegister(instr->result());
3306 if (instr->hydrogen()->from_inlined()) {
3310 Label done, adapted;
3314 __ j(
equal, &adapted, Label::kNear);
3317 __ movp(result,
rbp);
3318 __ jmp(&done, Label::kNear);
3331 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
3332 Register result =
ToRegister(instr->result());
3337 if (instr->elements()->IsRegister()) {
3342 __ movl(result, Immediate(
scope()->num_parameters()));
3343 __ j(
equal, &done, Label::kNear);
3347 __ SmiToInteger32(result,
3356 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
3357 Register receiver =
ToRegister(instr->receiver());
3358 Register
function =
ToRegister(instr->function());
3363 Label global_object, receiver_ok;
3366 if (!instr->hydrogen()->known_function()) {
3372 SharedFunctionInfo::kStrictModeByteOffset),
3378 SharedFunctionInfo::kNativeByteOffset),
3384 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
3385 __ j(
equal, &global_object, Label::kNear);
3386 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
3387 __ j(
equal, &global_object, Label::kNear);
3395 __ jmp(&receiver_ok, Label::kNear);
3396 __ bind(&global_object);
3403 __ bind(&receiver_ok);
3407 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3408 Register receiver =
ToRegister(instr->receiver());
3409 Register
function =
ToRegister(instr->function());
3410 Register length =
ToRegister(instr->length());
3411 Register elements =
ToRegister(instr->elements());
3419 __ cmpp(length, Immediate(kArgumentsLimit));
3423 __ movp(receiver, length);
3429 __ testl(length, length);
3430 __ j(
zero, &invoke, Label::kNear);
3432 StackArgumentsAccessor args(elements, length,
3434 __ Push(args.GetArgumentOperand(0));
3440 DCHECK(instr->HasPointerMap());
3441 LPointerMap* pointers = instr->pointer_map();
3443 this, pointers, Safepoint::kLazyDeopt);
3444 ParameterCount actual(
rax);
3445 __ InvokeFunction(
function, actual,
CALL_FUNCTION, safepoint_generator);
3449 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3450 LOperand* argument = instr->value();
3455 void LCodeGen::DoDrop(LDrop* instr) {
3456 __ Drop(instr->count());
3460 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3461 Register result =
ToRegister(instr->result());
3466 void LCodeGen::DoContext(LContext* instr) {
3467 Register result =
ToRegister(instr->result());
3468 if (info()->IsOptimizing()) {
3477 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3480 __ Push(instr->hydrogen()->pairs());
3487 int formal_parameter_count,
3489 LInstruction* instr,
3490 RDIState rdi_state) {
3491 bool dont_adapt_arguments =
3493 bool can_invoke_directly =
3494 dont_adapt_arguments || formal_parameter_count == arity;
3496 LPointerMap* pointers = instr->pointer_map();
3498 if (can_invoke_directly) {
3500 __ Move(
rdi,
function);
3508 if (dont_adapt_arguments) {
3513 if (
function.is_identical_to(info()->closure())) {
3524 this, pointers, Safepoint::kLazyDeopt);
3525 ParameterCount count(arity);
3526 ParameterCount expected(formal_parameter_count);
3527 __ InvokeFunction(
function, expected, count,
CALL_FUNCTION, generator);
3532 void LCodeGen::DoTailCallThroughMegamorphicCache(
3533 LTailCallThroughMegamorphicCache* instr) {
3534 Register receiver =
ToRegister(instr->receiver());
3539 Register scratch =
rbx;
3540 DCHECK(!scratch.is(receiver) && !scratch.is(
name));
3546 isolate()->stub_cache()->GenerateProbe(masm(), instr->hydrogen()->flags(),
3547 must_teardown_frame, receiver,
name,
3551 if (must_teardown_frame)
__ leave();
3556 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
3559 LPointerMap* pointers = instr->pointer_map();
3562 if (instr->target()->IsConstantOperand()) {
3563 LConstantOperand* target = LConstantOperand::cast(instr->target());
3565 generator.BeforeCall(
__ CallSize(code));
3568 DCHECK(instr->target()->IsRegister());
3569 Register target =
ToRegister(instr->target());
3570 generator.BeforeCall(
__ CallSize(target));
3574 generator.AfterCall();
3578 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
3582 if (instr->hydrogen()->pass_argument_count()) {
3583 __ Set(
rax, instr->arity());
3589 LPointerMap* pointers = instr->pointer_map();
3592 bool is_self_call =
false;
3593 if (instr->hydrogen()->function()->IsConstant()) {
3595 HConstant* fun_const = HConstant::cast(instr->hydrogen()->function());
3597 is_self_call = jsfun.is_identical_to(info()->closure());
3604 generator.BeforeCall(
__ CallSize(target));
3607 generator.AfterCall();
3612 Register input_reg =
ToRegister(instr->value());
3614 Heap::kHeapNumberMapRootIndex);
3617 Label slow, allocated, done;
3618 Register tmp = input_reg.is(
rax) ?
rcx :
rax;
3622 PushSafepointRegistersScope
scope(
this);
3624 __ movl(tmp,
FieldOperand(input_reg, HeapNumber::kExponentOffset));
3632 __ AllocateHeapNumber(tmp, tmp2, &slow);
3633 __ jmp(&allocated, Label::kNear);
3638 Runtime::kAllocateHeapNumber, 0, instr, instr->context());
3640 if (!tmp.is(
rax))
__ movp(tmp,
rax);
3642 __ LoadFromSafepointRegisterSlot(input_reg, input_reg);
3644 __ bind(&allocated);
3646 __ shlq(tmp2, Immediate(1));
3647 __ shrq(tmp2, Immediate(1));
3649 __ StoreToSafepointRegisterSlot(input_reg, tmp);
3656 Register input_reg =
ToRegister(instr->value());
3657 __ testl(input_reg, input_reg);
3662 __ bind(&is_positive);
3667 Register input_reg =
ToRegister(instr->value());
3668 __ testp(input_reg, input_reg);
3673 __ bind(&is_positive);
3677 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3681 DeferredMathAbsTaggedHeapNumber(
LCodeGen* codegen, LMathAbs* instr)
3684 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3686 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
3691 DCHECK(instr->value()->Equals(instr->result()));
3692 Representation r = instr->hydrogen()->value()->representation();
3697 __ xorps(scratch, scratch);
3698 __ subsd(scratch, input_reg);
3699 __ andps(input_reg, scratch);
3700 }
else if (r.IsInteger32()) {
3702 }
else if (r.IsSmi()) {
3705 DeferredMathAbsTaggedHeapNumber* deferred =
3706 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr);
3707 Register input_reg =
ToRegister(instr->value());
3709 __ JumpIfNotSmi(input_reg, deferred->entry());
3711 __ bind(deferred->exit());
3716 void LCodeGen::DoMathFloor(LMathFloor* instr) {
3718 Register output_reg =
ToRegister(instr->result());
3725 __ movq(output_reg, input_reg);
3726 __ subq(output_reg, Immediate(1));
3730 __ cvttsd2si(output_reg, xmm_scratch);
3731 __ cmpl(output_reg, Immediate(0x1));
3734 Label negative_sign, done;
3736 __ xorps(xmm_scratch, xmm_scratch);
3737 __ ucomisd(input_reg, xmm_scratch);
3739 __ j(
below, &negative_sign, Label::kNear);
3743 Label positive_sign;
3744 __ j(
above, &positive_sign, Label::kNear);
3745 __ movmskpd(output_reg, input_reg);
3746 __ testq(output_reg, Immediate(1));
3748 __ Set(output_reg, 0);
3750 __ bind(&positive_sign);
3754 __ cvttsd2si(output_reg, input_reg);
3756 __ cmpl(output_reg, Immediate(0x1));
3758 __ jmp(&done, Label::kNear);
3761 __ bind(&negative_sign);
3763 __ cvttsd2si(output_reg, input_reg);
3764 __ Cvtlsi2sd(xmm_scratch, output_reg);
3765 __ ucomisd(input_reg, xmm_scratch);
3766 __ j(
equal, &done, Label::kNear);
3767 __ subl(output_reg, Immediate(1));
3775 void LCodeGen::DoMathRound(LMathRound* instr) {
3777 Register output_reg =
ToRegister(instr->result());
3780 static int64_t one_half =
V8_INT64_C(0x3FE0000000000000);
3781 static int64_t minus_one_half =
V8_INT64_C(0xBFE0000000000000);
3783 Label done, round_to_zero, below_one_half;
3787 __ ucomisd(xmm_scratch, input_reg);
3788 __ j(
above, &below_one_half, Label::kNear);
3791 __ addsd(xmm_scratch, input_reg);
3792 __ cvttsd2si(output_reg, xmm_scratch);
3794 __ cmpl(output_reg, Immediate(0x1));
3796 __ jmp(&done, dist);
3798 __ bind(&below_one_half);
3801 __ ucomisd(xmm_scratch, input_reg);
3806 __ movq(input_temp, input_reg);
3807 __ subsd(input_temp, xmm_scratch);
3808 __ cvttsd2si(output_reg, input_temp);
3810 __ cmpl(output_reg, Immediate(0x1));
3813 __ Cvtlsi2sd(xmm_scratch, output_reg);
3814 __ ucomisd(xmm_scratch, input_temp);
3816 __ subl(output_reg, Immediate(1));
3818 __ jmp(&done, dist);
3820 __ bind(&round_to_zero);
3824 __ movq(output_reg, input_reg);
3825 __ testq(output_reg, output_reg);
3828 __ Set(output_reg, 0);
3833 void LCodeGen::DoMathFround(LMathFround* instr) {
3836 __ cvtsd2ss(output_reg, input_reg);
3837 __ cvtss2sd(output_reg, output_reg);
3841 void LCodeGen::DoMathSqrt(LMathSqrt* instr) {
3843 if (instr->value()->IsDoubleRegister()) {
3845 __ sqrtsd(output, input);
3847 Operand input =
ToOperand(instr->value());
3848 __ sqrtsd(output, input);
3853 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3866 __ ucomisd(xmm_scratch, input_reg);
3870 __ j(
carry, &sqrt, Label::kNear);
3872 __ xorps(input_reg, input_reg);
3873 __ subsd(input_reg, xmm_scratch);
3874 __ jmp(&done, Label::kNear);
3878 __ xorps(xmm_scratch, xmm_scratch);
3879 __ addsd(input_reg, xmm_scratch);
3880 __ sqrtsd(input_reg, input_reg);
3885 void LCodeGen::DoPower(LPower* instr) {
3886 Representation exponent_type = instr->hydrogen()->right()->representation();
3891 DCHECK(!instr->right()->IsRegister() ||
3893 DCHECK(!instr->right()->IsDoubleRegister() ||
3898 if (exponent_type.IsSmi()) {
3901 }
else if (exponent_type.IsTagged()) {
3903 __ JumpIfSmi(tagged_exponent, &no_deopt, Label::kNear);
3909 }
else if (exponent_type.IsInteger32()) {
3913 DCHECK(exponent_type.IsDouble());
3920 void LCodeGen::DoMathExp(LMathExp* instr) {
3931 void LCodeGen::DoMathLog(LMathLog* instr) {
3932 DCHECK(instr->value()->Equals(instr->result()));
3936 __ xorps(xmm_scratch, xmm_scratch);
3937 __ ucomisd(input_reg, xmm_scratch);
3940 ExternalReference nan =
3941 ExternalReference::address_of_canonical_non_hole_nan();
3942 Operand nan_operand = masm()->ExternalOperand(nan);
3943 __ movsd(input_reg, nan_operand);
3944 __ jmp(&done, Label::kNear);
3946 ExternalReference ninf =
3947 ExternalReference::address_of_negative_infinity();
3948 Operand ninf_operand = masm()->ExternalOperand(ninf);
3949 __ movsd(input_reg, ninf_operand);
3950 __ jmp(&done, Label::kNear);
3954 __ movsd(Operand(
rsp, 0), input_reg);
3955 __ fld_d(Operand(
rsp, 0));
3957 __ fstp_d(Operand(
rsp, 0));
3958 __ movsd(input_reg, Operand(
rsp, 0));
3964 void LCodeGen::DoMathClz32(LMathClz32* instr) {
3966 Register result =
ToRegister(instr->result());
3967 Label not_zero_input;
3968 __ bsrl(result, input);
3973 __ bind(¬_zero_input);
3974 __ xorl(result, Immediate(31));
3978 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3981 DCHECK(instr->HasPointerMap());
3983 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
3984 if (known_function.is_null()) {
3985 LPointerMap* pointers = instr->pointer_map();
3987 ParameterCount count(instr->arity());
3991 instr->hydrogen()->formal_parameter_count(),
3999 void LCodeGen::DoCallFunction(LCallFunction* instr) {
4004 int arity = instr->arity();
4005 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
4010 void LCodeGen::DoCallNew(LCallNew* instr) {
4015 __ Set(
rax, instr->arity());
4017 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
4023 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4028 __ Set(
rax, instr->arity());
4029 __ LoadRoot(
rbx, Heap::kUndefinedValueRootIndex);
4030 ElementsKind kind = instr->hydrogen()->elements_kind();
4036 if (instr->arity() == 0) {
4037 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
4039 }
else if (instr->arity() == 1) {
4047 __ j(
zero, &packed_case, Label::kNear);
4050 ArraySingleArgumentConstructorStub stub(isolate(),
4054 __ jmp(&done, Label::kNear);
4055 __ bind(&packed_case);
4058 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
4062 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
4068 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
4070 CallRuntime(instr->function(), instr->arity(), instr, instr->save_doubles());
4074 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
4075 Register
function =
ToRegister(instr->function());
4076 Register code_object =
ToRegister(instr->code_object());
4082 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
4083 Register result =
ToRegister(instr->result());
4084 Register base =
ToRegister(instr->base_object());
4085 if (instr->offset()->IsConstantOperand()) {
4086 LConstantOperand* offset = LConstantOperand::cast(instr->offset());
4089 Register offset =
ToRegister(instr->offset());
4090 __ leap(result, Operand(base, offset,
times_1, 0));
4095 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
4096 HStoreNamedField* hinstr = instr->hydrogen();
4097 Representation representation = instr->representation();
4099 HObjectAccess access = hinstr->access();
4100 int offset = access.offset();
4102 if (access.IsExternalMemory()) {
4103 DCHECK(!hinstr->NeedsWriteBarrier());
4105 if (instr->object()->IsConstantOperand()) {
4107 LConstantOperand*
object = LConstantOperand::cast(instr->object());
4110 Register
object =
ToRegister(instr->object());
4111 __ Store(
MemOperand(
object, offset), value, representation);
4116 Register
object =
ToRegister(instr->object());
4117 __ AssertNotSmi(
object);
4119 DCHECK(!representation.IsSmi() ||
4120 !instr->value()->IsConstantOperand() ||
4122 if (representation.IsDouble()) {
4123 DCHECK(access.IsInobject());
4124 DCHECK(!hinstr->has_transition());
4125 DCHECK(!hinstr->NeedsWriteBarrier());
4131 if (hinstr->has_transition()) {
4132 Handle<Map> transition = hinstr->transition_map();
4133 AddDeprecationDependency(transition);
4134 if (!hinstr->NeedsWriteBarrierForMap()) {
4141 __ RecordWriteForMap(
object,
4149 Register write_register = object;
4150 if (!access.IsInobject()) {
4156 hinstr->value()->representation().IsInteger32()) {
4158 if (FLAG_debug_code) {
4160 __ Load(scratch,
FieldOperand(write_register, offset), representation);
4161 __ AssertSmi(scratch);
4170 Operand operand =
FieldOperand(write_register, offset);
4172 if (instr->value()->IsRegister()) {
4174 __ Store(operand, value, representation);
4176 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4178 DCHECK(!hinstr->NeedsWriteBarrier());
4180 if (representation.IsSmi()) {
4184 __ movl(operand, Immediate(value));
4188 Handle<Object> handle_value =
ToHandle(operand_value);
4189 DCHECK(!hinstr->NeedsWriteBarrier());
4190 __ Move(operand, handle_value);
4194 if (hinstr->NeedsWriteBarrier()) {
4196 Register temp = access.IsInobject() ?
ToRegister(instr->temp()) : object;
4198 __ RecordWriteField(write_register,
4204 hinstr->SmiCheckForWriteBarrier(),
4205 hinstr->PointersToHereCheckForValue());
4210 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4221 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4222 Representation representation = instr->hydrogen()->length()->representation();
4223 DCHECK(representation.Equals(instr->hydrogen()->index()->representation()));
4224 DCHECK(representation.IsSmiOrInteger32());
4227 if (instr->length()->IsConstantOperand()) {
4230 if (representation.IsSmi()) {
4233 __ cmpl(index, Immediate(length));
4236 }
else if (instr->index()->IsConstantOperand()) {
4238 if (instr->length()->IsRegister()) {
4239 Register length =
ToRegister(instr->length());
4240 if (representation.IsSmi()) {
4243 __ cmpl(length, Immediate(index));
4246 Operand length =
ToOperand(instr->length());
4247 if (representation.IsSmi()) {
4250 __ cmpl(length, Immediate(index));
4255 if (instr->length()->IsRegister()) {
4256 Register length =
ToRegister(instr->length());
4257 if (representation.IsSmi()) {
4258 __ cmpp(length, index);
4260 __ cmpl(length, index);
4263 Operand length =
ToOperand(instr->length());
4264 if (representation.IsSmi()) {
4265 __ cmpp(length, index);
4267 __ cmpl(length, index);
4271 if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
4284 LOperand* key = instr->key();
4287 Representation key_representation =
4288 instr->hydrogen()->key()->representation();
4290 __ SmiToInteger64(key_reg, key_reg);
4291 }
else if (instr->hydrogen()->IsDehoisted()) {
4294 __ movsxlq(key_reg, key_reg);
4300 instr->hydrogen()->key()->representation(),
4302 instr->base_offset()));
4307 __ cvtsd2ss(value, value);
4308 __ movss(operand, value);
4314 switch (elements_kind) {
4321 __ movb(operand, value);
4327 __ movw(operand, value);
4333 __ movl(operand, value);
4356 LOperand* key = instr->key();
4358 instr->hydrogen()->IsDehoisted()) {
4363 if (instr->NeedsCanonicalization()) {
4366 __ ucomisd(value, value);
4374 __ bind(&have_value);
4380 instr->hydrogen()->key()->representation(),
4382 instr->base_offset());
4384 __ movsd(double_store_operand, value);
4389 HStoreKeyed* hinstr = instr->hydrogen();
4390 LOperand* key = instr->key();
4391 int offset = instr->base_offset();
4392 Representation representation = hinstr->value()->representation();
4395 instr->hydrogen()->IsDehoisted()) {
4403 if (FLAG_debug_code) {
4408 instr->hydrogen()->key()->representation(),
4412 __ AssertSmi(scratch);
4423 instr->hydrogen()->key()->representation(),
4426 if (instr->value()->IsRegister()) {
4427 __ Store(operand,
ToRegister(instr->value()), representation);
4429 LConstantOperand* operand_value = LConstantOperand::cast(instr->value());
4432 if (representation.IsSmi()) {
4436 __ movl(operand, Immediate(value));
4439 Handle<Object> handle_value =
ToHandle(operand_value);
4440 __ Move(operand, handle_value);
4444 if (hinstr->NeedsWriteBarrier()) {
4445 Register elements =
ToRegister(instr->elements());
4446 DCHECK(instr->value()->IsRegister());
4448 DCHECK(!key->IsConstantOperand());
4449 SmiCheck check_needed = hinstr->value()->type().IsHeapObject()
4453 __ leap(key_reg, operand);
4454 __ RecordWrite(elements,
4460 hinstr->PointersToHereCheckForValue());
4465 void LCodeGen::DoStoreKeyed(LStoreKeyed* instr) {
4466 if (instr->is_typed_elements()) {
4468 }
else if (instr->hydrogen()->value()->representation().IsDouble()) {
4476 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4483 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code();
4488 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4489 Register object_reg =
ToRegister(instr->object());
4491 Handle<Map> from_map = instr->original_map();
4492 Handle<Map> to_map = instr->transitioned_map();
4496 Label not_applicable;
4500 Register new_map_reg =
ToRegister(instr->new_map_temp());
4504 __ RecordWriteForMap(object_reg, new_map_reg,
ToRegister(instr->temp()),
4509 PushSafepointRegistersScope
scope(
this);
4510 __ Move(
rbx, to_map);
4511 bool is_js_array = from_map->instance_type() ==
JS_ARRAY_TYPE;
4512 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
4516 __ bind(¬_applicable);
4520 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4521 Register
object =
ToRegister(instr->object());
4523 Label no_memento_found;
4524 __ TestJSArrayForAllocationMemento(
object, temp, &no_memento_found);
4526 __ bind(&no_memento_found);
4530 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4534 StringAddStub stub(isolate(),
4535 instr->hydrogen()->flags(),
4536 instr->hydrogen()->pretenure_flag());
4541 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4544 DeferredStringCharCodeAt(
LCodeGen* codegen, LStringCharCodeAt* instr)
4547 codegen()->DoDeferredStringCharCodeAt(instr_);
4549 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4551 LStringCharCodeAt* instr_;
4554 DeferredStringCharCodeAt* deferred =
4555 new(zone()) DeferredStringCharCodeAt(
this, instr);
4562 __ bind(deferred->exit());
4567 Register
string =
ToRegister(instr->string());
4568 Register result =
ToRegister(instr->result());
4575 PushSafepointRegistersScope
scope(
this);
4580 if (instr->index()->IsConstantOperand()) {
4585 __ Integer32ToSmi(index, index);
4589 Runtime::kStringCharCodeAtRT, 2, instr, instr->context());
4592 __ StoreToSafepointRegisterSlot(result,
rax);
4596 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4599 DeferredStringCharFromCode(
LCodeGen* codegen, LStringCharFromCode* instr)
4602 codegen()->DoDeferredStringCharFromCode(instr_);
4604 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4606 LStringCharFromCode* instr_;
4609 DeferredStringCharFromCode* deferred =
4610 new(zone()) DeferredStringCharFromCode(
this, instr);
4612 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
4613 Register char_code =
ToRegister(instr->char_code());
4614 Register result =
ToRegister(instr->result());
4615 DCHECK(!char_code.is(result));
4618 __ j(
above, deferred->entry());
4619 __ movsxlq(char_code, char_code);
4620 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4624 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
4625 __ j(
equal, deferred->entry());
4626 __ bind(deferred->exit());
4631 Register char_code =
ToRegister(instr->char_code());
4632 Register result =
ToRegister(instr->result());
4639 PushSafepointRegistersScope
scope(
this);
4640 __ Integer32ToSmi(char_code, char_code);
4643 __ StoreToSafepointRegisterSlot(result,
rax);
4647 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4648 LOperand* input = instr->value();
4649 DCHECK(input->IsRegister() || input->IsStackSlot());
4650 LOperand* output = instr->result();
4651 DCHECK(output->IsDoubleRegister());
4652 if (input->IsRegister()) {
4660 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4661 LOperand* input = instr->value();
4662 LOperand* output = instr->result();
4668 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
4671 DeferredNumberTagI(
LCodeGen* codegen, LNumberTagI* instr)
4674 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp1(),
4677 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4679 LNumberTagI* instr_;
4682 LOperand* input = instr->value();
4683 DCHECK(input->IsRegister() && input->Equals(instr->result()));
4687 __ Integer32ToSmi(reg, reg);
4690 DeferredNumberTagI* deferred =
new(zone()) DeferredNumberTagI(
this, instr);
4691 __ Integer32ToSmi(reg, reg);
4693 __ bind(deferred->exit());
4698 void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4701 DeferredNumberTagU(
LCodeGen* codegen, LNumberTagU* instr)
4704 codegen()->DoDeferredNumberTagIU(instr_, instr_->value(), instr_->temp1(),
4707 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4709 LNumberTagU* instr_;
4712 LOperand* input = instr->value();
4713 DCHECK(input->IsRegister() && input->Equals(instr->result()));
4716 DeferredNumberTagU* deferred =
new(zone()) DeferredNumberTagU(
this, instr);
4718 __ j(
above, deferred->entry());
4719 __ Integer32ToSmi(reg, reg);
4720 __ bind(deferred->exit());
4728 IntegerSignedness signedness) {
4742 __ SmiToInteger32(reg, reg);
4743 __ xorl(reg, Immediate(0x80000000));
4744 __ cvtlsi2sd(temp_xmm, reg);
4747 __ LoadUint32(temp_xmm, reg);
4750 if (FLAG_inline_new) {
4751 __ AllocateHeapNumber(reg, tmp, &slow);
4764 PushSafepointRegistersScope
scope(
this);
4772 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4774 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4775 __ StoreToSafepointRegisterSlot(reg,
rax);
4785 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
4788 DeferredNumberTagD(
LCodeGen* codegen, LNumberTagD* instr)
4791 codegen()->DoDeferredNumberTagD(instr_);
4793 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4795 LNumberTagD* instr_;
4802 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
4803 if (FLAG_inline_new) {
4804 __ AllocateHeapNumber(reg, tmp, deferred->entry());
4806 __ jmp(deferred->entry());
4808 __ bind(deferred->exit());
4821 PushSafepointRegistersScope
scope(
this);
4828 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4830 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4837 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4838 HChange* hchange = instr->hydrogen();
4840 Register output =
ToRegister(instr->result());
4843 Condition is_smi =
__ CheckUInteger32ValidSmiValue(input);
4846 __ Integer32ToSmi(output, input);
4854 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4855 DCHECK(instr->value()->Equals(instr->result()));
4857 if (instr->needs_check()) {
4861 __ AssertSmi(input);
4863 __ SmiToInteger32(input, input);
4869 bool can_convert_undefined_to_nan =
4870 instr->hydrogen()->can_convert_undefined_to_nan();
4871 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
4873 Label convert, load_smi, done;
4877 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
4881 Heap::kHeapNumberMapRootIndex);
4887 if (can_convert_undefined_to_nan) {
4893 if (deoptimize_on_minus_zero) {
4895 __ xorps(xmm_scratch, xmm_scratch);
4896 __ ucomisd(xmm_scratch, result_reg);
4902 __ jmp(&done, Label::kNear);
4904 if (can_convert_undefined_to_nan) {
4908 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4911 __ xorps(result_reg, result_reg);
4912 __ divsd(result_reg, result_reg);
4913 __ jmp(&done, Label::kNear);
4928 Register input_reg =
ToRegister(instr->value());
4930 if (instr->truncating()) {
4931 Label no_heap_number, check_bools, check_false;
4935 Heap::kHeapNumberMapRootIndex);
4937 __ TruncateHeapNumberToI(input_reg, input_reg);
4940 __ bind(&no_heap_number);
4943 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4945 __ Set(input_reg, 0);
4948 __ bind(&check_bools);
4949 __ CompareRoot(input_reg, Heap::kTrueValueRootIndex);
4951 __ Set(input_reg, 1);
4954 __ bind(&check_false);
4955 __ CompareRoot(input_reg, Heap::kFalseValueRootIndex);
4957 __ Set(input_reg, 0);
4962 Heap::kHeapNumberMapRootIndex);
4965 __ cvttsd2si(input_reg,
xmm0);
4966 __ Cvtlsi2sd(scratch, input_reg);
4967 __ ucomisd(
xmm0, scratch);
4971 __ testl(input_reg, input_reg);
4973 __ movmskpd(input_reg,
xmm0);
4974 __ andl(input_reg, Immediate(1));
4981 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
4984 DeferredTaggedToI(
LCodeGen* codegen, LTaggedToI* instr)
4987 codegen()->DoDeferredTaggedToI(instr_, done());
4989 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4994 LOperand* input = instr->value();
4995 DCHECK(input->IsRegister());
4996 DCHECK(input->Equals(instr->result()));
4999 if (instr->hydrogen()->value()->representation().IsSmi()) {
5000 __ SmiToInteger32(input_reg, input_reg);
5002 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
5003 __ JumpIfNotSmi(input_reg, deferred->entry());
5004 __ SmiToInteger32(input_reg, input_reg);
5005 __ bind(deferred->exit());
5010 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
5011 LOperand* input = instr->value();
5012 DCHECK(input->IsRegister());
5013 LOperand* result = instr->result();
5014 DCHECK(result->IsDoubleRegister());
5019 HValue* value = instr->hydrogen()->value();
5027 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
5028 LOperand* input = instr->value();
5029 DCHECK(input->IsDoubleRegister());
5030 LOperand* result = instr->result();
5031 DCHECK(result->IsRegister());
5036 if (instr->truncating()) {
5037 __ TruncateDoubleToI(result_reg, input_reg);
5039 Label lost_precision, is_nan, minus_zero, done;
5042 __ DoubleToI(result_reg, input_reg, xmm_scratch,
5043 instr->hydrogen()->GetMinusZeroMode(), &lost_precision,
5044 &is_nan, &minus_zero, dist);
5045 __ jmp(&done, dist);
5046 __ bind(&lost_precision);
5050 __ bind(&minus_zero);
5057 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) {
5058 LOperand* input = instr->value();
5059 DCHECK(input->IsDoubleRegister());
5060 LOperand* result = instr->result();
5061 DCHECK(result->IsRegister());
5066 Label lost_precision, is_nan, minus_zero, done;
5069 __ DoubleToI(result_reg, input_reg, xmm_scratch,
5070 instr->hydrogen()->GetMinusZeroMode(), &lost_precision, &is_nan,
5072 __ jmp(&done, dist);
5073 __ bind(&lost_precision);
5077 __ bind(&minus_zero);
5080 __ Integer32ToSmi(result_reg, result_reg);
5085 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
5086 LOperand* input = instr->value();
5092 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
5093 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
5094 LOperand* input = instr->value();
5101 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
5106 if (instr->hydrogen()->is_interval_check()) {
5109 instr->hydrogen()->GetCheckInterval(&first, &last);
5112 Immediate(
static_cast<int8_t
>(first)));
5115 if (first == last) {
5122 Immediate(
static_cast<int8_t
>(last)));
5129 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
5147 void LCodeGen::DoCheckValue(LCheckValue* instr) {
5149 __ Cmp(reg, instr->hydrogen()->object().handle());
5156 PushSafepointRegistersScope
scope(
this);
5159 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5161 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5169 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5172 DeferredCheckMaps(
LCodeGen* codegen, LCheckMaps* instr, Register
object)
5174 SetExit(check_maps());
5177 codegen()->DoDeferredInstanceMigration(instr_, object_);
5179 Label* check_maps() {
return &check_maps_; }
5180 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5187 if (instr->hydrogen()->IsStabilityCheck()) {
5188 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5189 for (
int i = 0;
i < maps->size(); ++
i) {
5190 AddStabilityDependency(maps->at(
i).handle());
5195 LOperand* input = instr->value();
5196 DCHECK(input->IsRegister());
5199 DeferredCheckMaps* deferred =
NULL;
5200 if (instr->hydrogen()->HasMigrationTarget()) {
5201 deferred =
new(zone()) DeferredCheckMaps(
this, instr, reg);
5202 __ bind(deferred->check_maps());
5205 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5207 for (
int i = 0;
i < maps->size() - 1;
i++) {
5208 Handle<Map>
map = maps->at(
i).handle();
5209 __ CompareMap(reg,
map);
5210 __ j(
equal, &success, Label::kNear);
5213 Handle<Map>
map = maps->at(maps->size() - 1).handle();
5214 __ CompareMap(reg,
map);
5215 if (instr->hydrogen()->HasMigrationTarget()) {
5225 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
5228 Register result_reg =
ToRegister(instr->result());
5229 __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg);
5233 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
5234 DCHECK(instr->unclamped()->Equals(instr->result()));
5235 Register value_reg =
ToRegister(instr->result());
5236 __ ClampUint8(value_reg);
5240 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
5241 DCHECK(instr->unclamped()->Equals(instr->result()));
5242 Register input_reg =
ToRegister(instr->unclamped());
5245 Label is_smi, done, heap_number;
5247 __ JumpIfSmi(input_reg, &is_smi, dist);
5251 factory()->heap_number_map());
5252 __ j(
equal, &heap_number, Label::kNear);
5256 __ Cmp(input_reg, factory()->undefined_value());
5258 __ xorl(input_reg, input_reg);
5259 __ jmp(&done, Label::kNear);
5262 __ bind(&heap_number);
5264 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg);
5265 __ jmp(&done, Label::kNear);
5269 __ SmiToInteger32(input_reg, input_reg);
5270 __ ClampUint8(input_reg);
5276 void LCodeGen::DoDoubleBits(LDoubleBits* instr) {
5278 Register result_reg =
ToRegister(instr->result());
5279 if (instr->hydrogen()->bits() == HDoubleBits::HIGH) {
5280 __ movq(result_reg, value_reg);
5281 __ shrq(result_reg, Immediate(32));
5283 __ movd(result_reg, value_reg);
5288 void LCodeGen::DoConstructDouble(LConstructDouble* instr) {
5293 __ movd(result_reg, hi_reg);
5294 __ psllq(result_reg, 32);
5295 __ movd(xmm_scratch, lo_reg);
5296 __ orps(result_reg, xmm_scratch);
5300 void LCodeGen::DoAllocate(LAllocate* instr) {
5303 DeferredAllocate(
LCodeGen* codegen, LAllocate* instr)
5306 codegen()->DoDeferredAllocate(instr_);
5308 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5313 DeferredAllocate* deferred =
5314 new(zone()) DeferredAllocate(
this, instr);
5316 Register result =
ToRegister(instr->result());
5321 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5324 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5325 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5326 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5328 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5329 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5333 if (instr->size()->IsConstantOperand()) {
5338 __ jmp(deferred->entry());
5345 __ bind(deferred->exit());
5347 if (instr->hydrogen()->MustPrefillWithFiller()) {
5348 if (instr->size()->IsConstantOperand()) {
5359 isolate()->factory()->one_pointer_filler_map());
5367 Register result =
ToRegister(instr->result());
5374 PushSafepointRegistersScope
scope(
this);
5375 if (instr->size()->IsRegister()) {
5386 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5387 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5388 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5390 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5391 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5399 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5400 __ StoreToSafepointRegisterSlot(result,
rax);
5404 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5407 CallRuntime(Runtime::kToFastProperties, 1, instr);
5411 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5418 int literal_offset =
5420 __ Move(
rcx, instr->hydrogen()->literals());
5422 __ CompareRoot(
rbx, Heap::kUndefinedValueRootIndex);
5429 __ Push(instr->hydrogen()->pattern());
5430 __ Push(instr->hydrogen()->flags());
5431 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5434 __ bind(&materialized);
5436 Label allocated, runtime_allocate;
5438 __ jmp(&allocated, Label::kNear);
5440 __ bind(&runtime_allocate);
5443 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5446 __ bind(&allocated);
5462 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5466 bool pretenure = instr->hydrogen()->pretenure();
5467 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5468 FastNewClosureStub stub(isolate(), instr->hydrogen()->strict_mode(),
5469 instr->hydrogen()->kind());
5470 __ Move(
rbx, instr->hydrogen()->shared_info());
5474 __ Push(instr->hydrogen()->shared_info());
5475 __ PushRoot(pretenure ? Heap::kTrueValueRootIndex :
5476 Heap::kFalseValueRootIndex);
5482 void LCodeGen::DoTypeof(LTypeof* instr) {
5484 LOperand* input = instr->value();
5491 DCHECK(!operand->IsDoubleRegister());
5492 if (operand->IsConstantOperand()) {
5494 }
else if (operand->IsRegister()) {
5502 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5512 Label* true_label = instr->TrueLabel(chunk_);
5513 Label* false_label = instr->FalseLabel(chunk_);
5514 Handle<String> type_name = instr->type_literal();
5515 int left_block = instr->TrueDestination(chunk_);
5516 int right_block = instr->FalseDestination(chunk_);
5517 int next_block = GetNextEmittedBlock();
5519 Label::Distance true_distance = left_block == next_block ? Label::kNear
5521 Label::Distance false_distance = right_block == next_block ? Label::kNear
5524 Factory* factory = isolate()->factory();
5526 __ JumpIfSmi(input, true_label, true_distance);
5528 Heap::kHeapNumberMapRootIndex);
5530 final_branch_condition =
equal;
5532 }
else if (
String::Equals(type_name, factory->string_string())) {
5533 __ JumpIfSmi(input, false_label, false_distance);
5538 final_branch_condition =
zero;
5540 }
else if (
String::Equals(type_name, factory->symbol_string())) {
5541 __ JumpIfSmi(input, false_label, false_distance);
5543 final_branch_condition =
equal;
5545 }
else if (
String::Equals(type_name, factory->boolean_string())) {
5546 __ CompareRoot(input, Heap::kTrueValueRootIndex);
5547 __ j(
equal, true_label, true_distance);
5548 __ CompareRoot(input, Heap::kFalseValueRootIndex);
5549 final_branch_condition =
equal;
5551 }
else if (
String::Equals(type_name, factory->undefined_string())) {
5552 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
5553 __ j(
equal, true_label, true_distance);
5554 __ JumpIfSmi(input, false_label, false_distance);
5561 }
else if (
String::Equals(type_name, factory->function_string())) {
5563 __ JumpIfSmi(input, false_label, false_distance);
5565 __ j(
equal, true_label, true_distance);
5567 final_branch_condition =
equal;
5569 }
else if (
String::Equals(type_name, factory->object_string())) {
5570 __ JumpIfSmi(input, false_label, false_distance);
5571 __ CompareRoot(input, Heap::kNullValueRootIndex);
5572 __ j(
equal, true_label, true_distance);
5574 __ j(
below, false_label, false_distance);
5576 __ j(
above, false_label, false_distance);
5580 final_branch_condition =
zero;
5583 __ jmp(false_label, false_distance);
5586 return final_branch_condition;
5590 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
5603 Label check_frame_marker;
5606 __ j(
not_equal, &check_frame_marker, Label::kNear);
5610 __ bind(&check_frame_marker);
5617 if (!info()->IsStub()) {
5620 int current_pc = masm()->pc_offset();
5621 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5622 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5623 __ Nop(padding_size);
5626 last_lazy_deopt_pc_ = masm()->pc_offset();
5630 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5631 last_lazy_deopt_pc_ = masm()->pc_offset();
5632 DCHECK(instr->HasEnvironment());
5635 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5639 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5652 void LCodeGen::DoDummy(LDummy* instr) {
5657 void LCodeGen::DoDummyUse(LDummyUse* instr) {
5663 PushSafepointRegistersScope
scope(
this);
5665 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5667 DCHECK(instr->HasEnvironment());
5669 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5673 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5676 DeferredStackCheck(
LCodeGen* codegen, LStackCheck* instr)
5679 codegen()->DoDeferredStackCheck(instr_);
5681 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5683 LStackCheck* instr_;
5686 DCHECK(instr->HasEnvironment());
5690 if (instr->hydrogen()->is_function_entry()) {
5693 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
5696 DCHECK(instr->context()->IsRegister());
5698 CallCode(isolate()->builtins()->StackCheck(),
5703 DCHECK(instr->hydrogen()->is_backwards_branch());
5705 DeferredStackCheck* deferred_stack_check =
5706 new(zone()) DeferredStackCheck(
this, instr);
5707 __ CompareRoot(
rsp, Heap::kStackLimitRootIndex);
5708 __ j(
below, deferred_stack_check->entry());
5710 __ bind(instr->done_label());
5711 deferred_stack_check->SetExit(instr->done_label());
5720 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5728 DCHECK(!environment->HasBeenRegistered());
5735 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5737 __ CompareRoot(
rax, Heap::kUndefinedValueRootIndex);
5740 Register null_value =
rdi;
5741 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5742 __ cmpp(
rax, null_value);
5752 Label use_cache, call_runtime;
5753 __ CheckEnumCache(null_value, &call_runtime);
5756 __ jmp(&use_cache, Label::kNear);
5759 __ bind(&call_runtime);
5761 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5764 Heap::kMetaMapRootIndex);
5766 __ bind(&use_cache);
5770 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5772 Register result =
ToRegister(instr->result());
5773 Label load_cache, done;
5774 __ EnumLength(result,
map);
5777 __ LoadRoot(result, Heap::kEmptyFixedArrayRootIndex);
5778 __ jmp(&done, Label::kNear);
5779 __ bind(&load_cache);
5780 __ LoadInstanceDescriptors(
map, result);
5791 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5792 Register
object =
ToRegister(instr->value());
5802 PushSafepointRegistersScope
scope(
this);
5806 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5808 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
5809 __ StoreToSafepointRegisterSlot(
object,
rax);
5813 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5816 DeferredLoadMutableDouble(
LCodeGen* codegen,
5817 LLoadFieldByIndex* instr,
5826 codegen()->DoDeferredLoadMutableDouble(instr_, object_, index_);
5828 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5830 LLoadFieldByIndex* instr_;
5835 Register
object =
ToRegister(instr->object());
5838 DeferredLoadMutableDouble* deferred;
5839 deferred =
new(zone()) DeferredLoadMutableDouble(
this, instr,
object, index);
5841 Label out_of_object, done;
5846 __ sarp(index, Immediate(1));
5848 __ SmiToInteger32(index, index);
5849 __ cmpl(index, Immediate(0));
5850 __ j(
less, &out_of_object, Label::kNear);
5855 __ jmp(&done, Label::kNear);
5857 __ bind(&out_of_object);
5865 __ bind(deferred->exit());
5870 void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
5871 Register context =
ToRegister(instr->context());
5876 void LCodeGen::DoAllocateBlockContext(LAllocateBlockContext* instr) {
5877 Handle<ScopeInfo> scope_info = instr->scope_info();
5880 CallRuntime(Runtime::kPushBlockContext, 2, instr);
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kLengthOffset
static RelocInfo::Mode RelocInfoNone()
static U update(U previous, T value)
static const int kHeaderSize
static int SlotOffset(int index)
static bool IsSupported(CpuFeature f)
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
static const int kEnumCacheOffset
static const int kHeaderSize
static int OffsetOfElementAt(int index)
static int SizeFor(int length)
static double canonical_not_the_hole_nan_as_double()
static const int kGlobalProxyOffset
@ kAllUsesTruncatingToInt32
static Handle< T > cast(Handle< S > that)
static Handle< T > null()
static const uint32_t kSignMask
static const int kValueOffset
static const int kMapOffset
static const int kValueOffset
static const int kCacheStampOffset
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kInObjectFieldCount
static const int kFunctionOffset
bool IsNextEmittedBlock(int block_id) const
void RestoreCallerDoubles()
void DoStoreKeyedFixedArray(LStoreKeyed *instr)
DwVfpRegister ToDoubleRegister(LOperand *op) const
void RecordSafepointWithRegisters(LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
@ RECORD_SIMPLE_SAFEPOINT
@ RECORD_SAFEPOINT_WITH_REGISTERS
int inlined_function_count_
friend class SafepointGenerator
TranslationBuffer translations_
MemOperand BuildSeqStringOperand(Register string, LOperand *index, String::Encoding encoding)
bool IsSmiConstant(LConstantOperand *op) const
Condition EmitIsString(Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
void DoDeferredStackCheck(LStackCheck *instr)
SafepointTableBuilder safepoints_
void EmitVectorLoadICRegisters(T *instr)
static Condition TokenToCondition(Token::Value op, bool is_unsigned)
ZoneList< Handle< Object > > deoptimization_literals_
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal *instr, Label *map_check, Label *bool_load)
void PopulateDeoptimizationLiteralsWithInlinedFunctions()
void AddToTranslation(LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
ZoneList< LEnvironment * > deoptimizations_
void EmitIntegerMathAbs(LMathAbs *instr)
int32_t ToRepresentation(LConstantOperand *op, const Representation &r) const
void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
void EmitIsConstructCall(Register temp1, Register temp2)
void EmitPushTaggedOperand(LOperand *operand)
int32_t ToInteger32(LConstantOperand *op) const
LPlatformChunk * chunk() const
void FinishCode(Handle< Code > code)
ExternalReference ToExternalReference(LConstantOperand *op) const
Condition EmitTypeofIs(Label *true_label, Label *false_label, Register input, Handle< String > type_name)
void DoDeferredAllocate(LAllocate *instr)
void RecordSafepoint(LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
void DoDeferredTaggedToI(LTaggedToI *instr)
LowDwVfpRegister double_scratch0()
void CallCodeGeneric(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
void DoDeferredStringCharCodeAt(LStringCharCodeAt *instr)
void CallCode(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
Safepoint::Kind expected_safepoint_kind_
ZoneList< LDeferredCode * > deferred_
bool GenerateDeferredCode()
void DoDeferredNumberTagIU(LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
Handle< Object > ToHandle(LConstantOperand *op) const
bool NeedsEagerFrame() const
void GenerateBodyInstructionPost(LInstruction *instr) OVERRIDE
void RegisterEnvironmentForDeoptimization(LEnvironment *environment, Safepoint::DeoptMode mode)
friend class LDeferredCode
void LoadContextFromDeferred(LOperand *context)
void GenerateOsrPrologue()
bool IsDehoistedKeyConstant(LConstantOperand *op) const
Operand BuildFastArrayOperand(LOperand *elements_pointer, LOperand *key, Representation key_representation, ElementsKind elements_kind, uint32_t base_offset)
bool NeedsDeferredFrame() const
void DoDeferredInstanceMigration(LCheckMaps *instr, Register object)
void DoDeferredLoadMutableDouble(LLoadFieldByIndex *instr, Register result, Register object, Register index)
int DefineDeoptimizationLiteral(Handle< Object > literal)
void DeoptimizeIf(Condition condition, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
int GetStackSlotCount() const
void CallKnownFunction(Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, R1State r1_state)
void WriteTranslation(LEnvironment *environment, Translation *translation)
void DoDeferredMathAbsTaggedHeapNumber(LMathAbs *instr)
void DoLoadKeyedFixedDoubleArray(LLoadKeyed *instr)
bool GenerateSafepointTable()
Operand ToOperand(LOperand *op)
void EmitClassOfTest(Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
void DoLoadKeyedExternalArray(LLoadKeyed *instr)
double ToDouble(LConstantOperand *op) const
Register ToRegister(LOperand *op) const
void DoStoreKeyedExternalArray(LStoreKeyed *instr)
void RecordAndWritePosition(int position) OVERRIDE
void PopulateDeoptimizationData(Handle< Code > code)
void DoParallelMove(LParallelMove *move)
Smi * ToSmi(LConstantOperand *op) const
void CallRuntime(const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
bool IsInteger32Constant(LConstantOperand *op) const
void DoDeferredStringCharFromCode(LStringCharFromCode *instr)
ZoneList< Deoptimizer::JumpTableEntry > jump_table_
Condition EmitIsObject(Register input, Register temp1, Label *is_not_object, Label *is_object)
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE
void EmitNumberUntagD(LNumberUntagD *instr, Register input, DwVfpRegister result, NumberUntagDMode mode)
void GenerateBodyInstructionPre(LInstruction *instr) OVERRIDE
void RecordSafepointWithLazyDeopt(LInstruction *instr, SafepointMode safepoint_mode)
void EmitFalseBranch(InstrType instr, Condition condition)
void DoLoadKeyedFixedArray(LLoadKeyed *instr)
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
void EmitBranch(InstrType instr, Condition condition)
void DoDeferredNumberTagD(LNumberTagD *instr)
void DoStoreKeyedFixedDoubleArray(LStoreKeyed *instr)
friend class LEnvironment
void EmitSmiMathAbs(LMathAbs *instr)
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateMiss(MacroAssembler *masm)
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kConstructorOffset
static const int kPrototypeOffset
static void EmitMathExp(MacroAssembler *masm, DwVfpRegister input, DwVfpRegister result, DwVfpRegister double_scratch1, DwVfpRegister double_scratch2, Register temp1, Register temp2, Register temp3)
static const Register exponent()
static const int kHashShift
static const int kHashFieldOffset
static const int kMaxRegularHeapObjectSize
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
static Representation Smi()
static Representation Integer32()
int num_parameters() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kDontAdaptArgumentsSentinel
static const int kInstanceClassNameOffset
static const int kNativeBitWithinByte
static const int kStrictModeBitWithinByte
static const int kMaxValue
static Smi * FromInt(int value)
static const int kContextOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const unsigned int kContainsCachedArrayIndexMask
static const int32_t kMaxOneByteCharCode
static const int kMaxLength
static const int kLengthOffset
static const int kMaxUtf16CodeUnit
bool Equals(String *other)
static const Register VectorRegister()
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
#define DCHECK_LE(v1, v2)
#define DCHECK(condition)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
bool IsPowerOfTwo32(uint32_t value)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
Vector< const char > CStrVector(const char *data)
const uint32_t kStringEncodingMask
MemOperand ContextOperand(Register context, int index)
static bool ExternalArrayOpRequiresTemp(Representation key_representation, ElementsKind elements_kind)
Condition CommuteCondition(Condition cond)
bool EvalComparison(Token::Value op, double op1, double op2)
const Register kScratchRegister
const uint32_t kTwoByteStringTag
Operand FieldOperand(Register object, int offset)
@ STORE_TO_INITIALIZED_ENTRY
const int kPointerSizeLog2
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ EXTERNAL_UINT16_ELEMENTS
@ EXTERNAL_INT16_ELEMENTS
@ EXTERNAL_UINT8_ELEMENTS
@ EXTERNAL_INT32_ELEMENTS
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ SLOPPY_ARGUMENTS_ELEMENTS
@ EXTERNAL_FLOAT32_ELEMENTS
@ EXTERNAL_FLOAT64_ELEMENTS
@ FAST_HOLEY_SMI_ELEMENTS
@ EXTERNAL_UINT32_ELEMENTS
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const uint32_t kOneByteStringTag
int ElementsKindToShiftSize(ElementsKind elements_kind)
int32_t WhichPowerOf2Abs(int32_t x)
int StackSlotOffset(int index)
bool IsFastPackedElementsKind(ElementsKind kind)
const bool FLAG_enable_slow_asserts
@ NUMBER_CANDIDATE_IS_SMI
@ NUMBER_CANDIDATE_IS_ANY_TAGGED
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
Condition NegateCondition(Condition cond)
static InstanceType TestType(HHasInstanceTypeAndBranch *instr)
const uint32_t kStringRepresentationMask
@ ARGUMENTS_DONT_CONTAIN_RECEIVER
static Condition BranchCondition(HHasInstanceTypeAndBranch *instr)
static bool SmiValuesAre32Bits()
static int ArgumentsOffsetWithoutFrame(int index)
static bool SmiValuesAre31Bits()
static const char * LabelType(LLabel *label)
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
@ NO_CALL_CONSTRUCTOR_FLAGS
bool IsFastSmiElementsKind(ElementsKind kind)
const uint32_t kHoleNanLower32
const uint32_t kSlotsZapValue
const uint32_t kHoleNanUpper32
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
bool is(DwVfpRegister reg) const
static Register FromAllocationIndex(int index)
bool is(Register reg) const
static XMMRegister FromAllocationIndex(int index)
#define T(name, string, precedence)