20 class SafepointGenerator
FINAL :
public CallWrapper {
23 LPointerMap* pointers,
24 Safepoint::DeoptMode
mode)
33 codegen_->RecordSafepoint(pointers_, deopt_mode_);
63 code->set_safepoint_table_offset(
safepoints_.GetCodeOffset());
64 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
70 DCHECK(info()->saves_caller_doubles());
72 Comment(
";;; Save clobbered callee double registers");
75 BitVector::Iterator save_iterator(doubles);
76 while (!save_iterator.Done()) {
79 save_iterator.Advance();
86 DCHECK(info()->saves_caller_doubles());
88 Comment(
";;; Restore clobbered callee double registers");
90 BitVector::Iterator save_iterator(doubles);
92 while (!save_iterator.Done()) {
95 save_iterator.Advance();
104 if (info()->IsOptimizing()) {
108 if (strlen(FLAG_stop_at) > 0 &&
109 info_->function()->name()->IsUtf8EqualTo(
CStrVector(FLAG_stop_at))) {
123 if (info_->this_has_uses() &&
124 info_->strict_mode() ==
SLOPPY &&
125 !info_->is_native()) {
127 int receiver_offset = info_->scope()->num_parameters() *
kPointerSize;
129 __ CompareRoot(
r2, Heap::kUndefinedValueRootIndex);
141 info()->set_prologue_offset(masm_->pc_offset());
143 if (info()->IsStub()) {
146 __ Prologue(info()->IsCodePreAgingActive());
149 info_->AddNoFrameRange(0, masm_->pc_offset());
155 if (FLAG_debug_code) {
174 if (info()->saves_caller_doubles()) {
180 if (heap_slots > 0) {
181 Comment(
";;; Allocate local context");
182 bool need_write_barrier =
true;
184 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
185 FastNewContextStub stub(isolate(), heap_slots);
188 need_write_barrier =
false;
200 for (
int i = 0;
i < num_parameters;
i++) {
211 if (need_write_barrier) {
212 __ RecordWriteContextSlot(
219 }
else if (FLAG_debug_code) {
221 __ JumpIfInNewSpace(
cp,
r0, &done);
222 __ Abort(kExpectedNewSpaceObject);
227 Comment(
";;; End allocate local context");
231 if (FLAG_trace && info()->IsOptimizing()) {
236 return !is_aborted();
259 if (!instr->IsLazyBailout() && !instr->
IsGap()) {
268 for (
int i = 0; !is_aborted() &&
i <
deferred_.length();
i++) {
277 "-------------------- Deferred %s --------------------",
307 masm()->CheckConstPool(
true,
false);
309 return !is_aborted();
323 Abort(kGeneratedCodeIsTooLarge);
327 Label needs_frame, call_deopt_entry;
329 Comment(
";;; -------------------- Jump table --------------------");
335 for (
int i = 0;
i < length;
i++) {
341 DeoptComment(table_entry->
reason);
346 __ mov(entry_offset,
Operand(entry - base));
349 DCHECK(!info()->saves_caller_doubles());
350 if (needs_frame.is_bound()) {
353 __ bind(&needs_frame);
354 Comment(
";;; call deopt with frame");
364 __ bind(&call_deopt_entry);
367 __ add(entry_offset, entry_offset,
368 Operand(ExternalReference::ForDeoptEntry(base)));
369 __ blx(entry_offset);
372 masm()->CheckConstPool(
false,
false);
376 bool need_branch = ((
i + 1) != length) || call_deopt_entry.is_bound();
378 if (need_branch)
__ b(&call_deopt_entry);
380 masm()->CheckConstPool(
false, !need_branch);
384 if (!call_deopt_entry.is_bound()) {
386 __ bind(&call_deopt_entry);
388 if (info()->saves_caller_doubles()) {
394 __ add(entry_offset, entry_offset,
395 Operand(ExternalReference::ForDeoptEntry(base)));
396 __ blx(entry_offset);
402 masm()->CheckConstPool(
true,
false);
406 if (!is_aborted()) status_ =
DONE;
407 return !is_aborted();
414 return !is_aborted();
435 if (op->IsRegister()) {
437 }
else if (op->IsConstantOperand()) {
438 LConstantOperand* const_op = LConstantOperand::cast(op);
439 HConstant* constant = chunk_->LookupConstant(const_op);
443 DCHECK(literal->IsNumber());
446 Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
449 __ Move(scratch, literal);
452 }
else if (op->IsStackSlot()) {
462 DCHECK(op->IsDoubleRegister());
470 if (op->IsDoubleRegister()) {
472 }
else if (op->IsConstantOperand()) {
473 LConstantOperand* const_op = LConstantOperand::cast(op);
474 HConstant* constant = chunk_->LookupConstant(const_op);
478 DCHECK(literal->IsNumber());
480 __ vmov(flt_scratch,
ip);
481 __ vcvt_f64_s32(dbl_scratch, flt_scratch);
484 Abort(kUnsupportedDoubleImmediate);
486 Abort(kUnsupportedTaggedImmediate);
488 }
else if (op->IsStackSlot()) {
492 __ vldr(dbl_scratch, mem_op.rn(), mem_op.
offset());
501 HConstant* constant = chunk_->LookupConstant(op);
502 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
503 return constant->handle(isolate());
508 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
513 return chunk_->LookupLiteralRepresentation(op).IsSmi();
524 HConstant* constant = chunk_->LookupConstant(op);
525 int32_t value = constant->Integer32Value();
533 HConstant* constant = chunk_->LookupConstant(op);
539 HConstant* constant = chunk_->LookupConstant(op);
540 DCHECK(constant->HasDoubleValue());
541 return constant->DoubleValue();
546 if (op->IsConstantOperand()) {
547 LConstantOperand* const_op = LConstantOperand::cast(op);
548 HConstant* constant =
chunk()->LookupConstant(const_op);
551 DCHECK(constant->HasSmiValue());
554 DCHECK(constant->HasInteger32Value());
555 return Operand(constant->Integer32Value());
557 Abort(kToOperandUnsupportedDoubleImmediate);
560 return Operand(constant->handle(isolate()));
561 }
else if (op->IsRegister()) {
563 }
else if (op->IsDoubleRegister()) {
564 Abort(kToOperandIsDoubleRegisterUnimplemented);
565 return Operand::Zero();
569 return Operand::Zero();
580 DCHECK(!op->IsRegister());
581 DCHECK(!op->IsDoubleRegister());
582 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
594 DCHECK(op->IsDoubleStackSlot());
607 Translation* translation) {
608 if (environment ==
NULL)
return;
611 int translation_size = environment->translation_size();
613 int height = translation_size - environment->parameter_count();
616 bool has_closure_id = !info()->closure().is_null() &&
617 !info()->closure().is_identical_to(environment->closure());
618 int closure_id = has_closure_id
620 : Translation::kSelfLiteralId;
622 switch (environment->frame_type()) {
624 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
627 translation->BeginConstructStubFrame(closure_id, translation_size);
630 DCHECK(translation_size == 1);
632 translation->BeginGetterStubFrame(closure_id);
635 DCHECK(translation_size == 2);
637 translation->BeginSetterStubFrame(closure_id);
640 translation->BeginCompiledStubFrame();
643 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
647 int object_index = 0;
648 int dematerialized_index = 0;
649 for (
int i = 0;
i < translation_size; ++
i) {
650 LOperand* value = environment->values()->at(
i);
654 environment->HasTaggedValueAt(
i),
655 environment->HasUint32ValueAt(
i),
657 &dematerialized_index);
663 Translation* translation,
667 int* object_index_pointer,
668 int* dematerialized_index_pointer) {
669 if (op == LEnvironment::materialization_marker()) {
670 int object_index = (*object_index_pointer)++;
671 if (environment->ObjectIsDuplicateAt(object_index)) {
672 int dupe_of = environment->ObjectDuplicateOfAt(object_index);
673 translation->DuplicateObject(dupe_of);
676 int object_length = environment->ObjectLengthAt(object_index);
677 if (environment->ObjectIsArgumentsAt(object_index)) {
678 translation->BeginArgumentsObject(object_length);
680 translation->BeginCapturedObject(object_length);
682 int dematerialized_index = *dematerialized_index_pointer;
683 int env_offset = environment->translation_size() + dematerialized_index;
684 *dematerialized_index_pointer += object_length;
685 for (
int i = 0;
i < object_length; ++
i) {
686 LOperand* value = environment->values()->at(env_offset +
i);
690 environment->HasTaggedValueAt(env_offset +
i),
691 environment->HasUint32ValueAt(env_offset +
i),
692 object_index_pointer,
693 dematerialized_index_pointer);
698 if (op->IsStackSlot()) {
700 translation->StoreStackSlot(op->
index());
701 }
else if (is_uint32) {
702 translation->StoreUint32StackSlot(op->
index());
704 translation->StoreInt32StackSlot(op->
index());
706 }
else if (op->IsDoubleStackSlot()) {
707 translation->StoreDoubleStackSlot(op->
index());
708 }
else if (op->IsRegister()) {
711 translation->StoreRegister(reg);
712 }
else if (is_uint32) {
713 translation->StoreUint32Register(reg);
715 translation->StoreInt32Register(reg);
717 }
else if (op->IsDoubleRegister()) {
719 translation->StoreDoubleRegister(reg);
720 }
else if (op->IsConstantOperand()) {
721 HConstant* constant =
chunk()->LookupConstant(LConstantOperand::cast(op));
723 translation->StoreLiteral(src_index);
731 int size = masm()->CallSize(code,
mode);
732 if (code->kind() == Code::BINARY_OP_IC ||
733 code->kind() == Code::COMPARE_IC) {
762 if (code->kind() == Code::BINARY_OP_IC ||
763 code->kind() == Code::COMPARE_IC) {
782 if (context->IsRegister()) {
784 }
else if (context->IsStackSlot()) {
786 }
else if (context->IsConstantOperand()) {
787 HConstant* constant =
788 chunk_->LookupConstant(LConstantOperand::cast(context));
801 __ CallRuntimeSaveDoubles(
id);
803 instr->
pointer_map(), argc, Safepoint::kNoLazyDeopt);
808 Safepoint::DeoptMode
mode) {
809 environment->set_has_been_used();
810 if (!environment->HasBeenRegistered()) {
825 int jsframe_count = 0;
832 Translation translation(&
translations_, frame_count, jsframe_count, zone());
835 int pc_offset = masm()->pc_offset();
836 environment->Register(deoptimization_index,
838 (
mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
849 DCHECK(environment->HasBeenRegistered());
850 int id = environment->deoptimization_index();
851 DCHECK(info()->IsOptimizing() || info()->IsStub());
855 Abort(kBailoutWasNotPrepared);
859 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
861 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
864 if (condition !=
al) {
878 if (condition !=
al) {
888 if (condition !=
al) {
890 __ cmp(scratch, Operand::Zero());
894 if (info()->ShouldTrapOnDeopt()) {
895 __ stop(
"trap_on_deopt", condition);
904 !info()->saves_caller_doubles()) {
905 DeoptComment(reason);
922 const char* detail) {
932 if (length == 0)
return;
938 data->SetTranslationByteArray(*translations);
940 data->SetOptimizationId(
Smi::FromInt(info_->optimization_id()));
941 if (info_->IsOptimizing()) {
944 data->SetSharedFunctionInfo(*info_->shared_info());
958 data->SetOsrAstId(
Smi::FromInt(info_->osr_ast_id().ToInt()));
962 for (
int i = 0;
i < length;
i++) {
964 data->SetAstId(
i, env->ast_id());
965 data->SetTranslationIndex(
i,
Smi::FromInt(env->translation_index()));
966 data->SetArgumentsStackHeight(
i,
970 code->set_deoptimization_data(*data);
988 chunk()->inlined_closures();
990 for (
int i = 0, length = inlined_closures->length();
1013 LPointerMap* pointers,
1014 Safepoint::Kind kind,
1016 Safepoint::DeoptMode deopt_mode) {
1020 Safepoint safepoint =
safepoints_.DefineSafepoint(masm(),
1021 kind, arguments, deopt_mode);
1022 for (
int i = 0;
i < operands->length();
i++) {
1024 if (pointer->IsStackSlot()) {
1025 safepoint.DefinePointerSlot(pointer->
index(), zone());
1026 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
1027 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
1030 if (FLAG_enable_ool_constant_pool && (kind & Safepoint::kWithRegisters)) {
1032 safepoint.DefinePointerRegister(
pp, zone());
1038 Safepoint::DeoptMode deopt_mode) {
1044 LPointerMap empty_pointers(zone());
1051 Safepoint::DeoptMode deopt_mode) {
1053 pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
1059 masm()->positions_recorder()->RecordPosition(position);
1060 masm()->positions_recorder()->WriteRecordedPositions();
1065 if (label->is_loop_header())
return " (loop header)";
1066 if (label->is_osr_entry())
return " (OSR entry)";
1071 void LCodeGen::DoLabel(LLabel* label) {
1072 Comment(
";;; <@%d,#%d> -------------------- B%d%s --------------------",
1073 current_instruction_,
1074 label->hydrogen_value()->id(),
1077 __ bind(label->label());
1078 current_block_ = label->block_id();
1099 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
1104 void LCodeGen::DoParameter(LParameter* instr) {
1109 void LCodeGen::DoCallStub(LCallStub* instr) {
1112 switch (instr->hydrogen()->major_key()) {
1113 case CodeStub::RegExpExec: {
1114 RegExpExecStub stub(isolate());
1118 case CodeStub::SubString: {
1119 SubStringStub stub(isolate());
1123 case CodeStub::StringCompare: {
1124 StringCompareStub stub(isolate());
1134 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
1139 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
1140 Register dividend =
ToRegister(instr->dividend());
1141 int32_t divisor = instr->divisor();
1150 HMod* hmod = instr->hydrogen();
1151 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1152 Label dividend_is_not_negative, done;
1154 __ cmp(dividend, Operand::Zero());
1155 __ b(
pl, ÷nd_is_not_negative);
1157 __ rsb(dividend, dividend, Operand::Zero());
1158 __ and_(dividend, dividend, Operand(mask));
1159 __ rsb(dividend, dividend, Operand::Zero(),
SetCC);
1166 __ bind(÷nd_is_not_negative);
1167 __ and_(dividend, dividend, Operand(mask));
1172 void LCodeGen::DoModByConstI(LModByConstI* instr) {
1173 Register dividend =
ToRegister(instr->dividend());
1174 int32_t divisor = instr->divisor();
1175 Register result =
ToRegister(instr->result());
1176 DCHECK(!dividend.is(result));
1183 __ TruncatingDiv(result, dividend,
Abs(divisor));
1184 __ mov(
ip, Operand(
Abs(divisor)));
1185 __ smull(result,
ip, result,
ip);
1186 __ sub(result, dividend, result,
SetCC);
1189 HMod* hmod = instr->hydrogen();
1191 Label remainder_not_zero;
1192 __ b(
ne, &remainder_not_zero);
1193 __ cmp(dividend, Operand::Zero());
1195 __ bind(&remainder_not_zero);
1200 void LCodeGen::DoModI(LModI* instr) {
1201 HMod* hmod = instr->hydrogen();
1205 Register left_reg =
ToRegister(instr->left());
1206 Register right_reg =
ToRegister(instr->right());
1207 Register result_reg =
ToRegister(instr->result());
1213 __ cmp(right_reg, Operand::Zero());
1220 Label no_overflow_possible;
1222 __ b(
ne, &no_overflow_possible);
1223 __ cmp(right_reg, Operand(-1));
1227 __ b(
ne, &no_overflow_possible);
1228 __ mov(result_reg, Operand::Zero());
1231 __ bind(&no_overflow_possible);
1238 __ sdiv(result_reg, left_reg, right_reg);
1239 __ Mls(result_reg, result_reg, right_reg, left_reg);
1243 __ cmp(result_reg, Operand::Zero());
1245 __ cmp(left_reg, Operand::Zero());
1252 Register left_reg =
ToRegister(instr->left());
1253 Register right_reg =
ToRegister(instr->right());
1254 Register result_reg =
ToRegister(instr->result());
1256 DCHECK(!scratch.is(left_reg));
1257 DCHECK(!scratch.is(right_reg));
1258 DCHECK(!scratch.is(result_reg));
1261 DCHECK(!divisor.is(dividend));
1263 DCHECK(!quotient.is(dividend));
1264 DCHECK(!quotient.is(divisor));
1270 __ cmp(right_reg, Operand::Zero());
1274 __ Move(result_reg, left_reg);
1285 __ vabs(divisor, divisor);
1287 __ vdiv(quotient, dividend, divisor);
1288 __ vcvt_s32_f64(quotient.low(), quotient);
1289 __ vcvt_f64_s32(quotient, quotient.low());
1295 __ sub(result_reg, left_reg, scratch,
SetCC);
1300 __ cmp(left_reg, Operand::Zero());
1308 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
1309 Register dividend =
ToRegister(instr->dividend());
1310 int32_t divisor = instr->divisor();
1311 Register result =
ToRegister(instr->result());
1313 DCHECK(!result.is(dividend));
1316 HDiv* hdiv = instr->hydrogen();
1318 __ cmp(dividend, Operand::Zero());
1328 divisor != 1 && divisor != -1) {
1329 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
1330 __ tst(dividend, Operand(mask));
1334 if (divisor == -1) {
1335 __ rsb(result, dividend, Operand(0));
1340 __ mov(result, dividend);
1341 }
else if (
shift == 1) {
1342 __ add(result, dividend, Operand(dividend,
LSR, 31));
1344 __ mov(result, Operand(dividend,
ASR, 31));
1345 __ add(result, dividend, Operand(result,
LSR, 32 -
shift));
1348 if (divisor < 0)
__ rsb(result, result, Operand(0));
1352 void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
1353 Register dividend =
ToRegister(instr->dividend());
1354 int32_t divisor = instr->divisor();
1355 Register result =
ToRegister(instr->result());
1356 DCHECK(!dividend.is(result));
1364 HDiv* hdiv = instr->hydrogen();
1366 __ cmp(dividend, Operand::Zero());
1370 __ TruncatingDiv(result, dividend,
Abs(divisor));
1371 if (divisor < 0)
__ rsb(result, result, Operand::Zero());
1374 __ mov(
ip, Operand(divisor));
1383 void LCodeGen::DoDivI(LDivI* instr) {
1384 HBinaryOperation* hdiv = instr->hydrogen();
1385 Register dividend =
ToRegister(instr->dividend());
1386 Register divisor =
ToRegister(instr->divisor());
1387 Register result =
ToRegister(instr->result());
1391 __ cmp(divisor, Operand::Zero());
1400 __ cmp(divisor, Operand::Zero());
1403 __ cmp(dividend, Operand::Zero());
1415 __ cmp(divisor, Operand(-1),
eq);
1421 __ sdiv(result, dividend, divisor);
1429 __ vdiv(vleft, vleft, vright);
1437 __ Mls(remainder, result, divisor, dividend);
1438 __ cmp(remainder, Operand::Zero());
1444 void LCodeGen::DoMultiplyAddD(LMultiplyAddD* instr) {
1452 __ vmla(addend, multiplier, multiplicand);
1456 void LCodeGen::DoMultiplySubD(LMultiplySubD* instr) {
1464 __ vmls(minuend, multiplier, multiplicand);
1468 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
1469 Register dividend =
ToRegister(instr->dividend());
1470 Register result =
ToRegister(instr->result());
1471 int32_t divisor = instr->divisor();
1475 __ Move(result, dividend);
1483 __ mov(result, Operand(dividend,
ASR,
shift));
1488 __ rsb(result, dividend, Operand::Zero(),
SetCC);
1494 if (divisor == -1) {
1512 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
1513 Register dividend =
ToRegister(instr->dividend());
1514 int32_t divisor = instr->divisor();
1515 Register result =
ToRegister(instr->result());
1516 DCHECK(!dividend.is(result));
1524 HMathFloorOfDiv* hdiv = instr->hydrogen();
1526 __ cmp(dividend, Operand::Zero());
1534 __ TruncatingDiv(result, dividend,
Abs(divisor));
1535 if (divisor < 0)
__ rsb(result, result, Operand::Zero());
1542 DCHECK(!temp.is(dividend) && !temp.is(result));
1543 Label needs_adjustment, done;
1544 __ cmp(dividend, Operand::Zero());
1545 __ b(divisor > 0 ?
lt :
gt, &needs_adjustment);
1546 __ TruncatingDiv(result, dividend,
Abs(divisor));
1547 if (divisor < 0)
__ rsb(result, result, Operand::Zero());
1549 __ bind(&needs_adjustment);
1550 __ add(temp, dividend, Operand(divisor > 0 ? 1 : -1));
1551 __ TruncatingDiv(result, temp,
Abs(divisor));
1552 if (divisor < 0)
__ rsb(result, result, Operand::Zero());
1553 __ sub(result, result, Operand(1));
1559 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) {
1560 HBinaryOperation* hdiv = instr->hydrogen();
1561 Register left =
ToRegister(instr->dividend());
1562 Register right =
ToRegister(instr->divisor());
1563 Register result =
ToRegister(instr->result());
1567 __ cmp(right, Operand::Zero());
1576 __ cmp(right, Operand::Zero());
1579 __ cmp(left, Operand::Zero());
1591 __ cmp(right, Operand(-1),
eq);
1597 __ sdiv(result, left, right);
1605 __ vdiv(vleft, vleft, vright);
1612 __ Mls(remainder, result, right, left);
1613 __ cmp(remainder, Operand::Zero());
1615 __ eor(remainder, remainder, Operand(right));
1616 __ add(result, result, Operand(remainder,
ASR, 31));
1621 void LCodeGen::DoMulI(LMulI* instr) {
1622 Register result =
ToRegister(instr->result());
1625 LOperand* right_op = instr->right();
1627 bool bailout_on_minus_zero =
1631 if (right_op->IsConstantOperand()) {
1634 if (bailout_on_minus_zero && (constant < 0)) {
1637 __ cmp(left, Operand::Zero());
1644 __ rsb(result, left, Operand::Zero(),
SetCC);
1647 __ rsb(result, left, Operand::Zero());
1651 if (bailout_on_minus_zero) {
1654 __ cmp(left, Operand::Zero());
1657 __ mov(result, Operand::Zero());
1660 __ Move(result, left);
1666 int32_t mask = constant >> 31;
1667 uint32_t constant_abs = (constant + mask) ^ mask;
1673 if (constant < 0)
__ rsb(result, result, Operand::Zero());
1676 __ add(result, left, Operand(left,
LSL,
shift));
1678 if (constant < 0)
__ rsb(result, result, Operand::Zero());
1681 __ rsb(result, left, Operand(left,
LSL,
shift));
1683 if (constant < 0)
__ rsb(result, result, Operand::Zero());
1686 __ mov(
ip, Operand(constant));
1687 __ mul(result, left,
ip);
1692 DCHECK(right_op->IsRegister());
1698 if (instr->hydrogen()->representation().IsSmi()) {
1699 __ SmiUntag(result, left);
1700 __ smull(result, scratch, result, right);
1702 __ smull(result, scratch, left, right);
1704 __ cmp(scratch, Operand(result,
ASR, 31));
1707 if (instr->hydrogen()->representation().IsSmi()) {
1708 __ SmiUntag(result, left);
1709 __ mul(result, result, right);
1711 __ mul(result, left, right);
1715 if (bailout_on_minus_zero) {
1717 __ teq(left, Operand(right));
1720 __ cmp(result, Operand::Zero());
1728 void LCodeGen::DoBitI(LBitI* instr) {
1729 LOperand* left_op = instr->left();
1730 LOperand* right_op = instr->right();
1731 DCHECK(left_op->IsRegister());
1733 Register result =
ToRegister(instr->result());
1736 if (right_op->IsStackSlot()) {
1739 DCHECK(right_op->IsRegister() || right_op->IsConstantOperand());
1743 switch (instr->op()) {
1744 case Token::BIT_AND:
1745 __ and_(result, left, right);
1748 __ orr(result, left, right);
1750 case Token::BIT_XOR:
1751 if (right_op->IsConstantOperand() && right.immediate() ==
int32_t(~0)) {
1752 __ mvn(result, Operand(left));
1754 __ eor(result, left, right);
1764 void LCodeGen::DoShiftI(LShiftI* instr) {
1767 LOperand* right_op = instr->right();
1769 Register result =
ToRegister(instr->result());
1771 if (right_op->IsRegister()) {
1774 switch (instr->op()) {
1776 __ mov(result, Operand(left,
ROR, scratch));
1779 __ mov(result, Operand(left,
ASR, scratch));
1782 if (instr->can_deopt()) {
1783 __ mov(result, Operand(left,
LSR, scratch),
SetCC);
1786 __ mov(result, Operand(left,
LSR, scratch));
1790 __ mov(result, Operand(left,
LSL, scratch));
1798 int value =
ToInteger32(LConstantOperand::cast(right_op));
1799 uint8_t shift_count =
static_cast<uint8_t
>(value & 0x1F);
1800 switch (instr->op()) {
1802 if (shift_count != 0) {
1803 __ mov(result, Operand(left,
ROR, shift_count));
1805 __ Move(result, left);
1809 if (shift_count != 0) {
1810 __ mov(result, Operand(left,
ASR, shift_count));
1812 __ Move(result, left);
1816 if (shift_count != 0) {
1817 __ mov(result, Operand(left,
LSR, shift_count));
1819 if (instr->can_deopt()) {
1820 __ tst(left, Operand(0x80000000));
1823 __ Move(result, left);
1827 if (shift_count != 0) {
1828 if (instr->hydrogen_value()->representation().IsSmi() &&
1829 instr->can_deopt()) {
1830 if (shift_count != 1) {
1831 __ mov(result, Operand(left,
LSL, shift_count - 1));
1832 __ SmiTag(result, result,
SetCC);
1834 __ SmiTag(result, left,
SetCC);
1838 __ mov(result, Operand(left,
LSL, shift_count));
1841 __ Move(result, left);
1852 void LCodeGen::DoSubI(LSubI* instr) {
1853 LOperand* left = instr->left();
1854 LOperand* right = instr->right();
1855 LOperand* result = instr->result();
1859 if (right->IsStackSlot()) {
1863 DCHECK(right->IsRegister() || right->IsConstantOperand());
1873 void LCodeGen::DoRSubI(LRSubI* instr) {
1874 LOperand* left = instr->left();
1875 LOperand* right = instr->right();
1876 LOperand* result = instr->result();
1880 if (right->IsStackSlot()) {
1884 DCHECK(right->IsRegister() || right->IsConstantOperand());
1894 void LCodeGen::DoConstantI(LConstantI* instr) {
1895 __ mov(
ToRegister(instr->result()), Operand(instr->value()));
1899 void LCodeGen::DoConstantS(LConstantS* instr) {
1900 __ mov(
ToRegister(instr->result()), Operand(instr->value()));
1904 void LCodeGen::DoConstantD(LConstantD* instr) {
1905 DCHECK(instr->result()->IsDoubleRegister());
1907 double v = instr->value();
1912 void LCodeGen::DoConstantE(LConstantE* instr) {
1913 __ mov(
ToRegister(instr->result()), Operand(instr->value()));
1917 void LCodeGen::DoConstantT(LConstantT* instr) {
1918 Handle<Object>
object = instr->value(isolate());
1924 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
1925 Register result =
ToRegister(instr->result());
1927 __ EnumLength(result,
map);
1931 void LCodeGen::DoDateField(LDateField* instr) {
1933 Register result =
ToRegister(instr->result());
1934 Register scratch =
ToRegister(instr->temp());
1935 Smi* index = instr->index();
1936 Label runtime, done;
1937 DCHECK(
object.is(result));
1940 DCHECK(!scratch.is(
object));
1947 if (index->value() == 0) {
1951 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
1952 __ mov(scratch, Operand(stamp));
1962 __ PrepareCallCFunction(2, scratch);
1963 __ mov(
r1, Operand(index));
1964 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
1973 if (index->IsConstantOperand()) {
1974 int offset =
ToInteger32(LConstantOperand::cast(index));
1994 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
1999 if (FLAG_debug_code) {
2004 __ and_(scratch, scratch,
2009 ? one_byte_seq_type : two_byte_seq_type));
2010 __ Check(
eq, kUnexpectedStringType);
2015 __ ldrb(result, operand);
2017 __ ldrh(result, operand);
2022 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
2024 Register
string =
ToRegister(instr->string());
2027 if (FLAG_debug_code) {
2033 ? one_byte_seq_type : two_byte_seq_type;
2034 __ EmitSeqStringSetCharCheck(
string, index, value, encoding_mask);
2039 __ strb(value, operand);
2041 __ strh(value, operand);
2046 void LCodeGen::DoAddI(LAddI* instr) {
2047 LOperand* left = instr->left();
2048 LOperand* right = instr->right();
2049 LOperand* result = instr->result();
2053 if (right->IsStackSlot()) {
2057 DCHECK(right->IsRegister() || right->IsConstantOperand());
2067 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
2068 LOperand* left = instr->left();
2069 LOperand* right = instr->right();
2070 HMathMinMax::Operation operation = instr->hydrogen()->operation();
2071 if (instr->hydrogen()->representation().IsSmiOrInteger32()) {
2072 Condition condition = (operation == HMathMinMax::kMathMin) ?
le :
ge;
2074 Operand right_op = (right->IsRegister() || right->IsConstantOperand())
2077 Register result_reg =
ToRegister(instr->result());
2078 __ cmp(left_reg, right_op);
2079 __ Move(result_reg, left_reg, condition);
2082 DCHECK(instr->hydrogen()->representation().IsDouble());
2086 Label result_is_nan, return_left, return_right, check_zero, done;
2087 __ VFPCompareAndSetFlags(left_reg, right_reg);
2088 if (operation == HMathMinMax::kMathMin) {
2089 __ b(
mi, &return_left);
2090 __ b(
gt, &return_right);
2092 __ b(
mi, &return_right);
2093 __ b(
gt, &return_left);
2095 __ b(
vs, &result_is_nan);
2097 __ VFPCompareAndSetFlags(left_reg, 0.0);
2098 if (left_reg.is(result_reg) || right_reg.is(result_reg)) {
2101 __ b(
ne, &return_left);
2104 if (operation == HMathMinMax::kMathMin) {
2106 __ vneg(left_reg, left_reg);
2107 __ vsub(result_reg, left_reg, right_reg);
2108 __ vneg(result_reg, result_reg);
2112 __ vadd(result_reg, left_reg, right_reg);
2116 __ bind(&result_is_nan);
2117 __ vadd(result_reg, left_reg, right_reg);
2120 __ bind(&return_right);
2121 __ Move(result_reg, right_reg);
2122 if (!left_reg.is(result_reg)) {
2126 __ bind(&return_left);
2127 __ Move(result_reg, left_reg);
2134 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
2138 switch (instr->op()) {
2140 __ vadd(result, left, right);
2143 __ vsub(result, left, right);
2146 __ vmul(result, left, right);
2149 __ vdiv(result, left, right);
2153 __ MovToFloatParameters(left, right);
2155 ExternalReference::mod_two_doubles_operation(isolate()),
2158 __ MovFromFloatResult(result);
2168 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2175 CodeFactory::BinaryOpIC(isolate(), instr->op(),
NO_OVERWRITE).code();
2183 template<
class InstrType>
2185 int left_block = instr->TrueDestination(chunk_);
2186 int right_block = instr->FalseDestination(chunk_);
2188 int next_block = GetNextEmittedBlock();
2190 if (right_block == left_block || condition ==
al) {
2192 }
else if (left_block == next_block) {
2194 }
else if (right_block == next_block) {
2195 __ b(condition, chunk_->GetAssemblyLabel(left_block));
2197 __ b(condition, chunk_->GetAssemblyLabel(left_block));
2198 __ b(chunk_->GetAssemblyLabel(right_block));
2203 template<
class InstrType>
2205 int false_block = instr->FalseDestination(chunk_);
2206 __ b(condition, chunk_->GetAssemblyLabel(false_block));
2210 void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
2215 void LCodeGen::DoBranch(LBranch* instr) {
2216 Representation r = instr->hydrogen()->value()->representation();
2217 if (r.IsInteger32() || r.IsSmi()) {
2218 DCHECK(!info()->IsStub());
2220 __ cmp(reg, Operand::Zero());
2222 }
else if (r.IsDouble()) {
2223 DCHECK(!info()->IsStub());
2226 __ VFPCompareAndSetFlags(reg, 0.0);
2232 HType type = instr->hydrogen()->value()->type();
2233 if (type.IsBoolean()) {
2234 DCHECK(!info()->IsStub());
2235 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
2237 }
else if (type.IsSmi()) {
2238 DCHECK(!info()->IsStub());
2239 __ cmp(reg, Operand::Zero());
2241 }
else if (type.IsJSArray()) {
2242 DCHECK(!info()->IsStub());
2244 }
else if (type.IsHeapNumber()) {
2245 DCHECK(!info()->IsStub());
2249 __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
2252 }
else if (type.IsString()) {
2253 DCHECK(!info()->IsStub());
2255 __ cmp(
ip, Operand::Zero());
2258 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
2264 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
2265 __ b(
eq, instr->FalseLabel(chunk_));
2269 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
2270 __ b(
eq, instr->TrueLabel(chunk_));
2271 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
2272 __ b(
eq, instr->FalseLabel(chunk_));
2276 __ CompareRoot(reg, Heap::kNullValueRootIndex);
2277 __ b(
eq, instr->FalseLabel(chunk_));
2282 __ cmp(reg, Operand::Zero());
2283 __ b(
eq, instr->FalseLabel(chunk_));
2284 __ JumpIfSmi(reg, instr->TrueLabel(chunk_));
2285 }
else if (expected.NeedsMap()) {
2292 if (expected.NeedsMap()) {
2295 if (expected.CanBeUndetectable()) {
2299 __ b(
ne, instr->FalseLabel(chunk_));
2306 __ b(
ge, instr->TrueLabel(chunk_));
2313 __ b(
ge, ¬_string);
2315 __ cmp(
ip, Operand::Zero());
2316 __ b(
ne, instr->TrueLabel(chunk_));
2317 __ b(instr->FalseLabel(chunk_));
2318 __ bind(¬_string);
2324 __ b(
eq, instr->TrueLabel(chunk_));
2330 Label not_heap_number;
2331 __ CompareRoot(
map, Heap::kHeapNumberMapRootIndex);
2332 __ b(
ne, ¬_heap_number);
2334 __ VFPCompareAndSetFlags(dbl_scratch, 0.0);
2336 __ b(
eq, instr->FalseLabel(chunk_));
2337 __ b(instr->TrueLabel(chunk_));
2338 __ bind(¬_heap_number);
2341 if (!expected.IsGeneric()) {
2358 void LCodeGen::DoGoto(LGoto* instr) {
2367 case Token::EQ_STRICT:
2371 case Token::NE_STRICT:
2375 cond = is_unsigned ?
lo :
lt;
2378 cond = is_unsigned ?
hi :
gt;
2381 cond = is_unsigned ?
ls :
le;
2384 cond = is_unsigned ?
hs :
ge;
2387 case Token::INSTANCEOF:
2395 void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) {
2403 if (left->IsConstantOperand() && right->IsConstantOperand()) {
2405 double left_val =
ToDouble(LConstantOperand::cast(left));
2406 double right_val =
ToDouble(LConstantOperand::cast(right));
2407 int next_block =
EvalComparison(instr->op(), left_val, right_val) ?
2408 instr->TrueDestination(chunk_) : instr->FalseDestination(chunk_);
2411 if (instr->is_double()) {
2417 __ b(
vs, instr->FalseLabel(chunk_));
2419 if (right->IsConstantOperand()) {
2421 if (instr->hydrogen_value()->representation().IsSmi()) {
2426 }
else if (left->IsConstantOperand()) {
2428 if (instr->hydrogen_value()->representation().IsSmi()) {
2444 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2448 __ cmp(left, Operand(right));
2453 void LCodeGen::DoCmpHoleAndBranch(LCmpHoleAndBranch* instr) {
2454 if (instr->hydrogen()->representation().IsTagged()) {
2455 Register input_reg =
ToRegister(instr->object());
2456 __ mov(
ip, Operand(factory()->the_hole_value()));
2457 __ cmp(input_reg,
ip);
2463 __ VFPCompareAndSetFlags(input_reg, input_reg);
2467 __ VmovHigh(scratch, input_reg);
2473 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
2474 Representation rep = instr->hydrogen()->value()->representation();
2475 DCHECK(!rep.IsInteger32());
2476 Register scratch =
ToRegister(instr->temp());
2478 if (rep.IsDouble()) {
2480 __ VFPCompareAndSetFlags(value, 0.0);
2482 __ VmovHigh(scratch, value);
2483 __ cmp(scratch, Operand(0x80000000));
2488 Heap::kHeapNumberMapRootIndex,
2489 instr->FalseLabel(
chunk()),
2493 __ cmp(scratch, Operand(0x80000000));
2494 __ cmp(
ip, Operand(0x00000000),
eq);
2502 Label* is_not_object,
2505 __ JumpIfSmi(input, is_not_object);
2507 __ LoadRoot(temp2, Heap::kNullValueRootIndex);
2508 __ cmp(input, temp2);
2509 __ b(
eq, is_object);
2516 __ b(
ne, is_not_object);
2521 __ b(
lt, is_not_object);
2527 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
2533 instr->FalseLabel(chunk_), instr->TrueLabel(chunk_));
2541 Label* is_not_string,
2544 __ JumpIfSmi(input, is_not_string);
2552 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
2557 instr->hydrogen()->value()->type().IsHeapObject()
2560 EmitIsString(reg, temp1, instr->FalseLabel(chunk_), check_needed);
2566 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
2568 __ SmiTst(input_reg);
2573 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
2577 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2578 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2589 case Token::EQ_STRICT:
2607 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
2611 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2614 __ cmp(
r0, Operand::Zero());
2634 if (from ==
to)
return eq;
2642 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
2646 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2647 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
2650 __ CompareObjectType(input, scratch, scratch,
TestType(instr->hydrogen()));
2655 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
2657 Register result =
ToRegister(instr->result());
2659 __ AssertString(input);
2662 __ IndexFromHash(result, result);
2666 void LCodeGen::DoHasCachedArrayIndexAndBranch(
2667 LHasCachedArrayIndexAndBranch* instr) {
2690 __ JumpIfSmi(input, is_false);
2692 if (
String::Equals(isolate()->factory()->Function_string(), class_name)) {
2745 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2752 class_name, input, temp, temp2);
2758 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2763 __ cmp(temp, Operand(instr->map()));
2768 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
2776 __ cmp(
r0, Operand::Zero());
2782 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
2785 DeferredInstanceOfKnownGlobal(
LCodeGen* codegen,
2786 LInstanceOfKnownGlobal* instr)
2789 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_,
2792 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
2793 Label* map_check() {
return &map_check_; }
2794 Label* load_bool() {
return &load_bool_; }
2797 LInstanceOfKnownGlobal* instr_;
2802 DeferredInstanceOfKnownGlobal* deferred;
2803 deferred =
new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
2805 Label done, false_result;
2806 Register
object =
ToRegister(instr->value());
2808 Register result =
ToRegister(instr->result());
2811 __ JumpIfSmi(
object, &false_result);
2817 Register
map = temp;
2823 __ bind(deferred->map_check());
2827 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
2828 __ mov(
ip, Operand(Handle<Object>(cell)));
2831 __ b(
ne, &cache_miss);
2832 __ bind(deferred->load_bool());
2836 __ mov(result, Operand(factory()->the_hole_value()));
2842 __ bind(&cache_miss);
2844 __ LoadRoot(
ip, Heap::kNullValueRootIndex);
2845 __ cmp(
object, Operand(
ip));
2846 __ b(
eq, &false_result);
2849 Condition is_string = masm_->IsObjectStringType(
object, temp);
2850 __ b(is_string, &false_result);
2853 __ b(deferred->entry());
2855 __ bind(&false_result);
2856 __ LoadRoot(result, Heap::kFalseValueRootIndex);
2860 __ bind(deferred->exit());
2877 PushSafepointRegistersScope
scope(
this);
2893 int map_check_delta =
2894 masm_->InstructionsGeneratedSince(map_check) + additional_delta;
2895 int bool_load_delta =
2896 masm_->InstructionsGeneratedSince(bool_load) + additional_delta;
2897 Label before_push_delta;
2898 __ bind(&before_push_delta);
2899 __ BlockConstPoolFor(additional_delta);
2907 while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) {
2915 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
2916 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2919 __ StoreToSafepointRegisterSlot(
r0,
ToRegister(instr->result()));
2923 void LCodeGen::DoCmpT(LCmpT* instr) {
2927 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2930 __ cmp(
r0, Operand::Zero());
2934 Heap::kTrueValueRootIndex,
2937 Heap::kFalseValueRootIndex,
2942 void LCodeGen::DoReturn(LReturn* instr) {
2943 if (FLAG_trace && info()->IsOptimizing()) {
2952 if (info()->saves_caller_doubles()) {
2955 int no_frame_start = -1;
2957 no_frame_start = masm_->LeaveFrame(StackFrame::JAVA_SCRIPT);
2959 { ConstantPoolUnavailableScope constant_pool_unavailable(masm());
2960 if (instr->has_constant_parameter_count()) {
2961 int parameter_count =
ToInteger32(instr->constant_parameter_count());
2963 if (sp_delta != 0) {
2964 __ add(
sp,
sp, Operand(sp_delta));
2967 Register reg =
ToRegister(instr->parameter_count());
2975 if (no_frame_start != -1) {
2976 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
2982 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
2983 Register result =
ToRegister(instr->result());
2984 __ mov(
ip, Operand(Handle<Object>(instr->hydrogen()->cell().handle())));
2986 if (instr->hydrogen()->RequiresHoleCheck()) {
2987 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
2999 __ Move(vector, instr->hydrogen()->feedback_vector());
3007 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
3014 if (FLAG_vector_ics) {
3015 EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
3023 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
3028 __ mov(cell, Operand(instr->hydrogen()->cell().handle()));
3034 if (instr->hydrogen()->RequiresHoleCheck()) {
3036 Register payload =
ToRegister(instr->temp());
3038 __ CompareRoot(payload, Heap::kTheHoleValueRootIndex);
3048 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
3049 Register context =
ToRegister(instr->context());
3050 Register result =
ToRegister(instr->result());
3052 if (instr->hydrogen()->RequiresHoleCheck()) {
3053 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
3055 if (instr->hydrogen()->DeoptimizesOnHole()) {
3058 __ mov(result, Operand(factory()->undefined_value()),
LeaveCC,
eq);
3064 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
3065 Register context =
ToRegister(instr->context());
3070 Label skip_assignment;
3072 if (instr->hydrogen()->RequiresHoleCheck()) {
3073 __ ldr(scratch, target);
3074 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
3075 __ cmp(scratch,
ip);
3076 if (instr->hydrogen()->DeoptimizesOnHole()) {
3079 __ b(
ne, &skip_assignment);
3083 __ str(value, target);
3084 if (instr->hydrogen()->NeedsWriteBarrier()) {
3086 instr->hydrogen()->value()->type().IsHeapObject()
3088 __ RecordWriteContextSlot(context,
3098 __ bind(&skip_assignment);
3102 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
3103 HObjectAccess access = instr->hydrogen()->access();
3104 int offset = access.offset();
3105 Register
object =
ToRegister(instr->object());
3107 if (access.IsExternalMemory()) {
3108 Register result =
ToRegister(instr->result());
3110 __ Load(result, operand, access.representation());
3114 if (instr->hydrogen()->representation().IsDouble()) {
3120 Register result =
ToRegister(instr->result());
3121 if (!access.IsInobject()) {
3126 __ Load(result, operand, access.representation());
3130 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3137 if (FLAG_vector_ics) {
3138 EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
3140 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
NOT_CONTEXTUAL).code();
3145 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3147 Register
function =
ToRegister(instr->function());
3148 Register result =
ToRegister(instr->result());
3155 __ LoadRoot(
ip, Heap::kTheHoleValueRootIndex);
3161 __ CompareObjectType(result, scratch, scratch,
MAP_TYPE);
3172 void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
3173 Register result =
ToRegister(instr->result());
3174 __ LoadRoot(result, instr->index());
3178 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
3179 Register arguments =
ToRegister(instr->arguments());
3180 Register result =
ToRegister(instr->result());
3183 if (instr->length()->IsConstantOperand()) {
3184 int const_length =
ToInteger32(LConstantOperand::cast(instr->length()));
3185 if (instr->index()->IsConstantOperand()) {
3186 int const_index =
ToInteger32(LConstantOperand::cast(instr->index()));
3187 int index = (const_length - const_index) + 1;
3191 __ rsb(result, index, Operand(const_length + 1));
3194 }
else if (instr->index()->IsConstantOperand()) {
3195 Register length =
ToRegister(instr->length());
3196 int const_index =
ToInteger32(LConstantOperand::cast(instr->index()));
3197 int loc = const_index - 1;
3199 __ sub(result, length, Operand(loc));
3205 Register length =
ToRegister(instr->length());
3207 __ sub(result, length, index);
3208 __ add(result, result, Operand(1));
3218 bool key_is_constant = instr->
key()->IsConstantOperand();
3219 int constant_key = 0;
3220 if (key_is_constant) {
3221 constant_key =
ToInteger32(LConstantOperand::cast(instr->
key()));
3222 if (constant_key & 0xF0000000) {
3223 Abort(kArrayIndexConstantValueTooBig);
3229 int shift_size = (instr->hydrogen()->
key()->representation().IsSmi())
3230 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
3239 Operand operand = key_is_constant
3240 ?
Operand(constant_key << element_size_shift)
3242 __ add(
scratch0(), external_pointer, operand);
3253 key, external_pointer, key_is_constant, constant_key,
3254 element_size_shift, shift_size, base_offset);
3255 switch (elements_kind) {
3258 __ ldrsb(result, mem_operand);
3264 __ ldrb(result, mem_operand);
3268 __ ldrsh(result, mem_operand);
3272 __ ldrh(result, mem_operand);
3276 __ ldr(result, mem_operand);
3280 __ ldr(result, mem_operand);
3307 bool key_is_constant = instr->
key()->IsConstantOperand();
3315 if (key_is_constant) {
3316 int constant_key =
ToInteger32(LConstantOperand::cast(instr->
key()));
3317 if (constant_key & 0xF0000000) {
3318 Abort(kArrayIndexConstantValueTooBig);
3322 __ add(scratch, elements,
Operand(base_offset));
3324 if (!key_is_constant) {
3326 int shift_size = (instr->hydrogen()->
key()->representation().IsSmi())
3327 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
3328 __ add(scratch, scratch,
Operand(key,
LSL, shift_size));
3331 __ vldr(result, scratch, 0);
3333 if (instr->hydrogen()->RequiresHoleCheck()) {
3348 if (instr->
key()->IsConstantOperand()) {
3349 LConstantOperand* const_operand = LConstantOperand::cast(instr->
key());
3351 store_base = elements;
3358 if (instr->hydrogen()->
key()->representation().IsSmi()) {
3359 __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
3367 if (instr->hydrogen()->RequiresHoleCheck()) {
3372 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
3373 __ cmp(result, scratch);
3380 void LCodeGen::DoLoadKeyed(
LLoadKeyed* instr) {
3383 }
else if (instr->hydrogen()->representation().IsDouble()) {
3393 bool key_is_constant,
3398 if (key_is_constant) {
3399 return MemOperand(base, (constant_key << element_size) + base_offset);
3402 if (base_offset == 0) {
3403 if (shift_size >= 0) {
3411 if (shift_size >= 0) {
3422 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3427 if (FLAG_vector_ics) {
3428 EmitVectorLoadICRegisters<LLoadKeyedGeneric>(instr);
3431 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
3436 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
3438 Register result =
ToRegister(instr->result());
3440 if (instr->hydrogen()->from_inlined()) {
3444 Label done, adapted;
3457 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
3458 Register elem =
ToRegister(instr->elements());
3459 Register result =
ToRegister(instr->result());
3465 __ mov(result, Operand(
scope()->num_parameters()));
3472 __ SmiUntag(result);
3479 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
3480 Register receiver =
ToRegister(instr->receiver());
3481 Register
function =
ToRegister(instr->function());
3482 Register result =
ToRegister(instr->result());
3488 Label global_object, result_in_receiver;
3490 if (!instr->hydrogen()->known_function()) {
3498 __ tst(scratch, Operand(mask));
3499 __ b(
ne, &result_in_receiver);
3503 __ b(
ne, &result_in_receiver);
3507 __ LoadRoot(scratch, Heap::kNullValueRootIndex);
3508 __ cmp(receiver, scratch);
3509 __ b(
eq, &global_object);
3510 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
3511 __ cmp(receiver, scratch);
3512 __ b(
eq, &global_object);
3515 __ SmiTst(receiver);
3520 __ b(&result_in_receiver);
3521 __ bind(&global_object);
3527 if (result.is(receiver)) {
3528 __ bind(&result_in_receiver);
3532 __ bind(&result_in_receiver);
3533 __ mov(result, receiver);
3534 __ bind(&result_ok);
3539 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
3540 Register receiver =
ToRegister(instr->receiver());
3541 Register
function =
ToRegister(instr->function());
3542 Register length =
ToRegister(instr->length());
3543 Register elements =
ToRegister(instr->elements());
3552 __ cmp(length, Operand(kArgumentsLimit));
3558 __ mov(receiver, length);
3566 __ cmp(length, Operand::Zero());
3571 __ sub(length, length, Operand(1),
SetCC);
3575 DCHECK(instr->HasPointerMap());
3576 LPointerMap* pointers = instr->pointer_map();
3578 this, pointers, Safepoint::kLazyDeopt);
3581 ParameterCount actual(receiver);
3582 __ InvokeFunction(
function, actual,
CALL_FUNCTION, safepoint_generator);
3586 void LCodeGen::DoPushArgument(LPushArgument* instr) {
3587 LOperand* argument = instr->value();
3588 if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
3589 Abort(kDoPushArgumentNotImplementedForDoubleType);
3592 __ push(argument_reg);
3597 void LCodeGen::DoDrop(LDrop* instr) {
3598 __ Drop(instr->count());
3602 void LCodeGen::DoThisFunction(LThisFunction* instr) {
3603 Register result =
ToRegister(instr->result());
3608 void LCodeGen::DoContext(LContext* instr) {
3610 Register result =
ToRegister(instr->result());
3611 if (info()->IsOptimizing()) {
3620 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
3623 __ Move(
scratch0(), instr->hydrogen()->pairs());
3632 int formal_parameter_count,
3636 bool dont_adapt_arguments =
3638 bool can_invoke_directly =
3639 dont_adapt_arguments || formal_parameter_count == arity;
3643 if (can_invoke_directly) {
3645 __ Move(
r1,
function);
3653 if (dont_adapt_arguments) {
3665 ParameterCount count(arity);
3666 ParameterCount expected(formal_parameter_count);
3667 __ InvokeFunction(
function, expected, count,
CALL_FUNCTION, generator);
3681 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
3693 __ Move(result, input);
3699 PushSafepointRegistersScope
scope(
this);
3710 Label allocated, slow;
3711 __ LoadRoot(tmp4, Heap::kHeapNumberMapRootIndex);
3712 __ AllocateHeapNumber(tmp1, tmp2, tmp3, tmp4, &slow);
3723 __ LoadFromSafepointRegisterSlot(input, input);
3726 __ bind(&allocated);
3734 __ StoreToSafepointRegisterSlot(tmp1, result);
3744 __ cmp(input, Operand::Zero());
3745 __ Move(result, input,
pl);
3749 __ rsb(result, input, Operand::Zero(),
SetCC,
mi);
3755 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3759 DeferredMathAbsTaggedHeapNumber(
LCodeGen* codegen, LMathAbs* instr)
3762 codegen()->DoDeferredMathAbsTaggedHeapNumber(instr_);
3764 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
3769 Representation r = instr->hydrogen()->value()->representation();
3773 __ vabs(result, input);
3774 }
else if (r.IsSmiOrInteger32()) {
3778 DeferredMathAbsTaggedHeapNumber* deferred =
3779 new(zone()) DeferredMathAbsTaggedHeapNumber(
this, instr);
3782 __ JumpIfNotSmi(input, deferred->entry());
3785 __ bind(deferred->exit());
3790 void LCodeGen::DoMathFloor(LMathFloor* instr) {
3792 Register result =
ToRegister(instr->result());
3802 __ cmp(result, Operand::Zero());
3804 __ cmp(input_high, Operand::Zero());
3811 void LCodeGen::DoMathRound(LMathRound* instr) {
3813 Register result =
ToRegister(instr->result());
3815 DwVfpRegister input_plus_dot_five = double_scratch1;
3818 Label convert, done;
3821 __ vabs(double_scratch1, input);
3822 __ VFPCompareAndSetFlags(double_scratch1, dot_five);
3828 __ VmovHigh(input_high, input);
3829 __ cmp(input_high, Operand::Zero());
3833 __ VFPCompareAndSetFlags(input, dot_five);
3841 __ vadd(input_plus_dot_five, input, dot_five);
3850 void LCodeGen::DoMathFround(LMathFround* instr) {
3854 __ vcvt_f32_f64(scratch.low(), input_reg);
3855 __ vcvt_f64_f32(output_reg, scratch.low());
3859 void LCodeGen::DoMathSqrt(LMathSqrt* instr) {
3862 __ vsqrt(result, input);
3866 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
3876 __ VFPCompareAndSetFlags(input, temp);
3877 __ vneg(result, temp,
eq);
3882 __ vsqrt(result, result);
3887 void LCodeGen::DoPower(LPower* instr) {
3888 Representation exponent_type = instr->hydrogen()->right()->representation();
3892 DCHECK(!instr->right()->IsDoubleRegister() ||
3894 DCHECK(!instr->right()->IsRegister() ||
3899 if (exponent_type.IsSmi()) {
3902 }
else if (exponent_type.IsTagged()) {
3904 __ JumpIfSmi(tagged_exponent, &no_deopt);
3907 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
3913 }
else if (exponent_type.IsInteger32()) {
3917 DCHECK(exponent_type.IsDouble());
3924 void LCodeGen::DoMathExp(LMathExp* instr) {
3933 masm(), input, result, double_scratch1, double_scratch2,
3938 void LCodeGen::DoMathLog(LMathLog* instr) {
3941 __ CallCFunction(ExternalReference::math_log_double_function(isolate()),
3947 void LCodeGen::DoMathClz32(LMathClz32* instr) {
3949 Register result =
ToRegister(instr->result());
3950 __ clz(result, input);
3954 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3957 DCHECK(instr->HasPointerMap());
3959 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
3960 if (known_function.is_null()) {
3961 LPointerMap* pointers = instr->pointer_map();
3963 ParameterCount count(instr->arity());
3967 instr->hydrogen()->formal_parameter_count(),
3975 void LCodeGen::DoTailCallThroughMegamorphicCache(
3976 LTailCallThroughMegamorphicCache* instr) {
3977 Register receiver =
ToRegister(instr->receiver());
3984 Register scratch =
r3;
3985 Register extra =
r4;
3986 Register extra2 =
r5;
3987 Register extra3 =
r6;
3993 isolate()->stub_cache()->GenerateProbe(masm(), instr->hydrogen()->flags(),
3994 must_teardown_frame, receiver,
name,
3995 scratch, extra, extra2, extra3);
4003 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
4006 LPointerMap* pointers = instr->pointer_map();
4009 if (instr->target()->IsConstantOperand()) {
4010 LConstantOperand* target = LConstantOperand::cast(instr->target());
4013 PlatformInterfaceDescriptor* call_descriptor =
4014 instr->descriptor().platform_specific_descriptor();
4016 call_descriptor->storage_mode());
4018 DCHECK(instr->target()->IsRegister());
4019 Register target =
ToRegister(instr->target());
4020 generator.BeforeCall(
__ CallSize(target));
4025 ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
4030 generator.AfterCall();
4034 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
4038 if (instr->hydrogen()->pass_argument_count()) {
4039 __ mov(
r0, Operand(instr->arity()));
4053 void LCodeGen::DoCallFunction(LCallFunction* instr) {
4058 int arity = instr->arity();
4059 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
4064 void LCodeGen::DoCallNew(LCallNew* instr) {
4069 __ mov(
r0, Operand(instr->arity()));
4071 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
4077 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
4082 __ mov(
r0, Operand(instr->arity()));
4083 __ LoadRoot(
r2, Heap::kUndefinedValueRootIndex);
4084 ElementsKind kind = instr->hydrogen()->elements_kind();
4090 if (instr->arity() == 0) {
4091 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
4093 }
else if (instr->arity() == 1) {
4100 __ cmp(
r5, Operand::Zero());
4101 __ b(
eq, &packed_case);
4104 ArraySingleArgumentConstructorStub stub(isolate(),
4109 __ bind(&packed_case);
4112 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
4116 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
4122 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
4123 CallRuntime(instr->function(), instr->arity(), instr);
4127 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
4128 Register
function =
ToRegister(instr->function());
4129 Register code_object =
ToRegister(instr->code_object());
4136 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
4137 Register result =
ToRegister(instr->result());
4138 Register base =
ToRegister(instr->base_object());
4139 if (instr->offset()->IsConstantOperand()) {
4140 LConstantOperand* offset = LConstantOperand::cast(instr->offset());
4143 Register offset =
ToRegister(instr->offset());
4144 __ add(result, base, offset);
4149 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
4150 Representation representation = instr->representation();
4152 Register
object =
ToRegister(instr->object());
4154 HObjectAccess access = instr->hydrogen()->access();
4155 int offset = access.offset();
4157 if (access.IsExternalMemory()) {
4160 __ Store(value, operand, representation);
4164 __ AssertNotSmi(
object);
4166 DCHECK(!representation.IsSmi() ||
4167 !instr->value()->IsConstantOperand() ||
4168 IsSmi(LConstantOperand::cast(instr->value())));
4169 if (representation.IsDouble()) {
4170 DCHECK(access.IsInobject());
4171 DCHECK(!instr->hydrogen()->has_transition());
4172 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4178 if (instr->hydrogen()->has_transition()) {
4179 Handle<Map> transition = instr->hydrogen()->transition_map();
4180 AddDeprecationDependency(transition);
4181 __ mov(scratch, Operand(transition));
4183 if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
4186 __ RecordWriteForMap(
object,
4196 if (access.IsInobject()) {
4198 __ Store(value, operand, representation);
4199 if (instr->hydrogen()->NeedsWriteBarrier()) {
4201 __ RecordWriteField(
object,
4208 instr->hydrogen()->SmiCheckForWriteBarrier(),
4209 instr->hydrogen()->PointersToHereCheckForValue());
4214 __ Store(value, operand, representation);
4215 if (instr->hydrogen()->NeedsWriteBarrier()) {
4218 __ RecordWriteField(scratch,
4225 instr->hydrogen()->SmiCheckForWriteBarrier(),
4226 instr->hydrogen()->PointersToHereCheckForValue());
4232 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
4243 void LCodeGen::DoBoundsCheck(LBoundsCheck* instr) {
4245 if (instr->index()->IsConstantOperand()) {
4246 Operand index =
ToOperand(instr->index());
4247 Register length =
ToRegister(instr->length());
4248 __ cmp(length, index);
4252 Operand length =
ToOperand(instr->length());
4253 __ cmp(index, length);
4255 if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
4258 __ stop(
"eliminated bounds check failed");
4270 bool key_is_constant = instr->
key()->IsConstantOperand();
4271 int constant_key = 0;
4272 if (key_is_constant) {
4273 constant_key =
ToInteger32(LConstantOperand::cast(instr->
key()));
4274 if (constant_key & 0xF0000000) {
4275 Abort(kArrayIndexConstantValueTooBig);
4281 int shift_size = (instr->hydrogen()->
key()->representation().IsSmi())
4282 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
4291 if (key_is_constant) {
4292 if (constant_key != 0) {
4293 __ add(address, external_pointer,
4294 Operand(constant_key << element_size_shift));
4296 address = external_pointer;
4299 __ add(address, external_pointer,
Operand(key,
LSL, shift_size));
4306 __ vstr(value, address, base_offset);
4311 key, external_pointer, key_is_constant, constant_key,
4312 element_size_shift, shift_size,
4314 switch (elements_kind) {
4321 __ strb(value, mem_operand);
4327 __ strh(value, mem_operand);
4333 __ str(value, mem_operand);
4359 bool key_is_constant = instr->
key()->IsConstantOperand();
4365 if (key_is_constant) {
4366 int constant_key =
ToInteger32(LConstantOperand::cast(instr->
key()));
4367 if (constant_key & 0xF0000000) {
4368 Abort(kArrayIndexConstantValueTooBig);
4370 __ add(scratch, elements,
4371 Operand((constant_key << element_size_shift) + base_offset));
4373 int shift_size = (instr->hydrogen()->
key()->representation().IsSmi())
4374 ? (element_size_shift -
kSmiTagSize) : element_size_shift;
4375 __ add(scratch, elements,
Operand(base_offset));
4376 __ add(scratch, scratch,
4382 if (masm()->emit_debug_code()) {
4385 __ Assert(
ne, kDefaultNaNModeNotSet);
4390 __ vstr(value, scratch, 0);
4405 if (instr->
key()->IsConstantOperand()) {
4406 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
4407 LConstantOperand* const_operand = LConstantOperand::cast(instr->
key());
4409 store_base = elements;
4415 if (instr->hydrogen()->
key()->representation().IsSmi()) {
4416 __ add(scratch, elements, Operand::PointerOffsetFromSmiKey(key));
4423 if (instr->hydrogen()->NeedsWriteBarrier()) {
4425 instr->hydrogen()->
value()->type().IsHeapObject()
4428 __ add(key, store_base,
Operand(offset));
4429 __ RecordWrite(elements,
4436 instr->hydrogen()->PointersToHereCheckForValue());
4445 }
else if (instr->hydrogen()->
value()->representation().IsDouble()) {
4453 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
4460 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code();
4465 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
4466 Register object_reg =
ToRegister(instr->object());
4469 Handle<Map> from_map = instr->original_map();
4470 Handle<Map> to_map = instr->transitioned_map();
4474 Label not_applicable;
4476 __ cmp(scratch, Operand(from_map));
4477 __ b(
ne, ¬_applicable);
4480 Register new_map_reg =
ToRegister(instr->new_map_temp());
4481 __ mov(new_map_reg, Operand(to_map));
4484 __ RecordWriteForMap(object_reg,
4492 PushSafepointRegistersScope
scope(
this);
4493 __ Move(
r1, to_map);
4494 bool is_js_array = from_map->instance_type() ==
JS_ARRAY_TYPE;
4495 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
4498 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
4500 __ bind(¬_applicable);
4504 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
4505 Register
object =
ToRegister(instr->object());
4507 Label no_memento_found;
4508 __ TestJSArrayForAllocationMemento(
object, temp, &no_memento_found);
4510 __ bind(&no_memento_found);
4514 void LCodeGen::DoStringAdd(LStringAdd* instr) {
4518 StringAddStub stub(isolate(),
4519 instr->hydrogen()->flags(),
4520 instr->hydrogen()->pretenure_flag());
4525 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
4528 DeferredStringCharCodeAt(
LCodeGen* codegen, LStringCharCodeAt* instr)
4531 codegen()->DoDeferredStringCharCodeAt(instr_);
4533 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4535 LStringCharCodeAt* instr_;
4538 DeferredStringCharCodeAt* deferred =
4539 new(zone()) DeferredStringCharCodeAt(
this, instr);
4546 __ bind(deferred->exit());
4558 __ mov(result, Operand::Zero());
4560 PushSafepointRegistersScope
scope(
this);
4564 if (instr->index()->IsConstantOperand()) {
4565 int const_index =
ToInteger32(LConstantOperand::cast(instr->index()));
4577 __ StoreToSafepointRegisterSlot(
r0, result);
4581 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
4584 DeferredStringCharFromCode(
LCodeGen* codegen, LStringCharFromCode* instr)
4587 codegen()->DoDeferredStringCharFromCode(instr_);
4589 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4591 LStringCharFromCode* instr_;
4594 DeferredStringCharFromCode* deferred =
4595 new(zone()) DeferredStringCharFromCode(
this, instr);
4597 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
4598 Register char_code =
ToRegister(instr->char_code());
4599 Register result =
ToRegister(instr->result());
4600 DCHECK(!char_code.is(result));
4603 __ b(
hi, deferred->entry());
4604 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
4607 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
4609 __ b(
eq, deferred->entry());
4610 __ bind(deferred->exit());
4621 __ mov(result, Operand::Zero());
4623 PushSafepointRegistersScope
scope(
this);
4624 __ SmiTag(char_code);
4627 __ StoreToSafepointRegisterSlot(
r0, result);
4631 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
4633 DCHECK(input->IsRegister() || input->IsStackSlot());
4634 LOperand* output = instr->result();
4635 DCHECK(output->IsDoubleRegister());
4637 if (input->IsStackSlot()) {
4640 __ vmov(single_scratch, scratch);
4648 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
4649 LOperand* input = instr->value();
4650 LOperand* output = instr->result();
4658 void LCodeGen::DoNumberTagI(LNumberTagI* instr) {
4661 DeferredNumberTagI(
LCodeGen* codegen, LNumberTagI* instr)
4664 codegen()->DoDeferredNumberTagIU(instr_,
4670 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4672 LNumberTagI* instr_;
4678 DeferredNumberTagI* deferred =
new(zone()) DeferredNumberTagI(
this, instr);
4680 __ b(
vs, deferred->entry());
4681 __ bind(deferred->exit());
4685 void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4688 DeferredNumberTagU(
LCodeGen* codegen, LNumberTagU* instr)
4691 codegen()->DoDeferredNumberTagIU(instr_,
4697 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4699 LNumberTagU* instr_;
4703 Register result =
ToRegister(instr->result());
4705 DeferredNumberTagU* deferred =
new(zone()) DeferredNumberTagU(
this, instr);
4707 __ b(
hi, deferred->entry());
4708 __ SmiTag(result, input);
4709 __ bind(deferred->exit());
4731 __ SmiUntag(src, dst);
4734 __ vmov(dbl_scratch.
low(), src);
4735 __ vcvt_f64_s32(dbl_scratch, dbl_scratch.
low());
4737 __ vmov(dbl_scratch.
low(), src);
4738 __ vcvt_f64_u32(dbl_scratch, dbl_scratch.
low());
4741 if (FLAG_inline_new) {
4742 __ LoadRoot(tmp3, Heap::kHeapNumberMapRootIndex);
4753 __ mov(dst, Operand::Zero());
4756 PushSafepointRegistersScope
scope(
this);
4764 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4766 instr->
pointer_map(), 0, Safepoint::kNoLazyDeopt);
4768 __ StoreToSafepointRegisterSlot(
r0, dst);
4779 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
4782 DeferredNumberTagD(
LCodeGen* codegen, LNumberTagD* instr)
4785 codegen()->DoDeferredNumberTagD(instr_);
4787 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
4789 LNumberTagD* instr_;
4798 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
4799 if (FLAG_inline_new) {
4800 __ LoadRoot(scratch, Heap::kHeapNumberMapRootIndex);
4802 __ AllocateHeapNumber(reg, temp1, temp2, scratch, deferred->entry(),
4805 __ jmp(deferred->entry());
4807 __ bind(deferred->exit());
4819 __ mov(reg, Operand::Zero());
4821 PushSafepointRegistersScope
scope(
this);
4828 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4830 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4832 __ StoreToSafepointRegisterSlot(
r0, reg);
4836 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4837 HChange* hchange = instr->hydrogen();
4847 __ SmiTag(output, input,
SetCC);
4850 __ SmiTag(output, input);
4855 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4857 Register result =
ToRegister(instr->result());
4858 if (instr->needs_check()) {
4861 __ SmiUntag(result, input,
SetCC);
4864 __ SmiUntag(result, input);
4872 bool can_convert_undefined_to_nan =
4873 instr->hydrogen()->can_convert_undefined_to_nan();
4874 bool deoptimize_on_minus_zero = instr->hydrogen()->deoptimize_on_minus_zero();
4879 Label convert, load_smi, done;
4882 __ UntagAndJumpIfSmi(scratch, input_reg, &load_smi);
4885 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
4887 if (can_convert_undefined_to_nan) {
4894 if (deoptimize_on_minus_zero) {
4895 __ VmovLow(scratch, result_reg);
4896 __ cmp(scratch, Operand::Zero());
4898 __ VmovHigh(scratch, result_reg);
4903 if (can_convert_undefined_to_nan) {
4906 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
4909 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4914 __ SmiUntag(scratch, input_reg);
4920 __ vmov(flt_scratch, scratch);
4921 __ vcvt_f64_s32(result_reg, flt_scratch);
4942 __ adc(scratch2, input_reg,
Operand(input_reg));
4946 __ LoadRoot(
ip, Heap::kHeapNumberMapRootIndex);
4949 if (instr->truncating()) {
4952 Label no_heap_number, check_bools, check_false;
4953 __ b(
ne, &no_heap_number);
4954 __ TruncateHeapNumberToI(input_reg, scratch2);
4959 __ bind(&no_heap_number);
4960 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
4962 __ b(
ne, &check_bools);
4963 __ mov(input_reg, Operand::Zero());
4966 __ bind(&check_bools);
4967 __ LoadRoot(
ip, Heap::kTrueValueRootIndex);
4969 __ b(
ne, &check_false);
4973 __ bind(&check_false);
4974 __ LoadRoot(
ip, Heap::kFalseValueRootIndex);
4976 DeoptimizeIf(
ne, instr,
"not a heap number/undefined/true/false");
4977 __ mov(input_reg, Operand::Zero());
4987 __ cmp(input_reg, Operand::Zero());
4998 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
5001 DeferredTaggedToI(
LCodeGen* codegen, LTaggedToI* instr)
5004 codegen()->DoDeferredTaggedToI(instr_);
5006 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5011 LOperand* input = instr->value();
5012 DCHECK(input->IsRegister());
5013 DCHECK(input->Equals(instr->result()));
5017 if (instr->hydrogen()->value()->representation().IsSmi()) {
5018 __ SmiUntag(input_reg);
5020 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
5024 __ SmiUntag(input_reg,
SetCC);
5027 __ b(
cs, deferred->entry());
5028 __ bind(deferred->exit());
5033 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
5034 LOperand* input = instr->value();
5035 DCHECK(input->IsRegister());
5036 LOperand* result = instr->result();
5037 DCHECK(result->IsDoubleRegister());
5042 HValue* value = instr->hydrogen()->value();
5050 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
5051 Register result_reg =
ToRegister(instr->result());
5056 if (instr->truncating()) {
5057 __ TruncateDoubleToI(result_reg, double_input);
5064 __ cmp(result_reg, Operand::Zero());
5075 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) {
5076 Register result_reg =
ToRegister(instr->result());
5081 if (instr->truncating()) {
5082 __ TruncateDoubleToI(result_reg, double_input);
5089 __ cmp(result_reg, Operand::Zero());
5102 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
5103 LOperand* input = instr->value();
5109 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
5110 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
5111 LOperand* input = instr->value();
5118 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
5125 if (instr->hydrogen()->is_interval_check()) {
5128 instr->hydrogen()->GetCheckInterval(&first, &last);
5130 __ cmp(scratch, Operand(first));
5133 if (first == last) {
5139 __ cmp(scratch, Operand(last));
5146 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
5150 __ tst(scratch, Operand(mask));
5153 __ and_(scratch, scratch, Operand(mask));
5154 __ cmp(scratch, Operand(tag));
5161 void LCodeGen::DoCheckValue(LCheckValue* instr) {
5163 Handle<HeapObject>
object = instr->hydrogen()->object().handle();
5165 if (isolate()->heap()->InNewSpace(*
object)) {
5167 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
5168 __ mov(
ip, Operand(Handle<Object>(cell)));
5172 __ cmp(reg, Operand(
object));
5180 PushSafepointRegistersScope
scope(
this);
5182 __ mov(
cp, Operand::Zero());
5183 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
5185 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
5193 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
5198 SetExit(check_maps());
5203 Label* check_maps() {
return &check_maps_; }
5204 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5211 if (instr->hydrogen()->IsStabilityCheck()) {
5212 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5213 for (
int i = 0;
i < maps->size(); ++
i) {
5214 AddStabilityDependency(maps->at(
i).handle());
5221 LOperand* input = instr->value();
5222 DCHECK(input->IsRegister());
5227 DeferredCheckMaps* deferred =
NULL;
5228 if (instr->hydrogen()->HasMigrationTarget()) {
5229 deferred =
new(zone()) DeferredCheckMaps(
this, instr, reg);
5230 __ bind(deferred->check_maps());
5233 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
5235 for (
int i = 0;
i < maps->size() - 1;
i++) {
5236 Handle<Map>
map = maps->at(
i).handle();
5237 __ CompareMap(map_reg,
map, &success);
5241 Handle<Map>
map = maps->at(maps->size() - 1).handle();
5242 __ CompareMap(map_reg,
map, &success);
5243 if (instr->hydrogen()->HasMigrationTarget()) {
5244 __ b(
ne, deferred->entry());
5253 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
5255 Register result_reg =
ToRegister(instr->result());
5260 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
5261 Register unclamped_reg =
ToRegister(instr->unclamped());
5262 Register result_reg =
ToRegister(instr->result());
5263 __ ClampUint8(result_reg, unclamped_reg);
5267 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
5269 Register input_reg =
ToRegister(instr->unclamped());
5270 Register result_reg =
ToRegister(instr->result());
5272 Label is_smi, done, heap_number;
5275 __ UntagAndJumpIfSmi(result_reg, input_reg, &is_smi);
5279 __ cmp(scratch, Operand(factory()->heap_number_map()));
5280 __ b(
eq, &heap_number);
5284 __ cmp(input_reg, Operand(factory()->undefined_value()));
5286 __ mov(result_reg, Operand::Zero());
5290 __ bind(&heap_number);
5297 __ ClampUint8(result_reg, result_reg);
5303 void LCodeGen::DoDoubleBits(LDoubleBits* instr) {
5305 Register result_reg =
ToRegister(instr->result());
5306 if (instr->hydrogen()->bits() == HDoubleBits::HIGH) {
5307 __ VmovHigh(result_reg, value_reg);
5309 __ VmovLow(result_reg, value_reg);
5314 void LCodeGen::DoConstructDouble(LConstructDouble* instr) {
5318 __ VmovHigh(result_reg, hi_reg);
5319 __ VmovLow(result_reg, lo_reg);
5323 void LCodeGen::DoAllocate(LAllocate* instr) {
5326 DeferredAllocate(
LCodeGen* codegen, LAllocate* instr)
5331 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5336 DeferredAllocate* deferred =
5337 new(zone()) DeferredAllocate(
this, instr);
5339 Register result =
ToRegister(instr->result());
5340 Register scratch =
ToRegister(instr->temp1());
5341 Register scratch2 =
ToRegister(instr->temp2());
5345 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
5348 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5349 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5350 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5352 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5353 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5357 if (instr->size()->IsConstantOperand()) {
5360 __ Allocate(
size, result, scratch, scratch2, deferred->entry(),
flags);
5362 __ jmp(deferred->entry());
5366 __ Allocate(
size, result, scratch, scratch2, deferred->entry(),
flags);
5369 __ bind(deferred->exit());
5371 if (instr->hydrogen()->MustPrefillWithFiller()) {
5373 if (instr->size()->IsConstantOperand()) {
5379 __ mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
5397 PushSafepointRegistersScope
scope(
this);
5398 if (instr->size()->IsRegister()) {
5409 __ stop(
"invalid allocation size");
5415 instr->hydrogen()->MustAllocateDoubleAligned());
5416 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
5417 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
5418 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5420 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
5421 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
5429 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
5430 __ StoreToSafepointRegisterSlot(
r0, result);
5434 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5437 CallRuntime(Runtime::kToFastProperties, 1, instr);
5441 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5449 int literal_offset =
5451 __ Move(
r6, instr->hydrogen()->literals());
5453 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
5455 __ b(
ne, &materialized);
5460 __ mov(
r4, Operand(instr->hydrogen()->pattern()));
5461 __ mov(
r3, Operand(instr->hydrogen()->flags()));
5463 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5466 __ bind(&materialized);
5468 Label allocated, runtime_allocate;
5473 __ bind(&runtime_allocate);
5476 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5479 __ bind(&allocated);
5485 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
5489 bool pretenure = instr->hydrogen()->pretenure();
5490 if (!pretenure && instr->hydrogen()->has_no_literals()) {
5491 FastNewClosureStub stub(isolate(), instr->hydrogen()->strict_mode(),
5492 instr->hydrogen()->kind());
5493 __ mov(
r2, Operand(instr->hydrogen()->shared_info()));
5496 __ mov(
r2, Operand(instr->hydrogen()->shared_info()));
5497 __ mov(
r1, Operand(pretenure ? factory()->true_value()
5498 : factory()->false_value()));
5505 void LCodeGen::DoTypeof(LTypeof* instr) {
5512 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5516 instr->FalseLabel(chunk_),
5518 instr->type_literal());
5531 Factory* factory = isolate()->factory();
5533 __ JumpIfSmi(input, true_label);
5535 __ CompareRoot(scratch, Heap::kHeapNumberMapRootIndex);
5536 final_branch_condition =
eq;
5538 }
else if (
String::Equals(type_name, factory->string_string())) {
5539 __ JumpIfSmi(input, false_label);
5541 __ b(
ge, false_label);
5544 final_branch_condition =
eq;
5546 }
else if (
String::Equals(type_name, factory->symbol_string())) {
5547 __ JumpIfSmi(input, false_label);
5549 final_branch_condition =
eq;
5551 }
else if (
String::Equals(type_name, factory->boolean_string())) {
5552 __ CompareRoot(input, Heap::kTrueValueRootIndex);
5553 __ b(
eq, true_label);
5554 __ CompareRoot(input, Heap::kFalseValueRootIndex);
5555 final_branch_condition =
eq;
5557 }
else if (
String::Equals(type_name, factory->undefined_string())) {
5558 __ CompareRoot(input, Heap::kUndefinedValueRootIndex);
5559 __ b(
eq, true_label);
5560 __ JumpIfSmi(input, false_label);
5565 final_branch_condition =
ne;
5567 }
else if (
String::Equals(type_name, factory->function_string())) {
5570 __ JumpIfSmi(input, false_label);
5572 __ b(
eq, true_label);
5574 final_branch_condition =
eq;
5576 }
else if (
String::Equals(type_name, factory->object_string())) {
5578 __ JumpIfSmi(input, false_label);
5579 __ CompareRoot(input, Heap::kNullValueRootIndex);
5580 __ b(
eq, true_label);
5581 __ CheckObjectTypeRange(input,
5589 final_branch_condition =
eq;
5595 return final_branch_condition;
5599 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
5624 if (!info()->IsStub()) {
5627 int current_pc = masm()->pc_offset();
5628 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
5631 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
5633 while (padding_size > 0) {
5639 last_lazy_deopt_pc_ = masm()->pc_offset();
5643 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
5644 last_lazy_deopt_pc_ = masm()->pc_offset();
5645 DCHECK(instr->HasEnvironment());
5648 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5652 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
5666 void LCodeGen::DoDummy(LDummy* instr) {
5671 void LCodeGen::DoDummyUse(LDummyUse* instr) {
5677 PushSafepointRegistersScope
scope(
this);
5679 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5682 DCHECK(instr->HasEnvironment());
5684 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5688 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5691 DeferredStackCheck(
LCodeGen* codegen, LStackCheck* instr)
5696 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5698 LStackCheck* instr_;
5701 DCHECK(instr->HasEnvironment());
5705 if (instr->hydrogen()->is_function_entry()) {
5708 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
5711 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
5712 PredictableCodeSizeScope predictable(masm(),
5714 DCHECK(instr->context()->IsRegister());
5719 DCHECK(instr->hydrogen()->is_backwards_branch());
5721 DeferredStackCheck* deferred_stack_check =
5722 new(zone()) DeferredStackCheck(
this, instr);
5723 __ LoadRoot(
ip, Heap::kStackLimitRootIndex);
5725 __ b(
lo, deferred_stack_check->entry());
5727 __ bind(instr->done_label());
5728 deferred_stack_check->SetExit(instr->done_label());
5737 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
5745 DCHECK(!environment->HasBeenRegistered());
5752 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
5753 __ LoadRoot(
ip, Heap::kUndefinedValueRootIndex);
5757 Register null_value =
r5;
5758 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
5759 __ cmp(
r0, null_value);
5769 Label use_cache, call_runtime;
5770 __ CheckEnumCache(null_value, &call_runtime);
5776 __ bind(&call_runtime);
5778 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
5781 __ LoadRoot(
ip, Heap::kMetaMapRootIndex);
5784 __ bind(&use_cache);
5788 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
5790 Register result =
ToRegister(instr->result());
5791 Label load_cache, done;
5792 __ EnumLength(result,
map);
5794 __ b(
ne, &load_cache);
5795 __ mov(result, Operand(isolate()->factory()->empty_fixed_array()));
5798 __ bind(&load_cache);
5799 __ LoadInstanceDescriptors(
map, result);
5804 __ cmp(result, Operand::Zero());
5811 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5812 Register
object =
ToRegister(instr->value());
5824 PushSafepointRegistersScope
scope(
this);
5827 __ mov(
cp, Operand::Zero());
5828 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5830 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
5831 __ StoreToSafepointRegisterSlot(
r0, result);
5835 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5838 DeferredLoadMutableDouble(
LCodeGen* codegen,
5839 LLoadFieldByIndex* instr,
5852 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5854 LLoadFieldByIndex* instr_;
5860 Register
object =
ToRegister(instr->object());
5862 Register result =
ToRegister(instr->result());
5865 DeferredLoadMutableDouble* deferred;
5866 deferred =
new(zone()) DeferredLoadMutableDouble(
5867 this, instr, result,
object, index);
5869 Label out_of_object, done;
5872 __ b(
ne, deferred->entry());
5873 __ mov(index, Operand(index,
ASR, 1));
5875 __ cmp(index, Operand::Zero());
5876 __ b(
lt, &out_of_object);
5878 __ add(scratch,
object, Operand::PointerOffsetFromSmiKey(index));
5883 __ bind(&out_of_object);
5887 __ sub(scratch, result, Operand::PointerOffsetFromSmiKey(index));
5890 __ bind(deferred->exit());
5895 void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
5896 Register context =
ToRegister(instr->context());
5901 void LCodeGen::DoAllocateBlockContext(LAllocateBlockContext* instr) {
5902 Handle<ScopeInfo> scope_info = instr->scope_info();
5905 CallRuntime(Runtime::kPushBlockContext, 2, instr);
An object reference managed by the v8 garbage collector.
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kLengthOffset
static const int kInstrSize
friend class BlockConstPoolScope
static U update(U previous, T value)
static const int kValueOffset
static const int kHeaderSize
static bool IsSupported(CpuFeature f)
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
static const int kEnumCacheOffset
Source to read snapshot and builtins files from.
Safepoint::DeoptMode deopt_mode_
virtual void BeforeCall(int call_size) const OVERRIDE
virtual ~SafepointGenerator()
SafepointGenerator(LCodeGen *codegen, LPointerMap *pointers, Safepoint::DeoptMode mode)
virtual void AfterCall() const OVERRIDE
static const int kHeaderSize
static int OffsetOfElementAt(int index)
static int SizeFor(int length)
static const int kGlobalProxyOffset
@ kAllUsesTruncatingToInt32
virtual HSourcePosition position() const
static Handle< T > cast(Handle< S > that)
static const uint32_t kSignMask
static const int kValueOffset
static const int kMapOffset
static const int kValueOffset
static const int kCacheStampOffset
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kInObjectFieldCount
static const int kFunctionOffset
bool IsNextEmittedBlock(int block_id) const
void RestoreCallerDoubles()
void DoStoreKeyedFixedArray(LStoreKeyed *instr)
DwVfpRegister ToDoubleRegister(LOperand *op) const
void RecordSafepointWithRegisters(LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
@ RECORD_SIMPLE_SAFEPOINT
@ RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
int inlined_function_count_
bool IsSmi(LConstantOperand *op) const
friend class SafepointGenerator
LinkRegisterStatus GetLinkRegisterState() const
TranslationBuffer translations_
MemOperand BuildSeqStringOperand(Register string, LOperand *index, String::Encoding encoding)
Condition EmitIsString(Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
DwVfpRegister EmitLoadDoubleRegister(LOperand *op, SwVfpRegister flt_scratch, DwVfpRegister dbl_scratch)
void DoDeferredStackCheck(LStackCheck *instr)
SafepointTableBuilder safepoints_
void EmitVectorLoadICRegisters(T *instr)
static Condition TokenToCondition(Token::Value op, bool is_unsigned)
ZoneList< Handle< Object > > deoptimization_literals_
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal *instr, Label *map_check, Label *bool_load)
MemOperand PrepareKeyedOperand(Register key, Register base, bool key_is_constant, int constant_key, int element_size, int shift_size, int base_offset)
void PopulateDeoptimizationLiteralsWithInlinedFunctions()
void AddToTranslation(LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
ZoneList< LEnvironment * > deoptimizations_
void EmitIntegerMathAbs(LMathAbs *instr)
int32_t ToRepresentation(LConstantOperand *op, const Representation &r) const
void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
void EmitIsConstructCall(Register temp1, Register temp2)
int32_t ToInteger32(LConstantOperand *op) const
LPlatformChunk * chunk() const
void FinishCode(Handle< Code > code)
int LookupDestination(int block_id) const
Condition EmitTypeofIs(Label *true_label, Label *false_label, Register input, Handle< String > type_name)
void DoDeferredAllocate(LAllocate *instr)
void RecordSafepoint(LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
void DoDeferredTaggedToI(LTaggedToI *instr)
LowDwVfpRegister double_scratch0()
void CallCodeGeneric(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
void DoDeferredStringCharCodeAt(LStringCharCodeAt *instr)
void CallCode(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
Safepoint::Kind expected_safepoint_kind_
ZoneList< LDeferredCode * > deferred_
bool GenerateDeferredCode()
void DoDeferredNumberTagIU(LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2, IntegerSignedness signedness)
Handle< Object > ToHandle(LConstantOperand *op) const
bool NeedsEagerFrame() const
int CallCodeSize(Handle< Code > code, RelocInfo::Mode mode)
DoubleRegister double_scratch()
void RegisterEnvironmentForDeoptimization(LEnvironment *environment, Safepoint::DeoptMode mode)
friend class LDeferredCode
void LoadContextFromDeferred(LOperand *context)
void GenerateOsrPrologue()
bool NeedsDeferredFrame() const
void DoDeferredInstanceMigration(LCheckMaps *instr, Register object)
void DoDeferredLoadMutableDouble(LLoadFieldByIndex *instr, Register result, Register object, Register index)
int DefineDeoptimizationLiteral(Handle< Object > literal)
void DeoptimizeIf(Condition condition, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
int GetStackSlotCount() const
void CallKnownFunction(Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, R1State r1_state)
void WriteTranslation(LEnvironment *environment, Translation *translation)
void DoDeferredMathAbsTaggedHeapNumber(LMathAbs *instr)
void DoLoadKeyedFixedDoubleArray(LLoadKeyed *instr)
bool GenerateSafepointTable()
Operand ToOperand(LOperand *op)
Register EmitLoadRegister(LOperand *op, Register scratch)
void EmitClassOfTest(Label *if_true, Label *if_false, Handle< String > class_name, Register input, Register temporary, Register temporary2)
void DoLoadKeyedExternalArray(LLoadKeyed *instr)
double ToDouble(LConstantOperand *op) const
Register ToRegister(LOperand *op) const
void DoStoreKeyedExternalArray(LStoreKeyed *instr)
void RecordAndWritePosition(int position) OVERRIDE
bool IsInteger32(LConstantOperand *op) const
void PopulateDeoptimizationData(Handle< Code > code)
void DoParallelMove(LParallelMove *move)
Smi * ToSmi(LConstantOperand *op) const
void CallRuntime(const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void DoDeferredStringCharFromCode(LStringCharFromCode *instr)
ZoneList< Deoptimizer::JumpTableEntry > jump_table_
Condition EmitIsObject(Register input, Register temp1, Label *is_not_object, Label *is_object)
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE
void EmitNumberUntagD(LNumberUntagD *instr, Register input, DwVfpRegister result, NumberUntagDMode mode)
MemOperand ToMemOperand(LOperand *op) const
void GenerateBodyInstructionPre(LInstruction *instr) OVERRIDE
MemOperand ToHighMemOperand(LOperand *op) const
void RecordSafepointWithLazyDeopt(LInstruction *instr, SafepointMode safepoint_mode)
void EmitFalseBranch(InstrType instr, Condition condition)
void DoLoadKeyedFixedArray(LLoadKeyed *instr)
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
void EmitBranch(InstrType instr, Condition condition)
void DoDeferredNumberTagD(LNumberTagD *instr)
void DoStoreKeyedFixedDoubleArray(LStoreKeyed *instr)
friend class LEnvironment
virtual void Generate()=0
int instruction_index() const
virtual LInstruction * instr()=0
LParallelMove * GetParallelMove(InnerPosition pos)
virtual const char * Mnemonic() const =0
LEnvironment * environment() const
virtual LOperand * result() const =0
HValue * hydrogen_value() const
LPointerMap * pointer_map() const
virtual bool IsGap() const
bool is_typed_elements() const
ElementsKind elements_kind() const
uint32_t base_offset() const
bool is_typed_elements() const
bool NeedsCanonicalization()
ElementsKind elements_kind() const
uint32_t base_offset() const
LOperand * result() const
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateMiss(MacroAssembler *masm)
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kConstructorOffset
static const int kPrototypeOffset
static void EmitMathExp(MacroAssembler *masm, DwVfpRegister input, DwVfpRegister result, DwVfpRegister double_scratch1, DwVfpRegister double_scratch2, Register temp1, Register temp2, Register temp3)
static const Register exponent()
static const int kHashFieldOffset
static const int kMaxRegularHeapObjectSize
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
bool IsSmiOrTagged() const
static Representation Integer32()
int num_parameters() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kDontAdaptArgumentsSentinel
static const int kInstanceClassNameOffset
static const int kCompilerHintsOffset
static const int kMaxValue
static Smi * FromInt(int value)
static const int kFixedFrameSizeFromFp
static const int kContextOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const unsigned int kContainsCachedArrayIndexMask
static const int32_t kMaxOneByteCharCode
static const int kLengthOffset
bool Equals(String *other)
static TypeFeedbackId None()
bool IsContextSlot() const
static const Register VectorRegister()
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
bool IsPowerOfTwo32(uint32_t value)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
int WhichPowerOf2(uint32_t x)
Vector< const char > CStrVector(const char *data)
const LowDwVfpRegister d2
const uint32_t kStringEncodingMask
MemOperand ContextOperand(Register context, int index)
Condition CommuteCondition(Condition cond)
bool EvalComparison(Token::Value op, double op1, double op2)
const uint32_t kVFPDefaultNaNModeControlBit
const LowDwVfpRegister d1
const uint32_t kTwoByteStringTag
const LowDwVfpRegister d0
DwVfpRegister DoubleRegister
const int kPointerSizeLog2
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ EXTERNAL_UINT16_ELEMENTS
@ EXTERNAL_INT16_ELEMENTS
@ EXTERNAL_UINT8_ELEMENTS
@ EXTERNAL_INT32_ELEMENTS
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ SLOPPY_ARGUMENTS_ELEMENTS
@ EXTERNAL_FLOAT32_ELEMENTS
@ EXTERNAL_FLOAT64_ELEMENTS
@ FAST_HOLEY_SMI_ELEMENTS
@ EXTERNAL_UINT32_ELEMENTS
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const uint32_t kOneByteStringTag
int ElementsKindToShiftSize(ElementsKind elements_kind)
MemOperand FieldMemOperand(Register object, int offset)
int32_t WhichPowerOf2Abs(int32_t x)
int StackSlotOffset(int index)
bool IsFastPackedElementsKind(ElementsKind kind)
@ NUMBER_CANDIDATE_IS_SMI
@ NUMBER_CANDIDATE_IS_ANY_TAGGED
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
Condition NegateCondition(Condition cond)
static InstanceType TestType(HHasInstanceTypeAndBranch *instr)
const uint32_t kStringRepresentationMask
@ NEVER_INLINE_TARGET_ADDRESS
static Condition BranchCondition(HHasInstanceTypeAndBranch *instr)
static int ArgumentsOffsetWithoutFrame(int index)
static Condition ComputeCompareCondition(Token::Value op)
static const char * LabelType(LLabel *label)
MemOperand GlobalObjectOperand()
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const intptr_t kSmiTagMask
@ NO_CALL_CONSTRUCTOR_FLAGS
bool IsFastSmiElementsKind(ElementsKind kind)
const uint32_t kHoleNanLower32
const uint32_t kSlotsZapValue
const uint32_t kHoleNanUpper32
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
bool IsEquivalentTo(const JumpTableEntry &other) const
Deoptimizer::BailoutType bailout_type
bool is(DwVfpRegister reg) const
static DwVfpRegister FromAllocationIndex(int index)
SwVfpRegister low() const
static Register FromAllocationIndex(int index)
bool is(Register reg) const
#define T(name, string, precedence)
#define STATIC_CHAR_VECTOR(x)