20 class SafepointGenerator
FINAL :
public CallWrapper {
23 LPointerMap* pointers,
24 Safepoint::DeoptMode
mode)
33 codegen_->RecordSafepoint(pointers_, deopt_mode_);
38 LPointerMap* pointers_;
39 Safepoint::DeoptMode deopt_mode_;
56 virtual void Emit(Label* label)
const {
87 virtual void Emit(Label* label)
const {
117 virtual void Emit(Label* label)
const {
160 virtual void Emit(Label* label)
const {
186 virtual void Emit(Label* label)
const {
191 __ JumpIfNotHeapNumber(
value_, label);
206 virtual void Emit(Label* label)
const {
221 Translation* translation) {
222 if (environment ==
NULL)
return;
225 int translation_size = environment->translation_size();
227 int height = translation_size - environment->parameter_count();
230 bool has_closure_id = !info()->closure().is_null() &&
231 !info()->closure().is_identical_to(environment->closure());
232 int closure_id = has_closure_id
234 : Translation::kSelfLiteralId;
236 switch (environment->frame_type()) {
238 translation->BeginJSFrame(environment->ast_id(), closure_id, height);
241 translation->BeginConstructStubFrame(closure_id, translation_size);
244 DCHECK(translation_size == 1);
246 translation->BeginGetterStubFrame(closure_id);
249 DCHECK(translation_size == 2);
251 translation->BeginSetterStubFrame(closure_id);
254 translation->BeginCompiledStubFrame();
257 translation->BeginArgumentsAdaptorFrame(closure_id, translation_size);
263 int object_index = 0;
264 int dematerialized_index = 0;
265 for (
int i = 0;
i < translation_size; ++
i) {
266 LOperand* value = environment->values()->at(
i);
271 environment->HasTaggedValueAt(
i),
272 environment->HasUint32ValueAt(
i),
274 &dematerialized_index);
280 Translation* translation,
284 int* object_index_pointer,
285 int* dematerialized_index_pointer) {
286 if (op == LEnvironment::materialization_marker()) {
287 int object_index = (*object_index_pointer)++;
288 if (environment->ObjectIsDuplicateAt(object_index)) {
289 int dupe_of = environment->ObjectDuplicateOfAt(object_index);
290 translation->DuplicateObject(dupe_of);
293 int object_length = environment->ObjectLengthAt(object_index);
294 if (environment->ObjectIsArgumentsAt(object_index)) {
295 translation->BeginArgumentsObject(object_length);
297 translation->BeginCapturedObject(object_length);
299 int dematerialized_index = *dematerialized_index_pointer;
300 int env_offset = environment->translation_size() + dematerialized_index;
301 *dematerialized_index_pointer += object_length;
302 for (
int i = 0;
i < object_length; ++
i) {
303 LOperand* value = environment->values()->at(env_offset +
i);
307 environment->HasTaggedValueAt(env_offset +
i),
308 environment->HasUint32ValueAt(env_offset +
i),
309 object_index_pointer,
310 dematerialized_index_pointer);
315 if (op->IsStackSlot()) {
317 translation->StoreStackSlot(op->index());
318 }
else if (is_uint32) {
319 translation->StoreUint32StackSlot(op->index());
321 translation->StoreInt32StackSlot(op->index());
323 }
else if (op->IsDoubleStackSlot()) {
324 translation->StoreDoubleStackSlot(op->index());
325 }
else if (op->IsRegister()) {
328 translation->StoreRegister(reg);
329 }
else if (is_uint32) {
330 translation->StoreUint32Register(reg);
332 translation->StoreInt32Register(reg);
334 }
else if (op->IsDoubleRegister()) {
336 translation->StoreDoubleRegister(reg);
337 }
else if (op->IsConstantOperand()) {
338 HConstant* constant =
chunk()->LookupConstant(LConstantOperand::cast(op));
340 translation->StoreLiteral(src_index);
358 Safepoint::DeoptMode
mode) {
359 environment->set_has_been_used();
360 if (!environment->HasBeenRegistered()) {
362 int jsframe_count = 0;
369 Translation translation(&
translations_, frame_count, jsframe_count, zone());
372 int pc_offset = masm()->pc_offset();
373 environment->Register(deoptimization_index,
375 (
mode == Safepoint::kLazyDeopt) ? pc_offset : -1);
398 if ((code->kind() == Code::BINARY_OP_IC) ||
399 (code->kind() == Code::COMPARE_IC)) {
407 void LCodeGen::DoCallFunction(LCallFunction* instr) {
412 int arity = instr->arity();
413 CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
419 void LCodeGen::DoCallNew(LCallNew* instr) {
421 DCHECK(instr->IsMarkedAsCall());
424 __ Mov(x0, instr->arity());
426 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
436 void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
437 DCHECK(instr->IsMarkedAsCall());
441 __ Mov(x0, Operand(instr->arity()));
442 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
450 if (instr->arity() == 0) {
451 ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
453 }
else if (instr->arity() == 1) {
460 __ Cbz(x10, &packed_case);
463 ArraySingleArgumentConstructorStub stub(isolate(),
468 __ Bind(&packed_case);
471 ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
475 ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
497 if (context->IsRegister()) {
499 }
else if (context->IsStackSlot()) {
501 }
else if (context->IsConstantOperand()) {
502 HConstant* constant =
503 chunk_->LookupConstant(LConstantOperand::cast(context));
504 __ LoadHeapObject(
cp,
517 __ CallRuntimeSaveDoubles(
id);
519 instr->pointer_map(), argc, Safepoint::kNoLazyDeopt);
525 masm()->positions_recorder()->RecordPosition(position);
526 masm()->positions_recorder()->WriteRecordedPositions();
531 SafepointMode safepoint_mode) {
537 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
543 Safepoint::Kind kind,
545 Safepoint::DeoptMode deopt_mode) {
548 const ZoneList<LOperand*>* operands = pointers->GetNormalizedOperands();
550 masm(), kind, arguments, deopt_mode);
552 for (
int i = 0;
i < operands->length();
i++) {
553 LOperand* pointer = operands->at(
i);
554 if (pointer->IsStackSlot()) {
555 safepoint.DefinePointerSlot(pointer->index(), zone());
556 }
else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
557 safepoint.DefinePointerRegister(
ToRegister(pointer), zone());
561 if (kind & Safepoint::kWithRegisters) {
563 safepoint.DefinePointerRegister(
cp, zone());
568 Safepoint::DeoptMode deopt_mode) {
574 LPointerMap empty_pointers(zone());
581 Safepoint::DeoptMode deopt_mode) {
582 RecordSafepoint(pointers, Safepoint::kWithRegisters, arguments, deopt_mode);
587 LPhase phase(
"Z_Code generation",
chunk());
589 status_ = GENERATING;
602 DCHECK(info()->saves_caller_doubles());
604 Comment(
";;; Save clobbered callee double registers");
605 BitVector* doubles =
chunk()->allocated_double_registers();
606 BitVector::Iterator iterator(doubles);
608 while (!iterator.Done()) {
620 DCHECK(info()->saves_caller_doubles());
622 Comment(
";;; Restore clobbered callee double registers");
623 BitVector* doubles =
chunk()->allocated_double_registers();
624 BitVector::Iterator iterator(doubles);
626 while (!iterator.Done()) {
640 if (info()->IsOptimizing()) {
648 if (info_->this_has_uses() &&
649 info_->strict_mode() ==
SLOPPY &&
650 !info_->is_native()) {
652 int receiver_offset = info_->scope()->num_parameters() *
kXRegSize;
653 __ Peek(x10, receiver_offset);
654 __ JumpIfNotRoot(x10, Heap::kUndefinedValueRootIndex, &ok);
658 __ Poke(x10, receiver_offset);
665 info()->set_prologue_offset(masm_->pc_offset());
667 if (info()->IsStub()) {
670 __ Prologue(info()->IsCodePreAgingActive());
673 info_->AddNoFrameRange(0, masm_->pc_offset());
682 if (info()->saves_caller_doubles()) {
688 if (heap_slots > 0) {
689 Comment(
";;; Allocate local context");
690 bool need_write_barrier =
true;
692 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
693 FastNewContextStub stub(isolate(), heap_slots);
696 need_write_barrier =
false;
708 for (
int i = 0;
i < num_parameters;
i++) {
710 if (var->IsContextSlot()) {
712 Register scratch = x3;
720 __ Str(value, target);
722 if (need_write_barrier) {
723 __ RecordWriteContextSlot(
cp, target.offset(), value, scratch,
725 }
else if (FLAG_debug_code) {
727 __ JumpIfInNewSpace(
cp, &done);
728 __ Abort(kExpectedNewSpaceObject);
733 Comment(
";;; End allocate local context");
737 if (FLAG_trace && info()->IsOptimizing()) {
743 return !is_aborted();
763 if (instr->IsCall()) {
766 if (!instr->IsLazyBailout() && !instr->IsGap()) {
775 for (
int i = 0; !is_aborted() && (
i <
deferred_.length());
i++) {
779 instructions_->at(code->instruction_index())->hydrogen_value();
781 chunk()->
graph()->SourcePositionToScriptPosition(value->position()));
783 Comment(
";;; <@%d,#%d> "
784 "-------------------- Deferred %s --------------------",
785 code->instruction_index(),
786 code->instr()->hydrogen_value()->id(),
787 code->instr()->Mnemonic());
789 __ Bind(code->entry());
792 Comment(
";;; Build frame");
799 __ Add(
fp,
__ StackPointer(),
801 Comment(
";;; Deferred code");
807 Comment(
";;; Destroy frame");
821 masm()->CheckConstPool(
true,
false);
823 return !is_aborted();
828 Label needs_frame, restore_caller_doubles, call_deopt_entry;
831 Comment(
";;; -------------------- Jump table --------------------");
834 UseScratchRegisterScope temps(masm());
835 Register entry_offset = temps.AcquireX();
838 for (
int i = 0;
i < length;
i++) {
839 Deoptimizer::JumpTableEntry* table_entry =
jump_table_[
i];
840 __ Bind(&table_entry->label);
842 Address entry = table_entry->address;
843 DeoptComment(table_entry->reason);
848 __ Mov(entry_offset, entry - base);
852 bool last_entry = (
i + 1) == length;
854 if (table_entry->needs_frame) {
855 DCHECK(!info()->saves_caller_doubles());
856 if (!needs_frame.is_bound()) {
862 UseScratchRegisterScope temps(masm());
863 Register stub_marker = temps.AcquireX();
864 __ Bind(&needs_frame);
868 if (!last_entry)
__ B(&call_deopt_entry);
873 }
else if (info()->saves_caller_doubles()) {
875 if (!restore_caller_doubles.is_bound()) {
876 __ Bind(&restore_caller_doubles);
878 if (!last_entry)
__ B(&call_deopt_entry);
881 __ B(&restore_caller_doubles);
886 if (!last_entry)
__ B(&call_deopt_entry);
889 masm()->CheckConstPool(
false, last_entry);
893 Register deopt_entry = temps.AcquireX();
894 __ Bind(&call_deopt_entry);
895 __ Mov(deopt_entry, Operand(
reinterpret_cast<uint64_t
>(base),
897 __ Add(deopt_entry, deopt_entry, entry_offset);
898 __ Call(deopt_entry);
903 masm()->CheckConstPool(
true,
false);
907 if (!is_aborted()) status_ =
DONE;
908 return !is_aborted();
916 masm()->CheckVeneerPool(
true,
true);
918 return !is_aborted();
925 code->set_safepoint_table_offset(
safepoints_.GetCodeOffset());
926 if (code->is_optimized_code()) RegisterWeakObjectsInOptimizedCode(code);
933 if (length == 0)
return;
935 Handle<DeoptimizationInputData> data =
938 Handle<ByteArray> translations =
940 data->SetTranslationByteArray(*translations);
942 data->SetOptimizationId(
Smi::FromInt(info_->optimization_id()));
943 if (info_->IsOptimizing()) {
946 data->SetSharedFunctionInfo(*info_->shared_info());
960 data->SetOsrAstId(
Smi::FromInt(info_->osr_ast_id().ToInt()));
964 for (
int i = 0;
i < length;
i++) {
966 data->SetAstId(
i, env->ast_id());
967 data->SetTranslationIndex(
i,
Smi::FromInt(env->translation_index()));
968 data->SetArgumentsStackHeight(
i,
973 code->set_deoptimization_data(*data);
980 const ZoneList<Handle<JSFunction> >* inlined_closures =
981 chunk()->inlined_closures();
983 for (
int i = 0, length = inlined_closures->length();
i < length;
i++) {
999 if (override_bailout_type !=
NULL) {
1000 bailout_type = *override_bailout_type;
1003 DCHECK(environment->HasBeenRegistered());
1004 DCHECK(info()->IsOptimizing() || info()->IsStub());
1005 int id = environment->deoptimization_index();
1009 if (entry ==
NULL) {
1010 Abort(kBailoutWasNotPrepared);
1013 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
1015 ExternalReference count = ExternalReference::stress_deopt_count(isolate());
1023 __ Mov(w1, FLAG_deopt_every_n_times);
1036 if (info()->ShouldTrapOnDeopt()) {
1040 __ Bind(&dont_trap);
1047 if (branch_type ==
always &&
1049 DeoptComment(reason);
1073 const char* detail) {
1079 const char* detail) {
1085 const char* detail) {
1091 const char* detail) {
1098 const char* detail) {
1104 const char* detail) {
1111 __ CompareRoot(rt, index);
1118 __ CompareRoot(rt, index);
1124 const char* detail) {
1125 __ TestForMinusZero(input);
1131 __ CompareObjectMap(
object, Heap::kHeapNumberMapRootIndex);
1137 const char* detail) {
1143 const char* detail) {
1149 if (!info()->IsStub()) {
1152 intptr_t current_pc = masm()->pc_offset();
1154 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
1155 ptrdiff_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1157 InstructionAccurateScope instruction_accurate(
1160 while (padding_size > 0) {
1166 last_lazy_deopt_pc_ = masm()->pc_offset();
1179 if (op->IsConstantOperand()) {
1190 HConstant* constant = chunk_->LookupConstant(op);
1196 DCHECK((op !=
NULL) && op->IsDoubleRegister());
1203 if (op->IsConstantOperand()) {
1204 LConstantOperand* const_op = LConstantOperand::cast(op);
1205 HConstant* constant =
chunk()->LookupConstant(const_op);
1206 Representation r = chunk_->LookupLiteralRepresentation(const_op);
1208 DCHECK(constant->HasSmiValue());
1209 return Operand(
Smi::FromInt(constant->Integer32Value()));
1210 }
else if (r.IsInteger32()) {
1211 DCHECK(constant->HasInteger32Value());
1212 return Operand(constant->Integer32Value());
1213 }
else if (r.IsDouble()) {
1214 Abort(kToOperandUnsupportedDoubleImmediate);
1217 return Operand(constant->handle(isolate()));
1218 }
else if (op->IsRegister()) {
1220 }
else if (op->IsDoubleRegister()) {
1221 Abort(kToOperandIsDoubleRegisterUnimplemented);
1232 if (op->IsRegister()) {
1234 }
else if (op->IsConstantOperand()) {
1235 LConstantOperand* const_op = LConstantOperand::cast(op);
1236 HConstant* constant =
chunk()->LookupConstant(const_op);
1237 Representation r = chunk_->LookupLiteralRepresentation(const_op);
1239 return Operand(constant->Integer32Value());
1242 Abort(kToOperand32UnsupportedImmediate);
1259 DCHECK(!op->IsRegister());
1260 DCHECK(!op->IsDoubleRegister());
1261 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
1264 if (op->
index() >= 0) {
1282 !info()->saves_caller_doubles()) {
1285 return MemOperand(masm()->StackPointer(), jssp_offset);
1299 HConstant* constant = chunk_->LookupConstant(op);
1300 DCHECK(chunk_->LookupLiteralRepresentation(op).IsSmiOrTagged());
1301 return constant->handle(isolate());
1307 if (shift_info->shift() ==
NO_SHIFT) {
1312 shift_info->shift(),
1319 return chunk_->LookupLiteralRepresentation(op).IsSmi();
1324 return chunk_->LookupLiteralRepresentation(op).IsSmiOrInteger32();
1329 HConstant* constant = chunk_->LookupConstant(op);
1330 return constant->Integer32Value();
1335 HConstant* constant = chunk_->LookupConstant(op);
1336 DCHECK(constant->HasDoubleValue());
1337 return constant->DoubleValue();
1345 case Token::EQ_STRICT:
1349 case Token::NE_STRICT:
1353 cond = is_unsigned ?
lo :
lt;
1356 cond = is_unsigned ?
hi :
gt;
1359 cond = is_unsigned ?
ls :
le;
1362 cond = is_unsigned ?
hs :
ge;
1365 case Token::INSTANCEOF:
1373 template<
class InstrType>
1376 int left_block = instr->TrueDestination(chunk_);
1377 int right_block = instr->FalseDestination(chunk_);
1379 int next_block = GetNextEmittedBlock();
1381 if (right_block == left_block) {
1383 }
else if (left_block == next_block) {
1384 branch.EmitInverted(chunk_->GetAssemblyLabel(right_block));
1386 branch.Emit(chunk_->GetAssemblyLabel(left_block));
1387 if (right_block != next_block) {
1388 __ B(chunk_->GetAssemblyLabel(right_block));
1394 template<
class InstrType>
1396 DCHECK((condition !=
al) && (condition !=
nv));
1402 template<
class InstrType>
1407 DCHECK((condition !=
al) && (condition !=
nv));
1413 template<
class InstrType>
1418 DCHECK((condition !=
al) && (condition !=
nv));
1424 template<
class InstrType>
1433 template<
class InstrType>
1441 template<
class InstrType>
1463 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) {
1464 Register arguments =
ToRegister(instr->arguments());
1465 Register result =
ToRegister(instr->result());
1472 if (instr->length()->IsConstantOperand() &&
1473 instr->index()->IsConstantOperand()) {
1474 int index =
ToInteger32(LConstantOperand::cast(instr->index()));
1475 int length =
ToInteger32(LConstantOperand::cast(instr->length()));
1478 }
else if (instr->index()->IsConstantOperand()) {
1480 int index =
ToInteger32(LConstantOperand::cast(instr->index()));
1481 int loc = index - 1;
1483 __ Sub(result.W(), length, loc);
1491 __ Sub(result.W(), length, index);
1492 __ Add(result.W(), result.W(), 1);
1498 void LCodeGen::DoAddE(LAddE* instr) {
1499 Register result =
ToRegister(instr->result());
1501 Operand right = (instr->right()->IsConstantOperand())
1502 ?
ToInteger32(LConstantOperand::cast(instr->right()))
1506 __ Add(result, left, right);
1510 void LCodeGen::DoAddI(LAddI* instr) {
1517 __ Adds(result, left, right);
1520 __ Add(result, left, right);
1525 void LCodeGen::DoAddS(LAddS* instr) {
1527 Register result =
ToRegister(instr->result());
1529 Operand right =
ToOperand(instr->right());
1531 __ Adds(result, left, right);
1534 __ Add(result, left, right);
1539 void LCodeGen::DoAllocate(LAllocate* instr) {
1542 DeferredAllocate(
LCodeGen* codegen, LAllocate* instr)
1544 virtual void Generate() { codegen()->DoDeferredAllocate(instr_); }
1545 virtual LInstruction* instr() {
return instr_; }
1550 DeferredAllocate* deferred =
new(zone()) DeferredAllocate(
this, instr);
1552 Register result =
ToRegister(instr->result());
1558 if (instr->hydrogen()->MustAllocateDoubleAligned()) {
1562 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
1563 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
1564 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
1566 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
1567 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
1571 if (instr->size()->IsConstantOperand()) {
1574 __ Allocate(
size, result, temp1, temp2, deferred->entry(),
flags);
1576 __ B(deferred->entry());
1581 __ Allocate(
size.X(), result, temp1, temp2, deferred->entry(),
flags);
1584 __ Bind(deferred->exit());
1586 if (instr->hydrogen()->MustPrefillWithFiller()) {
1587 Register filler_count = temp1;
1588 Register filler = temp2;
1589 Register untagged_result =
ToRegister(instr->temp3());
1591 if (instr->size()->IsConstantOperand()) {
1599 __ Mov(filler, Operand(isolate()->factory()->one_pointer_filler_map()));
1600 __ FillFields(untagged_result, filler_count, filler);
1613 PushSafepointRegistersScope
scope(
this);
1616 if (instr->size()->IsConstantOperand()) {
1617 __ Mov(
size,
ToSmi(LConstantOperand::cast(instr->size())));
1622 instr->hydrogen()->MustAllocateDoubleAligned());
1623 if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
1624 DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
1625 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
1627 }
else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
1628 DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
1637 Runtime::kAllocateInTargetSpace, 2, instr, instr->context());
1638 __ StoreToSafepointRegisterSlot(x0,
ToRegister(instr->result()));
1642 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
1643 Register receiver =
ToRegister(instr->receiver());
1644 Register
function =
ToRegister(instr->function());
1647 Register elements =
ToRegister(instr->elements());
1648 Register scratch = x5;
1652 DCHECK(instr->IsMarkedAsCall());
1657 __ Cmp(length, kArgumentsLimit);
1663 Register argc = receiver;
1665 __ Sxtw(argc, length);
1673 __ Cbz(length, &invoke);
1677 __ Subs(length, length, 1);
1681 DCHECK(instr->HasPointerMap());
1682 LPointerMap* pointers = instr->pointer_map();
1686 ParameterCount actual(argc);
1687 __ InvokeFunction(
function, actual,
CALL_FUNCTION, safepoint_generator);
1691 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
1696 Register result =
ToRegister(instr->result());
1698 if (instr->hydrogen()->from_inlined()) {
1707 DCHECK(masm()->StackPointer().
Is(jssp));
1711 Register previous_fp =
ToRegister(instr->temp());
1718 __ Csel(result,
fp, previous_fp,
ne);
1723 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
1724 Register elements =
ToRegister(instr->elements());
1729 __ Cmp(
fp, elements);
1730 __ Mov(result,
scope()->num_parameters());
1744 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1749 switch (instr->op()) {
1767 ExternalReference::mod_two_doubles_operation(isolate()),
1779 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
1786 CodeFactory::BinaryOpIC(isolate(), instr->op(),
NO_OVERWRITE).code();
1791 void LCodeGen::DoBitI(LBitI* instr) {
1796 switch (instr->op()) {
1797 case Token::BIT_AND:
__ And(result, left, right);
break;
1798 case Token::BIT_OR:
__ Orr(result, left, right);
break;
1799 case Token::BIT_XOR:
__ Eor(result, left, right);
break;
1807 void LCodeGen::DoBitS(LBitS* instr) {
1808 Register result =
ToRegister(instr->result());
1810 Operand right =
ToOperand(instr->right());
1812 switch (instr->op()) {
1813 case Token::BIT_AND:
__ And(result, left, right);
break;
1814 case Token::BIT_OR:
__ Orr(result, left, right);
break;
1815 case Token::BIT_XOR:
__ Eor(result, left, right);
break;
1823 void LCodeGen::DoBoundsCheck(LBoundsCheck *instr) {
1824 Condition cond = instr->hydrogen()->allow_equality() ?
hi :
hs;
1825 DCHECK(instr->hydrogen()->index()->representation().IsInteger32());
1826 DCHECK(instr->hydrogen()->length()->representation().IsInteger32());
1827 if (instr->index()->IsConstantOperand()) {
1830 __ Cmp(length, index);
1835 __ Cmp(index, length);
1837 if (FLAG_debug_code && instr->hydrogen()->skip_check()) {
1845 void LCodeGen::DoBranch(LBranch* instr) {
1846 Representation r = instr->hydrogen()->value()->representation();
1847 Label* true_label = instr->TrueLabel(chunk_);
1848 Label* false_label = instr->FalseLabel(chunk_);
1850 if (r.IsInteger32()) {
1851 DCHECK(!info()->IsStub());
1853 }
else if (r.IsSmi()) {
1854 DCHECK(!info()->IsStub());
1857 }
else if (r.IsDouble()) {
1864 HType type = instr->hydrogen()->value()->type();
1866 if (type.IsBoolean()) {
1867 DCHECK(!info()->IsStub());
1868 __ CompareRoot(value, Heap::kTrueValueRootIndex);
1870 }
else if (type.IsSmi()) {
1871 DCHECK(!info()->IsStub());
1873 }
else if (type.IsJSArray()) {
1874 DCHECK(!info()->IsStub());
1876 }
else if (type.IsHeapNumber()) {
1877 DCHECK(!info()->IsStub());
1882 }
else if (type.IsString()) {
1883 DCHECK(!info()->IsStub());
1888 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
1895 value, Heap::kUndefinedValueRootIndex, false_label);
1901 value, Heap::kTrueValueRootIndex, true_label);
1903 value, Heap::kFalseValueRootIndex, false_label);
1909 value, Heap::kNullValueRootIndex, false_label);
1915 __ Cbz(value, false_label);
1916 __ JumpIfSmi(value, true_label);
1917 }
else if (expected.NeedsMap()) {
1922 Register
map = NoReg;
1923 Register scratch = NoReg;
1925 if (expected.NeedsMap()) {
1932 if (expected.CanBeUndetectable()) {
1935 __ TestAndBranchIfAnySet(
1943 __ B(
ge, true_label);
1950 __ B(
ge, ¬_string);
1952 __ Cbz(scratch, false_label);
1954 __ Bind(¬_string);
1960 __ B(
eq, true_label);
1964 Label not_heap_number;
1965 __ JumpIfNotRoot(
map, Heap::kHeapNumberMapRootIndex, ¬_heap_number);
1971 __ B(
vs, false_label);
1972 __ B(
eq, false_label);
1974 __ Bind(¬_heap_number);
1977 if (!expected.IsGeneric()) {
1988 int formal_parameter_count,
1992 bool dont_adapt_arguments =
1994 bool can_invoke_directly =
1995 dont_adapt_arguments || formal_parameter_count == arity;
2004 if (function_reg.
IsNone()) {
2006 __ LoadObject(function_reg,
function);
2009 if (FLAG_debug_code) {
2012 __ JumpIfNotSmi(function_reg, &is_not_smi);
2013 __ Abort(kExpectedFunctionObject);
2014 __ Bind(&is_not_smi);
2017 if (can_invoke_directly) {
2023 if (dont_adapt_arguments) {
2024 __ Mov(arity_reg, arity);
2035 ParameterCount count(arity);
2036 ParameterCount expected(formal_parameter_count);
2037 __ InvokeFunction(function_reg, expected, count,
CALL_FUNCTION, generator);
2042 void LCodeGen::DoTailCallThroughMegamorphicCache(
2043 LTailCallThroughMegamorphicCache* instr) {
2060 isolate()->stub_cache()->GenerateProbe(masm(), instr->hydrogen()->flags(),
2061 must_teardown_frame, receiver,
name,
2062 scratch, extra, extra2, extra3);
2070 void LCodeGen::DoCallWithDescriptor(LCallWithDescriptor* instr) {
2071 DCHECK(instr->IsMarkedAsCall());
2074 LPointerMap* pointers = instr->pointer_map();
2077 if (instr->target()->IsConstantOperand()) {
2078 LConstantOperand* target = LConstantOperand::cast(instr->target());
2086 DCHECK(instr->target()->IsRegister());
2087 Register target =
ToRegister(instr->target());
2088 generator.BeforeCall(
__ CallSize(target));
2092 generator.AfterCall();
2097 void LCodeGen::DoCallJSFunction(LCallJSFunction* instr) {
2098 DCHECK(instr->IsMarkedAsCall());
2101 if (instr->hydrogen()->pass_argument_count()) {
2102 __ Mov(x0, Operand(instr->arity()));
2117 void LCodeGen::DoCallRuntime(LCallRuntime* instr) {
2118 CallRuntime(instr->function(), instr->arity(), instr);
2123 void LCodeGen::DoCallStub(LCallStub* instr) {
2126 switch (instr->hydrogen()->major_key()) {
2127 case CodeStub::RegExpExec: {
2128 RegExpExecStub stub(isolate());
2132 case CodeStub::SubString: {
2133 SubStringStub stub(isolate());
2137 case CodeStub::StringCompare: {
2138 StringCompareStub stub(isolate());
2149 void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) {
2157 PushSafepointRegistersScope
scope(
this);
2160 __ CallRuntimeSaveDoubles(Runtime::kTryMigrateInstance);
2162 instr->pointer_map(), 1, Safepoint::kNoLazyDeopt);
2163 __ StoreToSafepointRegisterSlot(x0, temp);
2169 void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
2172 DeferredCheckMaps(
LCodeGen* codegen, LCheckMaps* instr, Register
object)
2174 SetExit(check_maps());
2176 virtual void Generate() {
2177 codegen()->DoDeferredInstanceMigration(instr_, object_);
2179 Label* check_maps() {
return &check_maps_; }
2180 virtual LInstruction* instr() {
return instr_; }
2187 if (instr->hydrogen()->IsStabilityCheck()) {
2188 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
2189 for (
int i = 0;
i < maps->size(); ++
i) {
2190 AddStabilityDependency(maps->at(
i).handle());
2195 Register
object =
ToRegister(instr->value());
2196 Register map_reg =
ToRegister(instr->temp());
2200 DeferredCheckMaps* deferred =
NULL;
2201 if (instr->hydrogen()->HasMigrationTarget()) {
2202 deferred =
new(zone()) DeferredCheckMaps(
this, instr,
object);
2203 __ Bind(deferred->check_maps());
2206 const UniqueSet<Map>* maps = instr->hydrogen()->maps();
2208 for (
int i = 0;
i < maps->size() - 1;
i++) {
2209 Handle<Map>
map = maps->at(
i).handle();
2210 __ CompareMap(map_reg,
map);
2213 Handle<Map>
map = maps->at(maps->size() - 1).handle();
2214 __ CompareMap(map_reg,
map);
2217 if (instr->hydrogen()->HasMigrationTarget()) {
2218 __ B(
ne, deferred->entry());
2227 void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) {
2228 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
2234 void LCodeGen::DoCheckSmi(LCheckSmi* instr) {
2241 void LCodeGen::DoCheckInstanceType(LCheckInstanceType* instr) {
2243 Register scratch =
ToRegister(instr->temp());
2248 if (instr->hydrogen()->is_interval_check()) {
2250 instr->hydrogen()->GetCheckInterval(&first, &last);
2252 __ Cmp(scratch, first);
2253 if (first == last) {
2268 instr->hydrogen()->GetCheckMaskAndTag(&mask, &tag);
2271 DCHECK((tag == 0) || (tag == mask));
2274 "wrong instance type");
2277 "wrong instance type");
2281 __ Tst(scratch, mask);
2283 __ And(scratch, scratch, mask);
2284 __ Cmp(scratch, tag);
2292 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) {
2299 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) {
2302 __ ClampInt32ToUint8(result, input);
2306 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
2307 Register input =
ToRegister(instr->unclamped());
2313 __ JumpIfNotSmi(input, &is_not_smi);
2314 __ SmiUntag(result.X(), input);
2315 __ ClampInt32ToUint8(result);
2318 __ Bind(&is_not_smi);
2321 Label is_heap_number;
2322 __ JumpIfHeapNumber(input, &is_heap_number);
2326 "not a heap number/undefined");
2331 __ Bind(&is_heap_number);
2335 __ ClampDoubleToUint8(result, dbl_scratch, dbl_scratch2);
2341 void LCodeGen::DoDoubleBits(LDoubleBits* instr) {
2343 Register result_reg =
ToRegister(instr->result());
2344 if (instr->hydrogen()->bits() == HDoubleBits::HIGH) {
2345 __ Fmov(result_reg, value_reg);
2346 __ Lsr(result_reg, result_reg, 32);
2348 __ Fmov(result_reg.W(), value_reg.S());
2353 void LCodeGen::DoConstructDouble(LConstructDouble* instr) {
2360 __ Bfi(lo_reg, hi_reg, 32, 32);
2361 __ Fmov(result_reg, lo_reg);
2365 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
2366 Handle<String> class_name = instr->hydrogen()->class_name();
2367 Label* true_label = instr->TrueLabel(chunk_);
2368 Label* false_label = instr->FalseLabel(chunk_);
2371 Register scratch2 =
ToRegister(instr->temp2());
2373 __ JumpIfSmi(input, false_label);
2375 Register
map = scratch2;
2376 if (
String::Equals(isolate()->factory()->Function_string(), class_name)) {
2388 __ B(
lt, false_label);
2389 __ B(
eq, true_label);
2391 __ B(
eq, true_label);
2401 if (
String::Equals(class_name, isolate()->factory()->Object_string())) {
2402 __ JumpIfNotObjectType(
2405 __ JumpIfNotObjectType(
2426 void LCodeGen::DoCmpHoleAndBranchD(LCmpHoleAndBranchD* instr) {
2427 DCHECK(instr->hydrogen()->representation().IsDouble());
2433 __ Fcmp(
object,
object);
2434 __ B(
vc, instr->FalseLabel(chunk_));
2437 __ Fmov(temp,
object);
2442 void LCodeGen::DoCmpHoleAndBranchT(LCmpHoleAndBranchT* instr) {
2443 DCHECK(instr->hydrogen()->representation().IsTagged());
2444 Register
object =
ToRegister(instr->object());
2450 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
2459 void LCodeGen::DoCompareMinusZeroAndBranch(LCompareMinusZeroAndBranch* instr) {
2460 Representation rep = instr->hydrogen()->value()->representation();
2461 DCHECK(!rep.IsInteger32());
2462 Register scratch =
ToRegister(instr->temp());
2464 if (rep.IsDouble()) {
2466 instr->TrueLabel(
chunk()));
2471 __ JumpIfMinusZero(scratch, instr->TrueLabel(
chunk()));
2477 void LCodeGen::DoCompareNumericAndBranch(LCompareNumericAndBranch* instr) {
2478 LOperand* left = instr->left();
2479 LOperand* right = instr->right();
2485 if (left->IsConstantOperand() && right->IsConstantOperand()) {
2487 double left_val =
ToDouble(LConstantOperand::cast(left));
2488 double right_val =
ToDouble(LConstantOperand::cast(right));
2489 int next_block =
EvalComparison(instr->op(), left_val, right_val) ?
2490 instr->TrueDestination(chunk_) : instr->FalseDestination(chunk_);
2493 if (instr->is_double()) {
2498 __ B(
vs, instr->FalseLabel(chunk_));
2501 if (instr->hydrogen_value()->representation().IsInteger32()) {
2502 if (right->IsConstantOperand()) {
2511 DCHECK(instr->hydrogen_value()->representation().IsSmi());
2512 if (right->IsConstantOperand()) {
2518 }
else if (left->IsConstantOperand()) {
2537 void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) {
2544 void LCodeGen::DoCmpT(LCmpT* instr) {
2551 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2558 DCHECK(instr->IsMarkedAsCall());
2559 __ LoadTrueFalseRoots(x1, x2);
2565 void LCodeGen::DoConstantD(LConstantD* instr) {
2566 DCHECK(instr->result()->IsDoubleRegister());
2568 if (instr->value() == 0) {
2569 if (copysign(1.0, instr->value()) == 1.0) {
2570 __ Fmov(result, fp_zero);
2572 __ Fneg(result, fp_zero);
2575 __ Fmov(result, instr->value());
2580 void LCodeGen::DoConstantE(LConstantE* instr) {
2581 __ Mov(
ToRegister(instr->result()), Operand(instr->value()));
2585 void LCodeGen::DoConstantI(LConstantI* instr) {
2586 DCHECK(is_int32(instr->value()));
2593 void LCodeGen::DoConstantS(LConstantS* instr) {
2594 __ Mov(
ToRegister(instr->result()), Operand(instr->value()));
2598 void LCodeGen::DoConstantT(LConstantT* instr) {
2599 Handle<Object>
object = instr->value(isolate());
2605 void LCodeGen::DoContext(LContext* instr) {
2607 Register result =
ToRegister(instr->result());
2608 if (info()->IsOptimizing()) {
2617 void LCodeGen::DoCheckValue(LCheckValue* instr) {
2619 Handle<HeapObject>
object = instr->hydrogen()->object().handle();
2621 if (isolate()->heap()->InNewSpace(*
object)) {
2622 UseScratchRegisterScope temps(masm());
2623 Register temp = temps.AcquireX();
2624 Handle<Cell> cell = isolate()->factory()->NewCell(
object);
2625 __ Mov(temp, Operand(Handle<Object>(cell)));
2629 __ Cmp(reg, Operand(
object));
2635 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
2636 last_lazy_deopt_pc_ = masm()->pc_offset();
2637 DCHECK(instr->HasEnvironment());
2640 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
2644 void LCodeGen::DoDateField(LDateField* instr) {
2646 Register result =
ToRegister(instr->result());
2647 Register temp1 = x10;
2648 Register temp2 = x11;
2649 Smi* index = instr->index();
2650 Label runtime, done;
2652 DCHECK(
object.is(result) &&
object.
Is(x0));
2653 DCHECK(instr->IsMarkedAsCall());
2659 if (index->value() == 0) {
2663 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
2664 __ Mov(temp1, Operand(stamp));
2667 __ Cmp(temp1, temp2);
2675 __ Mov(x1, Operand(index));
2676 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
2683 void LCodeGen::DoDeoptimize(LDeoptimize* instr) {
2693 Deoptimize(instr, instr->hydrogen()->reason(), &type);
2697 void LCodeGen::DoDivByPowerOf2I(LDivByPowerOf2I* instr) {
2699 int32_t divisor = instr->divisor();
2702 DCHECK(!result.is(dividend));
2705 HDiv* hdiv = instr->hydrogen();
2713 __ Cmp(dividend, 1);
2718 divisor != 1 && divisor != -1) {
2719 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
2720 __ Tst(dividend, mask);
2724 if (divisor == -1) {
2725 __ Neg(result, dividend);
2730 __ Mov(result, dividend);
2731 }
else if (
shift == 1) {
2732 __ Add(result, dividend, Operand(dividend,
LSR, 31));
2734 __ Mov(result, Operand(dividend,
ASR, 31));
2735 __ Add(result, dividend, Operand(result,
LSR, 32 -
shift));
2738 if (divisor < 0)
__ Neg(result, result);
2742 void LCodeGen::DoDivByConstI(LDivByConstI* instr) {
2744 int32_t divisor = instr->divisor();
2754 HDiv* hdiv = instr->hydrogen();
2759 __ TruncatingDiv(result, dividend,
Abs(divisor));
2760 if (divisor < 0)
__ Neg(result, result);
2765 __ Sxtw(dividend.X(), dividend);
2766 __ Mov(temp, divisor);
2767 __ Smsubl(temp.X(), result, temp, dividend.X());
2774 void LCodeGen::DoDivI(LDivI* instr) {
2775 HBinaryOperation* hdiv = instr->hydrogen();
2782 __ Sdiv(result, dividend, divisor);
2810 __ Cmp(dividend, 1);
2820 __ Msub(remainder, result, divisor, dividend);
2825 void LCodeGen::DoDoubleToIntOrSmi(LDoubleToIntOrSmi* instr) {
2836 if (instr->tag_result()) {
2837 __ SmiTag(result.X());
2842 void LCodeGen::DoDrop(LDrop* instr) {
2843 __ Drop(instr->count());
2847 void LCodeGen::DoDummy(LDummy* instr) {
2852 void LCodeGen::DoDummyUse(LDummyUse* instr) {
2857 void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
2860 DCHECK(instr->IsMarkedAsCall());
2864 bool pretenure = instr->hydrogen()->pretenure();
2865 if (!pretenure && instr->hydrogen()->has_no_literals()) {
2866 FastNewClosureStub stub(isolate(), instr->hydrogen()->strict_mode(),
2867 instr->hydrogen()->kind());
2868 __ Mov(x2, Operand(instr->hydrogen()->shared_info()));
2871 __ Mov(x2, Operand(instr->hydrogen()->shared_info()));
2872 __ Mov(x1, Operand(pretenure ? factory()->true_value()
2873 : factory()->false_value()));
2880 void LCodeGen::DoForInCacheArray(LForInCacheArray* instr) {
2882 Register result =
ToRegister(instr->result());
2883 Label load_cache, done;
2885 __ EnumLengthUntagged(result,
map);
2886 __ Cbnz(result, &load_cache);
2888 __ Mov(result, Operand(isolate()->factory()->empty_fixed_array()));
2891 __ Bind(&load_cache);
2892 __ LoadInstanceDescriptors(
map, result);
2901 void LCodeGen::DoForInPrepareMap(LForInPrepareMap* instr) {
2902 Register
object =
ToRegister(instr->object());
2903 Register null_value = x5;
2905 DCHECK(instr->IsMarkedAsCall());
2908 DeoptimizeIfRoot(
object, Heap::kUndefinedValueRootIndex, instr,
"undefined");
2910 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
2911 __ Cmp(
object, null_value);
2920 Label use_cache, call_runtime;
2921 __ CheckEnumCache(
object, null_value, x1, x2, x3, x4, &call_runtime);
2927 __ Bind(&call_runtime);
2929 CallRuntime(Runtime::kGetPropertyNamesFast, 1, instr);
2934 __ Bind(&use_cache);
2938 void LCodeGen::DoGetCachedArrayIndex(LGetCachedArrayIndex* instr) {
2940 Register result =
ToRegister(instr->result());
2942 __ AssertString(input);
2947 __ IndexFromHash(result, result);
2959 void LCodeGen::DoGoto(LGoto* instr) {
2964 void LCodeGen::DoHasCachedArrayIndexAndBranch(
2965 LHasCachedArrayIndexAndBranch* instr) {
3003 if (from ==
to)
return eq;
3011 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
3013 Register scratch =
ToRegister(instr->temp());
3015 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
3016 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
3018 __ CompareObjectType(input, scratch, scratch,
TestType(instr->hydrogen()));
3023 void LCodeGen::DoInnerAllocatedObject(LInnerAllocatedObject* instr) {
3024 Register result =
ToRegister(instr->result());
3025 Register base =
ToRegister(instr->base_object());
3026 if (instr->offset()->IsConstantOperand()) {
3034 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
3047 __ LoadTrueFalseRoots(x0, x1);
3048 __ Csel(x0, x0, x1,
eq);
3052 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
3055 DeferredInstanceOfKnownGlobal(
LCodeGen* codegen,
3056 LInstanceOfKnownGlobal* instr)
3058 virtual void Generate() {
3059 codegen()->DoDeferredInstanceOfKnownGlobal(instr_);
3061 virtual LInstruction* instr() {
return instr_; }
3063 LInstanceOfKnownGlobal* instr_;
3066 DeferredInstanceOfKnownGlobal* deferred =
3067 new(zone()) DeferredInstanceOfKnownGlobal(
this, instr);
3069 Label map_check, return_false, cache_miss, done;
3070 Register
object =
ToRegister(instr->value());
3071 Register result =
ToRegister(instr->result());
3073 Register map_check_site = x4;
3077 DCHECK(instr->IsMarkedAsCall());
3081 Register scratch = x10;
3084 __ JumpIfSmi(
object, &return_false);
3094 InstructionAccurateScope
scope(masm(), 5);
3095 __ bind(&map_check);
3097 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value());
3098 __ ldr(scratch, Immediate(Handle<Object>(cell)));
3100 __ cmp(
map, scratch);
3101 __ b(&cache_miss,
ne);
3104 DCHECK(masm()->InstructionsGeneratedSince(&map_check) == 4);
3106 __ ldr(result, Immediate(factory()->the_hole_value()));
3112 __ Bind(&cache_miss);
3115 __ Adr(map_check_site, &map_check);
3117 __ JumpIfRoot(
object, Heap::kNullValueRootIndex, &return_false);
3125 __ IsObjectJSStringType(
object, scratch,
NULL, &return_false);
3126 __ B(deferred->entry());
3128 __ Bind(&return_false);
3129 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3132 __ Bind(deferred->exit());
3148 PushSafepointRegistersScope
scope(
this);
3160 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
3161 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
3164 __ StoreToSafepointRegisterSlot(result, result);
3168 void LCodeGen::DoInstructionGap(LInstructionGap* instr) {
3173 void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
3176 __ Scvtf(result, value);
3180 void LCodeGen::DoInvokeFunction(LInvokeFunction* instr) {
3184 DCHECK(instr->HasPointerMap());
3186 Handle<JSFunction> known_function = instr->hydrogen()->known_function();
3187 if (known_function.is_null()) {
3188 LPointerMap* pointers = instr->pointer_map();
3190 ParameterCount count(instr->arity());
3194 instr->hydrogen()->formal_parameter_count(),
3203 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
3211 Label check_frame_marker;
3214 __ B(
ne, &check_frame_marker);
3218 __ Bind(&check_frame_marker);
3226 void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) {
3227 Label* is_object = instr->TrueLabel(chunk_);
3228 Label* is_not_object = instr->FalseLabel(chunk_);
3231 Register scratch =
ToRegister(instr->temp2());
3233 __ JumpIfSmi(value, is_not_object);
3234 __ JumpIfRoot(value, Heap::kNullValueRootIndex, is_object);
3243 __ IsInstanceJSObjectType(
map, scratch,
NULL);
3253 Label* is_not_string,
3256 __ JumpIfSmi(input, is_not_string);
3264 void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) {
3266 Register scratch =
ToRegister(instr->temp());
3269 instr->hydrogen()->value()->type().IsHeapObject()
3272 EmitIsString(val, scratch, instr->FalseLabel(chunk_), check_needed);
3278 void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) {
3285 void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) {
3289 if (!instr->hydrogen()->value()->type().IsHeapObject()) {
3290 __ JumpIfSmi(input, instr->FalseLabel(chunk_));
3300 if (label->is_loop_header())
return " (loop header)";
3301 if (label->is_osr_entry())
return " (OSR entry)";
3306 void LCodeGen::DoLabel(LLabel* label) {
3307 Comment(
";;; <@%d,#%d> -------------------- B%d%s --------------------",
3308 current_instruction_,
3309 label->hydrogen_value()->id(),
3313 __ Bind(label->label());
3314 current_block_ = label->block_id();
3319 void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
3320 Register context =
ToRegister(instr->context());
3321 Register result =
ToRegister(instr->result());
3323 if (instr->hydrogen()->RequiresHoleCheck()) {
3324 if (instr->hydrogen()->DeoptimizesOnHole()) {
3328 __ JumpIfNotRoot(result, Heap::kTheHoleValueRootIndex, ¬_the_hole);
3329 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3330 __ Bind(¬_the_hole);
3336 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
3337 Register
function =
ToRegister(instr->function());
3338 Register result =
ToRegister(instr->result());
3350 __ CompareObjectType(result, temp, temp,
MAP_TYPE);
3361 void LCodeGen::DoLoadGlobalCell(LLoadGlobalCell* instr) {
3362 Register result =
ToRegister(instr->result());
3363 __ Mov(result, Operand(Handle<Object>(instr->hydrogen()->cell().handle())));
3365 if (instr->hydrogen()->RequiresHoleCheck()) {
3374 Register vector =
ToRegister(instr->temp_vector());
3376 __ Mov(vector, instr->hydrogen()->feedback_vector());
3384 void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) {
3390 if (FLAG_vector_ics) {
3391 EmitVectorLoadICRegisters<LLoadGlobalGeneric>(instr);
3394 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
mode).code();
3404 bool key_is_constant,
3410 if (key_is_constant) {
3411 int key_offset = constant_key << element_size_shift;
3412 return MemOperand(base, key_offset + base_offset);
3420 if (base_offset == 0) {
3425 __ Add(scratch, base, base_offset);
3435 bool key_is_smi = instr->hydrogen()->
key()->representation().IsSmi();
3436 bool key_is_constant = instr->
key()->IsConstantOperand();
3438 int constant_key = 0;
3439 if (key_is_constant) {
3441 constant_key =
ToInteger32(LConstantOperand::cast(instr->
key()));
3442 if (constant_key & 0xf0000000) {
3443 Abort(kArrayIndexConstantValueTooBig);
3452 key_is_constant, constant_key,
3459 __ Ldr(result.S(), mem_op);
3460 __ Fcvt(result, result.S());
3464 __ Ldr(result, mem_op);
3468 switch (elements_kind) {
3471 __ Ldrsb(result, mem_op);
3477 __ Ldrb(result, mem_op);
3481 __ Ldrsh(result, mem_op);
3485 __ Ldrh(result, mem_op);
3489 __ Ldrsw(result, mem_op);
3493 __ Ldr(result.W(), mem_op);
3496 __ Tst(result, 0xFFFFFFFF80000000);
3534 if (key_is_tagged) {
3546 DCHECK((element_size_shift >= 0) && (element_size_shift <= 4));
3550 __ Add(base, elements,
Operand(key,
SXTW, element_size_shift));
3553 __ Add(base, elements, base_offset);
3565 if (instr->
key()->IsConstantOperand()) {
3566 DCHECK(instr->hydrogen()->RequiresHoleCheck() ||
3569 int constant_key =
ToInteger32(LConstantOperand::cast(instr->
key()));
3570 if (constant_key & 0xf0000000) {
3571 Abort(kArrayIndexConstantValueTooBig);
3578 bool key_is_tagged = instr->hydrogen()->
key()->representation().IsSmi();
3581 instr->hydrogen()->representation(),
3585 __ Ldr(result, mem_op);
3587 if (instr->hydrogen()->RequiresHoleCheck()) {
3592 __ Ldr(scratch, mem_op);
3599 void LCodeGen::DoLoadKeyedFixed(LLoadKeyedFixed* instr) {
3600 Register elements =
ToRegister(instr->elements());
3601 Register result =
ToRegister(instr->result());
3604 Representation representation = instr->hydrogen()->representation();
3605 if (instr->key()->IsConstantOperand()) {
3607 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
3608 int offset = instr->base_offset() +
3610 if (representation.IsInteger32()) {
3619 Register load_base =
ToRegister(instr->temp());
3621 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi();
3624 instr->hydrogen()->elements_kind(),
3625 representation, instr->base_offset());
3628 __ Load(result, mem_op, representation);
3630 if (instr->hydrogen()->RequiresHoleCheck()) {
3640 void LCodeGen::DoLoadKeyedGeneric(LLoadKeyedGeneric* instr) {
3644 if (FLAG_vector_ics) {
3645 EmitVectorLoadICRegisters<LLoadKeyedGeneric>(instr);
3648 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
3655 void LCodeGen::DoLoadNamedField(LLoadNamedField* instr) {
3656 HObjectAccess access = instr->hydrogen()->access();
3657 int offset = access.offset();
3658 Register
object =
ToRegister(instr->object());
3660 if (access.IsExternalMemory()) {
3661 Register result =
ToRegister(instr->result());
3662 __ Load(result,
MemOperand(
object, offset), access.representation());
3666 if (instr->hydrogen()->representation().IsDouble()) {
3672 Register result =
ToRegister(instr->result());
3674 if (access.IsInobject()) {
3682 if (access.representation().IsSmi() &&
3683 instr->hydrogen()->representation().IsInteger32()) {
3695 void LCodeGen::DoLoadNamedGeneric(LLoadNamedGeneric* instr) {
3700 if (FLAG_vector_ics) {
3701 EmitVectorLoadICRegisters<LLoadNamedGeneric>(instr);
3704 Handle<Code> ic = CodeFactory::LoadIC(isolate(),
NOT_CONTEXTUAL).code();
3711 void LCodeGen::DoLoadRoot(LLoadRoot* instr) {
3712 Register result =
ToRegister(instr->result());
3713 __ LoadRoot(result, instr->index());
3717 void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) {
3718 Register result =
ToRegister(instr->result());
3720 __ EnumLengthSmi(result,
map);
3724 void LCodeGen::DoMathAbs(LMathAbs* instr) {
3725 Representation r = instr->hydrogen()->value()->representation();
3729 __ Fabs(result, input);
3730 }
else if (r.IsSmi() || r.IsInteger32()) {
3731 Register input = r.IsSmi() ?
ToRegister(instr->value())
3733 Register result = r.IsSmi() ?
ToRegister(instr->result())
3735 __ Abs(result, input);
3743 Label* allocation_entry) {
3761 Label runtime_allocation;
3771 __ Mov(result, input);
3779 __ Bind(allocation_entry);
3780 __ AllocateHeapNumber(result, &runtime_allocation, temp1, temp2);
3784 __ Bind(&runtime_allocation);
3785 if (FLAG_debug_code) {
3792 __ JumpIfSmi(result, &result_ok);
3793 __ Cmp(input, result);
3794 __ Assert(
eq, kUnexpectedValue);
3795 __ Bind(&result_ok);
3798 { PushSafepointRegistersScope
scope(
this);
3801 __ StoreToSafepointRegisterSlot(x0, result);
3813 virtual void Generate() {
3814 codegen()->DoDeferredMathAbsTagged(instr_, exit(),
3815 allocation_entry());
3817 virtual LInstruction* instr() {
return instr_; }
3818 Label* allocation_entry() {
return &allocation; }
3820 LMathAbsTagged* instr_;
3828 DeferredMathAbsTagged* deferred =
3829 new(zone()) DeferredMathAbsTagged(
this, instr);
3831 DCHECK(instr->hydrogen()->
value()->representation().IsTagged() ||
3832 instr->hydrogen()->
value()->representation().IsSmi());
3844 __ JumpIfNotSmi(input, deferred->entry());
3851 __ B(deferred->allocation_entry());
3853 __ Bind(deferred->exit());
3860 void LCodeGen::DoMathExp(LMathExp* instr) {
3870 double_temp1, double_temp2,
3871 temp1, temp2, temp3);
3875 void LCodeGen::DoMathFloorD(LMathFloorD* instr) {
3879 __ Frintm(result, input);
3883 void LCodeGen::DoMathFloorI(LMathFloorI* instr) {
3885 Register result =
ToRegister(instr->result());
3891 __ Fcvtms(result, input);
3895 __ Cmp(result, Operand(result,
SXTW));
3902 void LCodeGen::DoFlooringDivByPowerOf2I(LFlooringDivByPowerOf2I* instr) {
3905 int32_t divisor = instr->divisor();
3917 __ Mov(result, Operand(dividend,
ASR,
shift));
3922 __ Negs(result, dividend);
3928 if (divisor == -1) {
3937 __ Mov(result, Operand(dividend,
ASR,
shift));
3941 __ Asr(result, result,
shift);
3946 void LCodeGen::DoFlooringDivByConstI(LFlooringDivByConstI* instr) {
3948 int32_t divisor = instr->divisor();
3958 HMathFloorOfDiv* hdiv = instr->hydrogen();
3967 __ TruncatingDiv(result, dividend,
Abs(divisor));
3968 if (divisor < 0)
__ Neg(result, result);
3976 Label needs_adjustment, done;
3977 __ Cmp(dividend, 0);
3978 __ B(divisor > 0 ?
lt :
gt, &needs_adjustment);
3979 __ TruncatingDiv(result, dividend,
Abs(divisor));
3980 if (divisor < 0)
__ Neg(result, result);
3982 __ Bind(&needs_adjustment);
3983 __ Add(temp, dividend, Operand(divisor > 0 ? 1 : -1));
3984 __ TruncatingDiv(result, temp,
Abs(divisor));
3985 if (divisor < 0)
__ Neg(result, result);
3986 __ Sub(result, result, Operand(1));
3992 void LCodeGen::DoFlooringDivI(LFlooringDivI* instr) {
4000 __ Sdiv(result, dividend, divisor);
4008 __ Cmp(dividend, 1);
4025 __ Eor(remainder, dividend, divisor);
4029 __ Msub(remainder, result, divisor, dividend);
4030 __ Cbz(remainder, &done);
4031 __ Sub(result, result, 1);
4037 void LCodeGen::DoMathLog(LMathLog* instr) {
4038 DCHECK(instr->IsMarkedAsCall());
4040 __ CallCFunction(ExternalReference::math_log_double_function(isolate()),
4046 void LCodeGen::DoMathClz32(LMathClz32* instr) {
4049 __ Clz(result, input);
4053 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) {
4066 __ Fabs(result, input);
4077 void LCodeGen::DoPower(LPower* instr) {
4078 Representation exponent_type = instr->hydrogen()->right()->representation();
4083 DCHECK(!instr->right()->IsDoubleRegister() ||
4085 DCHECK(exponent_type.IsInteger32() || !instr->right()->IsRegister() ||
4087 DCHECK(!exponent_type.IsInteger32() ||
4092 if (exponent_type.IsSmi()) {
4095 }
else if (exponent_type.IsTagged()) {
4097 __ JumpIfSmi(tagged_exponent, &no_deopt);
4102 }
else if (exponent_type.IsInteger32()) {
4105 __ Sxtw(integer_exponent, integer_exponent);
4109 DCHECK(exponent_type.IsDouble());
4116 void LCodeGen::DoMathRoundD(LMathRoundD* instr) {
4125 __ Frinta(result, input);
4126 __ Fcmp(input, 0.0);
4135 __ Fmov(scratch_d, 0.5);
4136 __ Fadd(result, input, scratch_d);
4137 __ Frintm(result, result);
4139 __ Fabs(result, result);
4140 __ Fneg(result, result);
4146 void LCodeGen::DoMathRoundI(LMathRoundI* instr) {
4150 Register result =
ToRegister(instr->result());
4162 __ Fmov(dot_five, 0.5);
4163 __ Fadd(temp, input, dot_five);
4164 __ Fcvtms(result, temp);
4170 __ Cmp(result, Operand(result.W(),
SXTW));
4184 __ Fmov(result, input);
4189 __ Fcmp(input, dot_five);
4195 __ Cset(result,
ge);
4200 void LCodeGen::DoMathFround(LMathFround* instr) {
4203 __ Fcvt(result.S(), input);
4204 __ Fcvt(result, result.S());
4208 void LCodeGen::DoMathSqrt(LMathSqrt* instr) {
4211 __ Fsqrt(result, input);
4215 void LCodeGen::DoMathMinMax(LMathMinMax* instr) {
4216 HMathMinMax::Operation op = instr->hydrogen()->operation();
4217 if (instr->hydrogen()->representation().IsInteger32()) {
4222 __ Cmp(left, right);
4223 __ Csel(result, left, right, (op == HMathMinMax::kMathMax) ?
ge :
le);
4224 }
else if (instr->hydrogen()->representation().IsSmi()) {
4225 Register result =
ToRegister(instr->result());
4227 Operand right =
ToOperand(instr->right());
4229 __ Cmp(left, right);
4230 __ Csel(result, left, right, (op == HMathMinMax::kMathMax) ?
ge :
le);
4232 DCHECK(instr->hydrogen()->representation().IsDouble());
4237 if (op == HMathMinMax::kMathMax) {
4238 __ Fmax(result, left, right);
4240 DCHECK(op == HMathMinMax::kMathMin);
4241 __ Fmin(result, left, right);
4247 void LCodeGen::DoModByPowerOf2I(LModByPowerOf2I* instr) {
4249 int32_t divisor = instr->divisor();
4258 HMod* hmod = instr->hydrogen();
4259 int32_t mask = divisor < 0 ? -(divisor + 1) : (divisor - 1);
4260 Label dividend_is_not_negative, done;
4262 __ Tbz(dividend,
kWSignBit, ÷nd_is_not_negative);
4264 __ Neg(dividend, dividend);
4265 __ And(dividend, dividend, mask);
4266 __ Negs(dividend, dividend);
4273 __ bind(÷nd_is_not_negative);
4274 __ And(dividend, dividend, mask);
4279 void LCodeGen::DoModByConstI(LModByConstI* instr) {
4281 int32_t divisor = instr->divisor();
4291 __ TruncatingDiv(result, dividend,
Abs(divisor));
4292 __ Sxtw(dividend.X(), dividend);
4293 __ Mov(temp,
Abs(divisor));
4294 __ Smsubl(result.X(), result, temp, dividend.X());
4297 HMod* hmod = instr->hydrogen();
4299 Label remainder_not_zero;
4300 __ Cbnz(result, &remainder_not_zero);
4302 __ bind(&remainder_not_zero);
4307 void LCodeGen::DoModI(LModI* instr) {
4314 __ Sdiv(result, dividend, divisor);
4318 __ Msub(result, result, divisor, dividend);
4320 __ Cbnz(result, &done);
4327 void LCodeGen::DoMulConstIS(LMulConstIS* instr) {
4328 DCHECK(instr->hydrogen()->representation().IsSmiOrInteger32());
4329 bool is_smi = instr->hydrogen()->representation().IsSmi();
4338 bool bailout_on_minus_zero =
4341 if (bailout_on_minus_zero) {
4345 }
else if (right == 0) {
4356 __ Negs(result, left);
4359 __ Neg(result, left);
4372 __ Adds(result, left, left);
4375 __ Add(result, left, left);
4388 Register scratch = result;
4390 __ Cls(scratch, left);
4391 __ Cmp(scratch, right_log2);
4397 __ Lsl(result, left, right_log2);
4401 __ Negs(result, Operand(left,
LSL, right_log2));
4404 __ Neg(result, Operand(left,
LSL, right_log2));
4423 __ Neg(result, result);
4434 __ Neg(result, result);
4443 void LCodeGen::DoMulI(LMulI* instr) {
4449 bool bailout_on_minus_zero =
4452 if (bailout_on_minus_zero && !left.Is(right)) {
4464 __ Smull(result.X(), left, right);
4465 __ Cmp(result.X(), Operand(result,
SXTW));
4468 __ Mul(result, left, right);
4473 void LCodeGen::DoMulS(LMulS* instr) {
4474 Register result =
ToRegister(instr->result());
4479 bool bailout_on_minus_zero =
4482 if (bailout_on_minus_zero && !left.Is(right)) {
4495 __ Smulh(result, left, right);
4496 __ Cmp(result, Operand(result.W(),
SXTW));
4505 __ Mul(result, result, result);
4506 }
else if (result.Is(left) && !left.Is(right)) {
4509 __ SmiUntag(result, left);
4510 __ Mul(result, result, right);
4512 DCHECK(!left.Is(result));
4516 __ SmiUntag(result, right);
4517 __ Mul(result, left, result);
4527 Register result =
ToRegister(instr->result());
4530 PushSafepointRegistersScope
scope(
this);
4537 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4539 instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);
4540 __ StoreToSafepointRegisterSlot(x0, result);
4544 void LCodeGen::DoNumberTagD(LNumberTagD* instr) {
4547 DeferredNumberTagD(
LCodeGen* codegen, LNumberTagD* instr)
4549 virtual void Generate() { codegen()->DoDeferredNumberTagD(instr_); }
4550 virtual LInstruction* instr() {
return instr_; }
4552 LNumberTagD* instr_;
4556 Register result =
ToRegister(instr->result());
4560 DeferredNumberTagD* deferred =
new(zone()) DeferredNumberTagD(
this, instr);
4561 if (FLAG_inline_new) {
4562 __ AllocateHeapNumber(result, deferred->entry(), temp1, temp2);
4564 __ B(deferred->entry());
4567 __ Bind(deferred->exit());
4576 Label slow, convert_and_store;
4581 if (FLAG_inline_new) {
4583 __ AllocateHeapNumber(dst, &slow,
scratch1, scratch2);
4584 __ B(&convert_and_store);
4595 PushSafepointRegistersScope
scope(
this);
4603 __ CallRuntimeSaveDoubles(Runtime::kAllocateHeapNumber);
4605 instr->
pointer_map(), 0, Safepoint::kNoLazyDeopt);
4606 __ StoreToSafepointRegisterSlot(x0, dst);
4611 __ Bind(&convert_and_store);
4613 __ Ucvtf(dbl_scratch, src);
4618 void LCodeGen::DoNumberTagU(LNumberTagU* instr) {
4621 DeferredNumberTagU(
LCodeGen* codegen, LNumberTagU* instr)
4623 virtual void Generate() {
4624 codegen()->DoDeferredNumberTagU(instr_,
4629 virtual LInstruction* instr() {
return instr_; }
4631 LNumberTagU* instr_;
4635 Register result =
ToRegister(instr->result());
4637 DeferredNumberTagU* deferred =
new(zone()) DeferredNumberTagU(
this, instr);
4639 __ B(
hi, deferred->entry());
4640 __ SmiTag(result, value.X());
4641 __ Bind(deferred->exit());
4645 void LCodeGen::DoNumberUntagD(LNumberUntagD* instr) {
4647 Register scratch =
ToRegister(instr->temp());
4649 bool can_convert_undefined_to_nan =
4650 instr->hydrogen()->can_convert_undefined_to_nan();
4652 Label done, load_smi;
4655 HValue* value = instr->hydrogen()->value();
4660 __ JumpIfSmi(input, &load_smi);
4662 Label convert_undefined;
4665 if (can_convert_undefined_to_nan) {
4666 __ JumpIfNotHeapNumber(input, &convert_undefined);
4673 if (instr->hydrogen()->deoptimize_on_minus_zero()) {
4678 if (can_convert_undefined_to_nan) {
4679 __ Bind(&convert_undefined);
4681 "not a heap number/undefined");
4683 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4695 __ SmiUntagToDouble(result, input);
4701 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4709 DCHECK(!environment->HasBeenRegistered());
4716 void LCodeGen::DoParameter(LParameter* instr) {
4721 void LCodeGen::DoPreparePushArguments(LPreparePushArguments* instr) {
4726 void LCodeGen::DoPushArguments(LPushArguments* instr) {
4727 MacroAssembler::PushPopQueue args(masm());
4729 for (
int i = 0;
i < instr->ArgumentCount(); ++
i) {
4730 LOperand* arg = instr->argument(
i);
4731 if (arg->IsDoubleRegister() || arg->IsDoubleStackSlot()) {
4732 Abort(kDoPushArgumentNotImplementedForDoubleType);
4745 void LCodeGen::DoReturn(LReturn* instr) {
4746 if (FLAG_trace && info()->IsOptimizing()) {
4756 if (info()->saves_caller_doubles()) {
4760 int no_frame_start = -1;
4762 Register stack_pointer = masm()->StackPointer();
4763 __ Mov(stack_pointer,
fp);
4764 no_frame_start = masm_->pc_offset();
4768 if (instr->has_constant_parameter_count()) {
4769 int parameter_count =
ToInteger32(instr->constant_parameter_count());
4770 __ Drop(parameter_count + 1);
4772 Register parameter_count =
ToRegister(instr->parameter_count());
4773 __ DropBySMI(parameter_count);
4777 if (no_frame_start != -1) {
4778 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
4787 if (index->IsConstantOperand()) {
4788 int offset =
ToInteger32(LConstantOperand::cast(index));
4806 void LCodeGen::DoSeqStringGetChar(LSeqStringGetChar* instr) {
4812 if (FLAG_debug_code) {
4817 Register dbg_temp = temps.AcquireX();
4822 __ And(dbg_temp, dbg_temp,
4827 ? one_byte_seq_type : two_byte_seq_type));
4828 __ Check(
eq, kUnexpectedStringType);
4834 __ Ldrb(result, operand);
4836 __ Ldrh(result, operand);
4841 void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
4843 Register
string =
ToRegister(instr->string());
4847 if (FLAG_debug_code) {
4854 ? one_byte_seq_type : two_byte_seq_type;
4861 __ Strb(value, operand);
4863 __ Strh(value, operand);
4868 void LCodeGen::DoSmiTag(LSmiTag* instr) {
4869 HChange* hchange = instr->hydrogen();
4871 Register output =
ToRegister(instr->result());
4876 __ SmiTag(output, input);
4880 void LCodeGen::DoSmiUntag(LSmiUntag* instr) {
4882 Register result =
ToRegister(instr->result());
4885 if (instr->needs_check()) {
4890 __ SmiUntag(result, input);
4895 void LCodeGen::DoShiftI(LShiftI* instr) {
4896 LOperand* right_op = instr->right();
4900 if (right_op->IsRegister()) {
4902 switch (instr->op()) {
4904 case Token::SAR:
__ Asr(result, left, right);
break;
4905 case Token::SHL:
__ Lsl(result, left, right);
break;
4907 __ Lsr(result, left, right);
4908 if (instr->can_deopt()) {
4917 DCHECK(right_op->IsConstantOperand());
4919 if (shift_count == 0) {
4920 if ((instr->op() == Token::SHR) && instr->can_deopt()) {
4925 switch (instr->op()) {
4926 case Token::ROR:
__ Ror(result, left, shift_count);
break;
4927 case Token::SAR:
__ Asr(result, left, shift_count);
break;
4928 case Token::SHL:
__ Lsl(result, left, shift_count);
break;
4929 case Token::SHR:
__ Lsr(result, left, shift_count);
break;
4937 void LCodeGen::DoShiftS(LShiftS* instr) {
4938 LOperand* right_op = instr->right();
4940 Register result =
ToRegister(instr->result());
4942 if (right_op->IsRegister()) {
4950 switch (instr->op()) {
4954 UseScratchRegisterScope temps(masm());
4955 Register temp = temps.AcquireW();
4956 __ SmiUntag(temp, left);
4957 __ Ror(result.W(), temp.W(), result.W());
4962 __ Asr(result, left, result);
4966 __ Lsl(result, left, result);
4969 __ Lsr(result, left, result);
4971 if (instr->can_deopt()) {
4980 DCHECK(right_op->IsConstantOperand());
4982 if (shift_count == 0) {
4983 if ((instr->op() == Token::SHR) && instr->can_deopt()) {
4986 __ Mov(result, left);
4988 switch (instr->op()) {
4990 __ SmiUntag(result, left);
4991 __ Ror(result.W(), result.W(), shift_count);
4995 __ Asr(result, left, shift_count);
4999 __ Lsl(result, left, shift_count);
5002 __ Lsr(result, left, shift_count);
5012 void LCodeGen::DoDebugBreak(LDebugBreak* instr) {
5013 __ Debug(
"LDebugBreak", 0,
BREAK);
5017 void LCodeGen::DoDeclareGlobals(LDeclareGlobals* instr) {
5020 Register scratch2 = x6;
5021 DCHECK(instr->IsMarkedAsCall());
5026 __ LoadHeapObject(
scratch1, instr->hydrogen()->pairs());
5034 PushSafepointRegistersScope
scope(
this);
5036 __ CallRuntimeSaveDoubles(Runtime::kStackGuard);
5039 DCHECK(instr->HasEnvironment());
5041 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
5045 void LCodeGen::DoStackCheck(LStackCheck* instr) {
5048 DeferredStackCheck(
LCodeGen* codegen, LStackCheck* instr)
5050 virtual void Generate() { codegen()->DoDeferredStackCheck(instr_); }
5051 virtual LInstruction* instr() {
return instr_; }
5053 LStackCheck* instr_;
5056 DCHECK(instr->HasEnvironment());
5060 if (instr->hydrogen()->is_function_entry()) {
5063 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex);
5066 PredictableCodeSizeScope predictable(masm_,
5068 DCHECK(instr->context()->IsRegister());
5070 CallCode(isolate()->builtins()->StackCheck(),
5075 DCHECK(instr->hydrogen()->is_backwards_branch());
5077 DeferredStackCheck* deferred_stack_check =
5078 new(zone()) DeferredStackCheck(
this, instr);
5079 __ CompareRoot(masm()->StackPointer(), Heap::kStackLimitRootIndex);
5080 __ B(
lo, deferred_stack_check->entry());
5083 __ Bind(instr->done_label());
5084 deferred_stack_check->SetExit(instr->done_label());
5093 void LCodeGen::DoStoreCodeEntry(LStoreCodeEntry* instr) {
5094 Register
function =
ToRegister(instr->function());
5095 Register code_object =
ToRegister(instr->code_object());
5102 void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) {
5103 Register context =
ToRegister(instr->context());
5105 Register scratch =
ToRegister(instr->temp());
5108 Label skip_assignment;
5110 if (instr->hydrogen()->RequiresHoleCheck()) {
5111 __ Ldr(scratch, target);
5112 if (instr->hydrogen()->DeoptimizesOnHole()) {
5115 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, &skip_assignment);
5119 __ Str(value, target);
5120 if (instr->hydrogen()->NeedsWriteBarrier()) {
5122 instr->hydrogen()->value()->type().IsHeapObject()
5124 __ RecordWriteContextSlot(context,
5133 __ Bind(&skip_assignment);
5137 void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) {
5142 __ Mov(cell, Operand(instr->hydrogen()->cell().handle()));
5148 if (instr->hydrogen()->RequiresHoleCheck()) {
5149 Register payload =
ToRegister(instr->temp2());
5160 void LCodeGen::DoStoreKeyedExternal(LStoreKeyedExternal* instr) {
5161 Register ext_ptr =
ToRegister(instr->elements());
5166 bool key_is_smi = instr->hydrogen()->key()->representation().IsSmi();
5167 bool key_is_constant = instr->key()->IsConstantOperand();
5168 int constant_key = 0;
5169 if (key_is_constant) {
5171 constant_key =
ToInteger32(LConstantOperand::cast(instr->key()));
5172 if (constant_key & 0xf0000000) {
5173 Abort(kArrayIndexConstantValueTooBig);
5182 key_is_constant, constant_key,
5184 instr->base_offset());
5190 __ Fcvt(dbl_scratch.S(), value);
5191 __ Str(dbl_scratch.S(), dst);
5199 switch (elements_kind) {
5206 __ Strb(value, dst);
5212 __ Strh(value, dst);
5218 __ Str(value.W(), dst);
5239 void LCodeGen::DoStoreKeyedFixedDouble(LStoreKeyedFixedDouble* instr) {
5240 Register elements =
ToRegister(instr->elements());
5244 if (instr->key()->IsConstantOperand()) {
5245 int constant_key =
ToInteger32(LConstantOperand::cast(instr->key()));
5246 if (constant_key & 0xf0000000) {
5247 Abort(kArrayIndexConstantValueTooBig);
5249 int offset = instr->base_offset() + constant_key *
kDoubleSize;
5252 Register store_base =
ToRegister(instr->temp());
5254 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi();
5256 instr->hydrogen()->elements_kind(),
5257 instr->hydrogen()->representation(),
5258 instr->base_offset());
5261 if (instr->NeedsCanonicalization()) {
5265 __ Str(value, mem_op);
5270 void LCodeGen::DoStoreKeyedFixed(LStoreKeyedFixed* instr) {
5272 Register elements =
ToRegister(instr->elements());
5273 Register scratch =
no_reg;
5274 Register store_base =
no_reg;
5278 if (!instr->key()->IsConstantOperand() ||
5279 instr->hydrogen()->NeedsWriteBarrier()) {
5283 Representation representation = instr->hydrogen()->value()->representation();
5284 if (instr->key()->IsConstantOperand()) {
5285 LConstantOperand* const_operand = LConstantOperand::cast(instr->key());
5286 int offset = instr->base_offset() +
5288 store_base = elements;
5289 if (representation.IsInteger32()) {
5299 store_base = scratch;
5301 bool key_is_tagged = instr->hydrogen()->key()->representation().IsSmi();
5304 instr->hydrogen()->elements_kind(),
5305 representation, instr->base_offset());
5308 __ Store(value, mem_op, representation);
5310 if (instr->hydrogen()->NeedsWriteBarrier()) {
5311 DCHECK(representation.IsTagged());
5313 Register element_addr = scratch;
5315 instr->hydrogen()->value()->type().IsHeapObject()
5318 __ Add(element_addr, mem_op.base(), mem_op.OffsetAsOperand());
5321 instr->hydrogen()->PointersToHereCheckForValue());
5326 void LCodeGen::DoStoreKeyedGeneric(LStoreKeyedGeneric* instr) {
5333 CodeFactory::KeyedStoreIC(isolate(), instr->strict_mode()).code();
5338 void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) {
5339 Representation representation = instr->representation();
5341 Register
object =
ToRegister(instr->object());
5342 HObjectAccess access = instr->hydrogen()->access();
5343 int offset = access.offset();
5345 if (access.IsExternalMemory()) {
5346 DCHECK(!instr->hydrogen()->has_transition());
5347 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
5349 __ Store(value,
MemOperand(
object, offset), representation);
5353 __ AssertNotSmi(
object);
5355 if (representation.IsDouble()) {
5356 DCHECK(access.IsInobject());
5357 DCHECK(!instr->hydrogen()->has_transition());
5358 DCHECK(!instr->hydrogen()->NeedsWriteBarrier());
5366 DCHECK(!representation.IsSmi() ||
5367 !instr->value()->IsConstantOperand() ||
5370 if (instr->hydrogen()->has_transition()) {
5371 Handle<Map> transition = instr->hydrogen()->transition_map();
5372 AddDeprecationDependency(transition);
5374 Register new_map_value =
ToRegister(instr->temp0());
5375 __ Mov(new_map_value, Operand(transition));
5377 if (instr->hydrogen()->NeedsWriteBarrierForMap()) {
5379 __ RecordWriteForMap(
object,
5388 Register destination;
5389 if (access.IsInobject()) {
5390 destination = object;
5394 destination = temp0;
5397 if (representation.IsSmi() &&
5398 instr->hydrogen()->value()->representation().IsInteger32()) {
5403 __ AssertSmi(temp0);
5406 if (destination.Is(temp0)) {
5407 DCHECK(!access.IsInobject());
5418 if (instr->hydrogen()->NeedsWriteBarrier()) {
5419 __ RecordWriteField(destination,
5426 instr->hydrogen()->SmiCheckForWriteBarrier(),
5427 instr->hydrogen()->PointersToHereCheckForValue());
5432 void LCodeGen::DoStoreNamedGeneric(LStoreNamedGeneric* instr) {
5443 void LCodeGen::DoStringAdd(LStringAdd* instr) {
5447 StringAddStub stub(isolate(),
5448 instr->hydrogen()->flags(),
5449 instr->hydrogen()->pretenure_flag());
5454 void LCodeGen::DoStringCharCodeAt(LStringCharCodeAt* instr) {
5457 DeferredStringCharCodeAt(
LCodeGen* codegen, LStringCharCodeAt* instr)
5459 virtual void Generate() { codegen()->DoDeferredStringCharCodeAt(instr_); }
5460 virtual LInstruction* instr() {
return instr_; }
5462 LStringCharCodeAt* instr_;
5465 DeferredStringCharCodeAt* deferred =
5466 new(zone()) DeferredStringCharCodeAt(
this, instr);
5473 __ Bind(deferred->exit());
5478 Register
string =
ToRegister(instr->string());
5479 Register result =
ToRegister(instr->result());
5486 PushSafepointRegistersScope
scope(
this);
5491 __ SmiTagAndPush(index);
5497 __ StoreToSafepointRegisterSlot(x0, result);
5501 void LCodeGen::DoStringCharFromCode(LStringCharFromCode* instr) {
5504 DeferredStringCharFromCode(
LCodeGen* codegen, LStringCharFromCode* instr)
5506 virtual void Generate() { codegen()->DoDeferredStringCharFromCode(instr_); }
5507 virtual LInstruction* instr() {
return instr_; }
5509 LStringCharFromCode* instr_;
5512 DeferredStringCharFromCode* deferred =
5513 new(zone()) DeferredStringCharFromCode(
this, instr);
5515 DCHECK(instr->hydrogen()->value()->representation().IsInteger32());
5517 Register result =
ToRegister(instr->result());
5520 __ B(
hi, deferred->entry());
5521 __ LoadRoot(result, Heap::kSingleCharacterStringCacheRootIndex);
5524 __ CompareRoot(result, Heap::kUndefinedValueRootIndex);
5525 __ B(
eq, deferred->entry());
5526 __ Bind(deferred->exit());
5531 Register char_code =
ToRegister(instr->char_code());
5532 Register result =
ToRegister(instr->result());
5539 PushSafepointRegistersScope
scope(
this);
5540 __ SmiTagAndPush(char_code);
5542 __ StoreToSafepointRegisterSlot(x0, result);
5546 void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) {
5550 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
5560 void LCodeGen::DoSubI(LSubI* instr) {
5567 __ Subs(result, left, right);
5570 __ Sub(result, left, right);
5575 void LCodeGen::DoSubS(LSubS* instr) {
5577 Register result =
ToRegister(instr->result());
5579 Operand right =
ToOperand(instr->right());
5581 __ Subs(result, left, right);
5584 __ Sub(result, left, right);
5599 if (instr->truncating()) {
5604 __ JumpIfNotHeapNumber(input, &check_bools);
5607 __ TruncateHeapNumberToI(output, input);
5610 __ Bind(&check_bools);
5614 __ LoadTrueFalseRoots(true_root, false_root);
5615 __ Cmp(input, true_root);
5616 __ Cset(output,
eq);
5623 "not a heap number/undefined/true/false");
5633 __ TryRepresentDoubleAsInt32(output, dbl_scratch1, dbl_scratch2);
5647 void LCodeGen::DoTaggedToI(LTaggedToI* instr) {
5650 DeferredTaggedToI(
LCodeGen* codegen, LTaggedToI* instr)
5652 virtual void Generate() {
5653 codegen()->DoDeferredTaggedToI(instr_, instr_->value(), instr_->temp1(),
5657 virtual LInstruction* instr() {
return instr_; }
5663 Register output =
ToRegister(instr->result());
5665 if (instr->hydrogen()->value()->representation().IsSmi()) {
5666 __ SmiUntag(output, input);
5668 DeferredTaggedToI* deferred =
new(zone()) DeferredTaggedToI(
this, instr);
5670 __ JumpIfNotSmi(input, deferred->entry());
5671 __ SmiUntag(output, input);
5672 __ Bind(deferred->exit());
5677 void LCodeGen::DoThisFunction(LThisFunction* instr) {
5678 Register result =
ToRegister(instr->result());
5683 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
5687 CallRuntime(Runtime::kToFastProperties, 1, instr);
5691 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
5699 int literal_offset =
5701 __ LoadObject(x7, instr->hydrogen()->literals());
5703 __ JumpIfNotRoot(x1, Heap::kUndefinedValueRootIndex, &materialized);
5707 __ Mov(x12, Operand(
Smi::FromInt(instr->hydrogen()->literal_index())));
5708 __ Mov(x11, Operand(instr->hydrogen()->pattern()));
5709 __ Mov(x10, Operand(instr->hydrogen()->flags()));
5710 __ Push(x7, x12, x11, x10);
5711 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
5714 __ Bind(&materialized);
5716 Label allocated, runtime_allocate;
5721 __ Bind(&runtime_allocate);
5724 CallRuntime(Runtime::kAllocateInNewSpace, 1, instr);
5727 __ Bind(&allocated);
5733 void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
5734 Register
object =
ToRegister(instr->object());
5736 Handle<Map> from_map = instr->original_map();
5737 Handle<Map> to_map = instr->transitioned_map();
5741 Label not_applicable;
5745 Register new_map =
ToRegister(instr->temp2());
5747 __ Mov(new_map, Operand(to_map));
5754 UseScratchRegisterScope temps(masm());
5757 __ CheckMap(
object, temps.AcquireX(), from_map, ¬_applicable,
5762 PushSafepointRegistersScope
scope(
this);
5763 __ Mov(x1, Operand(to_map));
5764 bool is_js_array = from_map->instance_type() ==
JS_ARRAY_TYPE;
5765 TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
5768 instr->pointer_map(), 0, Safepoint::kLazyDeopt);
5770 __ Bind(¬_applicable);
5774 void LCodeGen::DoTrapAllocationMemento(LTrapAllocationMemento* instr) {
5775 Register
object =
ToRegister(instr->object());
5779 Label no_memento_found;
5780 __ TestJSArrayForAllocationMemento(
object, temp1, temp2, &no_memento_found);
5782 __ Bind(&no_memento_found);
5786 void LCodeGen::DoTruncateDoubleToIntOrSmi(LTruncateDoubleToIntOrSmi* instr) {
5788 Register result =
ToRegister(instr->result());
5789 __ TruncateDoubleToI(result, input);
5790 if (instr->tag_result()) {
5791 __ SmiTag(result, result);
5796 void LCodeGen::DoTypeof(LTypeof* instr) {
5803 void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) {
5804 Handle<String> type_name = instr->type_literal();
5805 Label* true_label = instr->TrueLabel(chunk_);
5806 Label* false_label = instr->FalseLabel(chunk_);
5809 Factory* factory = isolate()->factory();
5811 __ JumpIfSmi(value, true_label);
5813 int true_block = instr->TrueDestination(chunk_);
5814 int false_block = instr->FalseDestination(chunk_);
5815 int next_block = GetNextEmittedBlock();
5817 if (true_block == false_block) {
5819 }
else if (true_block == next_block) {
5820 __ JumpIfNotHeapNumber(value, chunk_->GetAssemblyLabel(false_block));
5822 __ JumpIfHeapNumber(value, chunk_->GetAssemblyLabel(true_block));
5823 if (false_block != next_block) {
5824 __ B(chunk_->GetAssemblyLabel(false_block));
5828 }
else if (
String::Equals(type_name, factory->string_string())) {
5831 Register scratch =
ToRegister(instr->temp2());
5833 __ JumpIfSmi(value, false_label);
5834 __ JumpIfObjectType(
5839 }
else if (
String::Equals(type_name, factory->symbol_string())) {
5842 Register scratch =
ToRegister(instr->temp2());
5844 __ JumpIfSmi(value, false_label);
5848 }
else if (
String::Equals(type_name, factory->boolean_string())) {
5849 __ JumpIfRoot(value, Heap::kTrueValueRootIndex, true_label);
5850 __ CompareRoot(value, Heap::kFalseValueRootIndex);
5853 }
else if (
String::Equals(type_name, factory->undefined_string())) {
5855 Register scratch =
ToRegister(instr->temp1());
5857 __ JumpIfRoot(value, Heap::kUndefinedValueRootIndex, true_label);
5858 __ JumpIfSmi(value, false_label);
5864 }
else if (
String::Equals(type_name, factory->function_string())) {
5869 __ JumpIfSmi(value, false_label);
5874 }
else if (
String::Equals(type_name, factory->object_string())) {
5877 Register scratch =
ToRegister(instr->temp2());
5879 __ JumpIfSmi(value, false_label);
5880 __ JumpIfRoot(value, Heap::kNullValueRootIndex, true_label);
5881 __ JumpIfObjectType(value,
map, scratch,
5884 __ B(
gt, false_label);
5895 void LCodeGen::DoUint32ToDouble(LUint32ToDouble* instr) {
5900 void LCodeGen::DoCheckMapValue(LCheckMapValue* instr) {
5901 Register
object =
ToRegister(instr->value());
5910 void LCodeGen::DoWrapReceiver(LWrapReceiver* instr) {
5911 Register receiver =
ToRegister(instr->receiver());
5912 Register
function =
ToRegister(instr->function());
5913 Register result =
ToRegister(instr->result());
5918 Label global_object, done, copy_receiver;
5920 if (!instr->hydrogen()->known_function()) {
5936 __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, &global_object);
5937 __ JumpIfRoot(receiver, Heap::kUndefinedValueRootIndex, &global_object);
5942 __ B(
ge, ©_receiver);
5943 Deoptimize(instr,
"not a JavaScript object");
5945 __ Bind(&global_object);
5951 __ Bind(©_receiver);
5952 __ Mov(result, receiver);
5961 PushSafepointRegistersScope
scope(
this);
5965 __ CallRuntimeSaveDoubles(Runtime::kLoadMutableDouble);
5967 instr->pointer_map(), 2, Safepoint::kNoLazyDeopt);
5968 __ StoreToSafepointRegisterSlot(x0, result);
5972 void LCodeGen::DoLoadFieldByIndex(LLoadFieldByIndex* instr) {
5975 DeferredLoadMutableDouble(
LCodeGen* codegen,
5976 LLoadFieldByIndex* instr,
5987 codegen()->DoDeferredLoadMutableDouble(instr_, result_, object_, index_);
5989 virtual LInstruction* instr()
OVERRIDE {
return instr_; }
5991 LLoadFieldByIndex* instr_;
5996 Register
object =
ToRegister(instr->object());
5998 Register result =
ToRegister(instr->result());
6000 __ AssertSmi(index);
6002 DeferredLoadMutableDouble* deferred;
6003 deferred =
new(zone()) DeferredLoadMutableDouble(
6004 this, instr, result,
object, index);
6006 Label out_of_object, done;
6008 __ TestAndBranchIfAnySet(
6009 index,
reinterpret_cast<uint64_t
>(
Smi::FromInt(1)), deferred->entry());
6010 __ Mov(index, Operand(index,
ASR, 1));
6013 __ B(
lt, &out_of_object);
6021 __ Bind(&out_of_object);
6027 __ Bind(deferred->exit());
6032 void LCodeGen::DoStoreFrameContext(LStoreFrameContext* instr) {
6033 Register context =
ToRegister(instr->context());
6038 void LCodeGen::DoAllocateBlockContext(LAllocateBlockContext* instr) {
6039 Handle<ScopeInfo> scope_info = instr->scope_info();
6042 CallRuntime(Runtime::kPushBlockContext, 2, instr);
An object reference managed by the v8 garbage collector.
static AllocationSiteMode GetMode(ElementsKind boilerplate_elements_kind)
static const int kLengthOffset
static const int kCallSizeWithRelocation
static U update(U previous, T value)
virtual void Emit(Label *label) const
virtual void EmitInverted(Label *label) const
BranchIfHeapNumber(LCodeGen *codegen, const Register &value)
virtual void EmitInverted(Label *label) const
BranchIfNonZeroNumber(LCodeGen *codegen, const FPRegister &value, const FPRegister &scratch)
const FPRegister & scratch_
virtual void Emit(Label *label) const
const FPRegister & value_
const Heap::RootListIndex index_
virtual void Emit(Label *label) const
virtual void EmitInverted(Label *label) const
BranchIfRoot(LCodeGen *codegen, const Register &value, Heap::RootListIndex index)
virtual void EmitInverted(Label *label) const
BranchOnCondition(LCodeGen *codegen, Condition cond)
virtual void Emit(Label *label) const
static const int kValueOffset
static const int kHeaderSize
virtual void Emit(Label *label) const
virtual void EmitInverted(Label *label) const
CompareAndBranch(LCodeGen *codegen, Condition cond, const Register &lhs, const Operand &rhs)
static Address GetDeoptimizationEntry(Isolate *isolate, int id, BailoutType type, GetEntryMode mode=ENSURE_ENTRY_CODE)
static const int kEnumCacheOffset
virtual void AfterCall() const
virtual ~SafepointGenerator()
virtual void BeforeCall(int call_size) const
SafepointGenerator(LCodeGen *codegen, LPointerMap *pointers, Safepoint::DeoptMode mode)
static const int kHeaderSize
static int OffsetOfElementAt(int index)
static int SizeFor(int length)
static const int kGlobalProxyOffset
@ kAllUsesTruncatingToInt32
virtual HSourcePosition position() const
static Handle< T > cast(Handle< S > that)
static const int kValueOffset
static const int kMapOffset
static void EmitNotInlined(MacroAssembler *masm)
static const int kValueOffset
static const int kCacheStampOffset
static const int kSharedFunctionInfoOffset
static const int kContextOffset
static const int kCodeEntryOffset
static const int kPrototypeOrInitialMapOffset
static const int kHeaderSize
static const int kPropertiesOffset
static const int kInObjectFieldCount
static const int kFunctionOffset
void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction *instr, const char *detail)
bool IsNextEmittedBlock(int block_id) const
void DeoptimizeBranch(LInstruction *instr, const char *detail, BranchType branch_type, Register reg=NoReg, int bit=-1, Deoptimizer::BailoutType *override_bailout_type=NULL)
void RestoreCallerDoubles()
DwVfpRegister ToDoubleRegister(LOperand *op) const
void RecordSafepointWithRegisters(LPointerMap *pointers, int arguments, Safepoint::DeoptMode mode)
@ RECORD_SIMPLE_SAFEPOINT
@ RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
int inlined_function_count_
bool after_push_argument_
bool IsSmi(LConstantOperand *op) const
friend class SafepointGenerator
LinkRegisterStatus GetLinkRegisterState() const
TranslationBuffer translations_
MemOperand BuildSeqStringOperand(Register string, LOperand *index, String::Encoding encoding)
void DeoptimizeIfZero(Register rt, LInstruction *instr, const char *detail)
Condition EmitIsString(Register input, Register temp1, Label *is_not_string, SmiCheck check_needed)
MemOperand PrepareKeyedArrayOperand(Register base, Register elements, Register key, bool key_is_tagged, ElementsKind elements_kind, Representation representation, int base_offset)
void EmitCompareAndBranch(InstrType instr, Condition condition, const Register &lhs, const Operand &rhs)
void DoDeferredStackCheck(LStackCheck *instr)
void DeoptimizeIfSmi(Register rt, LInstruction *instr, const char *detail)
void DeoptimizeIfBitSet(Register rt, int bit, LInstruction *instr, const char *detail)
SafepointTableBuilder safepoints_
void EmitVectorLoadICRegisters(T *instr)
static Condition TokenToCondition(Token::Value op, bool is_unsigned)
void DeoptimizeIfNotZero(Register rt, LInstruction *instr, const char *detail)
ZoneList< Handle< Object > > deoptimization_literals_
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal *instr, Label *map_check, Label *bool_load)
void DeoptimizeIfNotHeapNumber(Register object, LInstruction *instr)
void PopulateDeoptimizationLiteralsWithInlinedFunctions()
void AddToTranslation(LEnvironment *environment, Translation *translation, LOperand *op, bool is_tagged, bool is_uint32, int *object_index_pointer, int *dematerialized_index_pointer)
Operand ToShiftedRightOperand32(LOperand *right, LI *shift_info)
Operand ToOperand32(LOperand *op)
ZoneList< LEnvironment * > deoptimizations_
void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index, LInstruction *instr, const char *detail)
void CallRuntimeFromDeferred(Runtime::FunctionId id, int argc, LInstruction *instr, LOperand *context)
int32_t ToInteger32(LConstantOperand *op) const
LPlatformChunk * chunk() const
void FinishCode(Handle< Code > code)
void DeoptimizeIfNotSmi(Register rt, LInstruction *instr, const char *detail)
int LookupDestination(int block_id) const
void DoDeferredAllocate(LAllocate *instr)
void RecordSafepoint(LPointerMap *pointers, Safepoint::Kind kind, int arguments, Safepoint::DeoptMode mode)
int JSShiftAmountFromLConstant(LOperand *constant)
void DoDeferredTaggedToI(LTaggedToI *instr)
void CallCodeGeneric(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, SafepointMode safepoint_mode, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
void DoDeferredStringCharCodeAt(LStringCharCodeAt *instr)
void CallCode(Handle< Code > code, RelocInfo::Mode mode, LInstruction *instr, TargetAddressStorageMode storage_mode=CAN_INLINE_TARGET_ADDRESS)
Safepoint::Kind expected_safepoint_kind_
ZoneList< LDeferredCode * > deferred_
void EmitBranchGeneric(InstrType instr, const BranchGenerator &branch)
bool GenerateDeferredCode()
void EmitBranchIfNonZeroNumber(InstrType instr, const FPRegister &value, const FPRegister &scratch)
Handle< Object > ToHandle(LConstantOperand *op) const
bool NeedsEagerFrame() const
DoubleRegister double_scratch()
void RegisterEnvironmentForDeoptimization(LEnvironment *environment, Safepoint::DeoptMode mode)
friend class LDeferredCode
void DeoptimizeIfNegative(Register rt, LInstruction *instr, const char *detail)
void LoadContextFromDeferred(LOperand *context)
void GenerateOsrPrologue()
void DoDeferredNumberTagU(LInstruction *instr, LOperand *value, LOperand *temp1, LOperand *temp2)
bool NeedsDeferredFrame() const
void DoDeferredInstanceMigration(LCheckMaps *instr, Register object)
void DoDeferredLoadMutableDouble(LLoadFieldByIndex *instr, Register result, Register object, Register index)
int DefineDeoptimizationLiteral(Handle< Object > literal)
Register ToRegister32(LOperand *op) const
void DeoptimizeIf(Condition condition, LInstruction *instr, const char *detail, Deoptimizer::BailoutType bailout_type)
int GetStackSlotCount() const
void CallKnownFunction(Handle< JSFunction > function, int formal_parameter_count, int arity, LInstruction *instr, R1State r1_state)
void WriteTranslation(LEnvironment *environment, Translation *translation)
void DoDeferredMathAbsTagged(LMathAbsTagged *instr, Label *exit, Label *allocation_entry)
bool GenerateSafepointTable()
void EmitTestAndBranch(InstrType instr, Condition condition, const Register &value, uint64_t mask)
Operand ToOperand(LOperand *op)
void DeoptimizeIfBitClear(Register rt, int bit, LInstruction *instr, const char *detail)
double ToDouble(LConstantOperand *op) const
void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index, LInstruction *instr, const char *detail)
Register ToRegister(LOperand *op) const
void RecordAndWritePosition(int position) OVERRIDE
void PopulateDeoptimizationData(Handle< Code > code)
Smi * ToSmi(LConstantOperand *op) const
void CallRuntime(const Runtime::Function *function, int num_arguments, LInstruction *instr, SaveFPRegsMode save_doubles=kDontSaveFPRegs)
void EmitBranchIfRoot(InstrType instr, const Register &value, Heap::RootListIndex index)
bool IsInteger32Constant(LConstantOperand *op) const
void Deoptimize(LInstruction *instr, const char *detail, Deoptimizer::BailoutType *override_bailout_type=NULL)
void EmitBranchIfHeapNumber(InstrType instr, const Register &value)
void DoDeferredStringCharFromCode(LStringCharFromCode *instr)
ZoneList< Deoptimizer::JumpTableEntry > jump_table_
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE
MemOperand PrepareKeyedExternalArrayOperand(Register key, Register base, Register scratch, bool key_is_smi, bool key_is_constant, int constant_key, ElementsKind elements_kind, int base_offset)
MemOperand ToMemOperand(LOperand *op) const
void GenerateBodyInstructionPre(LInstruction *instr) OVERRIDE
void RecordSafepointWithLazyDeopt(LInstruction *instr, SafepointMode safepoint_mode)
LCodeGen(LChunk *chunk, MacroAssembler *assembler, CompilationInfo *info)
void EmitBranch(InstrType instr, Condition condition)
void DoDeferredNumberTagD(LNumberTagD *instr)
friend class LEnvironment
LParallelMove * GetParallelMove(InnerPosition pos)
virtual const char * Mnemonic() const =0
LEnvironment * environment() const
virtual LOperand * result() const =0
HValue * hydrogen_value() const
LPointerMap * pointer_map() const
ElementsKind elements_kind() const
uint32_t base_offset() const
LOperand * result() const
void Add(const T &element, AllocationPolicy allocator=AllocationPolicy())
static const Register ReceiverRegister()
static const Register NameRegister()
static void GenerateMiss(MacroAssembler *masm)
static const int kIsUndetectable
static const int kBitFieldOffset
static const int kInstanceTypeOffset
static const int kConstructorOffset
static const int kPrototypeOffset
static void EmitMathExp(MacroAssembler *masm, DwVfpRegister input, DwVfpRegister result, DwVfpRegister double_scratch1, DwVfpRegister double_scratch2, Register temp1, Register temp2, Register temp3)
static const Register exponent()
static const Register exponent()
static const int kHashShift
static const int kHashFieldOffset
static Operand UntagSmiAndScale(Register smi, int scale)
static const int kMaxRegularHeapObjectSize
static void MaybeCallEntryHook(MacroAssembler *masm)
static const int kNoPosition
static Representation Integer32()
int num_parameters() const
Variable * parameter(int index) const
static const int kHeaderSize
static const int kDontAdaptArgumentsSentinel
static const int kInstanceClassNameOffset
static const int kCompilerHintsOffset
static const int kMaxValue
static Smi * FromInt(int value)
static const int kFixedFrameSizeFromFp
static const int kContextOffset
static const int kCallerSPOffset
static const int kMarkerOffset
static const int kCallerFPOffset
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static Handle< Code > initialize_stub(Isolate *isolate, StrictMode strict_mode)
static void Generate(MacroAssembler *masm, Register string, Register index, Register result, Label *call_runtime)
static const unsigned int kContainsCachedArrayIndexMask
static const int32_t kMaxOneByteCharCode
static const int kArrayIndexValueBits
static const int kLengthOffset
bool Equals(String *other)
TestAndBranch(LCodeGen *codegen, Condition cond, const Register &value, uint64_t mask)
virtual void EmitInverted(Label *label) const
virtual void Emit(Label *label) const
static TypeFeedbackId None()
static const Register VectorRegister()
static const Register SlotRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric literals(0o77, 0b11)") DEFINE_BOOL(harmony_object_literals
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
enable harmony numeric enable harmony object literal extensions Optimize object Array shift
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
#define ASM_UNIMPLEMENTED_BREAK(message)
@ PRETENURE_OLD_POINTER_SPACE
@ PRETENURE_OLD_DATA_SPACE
bool IsPowerOfTwo32(uint32_t value)
static int Push(SpecialRPOStackFrame *stack, int depth, BasicBlock *child, int unvisited)
int WhichPowerOf2(uint32_t x)
const uint32_t kStringEncodingMask
Condition CommuteCondition(Condition cond)
bool EvalComparison(Token::Value op, double op1, double op2)
bool AreAliased(const CPURegister ®1, const CPURegister ®2, const CPURegister ®3=NoReg, const CPURegister ®4=NoReg, const CPURegister ®5=NoReg, const CPURegister ®6=NoReg, const CPURegister ®7=NoReg, const CPURegister ®8=NoReg)
const LowDwVfpRegister d1
const uint32_t kTwoByteStringTag
int MaskToBit(uint64_t mask)
const LowDwVfpRegister d0
MemOperand GlobalObjectMemOperand()
DwVfpRegister DoubleRegister
@ STORE_TO_INITIALIZED_ENTRY
const unsigned kWRegSizeInBits
const int kPointerSizeLog2
MemOperand ContextMemOperand(Register context, int index)
@ LAST_NONCALLABLE_SPEC_OBJECT_TYPE
@ NUM_OF_CALLABLE_SPEC_OBJECT_TYPES
@ FIRST_NONCALLABLE_SPEC_OBJECT_TYPE
@ EXTERNAL_UINT16_ELEMENTS
@ EXTERNAL_INT16_ELEMENTS
@ EXTERNAL_UINT8_ELEMENTS
@ EXTERNAL_INT32_ELEMENTS
@ FAST_HOLEY_DOUBLE_ELEMENTS
@ SLOPPY_ARGUMENTS_ELEMENTS
@ EXTERNAL_FLOAT32_ELEMENTS
@ EXTERNAL_FLOAT64_ELEMENTS
@ FAST_HOLEY_SMI_ELEMENTS
@ EXTERNAL_UINT32_ELEMENTS
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const uint32_t kOneByteStringTag
int ElementsKindToShiftSize(ElementsKind elements_kind)
MemOperand FieldMemOperand(Register object, int offset)
int32_t WhichPowerOf2Abs(int32_t x)
int StackSlotOffset(int index)
bool IsFastPackedElementsKind(ElementsKind kind)
@ NUMBER_CANDIDATE_IS_SMI
@ NUMBER_CANDIDATE_IS_ANY_TAGGED
ElementsKind GetHoleyElementsKind(ElementsKind packed_kind)
const uint64_t kSmiShiftMask
static uint64_t double_to_rawbits(double value)
AllocationSiteOverrideMode
@ DISABLE_ALLOCATION_SITES
Condition NegateCondition(Condition cond)
static InstanceType TestType(HHasInstanceTypeAndBranch *instr)
const uint32_t kStringRepresentationMask
static Condition BranchCondition(HHasInstanceTypeAndBranch *instr)
BranchType InvertBranchType(BranchType type)
static int ArgumentsOffsetWithoutFrame(int index)
MemOperand UntagSmiFieldMemOperand(Register object, int offset)
static const char * LabelType(LLabel *label)
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
const uint64_t kHoleNanInt64
const intptr_t kSmiTagMask
@ NO_CALL_CONSTRUCTOR_FLAGS
bool IsFastSmiElementsKind(ElementsKind kind)
const unsigned kInstructionSize
MemOperand UntagSmiMemOperand(Register object, int offset)
PerThreadAssertScopeDebugOnly< DEFERRED_HANDLE_DEREFERENCE_ASSERT, true > AllowDeferredHandleDereference
Debugger support for the V8 JavaScript engine.
bool Is(const CPURegister &other) const
bool IsEquivalentTo(const JumpTableEntry &other) const
bool is(DwVfpRegister reg) const
static DwVfpRegister FromAllocationIndex(int index)
static FPRegister FromAllocationIndex(unsigned int index)
static Register FromAllocationIndex(int index)
bool is(Register reg) const
#define T(name, string, precedence)