16 InstructionSelector::InstructionSelector(InstructionSequence* sequence,
17 SourcePositionTable* source_positions,
19 : zone_(sequence->isolate()),
21 source_positions_(source_positions),
24 instructions_(zone()),
25 defined_(graph()->NodeCount(),
false, zone()),
26 used_(graph()->NodeCount(),
false, zone()) {}
29 void InstructionSelector::SelectInstructions() {
33 BasicBlock* block = *
i;
34 if (!block->IsLoopHeader())
continue;
37 for (BasicBlock::const_iterator j = block->begin(); j != block->end();
40 if (phi->opcode() != IrOpcode::kPhi)
continue;
43 Node::Inputs inputs = phi->inputs();
44 for (
InputIter k = inputs.begin(); k != inputs.end(); ++k) {
57 BasicBlock* block = *
i;
58 size_t end = block->code_end_;
59 size_t start = block->code_start_;
60 sequence()->StartBlock(block);
61 while (start-- > end) {
62 sequence()->AddInstruction(instructions_[start], block);
64 sequence()->EndBlock(block);
70 InstructionOperand* output,
72 InstructionOperand** temps) {
73 size_t output_count = output ==
NULL ? 0 : 1;
74 return Emit(opcode, output_count, &output, 0,
NULL, temp_count, temps);
79 InstructionOperand* output,
80 InstructionOperand* a,
size_t temp_count,
81 InstructionOperand** temps) {
82 size_t output_count = output ==
NULL ? 0 : 1;
83 return Emit(opcode, output_count, &output, 1, &a, temp_count, temps);
88 InstructionOperand* output,
89 InstructionOperand* a,
90 InstructionOperand* b,
size_t temp_count,
91 InstructionOperand** temps) {
92 size_t output_count = output ==
NULL ? 0 : 1;
93 InstructionOperand* inputs[] = {a, b};
95 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
101 InstructionOperand* output,
102 InstructionOperand* a,
103 InstructionOperand* b,
104 InstructionOperand* c,
size_t temp_count,
105 InstructionOperand** temps) {
106 size_t output_count = output ==
NULL ? 0 : 1;
107 InstructionOperand* inputs[] = {a, b, c};
109 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
114 Instruction* InstructionSelector::Emit(
115 InstructionCode opcode, InstructionOperand* output, InstructionOperand* a,
116 InstructionOperand* b, InstructionOperand* c, InstructionOperand* d,
117 size_t temp_count, InstructionOperand** temps) {
118 size_t output_count = output ==
NULL ? 0 : 1;
119 InstructionOperand* inputs[] = {a, b, c, d};
121 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
126 Instruction* InstructionSelector::Emit(
127 InstructionCode opcode,
size_t output_count, InstructionOperand** outputs,
128 size_t input_count, InstructionOperand** inputs,
size_t temp_count,
129 InstructionOperand** temps) {
131 Instruction::New(instruction_zone(), opcode, output_count, outputs,
132 input_count, inputs, temp_count, temps);
137 Instruction* InstructionSelector::Emit(Instruction* instr) {
138 instructions_.push_back(instr);
143 bool InstructionSelector::IsNextInAssemblyOrder(
const BasicBlock* block)
const {
144 return block->rpo_number_ == (current_block_->rpo_number_ + 1) &&
145 block->deferred_ == current_block_->deferred_;
149 bool InstructionSelector::CanCover(Node* user, Node* node)
const {
150 return node->OwnedBy(user) &&
151 schedule()->block(node) == schedule()->block(user);
155 bool InstructionSelector::IsDefined(Node* node)
const {
164 void InstructionSelector::MarkAsDefined(Node* node) {
173 bool InstructionSelector::IsUsed(Node* node)
const {
174 if (!node->op()->HasProperty(Operator::kEliminatable))
return true;
182 void InstructionSelector::MarkAsUsed(Node* node) {
191 bool InstructionSelector::IsDouble(
const Node* node)
const {
193 return sequence()->IsDouble(node->id());
197 void InstructionSelector::MarkAsDouble(Node* node) {
199 DCHECK(!IsReference(node));
200 sequence()->MarkAsDouble(node->id());
204 bool InstructionSelector::IsReference(
const Node* node)
const {
206 return sequence()->IsReference(node->id());
210 void InstructionSelector::MarkAsReference(Node* node) {
213 sequence()->MarkAsReference(node->id());
217 void InstructionSelector::MarkAsRepresentation(
MachineType rep, Node* node) {
225 MarkAsReference(node);
235 CallBuffer::CallBuffer(
Zone* zone, CallDescriptor* d,
238 frame_state_descriptor(frame_desc),
241 instruction_args(zone),
244 outputs.reserve(d->ReturnCount());
252 void InstructionSelector::InitializeCallBuffer(Node* call,
CallBuffer* buffer,
253 bool call_code_immediate,
254 bool call_address_immediate) {
274 buffer->
descriptor->GetReturnType(
static_cast<int>(
i));
275 LinkageLocation location =
276 buffer->
descriptor->GetReturnLocation(
static_cast<int>(
i));
277 MarkAsRepresentation(type, output);
278 buffer->
outputs.push_back(g.DefineAsLocation(output, location, type));
284 Node* callee = call->InputAt(0);
286 case CallDescriptor::kCallCodeObject:
288 (call_code_immediate && callee->opcode() == IrOpcode::kHeapConstant)
289 ? g.UseImmediate(callee)
290 : g.UseRegister(callee));
292 case CallDescriptor::kCallAddress:
294 (call_address_immediate &&
295 (callee->opcode() == IrOpcode::kInt32Constant ||
296 callee->opcode() == IrOpcode::kInt64Constant))
297 ? g.UseImmediate(callee)
298 : g.UseRegister(callee));
300 case CallDescriptor::kCallJSFunction:
302 g.UseLocation(callee, buffer->
descriptor->GetInputLocation(0),
313 InstructionSequence::StateId state_id =
318 call->InputAt(
static_cast<int>(buffer->
descriptor->InputCount()));
325 size_t input_count =
static_cast<size_t>(buffer->
input_count());
332 int pushed_count = 0;
333 for (
size_t index = 0; index < input_count; ++iter, ++index) {
334 DCHECK(iter != call->inputs().end());
335 DCHECK(index ==
static_cast<size_t>(iter.index()));
336 DCHECK((*iter)->op()->opcode() != IrOpcode::kFrameState);
337 if (index == 0)
continue;
338 InstructionOperand* op =
339 g.UseLocation(*iter, buffer->
descriptor->GetInputLocation(index),
343 if (
static_cast<size_t>(stack_index) >= buffer->
pushed_nodes.size()) {
354 DCHECK(
static_cast<size_t>(input_count) ==
360 void InstructionSelector::VisitBlock(BasicBlock* block) {
362 current_block_ = block;
363 int current_block_end =
static_cast<int>(instructions_.size());
368 std::reverse(instructions_.begin() + current_block_end, instructions_.end());
372 for (BasicBlock::reverse_iterator
i = block->rbegin();
i != block->rend();
376 if (!IsUsed(node) || IsDefined(node))
continue;
379 size_t current_node_end = instructions_.size();
381 std::reverse(instructions_.begin() + current_node_end, instructions_.end());
386 block->code_end_ = current_block_end;
387 block->code_start_ =
static_cast<int>(instructions_.size());
389 current_block_ =
NULL;
396 for (BasicBlock::const_iterator
i = block->begin();
i != block->end(); ++
i) {
397 const Node* node = *
i;
398 CHECK_NE(IrOpcode::kPhi, node->opcode());
404 void InstructionSelector::VisitControl(BasicBlock* block) {
405 Node* input = block->control_input_;
406 switch (block->control_) {
408 return VisitGoto(block->SuccessorAt(0));
410 DCHECK_EQ(IrOpcode::kBranch, input->opcode());
411 BasicBlock* tbranch = block->SuccessorAt(0);
412 BasicBlock* fbranch = block->SuccessorAt(1);
417 if (tbranch == fbranch)
return VisitGoto(tbranch);
418 return VisitBranch(input, tbranch, fbranch);
422 Node* value = (input !=
NULL && input->opcode() == IrOpcode::kReturn)
425 return VisitReturn(value);
428 return VisitThrow(input);
441 void InstructionSelector::VisitNode(Node* node) {
443 SourcePosition source_position = source_positions_->GetSourcePosition(node);
444 if (!source_position.IsUnknown()) {
445 DCHECK(!source_position.IsInvalid());
446 if (FLAG_turbo_source_positions || node->opcode() == IrOpcode::kCall) {
447 Emit(SourcePositionInstruction::New(instruction_zone(), source_position));
450 switch (node->opcode()) {
451 case IrOpcode::kStart:
452 case IrOpcode::kLoop:
454 case IrOpcode::kBranch:
455 case IrOpcode::kIfTrue:
456 case IrOpcode::kIfFalse:
457 case IrOpcode::kEffectPhi:
458 case IrOpcode::kMerge:
461 case IrOpcode::kFinish:
462 return MarkAsReference(node), VisitFinish(node);
463 case IrOpcode::kParameter: {
464 MachineType type = linkage()->GetParameterType(OpParameter<int>(node));
465 MarkAsRepresentation(type, node);
466 return VisitParameter(node);
468 case IrOpcode::kPhi: {
470 MarkAsRepresentation(type, node);
471 return VisitPhi(node);
473 case IrOpcode::kProjection:
474 return VisitProjection(node);
475 case IrOpcode::kInt32Constant:
476 case IrOpcode::kInt64Constant:
477 case IrOpcode::kExternalConstant:
478 return VisitConstant(node);
479 case IrOpcode::kFloat32Constant:
480 return MarkAsDouble(node), VisitConstant(node);
481 case IrOpcode::kFloat64Constant:
482 return MarkAsDouble(node), VisitConstant(node);
483 case IrOpcode::kHeapConstant:
484 case IrOpcode::kNumberConstant:
486 return MarkAsReference(node), VisitConstant(node);
487 case IrOpcode::kCall:
489 case IrOpcode::kFrameState:
490 case IrOpcode::kStateValues:
492 case IrOpcode::kLoad: {
494 MarkAsRepresentation(rep, node);
495 return VisitLoad(node);
497 case IrOpcode::kStore:
498 return VisitStore(node);
499 case IrOpcode::kWord32And:
500 return VisitWord32And(node);
501 case IrOpcode::kWord32Or:
502 return VisitWord32Or(node);
503 case IrOpcode::kWord32Xor:
504 return VisitWord32Xor(node);
505 case IrOpcode::kWord32Shl:
506 return VisitWord32Shl(node);
507 case IrOpcode::kWord32Shr:
508 return VisitWord32Shr(node);
509 case IrOpcode::kWord32Sar:
510 return VisitWord32Sar(node);
511 case IrOpcode::kWord32Ror:
512 return VisitWord32Ror(node);
513 case IrOpcode::kWord32Equal:
514 return VisitWord32Equal(node);
515 case IrOpcode::kWord64And:
516 return VisitWord64And(node);
517 case IrOpcode::kWord64Or:
518 return VisitWord64Or(node);
519 case IrOpcode::kWord64Xor:
520 return VisitWord64Xor(node);
521 case IrOpcode::kWord64Shl:
522 return VisitWord64Shl(node);
523 case IrOpcode::kWord64Shr:
524 return VisitWord64Shr(node);
525 case IrOpcode::kWord64Sar:
526 return VisitWord64Sar(node);
527 case IrOpcode::kWord64Ror:
528 return VisitWord64Ror(node);
529 case IrOpcode::kWord64Equal:
530 return VisitWord64Equal(node);
531 case IrOpcode::kInt32Add:
532 return VisitInt32Add(node);
533 case IrOpcode::kInt32AddWithOverflow:
534 return VisitInt32AddWithOverflow(node);
535 case IrOpcode::kInt32Sub:
536 return VisitInt32Sub(node);
537 case IrOpcode::kInt32SubWithOverflow:
538 return VisitInt32SubWithOverflow(node);
539 case IrOpcode::kInt32Mul:
540 return VisitInt32Mul(node);
541 case IrOpcode::kInt32Div:
542 return VisitInt32Div(node);
543 case IrOpcode::kInt32UDiv:
544 return VisitInt32UDiv(node);
545 case IrOpcode::kInt32Mod:
546 return VisitInt32Mod(node);
547 case IrOpcode::kInt32UMod:
548 return VisitInt32UMod(node);
549 case IrOpcode::kInt32LessThan:
550 return VisitInt32LessThan(node);
551 case IrOpcode::kInt32LessThanOrEqual:
552 return VisitInt32LessThanOrEqual(node);
553 case IrOpcode::kUint32LessThan:
554 return VisitUint32LessThan(node);
555 case IrOpcode::kUint32LessThanOrEqual:
556 return VisitUint32LessThanOrEqual(node);
557 case IrOpcode::kInt64Add:
558 return VisitInt64Add(node);
559 case IrOpcode::kInt64Sub:
560 return VisitInt64Sub(node);
561 case IrOpcode::kInt64Mul:
562 return VisitInt64Mul(node);
563 case IrOpcode::kInt64Div:
564 return VisitInt64Div(node);
565 case IrOpcode::kInt64UDiv:
566 return VisitInt64UDiv(node);
567 case IrOpcode::kInt64Mod:
568 return VisitInt64Mod(node);
569 case IrOpcode::kInt64UMod:
570 return VisitInt64UMod(node);
571 case IrOpcode::kInt64LessThan:
572 return VisitInt64LessThan(node);
573 case IrOpcode::kInt64LessThanOrEqual:
574 return VisitInt64LessThanOrEqual(node);
575 case IrOpcode::kChangeFloat32ToFloat64:
576 return MarkAsDouble(node), VisitChangeFloat32ToFloat64(node);
577 case IrOpcode::kChangeInt32ToFloat64:
578 return MarkAsDouble(node), VisitChangeInt32ToFloat64(node);
579 case IrOpcode::kChangeUint32ToFloat64:
580 return MarkAsDouble(node), VisitChangeUint32ToFloat64(node);
581 case IrOpcode::kChangeFloat64ToInt32:
582 return VisitChangeFloat64ToInt32(node);
583 case IrOpcode::kChangeFloat64ToUint32:
584 return VisitChangeFloat64ToUint32(node);
585 case IrOpcode::kChangeInt32ToInt64:
586 return VisitChangeInt32ToInt64(node);
587 case IrOpcode::kChangeUint32ToUint64:
588 return VisitChangeUint32ToUint64(node);
589 case IrOpcode::kTruncateFloat64ToFloat32:
590 return MarkAsDouble(node), VisitTruncateFloat64ToFloat32(node);
591 case IrOpcode::kTruncateFloat64ToInt32:
592 return VisitTruncateFloat64ToInt32(node);
593 case IrOpcode::kTruncateInt64ToInt32:
594 return VisitTruncateInt64ToInt32(node);
595 case IrOpcode::kFloat64Add:
596 return MarkAsDouble(node), VisitFloat64Add(node);
597 case IrOpcode::kFloat64Sub:
598 return MarkAsDouble(node), VisitFloat64Sub(node);
599 case IrOpcode::kFloat64Mul:
600 return MarkAsDouble(node), VisitFloat64Mul(node);
601 case IrOpcode::kFloat64Div:
602 return MarkAsDouble(node), VisitFloat64Div(node);
603 case IrOpcode::kFloat64Mod:
604 return MarkAsDouble(node), VisitFloat64Mod(node);
605 case IrOpcode::kFloat64Sqrt:
606 return MarkAsDouble(node), VisitFloat64Sqrt(node);
607 case IrOpcode::kFloat64Equal:
608 return VisitFloat64Equal(node);
609 case IrOpcode::kFloat64LessThan:
610 return VisitFloat64LessThan(node);
611 case IrOpcode::kFloat64LessThanOrEqual:
612 return VisitFloat64LessThanOrEqual(node);
614 V8_Fatal(__FILE__, __LINE__,
"Unexpected operator #%d:%s @ node #%d",
615 node->opcode(), node->op()->mnemonic(), node->id());
620 #if V8_TURBOFAN_BACKEND
622 void InstructionSelector::VisitWord32Equal(Node* node) {
623 FlagsContinuation cont(
kEqual, node);
625 if (m.right().Is(0)) {
626 return VisitWord32Test(m.left().node(), &cont);
628 VisitWord32Compare(node, &cont);
632 void InstructionSelector::VisitInt32LessThan(Node* node) {
634 VisitWord32Compare(node, &cont);
638 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
640 VisitWord32Compare(node, &cont);
644 void InstructionSelector::VisitUint32LessThan(Node* node) {
646 VisitWord32Compare(node, &cont);
650 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
652 VisitWord32Compare(node, &cont);
656 void InstructionSelector::VisitWord64Equal(Node* node) {
657 FlagsContinuation cont(
kEqual, node);
659 if (m.right().Is(0)) {
660 return VisitWord64Test(m.left().node(), &cont);
662 VisitWord64Compare(node, &cont);
666 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
667 if (Node* ovf = node->FindProjection(1)) {
669 return VisitInt32AddWithOverflow(node, &cont);
671 FlagsContinuation cont;
672 VisitInt32AddWithOverflow(node, &cont);
676 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
677 if (Node* ovf = node->FindProjection(1)) {
679 return VisitInt32SubWithOverflow(node, &cont);
681 FlagsContinuation cont;
682 VisitInt32SubWithOverflow(node, &cont);
686 void InstructionSelector::VisitInt64LessThan(Node* node) {
688 VisitWord64Compare(node, &cont);
692 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
694 VisitWord64Compare(node, &cont);
698 void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
699 OperandGenerator g(
this);
700 Emit(kArchTruncateDoubleToI, g.DefineAsRegister(node),
701 g.UseRegister(node->InputAt(0)));
705 void InstructionSelector::VisitFloat64Equal(Node* node) {
707 VisitFloat64Compare(node, &cont);
711 void InstructionSelector::VisitFloat64LessThan(Node* node) {
713 VisitFloat64Compare(node, &cont);
717 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
719 VisitFloat64Compare(node, &cont);
725 #if V8_TARGET_ARCH_32_BIT && V8_TURBOFAN_BACKEND
727 void InstructionSelector::VisitWord64And(Node* node) {
UNIMPLEMENTED(); }
730 void InstructionSelector::VisitWord64Or(Node* node) {
UNIMPLEMENTED(); }
733 void InstructionSelector::VisitWord64Xor(Node* node) {
UNIMPLEMENTED(); }
736 void InstructionSelector::VisitWord64Shl(Node* node) {
UNIMPLEMENTED(); }
739 void InstructionSelector::VisitWord64Shr(Node* node) {
UNIMPLEMENTED(); }
742 void InstructionSelector::VisitWord64Sar(Node* node) {
UNIMPLEMENTED(); }
745 void InstructionSelector::VisitWord64Ror(Node* node) {
UNIMPLEMENTED(); }
748 void InstructionSelector::VisitInt64Add(Node* node) {
UNIMPLEMENTED(); }
751 void InstructionSelector::VisitInt64Sub(Node* node) {
UNIMPLEMENTED(); }
754 void InstructionSelector::VisitInt64Mul(Node* node) {
UNIMPLEMENTED(); }
757 void InstructionSelector::VisitInt64Div(Node* node) {
UNIMPLEMENTED(); }
760 void InstructionSelector::VisitInt64UDiv(Node* node) {
UNIMPLEMENTED(); }
763 void InstructionSelector::VisitInt64Mod(Node* node) {
UNIMPLEMENTED(); }
766 void InstructionSelector::VisitInt64UMod(Node* node) {
UNIMPLEMENTED(); }
769 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
774 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
779 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
788 #if V8_TARGET_ARCH_32_BIT || !V8_TURBOFAN_BACKEND
790 void InstructionSelector::VisitWord64Test(Node* node, FlagsContinuation* cont) {
795 void InstructionSelector::VisitWord64Compare(Node* node,
796 FlagsContinuation* cont) {
803 void InstructionSelector::VisitFinish(Node* node) {
804 OperandGenerator g(
this);
805 Node* value = node->InputAt(0);
806 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
810 void InstructionSelector::VisitParameter(Node* node) {
811 OperandGenerator g(
this);
812 int index = OpParameter<int>(node);
814 g.DefineAsLocation(node, linkage()->GetParameterLocation(index),
815 linkage()->GetParameterType(index)));
819 void InstructionSelector::VisitPhi(Node* node) {
821 for (
InputIter i = node->inputs().begin();
i != node->inputs().end(); ++
i) {
827 void InstructionSelector::VisitProjection(Node* node) {
828 OperandGenerator g(
this);
829 Node* value = node->InputAt(0);
830 switch (value->opcode()) {
831 case IrOpcode::kInt32AddWithOverflow:
832 case IrOpcode::kInt32SubWithOverflow:
833 if (OpParameter<size_t>(node) == 0) {
834 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
836 DCHECK(OpParameter<size_t>(node) == 1u);
846 void InstructionSelector::VisitConstant(Node* node) {
849 OperandGenerator g(
this);
850 Emit(kArchNop, g.DefineAsConstant(node));
854 void InstructionSelector::VisitGoto(BasicBlock* target) {
855 if (IsNextInAssemblyOrder(target)) {
857 Emit(kArchNop,
NULL)->MarkAsControl();
860 OperandGenerator g(
this);
861 Emit(kArchJmp,
NULL, g.Label(target))->MarkAsControl();
866 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
867 BasicBlock* fbranch) {
868 OperandGenerator g(
this);
870 Node* value = branch->InputAt(0);
872 FlagsContinuation cont(
kNotEqual, tbranch, fbranch);
875 if (IsNextInAssemblyOrder(tbranch)) {
881 while (CanCover(user, value)) {
882 if (value->opcode() == IrOpcode::kWord32Equal) {
884 if (m.right().Is(0)) {
886 value = m.left().node();
891 }
else if (value->opcode() == IrOpcode::kWord64Equal) {
893 if (m.right().Is(0)) {
895 value = m.left().node();
906 if (CanCover(user, value)) {
907 switch (value->opcode()) {
908 case IrOpcode::kWord32Equal:
909 cont.OverwriteAndNegateIfEqual(
kEqual);
910 return VisitWord32Compare(value, &cont);
911 case IrOpcode::kInt32LessThan:
913 return VisitWord32Compare(value, &cont);
914 case IrOpcode::kInt32LessThanOrEqual:
916 return VisitWord32Compare(value, &cont);
917 case IrOpcode::kUint32LessThan:
919 return VisitWord32Compare(value, &cont);
920 case IrOpcode::kUint32LessThanOrEqual:
922 return VisitWord32Compare(value, &cont);
923 case IrOpcode::kWord64Equal:
924 cont.OverwriteAndNegateIfEqual(
kEqual);
925 return VisitWord64Compare(value, &cont);
926 case IrOpcode::kInt64LessThan:
928 return VisitWord64Compare(value, &cont);
929 case IrOpcode::kInt64LessThanOrEqual:
931 return VisitWord64Compare(value, &cont);
932 case IrOpcode::kFloat64Equal:
934 return VisitFloat64Compare(value, &cont);
935 case IrOpcode::kFloat64LessThan:
937 return VisitFloat64Compare(value, &cont);
938 case IrOpcode::kFloat64LessThanOrEqual:
940 return VisitFloat64Compare(value, &cont);
941 case IrOpcode::kProjection:
944 if (OpParameter<size_t>(value) == 1u) {
950 Node* node = value->InputAt(0);
951 Node* result = node->FindProjection(0);
952 if (result ==
NULL || IsDefined(result)) {
953 switch (node->opcode()) {
954 case IrOpcode::kInt32AddWithOverflow:
955 cont.OverwriteAndNegateIfEqual(
kOverflow);
956 return VisitInt32AddWithOverflow(node, &cont);
957 case IrOpcode::kInt32SubWithOverflow:
958 cont.OverwriteAndNegateIfEqual(
kOverflow);
959 return VisitInt32SubWithOverflow(node, &cont);
972 VisitWord32Test(value, &cont);
976 void InstructionSelector::VisitReturn(Node* value) {
977 OperandGenerator g(
this);
979 Emit(kArchRet,
NULL, g.UseLocation(value, linkage()->GetReturnLocation(),
980 linkage()->GetReturnType()));
982 Emit(kArchRet,
NULL);
987 void InstructionSelector::VisitThrow(Node* value) {
992 FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor(
994 DCHECK(state->opcode() == IrOpcode::kFrameState);
996 FrameStateCallInfo state_info = OpParameter<FrameStateCallInfo>(state);
997 int parameters = OpParameter<int>(state->InputAt(0));
998 int locals = OpParameter<int>(state->InputAt(1));
999 int stack = OpParameter<int>(state->InputAt(2));
1001 FrameStateDescriptor* outer_state =
NULL;
1002 Node* outer_node = state->InputAt(4);
1003 if (outer_node->opcode() == IrOpcode::kFrameState) {
1004 outer_state = GetFrameStateDescriptor(outer_node);
1007 return new (instruction_zone())
1008 FrameStateDescriptor(state_info, parameters, locals, stack, outer_state);
1013 switch (input->opcode()) {
1014 case IrOpcode::kInt32Constant:
1015 case IrOpcode::kNumberConstant:
1016 case IrOpcode::kFloat64Constant:
1017 case IrOpcode::kHeapConstant:
1025 void InstructionSelector::AddFrameStateInputs(
1027 FrameStateDescriptor* descriptor) {
1028 DCHECK_EQ(IrOpcode::kFrameState, state->op()->opcode());
1030 if (descriptor->outer_state() !=
NULL) {
1031 AddFrameStateInputs(state->InputAt(4), inputs, descriptor->outer_state());
1034 Node* parameters = state->InputAt(0);
1035 Node* locals = state->InputAt(1);
1036 Node* stack = state->InputAt(2);
1037 Node* context = state->InputAt(3);
1039 DCHECK_EQ(IrOpcode::kStateValues, parameters->op()->opcode());
1040 DCHECK_EQ(IrOpcode::kStateValues, locals->op()->opcode());
1041 DCHECK_EQ(IrOpcode::kStateValues, stack->op()->opcode());
1043 DCHECK_EQ(descriptor->parameters_count(), parameters->InputCount());
1044 DCHECK_EQ(descriptor->locals_count(), locals->InputCount());
1045 DCHECK_EQ(descriptor->stack_count(), stack->InputCount());
1047 OperandGenerator g(
this);
1048 for (
int i = 0; i < static_cast<int>(descriptor->parameters_count());
i++) {
1051 if (descriptor->HasContext()) {
1054 for (
int i = 0; i < static_cast<int>(descriptor->locals_count());
i++) {
1057 for (
int i = 0; i < static_cast<int>(descriptor->stack_count());
i++) {
1063 #if !V8_TURBOFAN_BACKEND
1065 #define DECLARE_UNIMPLEMENTED_SELECTOR(x) \
1066 void InstructionSelector::Visit##x(Node* node) { UNIMPLEMENTED(); }
1068 #undef DECLARE_UNIMPLEMENTED_SELECTOR
1071 void InstructionSelector::VisitInt32AddWithOverflow(Node* node,
1072 FlagsContinuation* cont) {
1077 void InstructionSelector::VisitInt32SubWithOverflow(Node* node,
1078 FlagsContinuation* cont) {
1083 void InstructionSelector::VisitWord32Test(Node* node, FlagsContinuation* cont) {
1088 void InstructionSelector::VisitWord32Compare(Node* node,
1089 FlagsContinuation* cont) {
1094 void InstructionSelector::VisitFloat64Compare(Node* node,
1095 FlagsContinuation* cont) {
1100 void InstructionSelector::VisitCall(Node* call, BasicBlock* continuation,
1101 BasicBlock* deoptimization) {}
InstructionOperand * UseImmediate(Node *node)
InstructionOperand * UseUnique(Node *node)
static int GetValueInputCount(const Operator *op)
static const UnallocatedOperand * cast(const InstructionOperand *op)
int fixed_slot_index() const
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DECLARE_UNIMPLEMENTED_SELECTOR(x)
void V8_Fatal(const char *file, int line, const char *format,...)
#define CHECK_EQ(expected, value)
#define CHECK_NE(unexpected, value)
#define DCHECK_NOT_NULL(p)
#define DCHECK_NE(v1, v2)
#define DCHECK(condition)
#define DCHECK_EQ(v1, v2)
ZoneVector< BasicBlock * > BasicBlockVector
BinopMatcher< Int32Matcher, Int32Matcher > Int32BinopMatcher
static void CheckNoPhis(const BasicBlock *block)
MachineType RepresentationOf(MachineType machine_type)
BasicBlockVector::iterator BasicBlockVectorIter
BinopMatcher< Int64Matcher, Int64Matcher > Int64BinopMatcher
@ kUnsignedLessThanOrEqual
@ kUnorderedLessThanOrEqual
MachineType LoadRepresentation
BasicBlockVector::reverse_iterator BasicBlockVectorRIter
Node::Inputs::iterator InputIter
static InstructionOperand * UseOrImmediate(OperandGenerator *g, Node *input)
ZoneVector< InstructionOperand * > InstructionOperandVector
Debugger support for the V8 JavaScript engine.
#define MACHINE_OP_LIST(V)
CallDescriptor * descriptor
size_t frame_state_count() const
size_t input_count() const
size_t frame_state_value_count() const
InstructionOperandVector instruction_args
InstructionOperandVector outputs
FrameStateDescriptor * frame_state_descriptor