27 InstructionSelectorTest::Stream InstructionSelectorTest::StreamBuilder::Build(
28 InstructionSelector::Features features,
31 if (FLAG_trace_turbo) {
33 out <<
"=== Schedule before instruction selection ===" <<
endl << *schedule;
35 EXPECT_NE(0, graph()->NodeCount());
37 Linkage linkage(&info, call_descriptor());
39 SourcePositionTable source_position_table(graph());
40 InstructionSelector selector(&sequence, &source_position_table, features);
41 selector.SelectInstructions();
42 if (FLAG_trace_turbo) {
44 out <<
"=== Code sequence after instruction selection ===" <<
endl
48 std::set<int> virtual_registers;
49 for (InstructionSequence::const_iterator
i = sequence.begin();
50 i != sequence.end(); ++
i) {
51 Instruction* instr = *
i;
52 if (instr->opcode() < 0)
continue;
53 if (
mode == kTargetInstructions) {
54 switch (instr->arch_opcode()) {
64 if (
mode == kAllExceptNopInstructions && instr->arch_opcode() == kArchNop) {
67 for (
size_t i = 0;
i < instr->OutputCount(); ++
i) {
68 InstructionOperand* output = instr->OutputAt(
i);
70 if (output->IsConstant()) {
71 s.constants_.insert(std::make_pair(
72 output->index(), sequence.GetConstant(output->index())));
73 virtual_registers.insert(output->index());
74 }
else if (output->IsUnallocated()) {
75 virtual_registers.insert(
79 for (
size_t i = 0;
i < instr->InputCount(); ++
i) {
80 InstructionOperand* input = instr->InputAt(
i);
82 if (input->IsImmediate()) {
83 s.immediates_.insert(std::make_pair(
84 input->index(), sequence.GetImmediate(input->index())));
85 }
else if (input->IsUnallocated()) {
86 virtual_registers.insert(
90 s.instructions_.push_back(instr);
92 for (std::set<int>::const_iterator
i = virtual_registers.begin();
93 i != virtual_registers.end(); ++
i) {
94 int virtual_register = *
i;
95 if (sequence.IsDouble(virtual_register)) {
96 EXPECT_FALSE(sequence.IsReference(virtual_register));
97 s.doubles_.insert(virtual_register);
99 if (sequence.IsReference(virtual_register)) {
100 EXPECT_FALSE(sequence.IsDouble(virtual_register));
101 s.references_.insert(virtual_register);
104 for (
int i = 0;
i < sequence.GetFrameStateDescriptorCount();
i++) {
105 s.deoptimization_entries_.push_back(sequence.GetFrameStateDescriptor(
106 InstructionSequence::StateId::FromInt(
i)));
117 const float kValue = 4.2f;
119 m.Return(m.Float32Constant(kValue));
120 Stream s = m.Build(kAllInstructions);
121 ASSERT_EQ(2U, s.size());
122 EXPECT_EQ(kArchNop, s[0]->arch_opcode());
124 EXPECT_FLOAT_EQ(kValue, s.ToFloat32(s[0]->OutputAt(0)));
125 EXPECT_EQ(kArchRet, s[1]->arch_opcode());
126 EXPECT_EQ(1U, s[1]->InputCount());
132 m.Return(m.Parameter(0));
133 Stream s = m.Build(kAllInstructions);
134 ASSERT_EQ(2U, s.size());
135 EXPECT_EQ(kArchNop, s[0]->arch_opcode());
136 ASSERT_EQ(1U, s[0]->OutputCount());
137 EXPECT_EQ(kArchRet, s[1]->arch_opcode());
138 EXPECT_EQ(1U, s[1]->InputCount());
144 m.Return(m.Int32Constant(0));
145 Stream s = m.Build(kAllInstructions);
146 ASSERT_EQ(2U, s.size());
147 EXPECT_EQ(kArchNop, s[0]->arch_opcode());
148 ASSERT_EQ(1U, s[0]->OutputCount());
150 EXPECT_EQ(0, s.ToInt32(s[0]->OutputAt(0)));
151 EXPECT_EQ(kArchRet, s[1]->arch_opcode());
152 EXPECT_EQ(1U, s[1]->InputCount());
162 m.Return(m.TruncateFloat64ToInt32(m.Parameter(0)));
163 Stream s = m.Build(kAllInstructions);
164 ASSERT_EQ(3U, s.size());
165 EXPECT_EQ(kArchNop, s[0]->arch_opcode());
166 EXPECT_EQ(kArchTruncateDoubleToI, s[1]->arch_opcode());
167 EXPECT_EQ(1U, s[1]->InputCount());
168 EXPECT_EQ(1U, s[1]->OutputCount());
169 EXPECT_EQ(kArchRet, s[2]->arch_opcode());
179 Node* param = m.Parameter(0);
181 Stream s = m.Build(kAllInstructions);
182 EXPECT_TRUE(s.IsDouble(param->id()));
188 Node* param = m.Parameter(0);
190 Stream s = m.Build(kAllInstructions);
191 EXPECT_TRUE(s.IsReference(param->id()));
201 Node* param = m.Parameter(0);
202 Node* finish = m.NewNode(m.common()->Finish(1), param, m.graph()->start());
204 Stream s = m.Build(kAllInstructions);
205 ASSERT_EQ(3U, s.size());
206 EXPECT_EQ(kArchNop, s[0]->arch_opcode());
207 ASSERT_EQ(1U, s[0]->OutputCount());
208 ASSERT_TRUE(s[0]->Output()->IsUnallocated());
209 EXPECT_EQ(param->id(), s.ToVreg(s[0]->Output()));
210 EXPECT_EQ(kArchNop, s[1]->arch_opcode());
211 ASSERT_EQ(1U, s[1]->InputCount());
212 ASSERT_TRUE(s[1]->InputAt(0)->IsUnallocated());
213 EXPECT_EQ(param->id(), s.ToVreg(s[1]->InputAt(0)));
214 ASSERT_EQ(1U, s[1]->OutputCount());
215 ASSERT_TRUE(s[1]->Output()->IsUnallocated());
217 EXPECT_EQ(finish->id(), s.ToVreg(s[1]->Output()));
218 EXPECT_TRUE(s.IsReference(finish->id()));
226 typedef InstructionSelectorTestWithParam<MachineType>
232 StreamBuilder m(
this, type, type, type);
233 Node* param0 = m.Parameter(0);
234 Node* param1 = m.Parameter(1);
236 m.Branch(m.Int32Constant(0), &a, &b);
242 Node* phi = m.Phi(type, param0, param1);
244 Stream s = m.Build(kAllInstructions);
245 EXPECT_EQ(s.IsDouble(phi->id()), s.IsDouble(param0->id()));
246 EXPECT_EQ(s.IsDouble(phi->id()), s.IsDouble(param1->id()));
252 StreamBuilder m(
this, type, type, type);
253 Node* param0 = m.Parameter(0);
254 Node* param1 = m.Parameter(1);
256 m.Branch(m.Int32Constant(1), &a, &b);
262 Node* phi = m.Phi(type, param0, param1);
264 Stream s = m.Build(kAllInstructions);
265 EXPECT_EQ(s.IsReference(phi->id()), s.IsReference(param0->id()));
266 EXPECT_EQ(s.IsReference(phi->id()), s.IsReference(param1->id()));
283 Node*
p1 = m1.Parameter(0);
284 m1.Return(m1.Load(
kMachInt32,
p1, m1.Int32Constant(0)));
285 Stream
s1 = m1.Build(kAllInstructions);
287 Node*
p2 = m2.Parameter(0);
288 m2.Return(m2.NewNode(m2.machine()->Load(
kMachInt32),
p2, m2.Int32Constant(0),
289 m2.NewNode(m2.common()->ValueEffect(1),
p2)));
290 Stream
s2 = m2.Build(kAllInstructions);
291 EXPECT_LE(3U,
s1.size());
292 ASSERT_EQ(
s1.size(),
s2.size());
293 TRACED_FORRANGE(
size_t,
i, 0,
s1.size() - 1) {
313 Node* function_node = m.Parameter(0);
314 Node* receiver = m.Parameter(1);
315 Node* context = m.Parameter(2);
317 Node* parameters = m.NewNode(m.common()->StateValues(1), m.Int32Constant(1));
318 Node* locals = m.NewNode(m.common()->StateValues(0));
319 Node* stack = m.NewNode(m.common()->StateValues(0));
320 Node* context_dummy = m.Int32Constant(0);
322 Node* state_node = m.NewNode(
324 locals, stack, context_dummy, m.UndefinedConstant());
325 Node* call = m.CallJS0(function_node, receiver, context, state_node);
328 Stream s = m.Build(kAllExceptNopInstructions);
332 for (; index < s.size() && s[index]->arch_opcode() != kArchCallJSFunction;
336 ASSERT_EQ(index + 2, s.size());
338 EXPECT_EQ(kArchCallJSFunction, s[index++]->arch_opcode());
339 EXPECT_EQ(kArchRet, s[index++]->arch_opcode());
352 Node* function_node = m.Parameter(0);
353 Node* receiver = m.Parameter(1);
354 Node* context = m.Int32Constant(1);
357 Node* parameters = m.NewNode(m.common()->StateValues(1), m.Int32Constant(43));
358 Node* locals = m.NewNode(m.common()->StateValues(1), m.Int32Constant(44));
359 Node* stack = m.NewNode(m.common()->StateValues(1), m.Int32Constant(45));
361 Node* context_sentinel = m.Int32Constant(0);
362 Node* frame_state_before = m.NewNode(
364 parameters, locals, stack, context_sentinel, m.UndefinedConstant());
367 Node* call = m.CallFunctionStub0(function_node, receiver, context,
372 Stream s = m.Build(kAllExceptNopInstructions);
376 for (; index < s.size() && s[index]->arch_opcode() != kArchCallCodeObject;
380 ASSERT_EQ(index + 2, s.size());
384 EXPECT_EQ(kArchCallCodeObject, call_instr->
arch_opcode());
385 size_t num_operands =
391 ASSERT_EQ(num_operands, call_instr->
InputCount());
394 EXPECT_TRUE(call_instr->
InputAt(0)->IsImmediate());
399 s.GetFrameStateDescriptor(deopt_id_before);
400 EXPECT_EQ(bailout_id_before, desc_before->
bailout_id());
405 EXPECT_EQ(43, s.ToInt32(call_instr->
InputAt(2)));
406 EXPECT_EQ(0, s.ToInt32(call_instr->
InputAt(3)));
407 EXPECT_EQ(44, s.ToInt32(call_instr->
InputAt(4)));
408 EXPECT_EQ(45, s.ToInt32(call_instr->
InputAt(5)));
411 EXPECT_EQ(function_node->id(), s.ToVreg(call_instr->
InputAt(6)));
413 EXPECT_EQ(context->id(), s.ToVreg(call_instr->
InputAt(7)));
415 EXPECT_EQ(kArchRet, s[index++]->arch_opcode());
417 EXPECT_EQ(index, s.size());
422 CallFunctionStubDeoptRecursiveFrameState) {
430 Node* function_node = m.Parameter(0);
431 Node* receiver = m.Parameter(1);
432 Node* context = m.Int32Constant(66);
435 Node* parameters = m.NewNode(m.common()->StateValues(1), m.Int32Constant(63));
436 Node* locals = m.NewNode(m.common()->StateValues(1), m.Int32Constant(64));
437 Node* stack = m.NewNode(m.common()->StateValues(1), m.Int32Constant(65));
438 Node* frame_state_parent = m.NewNode(
440 parameters, locals, stack, context, m.UndefinedConstant());
442 Node* context2 = m.Int32Constant(46);
444 m.NewNode(m.common()->StateValues(1), m.Int32Constant(43));
445 Node* locals2 = m.NewNode(m.common()->StateValues(1), m.Int32Constant(44));
446 Node* stack2 = m.NewNode(m.common()->StateValues(1), m.Int32Constant(45));
447 Node* frame_state_before = m.NewNode(
449 parameters2, locals2, stack2, context2, frame_state_parent);
452 Node* call = m.CallFunctionStub0(function_node, receiver, context2,
457 Stream s = m.Build(kAllExceptNopInstructions);
461 for (; index < s.size() && s[index]->arch_opcode() != kArchCallCodeObject;
465 EXPECT_EQ(index + 2, s.size());
469 EXPECT_EQ(kArchCallCodeObject, call_instr->
arch_opcode());
470 size_t num_operands =
477 EXPECT_EQ(num_operands, call_instr->
InputCount());
479 EXPECT_TRUE(call_instr->
InputAt(0)->IsImmediate());
484 s.GetFrameStateDescriptor(deopt_id_before);
485 EXPECT_EQ(bailout_id_before, desc_before->
bailout_id());
489 EXPECT_EQ(63, s.ToInt32(call_instr->
InputAt(2)));
491 EXPECT_EQ(66, s.ToInt32(call_instr->
InputAt(3)));
492 EXPECT_EQ(64, s.ToInt32(call_instr->
InputAt(4)));
493 EXPECT_EQ(65, s.ToInt32(call_instr->
InputAt(5)));
495 EXPECT_EQ(43, s.ToInt32(call_instr->
InputAt(6)));
496 EXPECT_EQ(46, s.ToInt32(call_instr->
InputAt(7)));
497 EXPECT_EQ(44, s.ToInt32(call_instr->
InputAt(8)));
498 EXPECT_EQ(45, s.ToInt32(call_instr->
InputAt(9)));
501 EXPECT_EQ(function_node->id(), s.ToVreg(call_instr->
InputAt(10)));
503 EXPECT_EQ(context2->id(), s.ToVreg(call_instr->
InputAt(11)));
506 EXPECT_EQ(kArchRet, s[index++]->arch_opcode());
507 EXPECT_EQ(index, s.size());
OutputFrameStateCombine state_combine() const
size_t parameters_count() const
size_t stack_count() const
size_t locals_count() const
BailoutId bailout_id() const
InstructionSelectorTest()
virtual ~InstructionSelectorTest()
ArchOpcode arch_opcode() const
size_t OutputCount() const
size_t InputCount() const
InstructionOperand * InputAt(size_t i) const
static const UnallocatedOperand * cast(const InstructionOperand *op)
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long mode(MIPS only)") DEFINE_BOOL(enable_always_align_csp
#define TARGET_ARCH_OPCODE_LIST(V)
RawMachineAssembler::Label MLabel
InstructionSelectorTestWithParam< MachineType > InstructionSelectorPhiTest
TARGET_TEST_F(ChangeLowering32Test, ChangeInt32ToTagged)
TARGET_TEST_P(ChangeLoweringCommonTest, ChangeBitToBool)
INSTANTIATE_TEST_CASE_P(InstructionSelectorTest, InstructionSelectorDPITest, ::testing::ValuesIn(kDPIs))
OStream & endl(OStream &os)
Debugger support for the V8 JavaScript engine.