V8 Project
lithium-arm.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
8 #include "src/hydrogen-osr.h"
9 #include "src/lithium-inl.h"
10 
11 namespace v8 {
12 namespace internal {
13 
14 #define DEFINE_COMPILE(type) \
15  void L##type::CompileToNative(LCodeGen* generator) { \
16  generator->Do##type(this); \
17  }
19 #undef DEFINE_COMPILE
20 
21 #ifdef DEBUG
22 void LInstruction::VerifyCall() {
23  // Call instructions can use only fixed registers as temporaries and
24  // outputs because all registers are blocked by the calling convention.
25  // Inputs operands must use a fixed register or use-at-start policy or
26  // a non-register policy.
27  DCHECK(Output() == NULL ||
28  LUnallocated::cast(Output())->HasFixedPolicy() ||
29  !LUnallocated::cast(Output())->HasRegisterPolicy());
30  for (UseIterator it(this); !it.Done(); it.Advance()) {
31  LUnallocated* operand = LUnallocated::cast(it.Current());
32  DCHECK(operand->HasFixedPolicy() ||
33  operand->IsUsedAtStart());
34  }
35  for (TempIterator it(this); !it.Done(); it.Advance()) {
36  LUnallocated* operand = LUnallocated::cast(it.Current());
37  DCHECK(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
38  }
39 }
40 #endif
41 
42 
43 void LInstruction::PrintTo(StringStream* stream) {
44  stream->Add("%s ", this->Mnemonic());
45 
46  PrintOutputOperandTo(stream);
47 
48  PrintDataTo(stream);
49 
50  if (HasEnvironment()) {
51  stream->Add(" ");
52  environment()->PrintTo(stream);
53  }
54 
55  if (HasPointerMap()) {
56  stream->Add(" ");
57  pointer_map()->PrintTo(stream);
58  }
59 }
60 
61 
62 void LInstruction::PrintDataTo(StringStream* stream) {
63  stream->Add("= ");
64  for (int i = 0; i < InputCount(); i++) {
65  if (i > 0) stream->Add(" ");
66  if (InputAt(i) == NULL) {
67  stream->Add("NULL");
68  } else {
69  InputAt(i)->PrintTo(stream);
70  }
71  }
72 }
73 
74 
75 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
76  if (HasResult()) result()->PrintTo(stream);
77 }
78 
79 
80 void LLabel::PrintDataTo(StringStream* stream) {
81  LGap::PrintDataTo(stream);
82  LLabel* rep = replacement();
83  if (rep != NULL) {
84  stream->Add(" Dead block replaced with B%d", rep->block_id());
85  }
86 }
87 
88 
89 bool LGap::IsRedundant() const {
90  for (int i = 0; i < 4; i++) {
92  return false;
93  }
94  }
95 
96  return true;
97 }
98 
99 
100 void LGap::PrintDataTo(StringStream* stream) {
101  for (int i = 0; i < 4; i++) {
102  stream->Add("(");
103  if (parallel_moves_[i] != NULL) {
104  parallel_moves_[i]->PrintDataTo(stream);
105  }
106  stream->Add(") ");
107  }
108 }
109 
110 
111 const char* LArithmeticD::Mnemonic() const {
112  switch (op()) {
113  case Token::ADD: return "add-d";
114  case Token::SUB: return "sub-d";
115  case Token::MUL: return "mul-d";
116  case Token::DIV: return "div-d";
117  case Token::MOD: return "mod-d";
118  default:
119  UNREACHABLE();
120  return NULL;
121  }
122 }
123 
124 
125 const char* LArithmeticT::Mnemonic() const {
126  switch (op()) {
127  case Token::ADD: return "add-t";
128  case Token::SUB: return "sub-t";
129  case Token::MUL: return "mul-t";
130  case Token::MOD: return "mod-t";
131  case Token::DIV: return "div-t";
132  case Token::BIT_AND: return "bit-and-t";
133  case Token::BIT_OR: return "bit-or-t";
134  case Token::BIT_XOR: return "bit-xor-t";
135  case Token::ROR: return "ror-t";
136  case Token::SHL: return "shl-t";
137  case Token::SAR: return "sar-t";
138  case Token::SHR: return "shr-t";
139  default:
140  UNREACHABLE();
141  return NULL;
142  }
143 }
144 
145 
146 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
147  return !gen->IsNextEmittedBlock(block_id());
148 }
149 
150 
151 void LGoto::PrintDataTo(StringStream* stream) {
152  stream->Add("B%d", block_id());
153 }
154 
155 
156 void LBranch::PrintDataTo(StringStream* stream) {
157  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
158  value()->PrintTo(stream);
159 }
160 
161 
162 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
163  stream->Add("if ");
164  left()->PrintTo(stream);
165  stream->Add(" %s ", Token::String(op()));
166  right()->PrintTo(stream);
167  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
168 }
169 
170 
171 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
172  stream->Add("if is_object(");
173  value()->PrintTo(stream);
174  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
175 }
176 
177 
178 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
179  stream->Add("if is_string(");
180  value()->PrintTo(stream);
181  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
182 }
183 
184 
185 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
186  stream->Add("if is_smi(");
187  value()->PrintTo(stream);
188  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
189 }
190 
191 
192 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
193  stream->Add("if is_undetectable(");
194  value()->PrintTo(stream);
195  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
196 }
197 
198 
199 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
200  stream->Add("if string_compare(");
201  left()->PrintTo(stream);
202  right()->PrintTo(stream);
203  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
204 }
205 
206 
207 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
208  stream->Add("if has_instance_type(");
209  value()->PrintTo(stream);
210  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
211 }
212 
213 
214 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
215  stream->Add("if has_cached_array_index(");
216  value()->PrintTo(stream);
217  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
218 }
219 
220 
221 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
222  stream->Add("if class_of_test(");
223  value()->PrintTo(stream);
224  stream->Add(", \"%o\") then B%d else B%d",
225  *hydrogen()->class_name(),
226  true_block_id(),
227  false_block_id());
228 }
229 
230 
231 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
232  stream->Add("if typeof ");
233  value()->PrintTo(stream);
234  stream->Add(" == \"%s\" then B%d else B%d",
235  hydrogen()->type_literal()->ToCString().get(),
236  true_block_id(), false_block_id());
237 }
238 
239 
240 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
241  stream->Add(" = ");
242  function()->PrintTo(stream);
243  stream->Add(".code_entry = ");
244  code_object()->PrintTo(stream);
245 }
246 
247 
248 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
249  stream->Add(" = ");
250  base_object()->PrintTo(stream);
251  stream->Add(" + ");
252  offset()->PrintTo(stream);
253 }
254 
255 
256 void LCallJSFunction::PrintDataTo(StringStream* stream) {
257  stream->Add("= ");
258  function()->PrintTo(stream);
259  stream->Add("#%d / ", arity());
260 }
261 
262 
263 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
264  for (int i = 0; i < InputCount(); i++) {
265  InputAt(i)->PrintTo(stream);
266  stream->Add(" ");
267  }
268  stream->Add("#%d / ", arity());
269 }
270 
271 
272 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
273  context()->PrintTo(stream);
274  stream->Add("[%d]", slot_index());
275 }
276 
277 
278 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
279  context()->PrintTo(stream);
280  stream->Add("[%d] <- ", slot_index());
281  value()->PrintTo(stream);
282 }
283 
284 
285 void LInvokeFunction::PrintDataTo(StringStream* stream) {
286  stream->Add("= ");
287  function()->PrintTo(stream);
288  stream->Add(" #%d / ", arity());
289 }
290 
291 
292 void LCallNew::PrintDataTo(StringStream* stream) {
293  stream->Add("= ");
294  constructor()->PrintTo(stream);
295  stream->Add(" #%d / ", arity());
296 }
297 
298 
299 void LCallNewArray::PrintDataTo(StringStream* stream) {
300  stream->Add("= ");
301  constructor()->PrintTo(stream);
302  stream->Add(" #%d / ", arity());
303  ElementsKind kind = hydrogen()->elements_kind();
304  stream->Add(" (%s) ", ElementsKindToString(kind));
305 }
306 
307 
308 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
309  arguments()->PrintTo(stream);
310  stream->Add(" length ");
311  length()->PrintTo(stream);
312  stream->Add(" index ");
313  index()->PrintTo(stream);
314 }
315 
316 
317 void LStoreNamedField::PrintDataTo(StringStream* stream) {
318  object()->PrintTo(stream);
319  OStringStream os;
320  os << hydrogen()->access() << " <- ";
321  stream->Add(os.c_str());
322  value()->PrintTo(stream);
323 }
324 
325 
326 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
327  object()->PrintTo(stream);
328  stream->Add(".");
329  stream->Add(String::cast(*name())->ToCString().get());
330  stream->Add(" <- ");
331  value()->PrintTo(stream);
332 }
333 
334 
335 void LLoadKeyed::PrintDataTo(StringStream* stream) {
336  elements()->PrintTo(stream);
337  stream->Add("[");
338  key()->PrintTo(stream);
339  if (hydrogen()->IsDehoisted()) {
340  stream->Add(" + %d]", base_offset());
341  } else {
342  stream->Add("]");
343  }
344 }
345 
346 
347 void LStoreKeyed::PrintDataTo(StringStream* stream) {
348  elements()->PrintTo(stream);
349  stream->Add("[");
350  key()->PrintTo(stream);
351  if (hydrogen()->IsDehoisted()) {
352  stream->Add(" + %d] <-", base_offset());
353  } else {
354  stream->Add("] <- ");
355  }
356 
357  if (value() == NULL) {
358  DCHECK(hydrogen()->IsConstantHoleStore() &&
359  hydrogen()->value()->representation().IsDouble());
360  stream->Add("<the hole(nan)>");
361  } else {
362  value()->PrintTo(stream);
363  }
364 }
365 
366 
367 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
368  object()->PrintTo(stream);
369  stream->Add("[");
370  key()->PrintTo(stream);
371  stream->Add("] <- ");
372  value()->PrintTo(stream);
373 }
374 
375 
376 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
377  object()->PrintTo(stream);
378  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
379 }
380 
381 
382 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
383  // Skip a slot if for a double-width slot.
384  if (kind == DOUBLE_REGISTERS) spill_slot_count_++;
385  return spill_slot_count_++;
386 }
387 
388 
389 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
390  int index = GetNextSpillIndex(kind);
391  if (kind == DOUBLE_REGISTERS) {
392  return LDoubleStackSlot::Create(index, zone());
393  } else {
394  DCHECK(kind == GENERAL_REGISTERS);
395  return LStackSlot::Create(index, zone());
396  }
397 }
398 
399 
400 LPlatformChunk* LChunkBuilder::Build() {
401  DCHECK(is_unused());
402  chunk_ = new(zone()) LPlatformChunk(info(), graph());
403  LPhase phase("L_Building chunk", chunk_);
404  status_ = BUILDING;
405 
406  // If compiling for OSR, reserve space for the unoptimized frame,
407  // which will be subsumed into this frame.
408  if (graph()->has_osr()) {
409  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
410  chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
411  }
412  }
413 
414  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
415  for (int i = 0; i < blocks->length(); i++) {
416  HBasicBlock* next = NULL;
417  if (i < blocks->length() - 1) next = blocks->at(i + 1);
418  DoBasicBlock(blocks->at(i), next);
419  if (is_aborted()) return NULL;
420  }
421  status_ = DONE;
422  return chunk_;
423 }
424 
425 
426 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
427  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
429 }
430 
431 
432 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
433  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
435 }
436 
437 
438 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
439  return Use(value, ToUnallocated(fixed_register));
440 }
441 
442 
443 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
444  return Use(value, ToUnallocated(reg));
445 }
446 
447 
448 LOperand* LChunkBuilder::UseRegister(HValue* value) {
449  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
450 }
451 
452 
453 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
454  return Use(value,
455  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
457 }
458 
459 
460 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
461  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
462 }
463 
464 
465 LOperand* LChunkBuilder::Use(HValue* value) {
466  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
467 }
468 
469 
470 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
471  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
473 }
474 
475 
476 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
477  return value->IsConstant()
478  ? chunk_->DefineConstantOperand(HConstant::cast(value))
479  : Use(value);
480 }
481 
482 
483 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
484  return value->IsConstant()
485  ? chunk_->DefineConstantOperand(HConstant::cast(value))
486  : UseAtStart(value);
487 }
488 
489 
490 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
491  return value->IsConstant()
492  ? chunk_->DefineConstantOperand(HConstant::cast(value))
493  : UseRegister(value);
494 }
495 
496 
497 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
498  return value->IsConstant()
499  ? chunk_->DefineConstantOperand(HConstant::cast(value))
500  : UseRegisterAtStart(value);
501 }
502 
503 
504 LOperand* LChunkBuilder::UseConstant(HValue* value) {
505  return chunk_->DefineConstantOperand(HConstant::cast(value));
506 }
507 
508 
509 LOperand* LChunkBuilder::UseAny(HValue* value) {
510  return value->IsConstant()
511  ? chunk_->DefineConstantOperand(HConstant::cast(value))
512  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
513 }
514 
515 
516 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
517  if (value->EmitAtUses()) {
518  HInstruction* instr = HInstruction::cast(value);
519  VisitInstruction(instr);
520  }
521  operand->set_virtual_register(value->id());
522  return operand;
523 }
524 
525 
526 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
527  LUnallocated* result) {
528  result->set_virtual_register(current_instruction_->id());
529  instr->set_result(result);
530  return instr;
531 }
532 
533 
534 LInstruction* LChunkBuilder::DefineAsRegister(
535  LTemplateResultInstruction<1>* instr) {
536  return Define(instr,
537  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
538 }
539 
540 
541 LInstruction* LChunkBuilder::DefineAsSpilled(
542  LTemplateResultInstruction<1>* instr, int index) {
543  return Define(instr,
544  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
545 }
546 
547 
548 LInstruction* LChunkBuilder::DefineSameAsFirst(
549  LTemplateResultInstruction<1>* instr) {
550  return Define(instr,
551  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
552 }
553 
554 
555 LInstruction* LChunkBuilder::DefineFixed(
556  LTemplateResultInstruction<1>* instr, Register reg) {
557  return Define(instr, ToUnallocated(reg));
558 }
559 
560 
561 LInstruction* LChunkBuilder::DefineFixedDouble(
562  LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
563  return Define(instr, ToUnallocated(reg));
564 }
565 
566 
567 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
568  HEnvironment* hydrogen_env = current_block_->last_environment();
569  int argument_index_accumulator = 0;
570  ZoneList<HValue*> objects_to_materialize(0, zone());
571  instr->set_environment(CreateEnvironment(hydrogen_env,
572  &argument_index_accumulator,
573  &objects_to_materialize));
574  return instr;
575 }
576 
577 
578 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
579  HInstruction* hinstr,
580  CanDeoptimize can_deoptimize) {
581  info()->MarkAsNonDeferredCalling();
582 #ifdef DEBUG
583  instr->VerifyCall();
584 #endif
585  instr->MarkAsCall();
586  instr = AssignPointerMap(instr);
587 
588  // If instruction does not have side-effects lazy deoptimization
589  // after the call will try to deoptimize to the point before the call.
590  // Thus we still need to attach environment to this call even if
591  // call sequence can not deoptimize eagerly.
592  bool needs_environment =
593  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
594  !hinstr->HasObservableSideEffects();
595  if (needs_environment && !instr->HasEnvironment()) {
596  instr = AssignEnvironment(instr);
597  // We can't really figure out if the environment is needed or not.
598  instr->environment()->set_has_been_used();
599  }
600 
601  return instr;
602 }
603 
604 
605 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
606  DCHECK(!instr->HasPointerMap());
607  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
608  return instr;
609 }
610 
611 
612 LUnallocated* LChunkBuilder::TempRegister() {
613  LUnallocated* operand =
614  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
615  int vreg = allocator_->GetVirtualRegister();
616  if (!allocator_->AllocationOk()) {
617  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
618  vreg = 0;
619  }
620  operand->set_virtual_register(vreg);
621  return operand;
622 }
623 
624 
625 LUnallocated* LChunkBuilder::TempDoubleRegister() {
626  LUnallocated* operand =
627  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_DOUBLE_REGISTER);
628  int vreg = allocator_->GetVirtualRegister();
629  if (!allocator_->AllocationOk()) {
630  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
631  vreg = 0;
632  }
633  operand->set_virtual_register(vreg);
634  return operand;
635 }
636 
637 
638 LOperand* LChunkBuilder::FixedTemp(Register reg) {
639  LUnallocated* operand = ToUnallocated(reg);
640  DCHECK(operand->HasFixedPolicy());
641  return operand;
642 }
643 
644 
645 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
646  LUnallocated* operand = ToUnallocated(reg);
647  DCHECK(operand->HasFixedPolicy());
648  return operand;
649 }
650 
651 
652 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
653  return new(zone()) LLabel(instr->block());
654 }
655 
656 
657 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
658  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
659 }
660 
661 
662 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
663  UNREACHABLE();
664  return NULL;
665 }
666 
667 
668 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
669  return AssignEnvironment(new(zone()) LDeoptimize);
670 }
671 
672 
673 LInstruction* LChunkBuilder::DoShift(Token::Value op,
674  HBitwiseBinaryOperation* instr) {
675  if (instr->representation().IsSmiOrInteger32()) {
676  DCHECK(instr->left()->representation().Equals(instr->representation()));
677  DCHECK(instr->right()->representation().Equals(instr->representation()));
678  LOperand* left = UseRegisterAtStart(instr->left());
679 
680  HValue* right_value = instr->right();
681  LOperand* right = NULL;
682  int constant_value = 0;
683  bool does_deopt = false;
684  if (right_value->IsConstant()) {
685  HConstant* constant = HConstant::cast(right_value);
686  right = chunk_->DefineConstantOperand(constant);
687  constant_value = constant->Integer32Value() & 0x1f;
688  // Left shifts can deoptimize if we shift by > 0 and the result cannot be
689  // truncated to smi.
690  if (instr->representation().IsSmi() && constant_value > 0) {
691  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
692  }
693  } else {
694  right = UseRegisterAtStart(right_value);
695  }
696 
697  // Shift operations can only deoptimize if we do a logical shift
698  // by 0 and the result cannot be truncated to int32.
699  if (op == Token::SHR && constant_value == 0) {
700  if (FLAG_opt_safe_uint32_operations) {
701  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
702  } else {
703  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
704  }
705  }
706 
707  LInstruction* result =
708  DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
709  return does_deopt ? AssignEnvironment(result) : result;
710  } else {
711  return DoArithmeticT(op, instr);
712  }
713 }
714 
715 
716 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
717  HArithmeticBinaryOperation* instr) {
718  DCHECK(instr->representation().IsDouble());
719  DCHECK(instr->left()->representation().IsDouble());
720  DCHECK(instr->right()->representation().IsDouble());
721  if (op == Token::MOD) {
722  LOperand* left = UseFixedDouble(instr->left(), d0);
723  LOperand* right = UseFixedDouble(instr->right(), d1);
724  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
725  return MarkAsCall(DefineFixedDouble(result, d0), instr);
726  } else {
727  LOperand* left = UseRegisterAtStart(instr->left());
728  LOperand* right = UseRegisterAtStart(instr->right());
729  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
730  return DefineAsRegister(result);
731  }
732 }
733 
734 
735 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
736  HBinaryOperation* instr) {
737  HValue* left = instr->left();
738  HValue* right = instr->right();
739  DCHECK(left->representation().IsTagged());
740  DCHECK(right->representation().IsTagged());
741  LOperand* context = UseFixed(instr->context(), cp);
742  LOperand* left_operand = UseFixed(left, r1);
743  LOperand* right_operand = UseFixed(right, r0);
744  LArithmeticT* result =
745  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
746  return MarkAsCall(DefineFixed(result, r0), instr);
747 }
748 
749 
750 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
751  DCHECK(is_building());
752  current_block_ = block;
753  next_block_ = next_block;
754  if (block->IsStartBlock()) {
755  block->UpdateEnvironment(graph_->start_environment());
756  argument_count_ = 0;
757  } else if (block->predecessors()->length() == 1) {
758  // We have a single predecessor => copy environment and outgoing
759  // argument count from the predecessor.
760  DCHECK(block->phis()->length() == 0);
761  HBasicBlock* pred = block->predecessors()->at(0);
762  HEnvironment* last_environment = pred->last_environment();
763  DCHECK(last_environment != NULL);
764  // Only copy the environment, if it is later used again.
765  if (pred->end()->SecondSuccessor() == NULL) {
766  DCHECK(pred->end()->FirstSuccessor() == block);
767  } else {
768  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
769  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
770  last_environment = last_environment->Copy();
771  }
772  }
773  block->UpdateEnvironment(last_environment);
774  DCHECK(pred->argument_count() >= 0);
775  argument_count_ = pred->argument_count();
776  } else {
777  // We are at a state join => process phis.
778  HBasicBlock* pred = block->predecessors()->at(0);
779  // No need to copy the environment, it cannot be used later.
780  HEnvironment* last_environment = pred->last_environment();
781  for (int i = 0; i < block->phis()->length(); ++i) {
782  HPhi* phi = block->phis()->at(i);
783  if (phi->HasMergedIndex()) {
784  last_environment->SetValueAt(phi->merged_index(), phi);
785  }
786  }
787  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
788  if (block->deleted_phis()->at(i) < last_environment->length()) {
789  last_environment->SetValueAt(block->deleted_phis()->at(i),
790  graph_->GetConstantUndefined());
791  }
792  }
793  block->UpdateEnvironment(last_environment);
794  // Pick up the outgoing argument count of one of the predecessors.
795  argument_count_ = pred->argument_count();
796  }
797  HInstruction* current = block->first();
798  int start = chunk_->instructions()->length();
799  while (current != NULL && !is_aborted()) {
800  // Code for constants in registers is generated lazily.
801  if (!current->EmitAtUses()) {
802  VisitInstruction(current);
803  }
804  current = current->next();
805  }
806  int end = chunk_->instructions()->length() - 1;
807  if (end >= start) {
808  block->set_first_instruction_index(start);
809  block->set_last_instruction_index(end);
810  }
811  block->set_argument_count(argument_count_);
812  next_block_ = NULL;
813  current_block_ = NULL;
814 }
815 
816 
817 void LChunkBuilder::VisitInstruction(HInstruction* current) {
818  HInstruction* old_current = current_instruction_;
819  current_instruction_ = current;
820 
821  LInstruction* instr = NULL;
822  if (current->CanReplaceWithDummyUses()) {
823  if (current->OperandCount() == 0) {
824  instr = DefineAsRegister(new(zone()) LDummy());
825  } else {
826  DCHECK(!current->OperandAt(0)->IsControlInstruction());
827  instr = DefineAsRegister(new(zone())
828  LDummyUse(UseAny(current->OperandAt(0))));
829  }
830  for (int i = 1; i < current->OperandCount(); ++i) {
831  if (current->OperandAt(i)->IsControlInstruction()) continue;
832  LInstruction* dummy =
833  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
834  dummy->set_hydrogen_value(current);
835  chunk_->AddInstruction(dummy, current_block_);
836  }
837  } else {
838  HBasicBlock* successor;
839  if (current->IsControlInstruction() &&
840  HControlInstruction::cast(current)->KnownSuccessorBlock(&successor) &&
841  successor != NULL) {
842  instr = new(zone()) LGoto(successor);
843  } else {
844  instr = current->CompileToLithium(this);
845  }
846  }
847 
848  argument_count_ += current->argument_delta();
849  DCHECK(argument_count_ >= 0);
850 
851  if (instr != NULL) {
852  AddInstruction(instr, current);
853  }
854 
855  current_instruction_ = old_current;
856 }
857 
858 
859 void LChunkBuilder::AddInstruction(LInstruction* instr,
860  HInstruction* hydrogen_val) {
861  // Associate the hydrogen instruction first, since we may need it for
862  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
863  instr->set_hydrogen_value(hydrogen_val);
864 
865 #if DEBUG
866  // Make sure that the lithium instruction has either no fixed register
867  // constraints in temps or the result OR no uses that are only used at
868  // start. If this invariant doesn't hold, the register allocator can decide
869  // to insert a split of a range immediately before the instruction due to an
870  // already allocated register needing to be used for the instruction's fixed
871  // register constraint. In this case, The register allocator won't see an
872  // interference between the split child and the use-at-start (it would if
873  // the it was just a plain use), so it is free to move the split child into
874  // the same register that is used for the use-at-start.
875  // See https://code.google.com/p/chromium/issues/detail?id=201590
876  if (!(instr->ClobbersRegisters() &&
877  instr->ClobbersDoubleRegisters(isolate()))) {
878  int fixed = 0;
879  int used_at_start = 0;
880  for (UseIterator it(instr); !it.Done(); it.Advance()) {
881  LUnallocated* operand = LUnallocated::cast(it.Current());
882  if (operand->IsUsedAtStart()) ++used_at_start;
883  }
884  if (instr->Output() != NULL) {
885  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
886  }
887  for (TempIterator it(instr); !it.Done(); it.Advance()) {
888  LUnallocated* operand = LUnallocated::cast(it.Current());
889  if (operand->HasFixedPolicy()) ++fixed;
890  }
891  DCHECK(fixed == 0 || used_at_start == 0);
892  }
893 #endif
894 
895  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
896  instr = AssignPointerMap(instr);
897  }
898  if (FLAG_stress_environments && !instr->HasEnvironment()) {
899  instr = AssignEnvironment(instr);
900  }
901  chunk_->AddInstruction(instr, current_block_);
902 
903  if (instr->IsCall()) {
904  HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
905  LInstruction* instruction_needing_environment = NULL;
906  if (hydrogen_val->HasObservableSideEffects()) {
907  HSimulate* sim = HSimulate::cast(hydrogen_val->next());
908  instruction_needing_environment = instr;
909  sim->ReplayEnvironment(current_block_->last_environment());
910  hydrogen_value_for_lazy_bailout = sim;
911  }
912  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
913  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
914  chunk_->AddInstruction(bailout, current_block_);
915  if (instruction_needing_environment != NULL) {
916  // Store the lazy deopt environment with the instruction if needed.
917  // Right now it is only used for LInstanceOfKnownGlobal.
918  instruction_needing_environment->
919  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
920  }
921  }
922 }
923 
924 
925 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
926  return new(zone()) LGoto(instr->FirstSuccessor());
927 }
928 
929 
930 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
931  HValue* value = instr->value();
932  Representation r = value->representation();
933  HType type = value->type();
934  ToBooleanStub::Types expected = instr->expected_input_types();
935  if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
936 
937  bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() ||
938  type.IsJSArray() || type.IsHeapNumber() || type.IsString();
939  LInstruction* branch = new(zone()) LBranch(UseRegister(value));
940  if (!easy_case &&
941  ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) ||
942  !expected.IsGeneric())) {
943  branch = AssignEnvironment(branch);
944  }
945  return branch;
946 }
947 
948 
949 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
950  return new(zone()) LDebugBreak();
951 }
952 
953 
954 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
955  DCHECK(instr->value()->representation().IsTagged());
956  LOperand* value = UseRegisterAtStart(instr->value());
957  LOperand* temp = TempRegister();
958  return new(zone()) LCmpMapAndBranch(value, temp);
959 }
960 
961 
962 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* instr) {
963  info()->MarkAsRequiresFrame();
964  LOperand* value = UseRegister(instr->value());
965  return DefineAsRegister(new(zone()) LArgumentsLength(value));
966 }
967 
968 
969 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
970  info()->MarkAsRequiresFrame();
971  return DefineAsRegister(new(zone()) LArgumentsElements);
972 }
973 
974 
975 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
976  LOperand* context = UseFixed(instr->context(), cp);
977  LInstanceOf* result =
978  new(zone()) LInstanceOf(context, UseFixed(instr->left(), r0),
979  UseFixed(instr->right(), r1));
980  return MarkAsCall(DefineFixed(result, r0), instr);
981 }
982 
983 
984 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
985  HInstanceOfKnownGlobal* instr) {
986  LInstanceOfKnownGlobal* result =
987  new(zone()) LInstanceOfKnownGlobal(
988  UseFixed(instr->context(), cp),
989  UseFixed(instr->left(), r0),
990  FixedTemp(r4));
991  return MarkAsCall(DefineFixed(result, r0), instr);
992 }
993 
994 
995 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
996  LOperand* receiver = UseRegisterAtStart(instr->receiver());
997  LOperand* function = UseRegisterAtStart(instr->function());
998  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
999  return AssignEnvironment(DefineAsRegister(result));
1000 }
1001 
1002 
1003 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1004  LOperand* function = UseFixed(instr->function(), r1);
1005  LOperand* receiver = UseFixed(instr->receiver(), r0);
1006  LOperand* length = UseFixed(instr->length(), r2);
1007  LOperand* elements = UseFixed(instr->elements(), r3);
1008  LApplyArguments* result = new(zone()) LApplyArguments(function,
1009  receiver,
1010  length,
1011  elements);
1012  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
1013 }
1014 
1015 
1016 LInstruction* LChunkBuilder::DoPushArguments(HPushArguments* instr) {
1017  int argc = instr->OperandCount();
1018  for (int i = 0; i < argc; ++i) {
1019  LOperand* argument = Use(instr->argument(i));
1020  AddInstruction(new(zone()) LPushArgument(argument), instr);
1021  }
1022  return NULL;
1023 }
1024 
1025 
1026 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1027  HStoreCodeEntry* store_code_entry) {
1028  LOperand* function = UseRegister(store_code_entry->function());
1029  LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1030  return new(zone()) LStoreCodeEntry(function, code_object);
1031 }
1032 
1033 
1034 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1035  HInnerAllocatedObject* instr) {
1036  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1037  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1038  return DefineAsRegister(
1039  new(zone()) LInnerAllocatedObject(base_object, offset));
1040 }
1041 
1042 
1043 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1044  return instr->HasNoUses()
1045  ? NULL
1046  : DefineAsRegister(new(zone()) LThisFunction);
1047 }
1048 
1049 
1050 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1051  if (instr->HasNoUses()) return NULL;
1052 
1053  if (info()->IsStub()) {
1054  return DefineFixed(new(zone()) LContext, cp);
1055  }
1056 
1057  return DefineAsRegister(new(zone()) LContext);
1058 }
1059 
1060 
1061 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1062  LOperand* context = UseFixed(instr->context(), cp);
1063  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1064 }
1065 
1066 
1067 LInstruction* LChunkBuilder::DoCallJSFunction(
1068  HCallJSFunction* instr) {
1069  LOperand* function = UseFixed(instr->function(), r1);
1070 
1071  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1072 
1073  return MarkAsCall(DefineFixed(result, r0), instr);
1074 }
1075 
1076 
1077 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1078  HCallWithDescriptor* instr) {
1079  CallInterfaceDescriptor descriptor = instr->descriptor();
1080 
1081  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1082  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1083  ops.Add(target, zone());
1084  for (int i = 1; i < instr->OperandCount(); i++) {
1085  LOperand* op =
1086  UseFixed(instr->OperandAt(i), descriptor.GetParameterRegister(i - 1));
1087  ops.Add(op, zone());
1088  }
1089 
1090  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1091  descriptor, ops, zone());
1092  return MarkAsCall(DefineFixed(result, r0), instr);
1093 }
1094 
1095 
1096 LInstruction* LChunkBuilder::DoTailCallThroughMegamorphicCache(
1097  HTailCallThroughMegamorphicCache* instr) {
1098  LOperand* context = UseFixed(instr->context(), cp);
1099  LOperand* receiver_register =
1100  UseFixed(instr->receiver(), LoadDescriptor::ReceiverRegister());
1101  LOperand* name_register =
1102  UseFixed(instr->name(), LoadDescriptor::NameRegister());
1103  // Not marked as call. It can't deoptimize, and it never returns.
1104  return new (zone()) LTailCallThroughMegamorphicCache(
1105  context, receiver_register, name_register);
1106 }
1107 
1108 
1109 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1110  LOperand* context = UseFixed(instr->context(), cp);
1111  LOperand* function = UseFixed(instr->function(), r1);
1112  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1113  return MarkAsCall(DefineFixed(result, r0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1114 }
1115 
1116 
1117 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1118  switch (instr->op()) {
1119  case kMathFloor:
1120  return DoMathFloor(instr);
1121  case kMathRound:
1122  return DoMathRound(instr);
1123  case kMathFround:
1124  return DoMathFround(instr);
1125  case kMathAbs:
1126  return DoMathAbs(instr);
1127  case kMathLog:
1128  return DoMathLog(instr);
1129  case kMathExp:
1130  return DoMathExp(instr);
1131  case kMathSqrt:
1132  return DoMathSqrt(instr);
1133  case kMathPowHalf:
1134  return DoMathPowHalf(instr);
1135  case kMathClz32:
1136  return DoMathClz32(instr);
1137  default:
1138  UNREACHABLE();
1139  return NULL;
1140  }
1141 }
1142 
1143 
1144 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1145  LOperand* input = UseRegister(instr->value());
1146  LMathFloor* result = new(zone()) LMathFloor(input);
1147  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1148 }
1149 
1150 
1151 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1152  LOperand* input = UseRegister(instr->value());
1153  LOperand* temp = TempDoubleRegister();
1154  LMathRound* result = new(zone()) LMathRound(input, temp);
1155  return AssignEnvironment(DefineAsRegister(result));
1156 }
1157 
1158 
1159 LInstruction* LChunkBuilder::DoMathFround(HUnaryMathOperation* instr) {
1160  LOperand* input = UseRegister(instr->value());
1161  LMathFround* result = new (zone()) LMathFround(input);
1162  return DefineAsRegister(result);
1163 }
1164 
1165 
1166 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1167  Representation r = instr->value()->representation();
1168  LOperand* context = (r.IsDouble() || r.IsSmiOrInteger32())
1169  ? NULL
1170  : UseFixed(instr->context(), cp);
1171  LOperand* input = UseRegister(instr->value());
1172  LInstruction* result =
1173  DefineAsRegister(new(zone()) LMathAbs(context, input));
1174  if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result);
1175  if (!r.IsDouble()) result = AssignEnvironment(result);
1176  return result;
1177 }
1178 
1179 
1180 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1181  DCHECK(instr->representation().IsDouble());
1182  DCHECK(instr->value()->representation().IsDouble());
1183  LOperand* input = UseFixedDouble(instr->value(), d0);
1184  return MarkAsCall(DefineFixedDouble(new(zone()) LMathLog(input), d0), instr);
1185 }
1186 
1187 
1188 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1189  LOperand* input = UseRegisterAtStart(instr->value());
1190  LMathClz32* result = new(zone()) LMathClz32(input);
1191  return DefineAsRegister(result);
1192 }
1193 
1194 
1195 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1196  DCHECK(instr->representation().IsDouble());
1197  DCHECK(instr->value()->representation().IsDouble());
1198  LOperand* input = UseRegister(instr->value());
1199  LOperand* temp1 = TempRegister();
1200  LOperand* temp2 = TempRegister();
1201  LOperand* double_temp = TempDoubleRegister();
1202  LMathExp* result = new(zone()) LMathExp(input, double_temp, temp1, temp2);
1203  return DefineAsRegister(result);
1204 }
1205 
1206 
1207 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1208  LOperand* input = UseRegisterAtStart(instr->value());
1209  LMathSqrt* result = new(zone()) LMathSqrt(input);
1210  return DefineAsRegister(result);
1211 }
1212 
1213 
1214 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1215  LOperand* input = UseRegisterAtStart(instr->value());
1216  LMathPowHalf* result = new(zone()) LMathPowHalf(input);
1217  return DefineAsRegister(result);
1218 }
1219 
1220 
1221 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1222  LOperand* context = UseFixed(instr->context(), cp);
1223  LOperand* constructor = UseFixed(instr->constructor(), r1);
1224  LCallNew* result = new(zone()) LCallNew(context, constructor);
1225  return MarkAsCall(DefineFixed(result, r0), instr);
1226 }
1227 
1228 
1229 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1230  LOperand* context = UseFixed(instr->context(), cp);
1231  LOperand* constructor = UseFixed(instr->constructor(), r1);
1232  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1233  return MarkAsCall(DefineFixed(result, r0), instr);
1234 }
1235 
1236 
1237 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1238  LOperand* context = UseFixed(instr->context(), cp);
1239  LOperand* function = UseFixed(instr->function(), r1);
1240  LCallFunction* call = new(zone()) LCallFunction(context, function);
1241  return MarkAsCall(DefineFixed(call, r0), instr);
1242 }
1243 
1244 
1245 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1246  LOperand* context = UseFixed(instr->context(), cp);
1247  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), r0), instr);
1248 }
1249 
1250 
1251 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1252  return DoShift(Token::ROR, instr);
1253 }
1254 
1255 
1256 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1257  return DoShift(Token::SHR, instr);
1258 }
1259 
1260 
1261 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1262  return DoShift(Token::SAR, instr);
1263 }
1264 
1265 
1266 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1267  return DoShift(Token::SHL, instr);
1268 }
1269 
1270 
1271 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1272  if (instr->representation().IsSmiOrInteger32()) {
1273  DCHECK(instr->left()->representation().Equals(instr->representation()));
1274  DCHECK(instr->right()->representation().Equals(instr->representation()));
1275  DCHECK(instr->CheckFlag(HValue::kTruncatingToInt32));
1276 
1277  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1278  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1279  return DefineAsRegister(new(zone()) LBitI(left, right));
1280  } else {
1281  return DoArithmeticT(instr->op(), instr);
1282  }
1283 }
1284 
1285 
1286 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1287  DCHECK(instr->representation().IsSmiOrInteger32());
1288  DCHECK(instr->left()->representation().Equals(instr->representation()));
1289  DCHECK(instr->right()->representation().Equals(instr->representation()));
1290  LOperand* dividend = UseRegister(instr->left());
1291  int32_t divisor = instr->right()->GetInteger32Constant();
1292  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1293  dividend, divisor));
1294  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1295  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1296  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1297  divisor != 1 && divisor != -1)) {
1298  result = AssignEnvironment(result);
1299  }
1300  return result;
1301 }
1302 
1303 
1304 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1305  DCHECK(instr->representation().IsInteger32());
1306  DCHECK(instr->left()->representation().Equals(instr->representation()));
1307  DCHECK(instr->right()->representation().Equals(instr->representation()));
1308  LOperand* dividend = UseRegister(instr->left());
1309  int32_t divisor = instr->right()->GetInteger32Constant();
1310  LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1311  dividend, divisor));
1312  if (divisor == 0 ||
1313  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1314  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1315  result = AssignEnvironment(result);
1316  }
1317  return result;
1318 }
1319 
1320 
1321 LInstruction* LChunkBuilder::DoDivI(HDiv* instr) {
1322  DCHECK(instr->representation().IsSmiOrInteger32());
1323  DCHECK(instr->left()->representation().Equals(instr->representation()));
1324  DCHECK(instr->right()->representation().Equals(instr->representation()));
1325  LOperand* dividend = UseRegister(instr->left());
1326  LOperand* divisor = UseRegister(instr->right());
1327  LOperand* temp =
1328  CpuFeatures::IsSupported(SUDIV) ? NULL : TempDoubleRegister();
1329  LInstruction* result =
1330  DefineAsRegister(new(zone()) LDivI(dividend, divisor, temp));
1331  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1332  instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1333  (instr->CheckFlag(HValue::kCanOverflow) &&
1335  !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) ||
1336  (!instr->IsMathFloorOfDiv() &&
1337  !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) {
1338  result = AssignEnvironment(result);
1339  }
1340  return result;
1341 }
1342 
1343 
1344 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1345  if (instr->representation().IsSmiOrInteger32()) {
1346  if (instr->RightIsPowerOf2()) {
1347  return DoDivByPowerOf2I(instr);
1348  } else if (instr->right()->IsConstant()) {
1349  return DoDivByConstI(instr);
1350  } else {
1351  return DoDivI(instr);
1352  }
1353  } else if (instr->representation().IsDouble()) {
1354  return DoArithmeticD(Token::DIV, instr);
1355  } else {
1356  return DoArithmeticT(Token::DIV, instr);
1357  }
1358 }
1359 
1360 
1361 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1362  LOperand* dividend = UseRegisterAtStart(instr->left());
1363  int32_t divisor = instr->right()->GetInteger32Constant();
1364  LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1365  dividend, divisor));
1366  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1367  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1368  result = AssignEnvironment(result);
1369  }
1370  return result;
1371 }
1372 
1373 
1374 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1375  DCHECK(instr->representation().IsInteger32());
1376  DCHECK(instr->left()->representation().Equals(instr->representation()));
1377  DCHECK(instr->right()->representation().Equals(instr->representation()));
1378  LOperand* dividend = UseRegister(instr->left());
1379  int32_t divisor = instr->right()->GetInteger32Constant();
1380  LOperand* temp =
1381  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1382  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1383  NULL : TempRegister();
1384  LInstruction* result = DefineAsRegister(
1385  new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1386  if (divisor == 0 ||
1387  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1388  result = AssignEnvironment(result);
1389  }
1390  return result;
1391 }
1392 
1393 
1394 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1395  DCHECK(instr->representation().IsSmiOrInteger32());
1396  DCHECK(instr->left()->representation().Equals(instr->representation()));
1397  DCHECK(instr->right()->representation().Equals(instr->representation()));
1398  LOperand* dividend = UseRegister(instr->left());
1399  LOperand* divisor = UseRegister(instr->right());
1400  LOperand* temp =
1401  CpuFeatures::IsSupported(SUDIV) ? NULL : TempDoubleRegister();
1402  LFlooringDivI* div = new(zone()) LFlooringDivI(dividend, divisor, temp);
1403  return AssignEnvironment(DefineAsRegister(div));
1404 }
1405 
1406 
1407 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1408  if (instr->RightIsPowerOf2()) {
1409  return DoFlooringDivByPowerOf2I(instr);
1410  } else if (instr->right()->IsConstant()) {
1411  return DoFlooringDivByConstI(instr);
1412  } else {
1413  return DoFlooringDivI(instr);
1414  }
1415 }
1416 
1417 
1418 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1419  DCHECK(instr->representation().IsSmiOrInteger32());
1420  DCHECK(instr->left()->representation().Equals(instr->representation()));
1421  DCHECK(instr->right()->representation().Equals(instr->representation()));
1422  LOperand* dividend = UseRegisterAtStart(instr->left());
1423  int32_t divisor = instr->right()->GetInteger32Constant();
1424  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1425  dividend, divisor));
1426  if (instr->CheckFlag(HValue::kLeftCanBeNegative) &&
1427  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1428  result = AssignEnvironment(result);
1429  }
1430  return result;
1431 }
1432 
1433 
1434 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1435  DCHECK(instr->representation().IsSmiOrInteger32());
1436  DCHECK(instr->left()->representation().Equals(instr->representation()));
1437  DCHECK(instr->right()->representation().Equals(instr->representation()));
1438  LOperand* dividend = UseRegister(instr->left());
1439  int32_t divisor = instr->right()->GetInteger32Constant();
1440  LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1441  dividend, divisor));
1442  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1443  result = AssignEnvironment(result);
1444  }
1445  return result;
1446 }
1447 
1448 
1449 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1450  DCHECK(instr->representation().IsSmiOrInteger32());
1451  DCHECK(instr->left()->representation().Equals(instr->representation()));
1452  DCHECK(instr->right()->representation().Equals(instr->representation()));
1453  LOperand* dividend = UseRegister(instr->left());
1454  LOperand* divisor = UseRegister(instr->right());
1455  LOperand* temp =
1456  CpuFeatures::IsSupported(SUDIV) ? NULL : TempDoubleRegister();
1457  LOperand* temp2 =
1458  CpuFeatures::IsSupported(SUDIV) ? NULL : TempDoubleRegister();
1459  LInstruction* result = DefineAsRegister(new(zone()) LModI(
1460  dividend, divisor, temp, temp2));
1461  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1462  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1463  result = AssignEnvironment(result);
1464  }
1465  return result;
1466 }
1467 
1468 
1469 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1470  if (instr->representation().IsSmiOrInteger32()) {
1471  if (instr->RightIsPowerOf2()) {
1472  return DoModByPowerOf2I(instr);
1473  } else if (instr->right()->IsConstant()) {
1474  return DoModByConstI(instr);
1475  } else {
1476  return DoModI(instr);
1477  }
1478  } else if (instr->representation().IsDouble()) {
1479  return DoArithmeticD(Token::MOD, instr);
1480  } else {
1481  return DoArithmeticT(Token::MOD, instr);
1482  }
1483 }
1484 
1485 
1486 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1487  if (instr->representation().IsSmiOrInteger32()) {
1488  DCHECK(instr->left()->representation().Equals(instr->representation()));
1489  DCHECK(instr->right()->representation().Equals(instr->representation()));
1490  HValue* left = instr->BetterLeftOperand();
1491  HValue* right = instr->BetterRightOperand();
1492  LOperand* left_op;
1493  LOperand* right_op;
1494  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1495  bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1496 
1497  if (right->IsConstant()) {
1498  HConstant* constant = HConstant::cast(right);
1499  int32_t constant_value = constant->Integer32Value();
1500  // Constants -1, 0 and 1 can be optimized if the result can overflow.
1501  // For other constants, it can be optimized only without overflow.
1502  if (!can_overflow || ((constant_value >= -1) && (constant_value <= 1))) {
1503  left_op = UseRegisterAtStart(left);
1504  right_op = UseConstant(right);
1505  } else {
1506  if (bailout_on_minus_zero) {
1507  left_op = UseRegister(left);
1508  } else {
1509  left_op = UseRegisterAtStart(left);
1510  }
1511  right_op = UseRegister(right);
1512  }
1513  } else {
1514  if (bailout_on_minus_zero) {
1515  left_op = UseRegister(left);
1516  } else {
1517  left_op = UseRegisterAtStart(left);
1518  }
1519  right_op = UseRegister(right);
1520  }
1521  LMulI* mul = new(zone()) LMulI(left_op, right_op);
1522  if (can_overflow || bailout_on_minus_zero) {
1523  AssignEnvironment(mul);
1524  }
1525  return DefineAsRegister(mul);
1526 
1527  } else if (instr->representation().IsDouble()) {
1528  if (instr->HasOneUse() && (instr->uses().value()->IsAdd() ||
1529  instr->uses().value()->IsSub())) {
1530  HBinaryOperation* use = HBinaryOperation::cast(instr->uses().value());
1531 
1532  if (use->IsAdd() && instr == use->left()) {
1533  // This mul is the lhs of an add. The add and mul will be folded into a
1534  // multiply-add in DoAdd.
1535  return NULL;
1536  }
1537  if (instr == use->right() && use->IsAdd() && !use->left()->IsMul()) {
1538  // This mul is the rhs of an add, where the lhs is not another mul.
1539  // The add and mul will be folded into a multiply-add in DoAdd.
1540  return NULL;
1541  }
1542  if (instr == use->right() && use->IsSub()) {
1543  // This mul is the rhs of a sub. The sub and mul will be folded into a
1544  // multiply-sub in DoSub.
1545  return NULL;
1546  }
1547  }
1548 
1549  return DoArithmeticD(Token::MUL, instr);
1550  } else {
1551  return DoArithmeticT(Token::MUL, instr);
1552  }
1553 }
1554 
1555 
1556 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1557  if (instr->representation().IsSmiOrInteger32()) {
1558  DCHECK(instr->left()->representation().Equals(instr->representation()));
1559  DCHECK(instr->right()->representation().Equals(instr->representation()));
1560 
1561  if (instr->left()->IsConstant()) {
1562  // If lhs is constant, do reverse subtraction instead.
1563  return DoRSub(instr);
1564  }
1565 
1566  LOperand* left = UseRegisterAtStart(instr->left());
1567  LOperand* right = UseOrConstantAtStart(instr->right());
1568  LSubI* sub = new(zone()) LSubI(left, right);
1569  LInstruction* result = DefineAsRegister(sub);
1570  if (instr->CheckFlag(HValue::kCanOverflow)) {
1571  result = AssignEnvironment(result);
1572  }
1573  return result;
1574  } else if (instr->representation().IsDouble()) {
1575  if (instr->right()->IsMul() && instr->right()->HasOneUse()) {
1576  return DoMultiplySub(instr->left(), HMul::cast(instr->right()));
1577  }
1578 
1579  return DoArithmeticD(Token::SUB, instr);
1580  } else {
1581  return DoArithmeticT(Token::SUB, instr);
1582  }
1583 }
1584 
1585 
1586 LInstruction* LChunkBuilder::DoRSub(HSub* instr) {
1587  DCHECK(instr->representation().IsSmiOrInteger32());
1588  DCHECK(instr->left()->representation().Equals(instr->representation()));
1589  DCHECK(instr->right()->representation().Equals(instr->representation()));
1590 
1591  // Note: The lhs of the subtraction becomes the rhs of the
1592  // reverse-subtraction.
1593  LOperand* left = UseRegisterAtStart(instr->right());
1594  LOperand* right = UseOrConstantAtStart(instr->left());
1595  LRSubI* rsb = new(zone()) LRSubI(left, right);
1596  LInstruction* result = DefineAsRegister(rsb);
1597  if (instr->CheckFlag(HValue::kCanOverflow)) {
1598  result = AssignEnvironment(result);
1599  }
1600  return result;
1601 }
1602 
1603 
1604 LInstruction* LChunkBuilder::DoMultiplyAdd(HMul* mul, HValue* addend) {
1605  LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1606  LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1607  LOperand* addend_op = UseRegisterAtStart(addend);
1608  return DefineSameAsFirst(new(zone()) LMultiplyAddD(addend_op, multiplier_op,
1609  multiplicand_op));
1610 }
1611 
1612 
1613 LInstruction* LChunkBuilder::DoMultiplySub(HValue* minuend, HMul* mul) {
1614  LOperand* minuend_op = UseRegisterAtStart(minuend);
1615  LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1616  LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1617 
1618  return DefineSameAsFirst(new(zone()) LMultiplySubD(minuend_op,
1619  multiplier_op,
1620  multiplicand_op));
1621 }
1622 
1623 
1624 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1625  if (instr->representation().IsSmiOrInteger32()) {
1626  DCHECK(instr->left()->representation().Equals(instr->representation()));
1627  DCHECK(instr->right()->representation().Equals(instr->representation()));
1628  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1629  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1630  LAddI* add = new(zone()) LAddI(left, right);
1631  LInstruction* result = DefineAsRegister(add);
1632  if (instr->CheckFlag(HValue::kCanOverflow)) {
1633  result = AssignEnvironment(result);
1634  }
1635  return result;
1636  } else if (instr->representation().IsExternal()) {
1637  DCHECK(instr->left()->representation().IsExternal());
1638  DCHECK(instr->right()->representation().IsInteger32());
1639  DCHECK(!instr->CheckFlag(HValue::kCanOverflow));
1640  LOperand* left = UseRegisterAtStart(instr->left());
1641  LOperand* right = UseOrConstantAtStart(instr->right());
1642  LAddI* add = new(zone()) LAddI(left, right);
1643  LInstruction* result = DefineAsRegister(add);
1644  return result;
1645  } else if (instr->representation().IsDouble()) {
1646  if (instr->left()->IsMul() && instr->left()->HasOneUse()) {
1647  return DoMultiplyAdd(HMul::cast(instr->left()), instr->right());
1648  }
1649 
1650  if (instr->right()->IsMul() && instr->right()->HasOneUse()) {
1651  DCHECK(!instr->left()->IsMul() || !instr->left()->HasOneUse());
1652  return DoMultiplyAdd(HMul::cast(instr->right()), instr->left());
1653  }
1654 
1655  return DoArithmeticD(Token::ADD, instr);
1656  } else {
1657  return DoArithmeticT(Token::ADD, instr);
1658  }
1659 }
1660 
1661 
1662 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1663  LOperand* left = NULL;
1664  LOperand* right = NULL;
1665  if (instr->representation().IsSmiOrInteger32()) {
1666  DCHECK(instr->left()->representation().Equals(instr->representation()));
1667  DCHECK(instr->right()->representation().Equals(instr->representation()));
1668  left = UseRegisterAtStart(instr->BetterLeftOperand());
1669  right = UseOrConstantAtStart(instr->BetterRightOperand());
1670  } else {
1671  DCHECK(instr->representation().IsDouble());
1672  DCHECK(instr->left()->representation().IsDouble());
1673  DCHECK(instr->right()->representation().IsDouble());
1674  left = UseRegisterAtStart(instr->left());
1675  right = UseRegisterAtStart(instr->right());
1676  }
1677  return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1678 }
1679 
1680 
1681 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1682  DCHECK(instr->representation().IsDouble());
1683  // We call a C function for double power. It can't trigger a GC.
1684  // We need to use fixed result register for the call.
1685  Representation exponent_type = instr->right()->representation();
1686  DCHECK(instr->left()->representation().IsDouble());
1687  LOperand* left = UseFixedDouble(instr->left(), d0);
1688  LOperand* right =
1689  exponent_type.IsDouble()
1690  ? UseFixedDouble(instr->right(), d1)
1691  : UseFixed(instr->right(), MathPowTaggedDescriptor::exponent());
1692  LPower* result = new(zone()) LPower(left, right);
1693  return MarkAsCall(DefineFixedDouble(result, d2),
1694  instr,
1695  CAN_DEOPTIMIZE_EAGERLY);
1696 }
1697 
1698 
1699 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1700  DCHECK(instr->left()->representation().IsTagged());
1701  DCHECK(instr->right()->representation().IsTagged());
1702  LOperand* context = UseFixed(instr->context(), cp);
1703  LOperand* left = UseFixed(instr->left(), r1);
1704  LOperand* right = UseFixed(instr->right(), r0);
1705  LCmpT* result = new(zone()) LCmpT(context, left, right);
1706  return MarkAsCall(DefineFixed(result, r0), instr);
1707 }
1708 
1709 
1710 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1711  HCompareNumericAndBranch* instr) {
1712  Representation r = instr->representation();
1713  if (r.IsSmiOrInteger32()) {
1714  DCHECK(instr->left()->representation().Equals(r));
1715  DCHECK(instr->right()->representation().Equals(r));
1716  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1717  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1718  return new(zone()) LCompareNumericAndBranch(left, right);
1719  } else {
1720  DCHECK(r.IsDouble());
1721  DCHECK(instr->left()->representation().IsDouble());
1722  DCHECK(instr->right()->representation().IsDouble());
1723  LOperand* left = UseRegisterAtStart(instr->left());
1724  LOperand* right = UseRegisterAtStart(instr->right());
1725  return new(zone()) LCompareNumericAndBranch(left, right);
1726  }
1727 }
1728 
1729 
1730 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1731  HCompareObjectEqAndBranch* instr) {
1732  LOperand* left = UseRegisterAtStart(instr->left());
1733  LOperand* right = UseRegisterAtStart(instr->right());
1734  return new(zone()) LCmpObjectEqAndBranch(left, right);
1735 }
1736 
1737 
1738 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1739  HCompareHoleAndBranch* instr) {
1740  LOperand* value = UseRegisterAtStart(instr->value());
1741  return new(zone()) LCmpHoleAndBranch(value);
1742 }
1743 
1744 
1745 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1746  HCompareMinusZeroAndBranch* instr) {
1747  LOperand* value = UseRegister(instr->value());
1748  LOperand* scratch = TempRegister();
1749  return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1750 }
1751 
1752 
1753 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1754  DCHECK(instr->value()->representation().IsTagged());
1755  LOperand* value = UseRegisterAtStart(instr->value());
1756  LOperand* temp = TempRegister();
1757  return new(zone()) LIsObjectAndBranch(value, temp);
1758 }
1759 
1760 
1761 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1762  DCHECK(instr->value()->representation().IsTagged());
1763  LOperand* value = UseRegisterAtStart(instr->value());
1764  LOperand* temp = TempRegister();
1765  return new(zone()) LIsStringAndBranch(value, temp);
1766 }
1767 
1768 
1769 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1770  DCHECK(instr->value()->representation().IsTagged());
1771  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1772 }
1773 
1774 
1775 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1776  HIsUndetectableAndBranch* instr) {
1777  DCHECK(instr->value()->representation().IsTagged());
1778  LOperand* value = UseRegisterAtStart(instr->value());
1779  return new(zone()) LIsUndetectableAndBranch(value, TempRegister());
1780 }
1781 
1782 
1783 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1784  HStringCompareAndBranch* instr) {
1785  DCHECK(instr->left()->representation().IsTagged());
1786  DCHECK(instr->right()->representation().IsTagged());
1787  LOperand* context = UseFixed(instr->context(), cp);
1788  LOperand* left = UseFixed(instr->left(), r1);
1789  LOperand* right = UseFixed(instr->right(), r0);
1790  LStringCompareAndBranch* result =
1791  new(zone()) LStringCompareAndBranch(context, left, right);
1792  return MarkAsCall(result, instr);
1793 }
1794 
1795 
1796 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1797  HHasInstanceTypeAndBranch* instr) {
1798  DCHECK(instr->value()->representation().IsTagged());
1799  LOperand* value = UseRegisterAtStart(instr->value());
1800  return new(zone()) LHasInstanceTypeAndBranch(value);
1801 }
1802 
1803 
1804 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1805  HGetCachedArrayIndex* instr) {
1806  DCHECK(instr->value()->representation().IsTagged());
1807  LOperand* value = UseRegisterAtStart(instr->value());
1808 
1809  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1810 }
1811 
1812 
1813 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1814  HHasCachedArrayIndexAndBranch* instr) {
1815  DCHECK(instr->value()->representation().IsTagged());
1816  return new(zone()) LHasCachedArrayIndexAndBranch(
1817  UseRegisterAtStart(instr->value()));
1818 }
1819 
1820 
1821 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1822  HClassOfTestAndBranch* instr) {
1823  DCHECK(instr->value()->representation().IsTagged());
1824  LOperand* value = UseRegister(instr->value());
1825  return new(zone()) LClassOfTestAndBranch(value, TempRegister());
1826 }
1827 
1828 
1829 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1830  LOperand* map = UseRegisterAtStart(instr->value());
1831  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1832 }
1833 
1834 
1835 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1836  LOperand* object = UseFixed(instr->value(), r0);
1837  LDateField* result =
1838  new(zone()) LDateField(object, FixedTemp(r1), instr->index());
1839  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
1840 }
1841 
1842 
1843 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1844  LOperand* string = UseRegisterAtStart(instr->string());
1845  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1846  return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1847 }
1848 
1849 
1850 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1851  LOperand* string = UseRegisterAtStart(instr->string());
1852  LOperand* index = FLAG_debug_code
1853  ? UseRegisterAtStart(instr->index())
1854  : UseRegisterOrConstantAtStart(instr->index());
1855  LOperand* value = UseRegisterAtStart(instr->value());
1856  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
1857  return new(zone()) LSeqStringSetChar(context, string, index, value);
1858 }
1859 
1860 
1861 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1862  if (!FLAG_debug_code && instr->skip_check()) return NULL;
1863  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1864  LOperand* length = !index->IsConstantOperand()
1865  ? UseRegisterOrConstantAtStart(instr->length())
1866  : UseRegisterAtStart(instr->length());
1867  LInstruction* result = new(zone()) LBoundsCheck(index, length);
1868  if (!FLAG_debug_code || !instr->skip_check()) {
1869  result = AssignEnvironment(result);
1870  }
1871  return result;
1872 }
1873 
1874 
1875 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1876  HBoundsCheckBaseIndexInformation* instr) {
1877  UNREACHABLE();
1878  return NULL;
1879 }
1880 
1881 
1882 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1883  // The control instruction marking the end of a block that completed
1884  // abruptly (e.g., threw an exception). There is nothing specific to do.
1885  return NULL;
1886 }
1887 
1888 
1889 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1890  return NULL;
1891 }
1892 
1893 
1894 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1895  // All HForceRepresentation instructions should be eliminated in the
1896  // representation change phase of Hydrogen.
1897  UNREACHABLE();
1898  return NULL;
1899 }
1900 
1901 
1902 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1903  Representation from = instr->from();
1904  Representation to = instr->to();
1905  HValue* val = instr->value();
1906  if (from.IsSmi()) {
1907  if (to.IsTagged()) {
1908  LOperand* value = UseRegister(val);
1909  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1910  }
1911  from = Representation::Tagged();
1912  }
1913  if (from.IsTagged()) {
1914  if (to.IsDouble()) {
1915  LOperand* value = UseRegister(val);
1916  LInstruction* result = DefineAsRegister(new(zone()) LNumberUntagD(value));
1917  if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1918  return result;
1919  } else if (to.IsSmi()) {
1920  LOperand* value = UseRegister(val);
1921  if (val->type().IsSmi()) {
1922  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1923  }
1924  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1925  } else {
1926  DCHECK(to.IsInteger32());
1927  if (val->type().IsSmi() || val->representation().IsSmi()) {
1928  LOperand* value = UseRegisterAtStart(val);
1929  return DefineAsRegister(new(zone()) LSmiUntag(value, false));
1930  } else {
1931  LOperand* value = UseRegister(val);
1932  LOperand* temp1 = TempRegister();
1933  LOperand* temp2 = TempDoubleRegister();
1934  LInstruction* result =
1935  DefineSameAsFirst(new(zone()) LTaggedToI(value, temp1, temp2));
1936  if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1937  return result;
1938  }
1939  }
1940  } else if (from.IsDouble()) {
1941  if (to.IsTagged()) {
1942  info()->MarkAsDeferredCalling();
1943  LOperand* value = UseRegister(val);
1944  LOperand* temp1 = TempRegister();
1945  LOperand* temp2 = TempRegister();
1946  LUnallocated* result_temp = TempRegister();
1947  LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1948  return AssignPointerMap(Define(result, result_temp));
1949  } else if (to.IsSmi()) {
1950  LOperand* value = UseRegister(val);
1951  return AssignEnvironment(
1952  DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1953  } else {
1954  DCHECK(to.IsInteger32());
1955  LOperand* value = UseRegister(val);
1956  LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value));
1957  if (!instr->CanTruncateToInt32()) result = AssignEnvironment(result);
1958  return result;
1959  }
1960  } else if (from.IsInteger32()) {
1961  info()->MarkAsDeferredCalling();
1962  if (to.IsTagged()) {
1963  if (!instr->CheckFlag(HValue::kCanOverflow)) {
1964  LOperand* value = UseRegisterAtStart(val);
1965  return DefineAsRegister(new(zone()) LSmiTag(value));
1966  } else if (val->CheckFlag(HInstruction::kUint32)) {
1967  LOperand* value = UseRegisterAtStart(val);
1968  LOperand* temp1 = TempRegister();
1969  LOperand* temp2 = TempRegister();
1970  LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1971  return AssignPointerMap(DefineAsRegister(result));
1972  } else {
1973  LOperand* value = UseRegisterAtStart(val);
1974  LOperand* temp1 = TempRegister();
1975  LOperand* temp2 = TempRegister();
1976  LNumberTagI* result = new(zone()) LNumberTagI(value, temp1, temp2);
1977  return AssignPointerMap(DefineAsRegister(result));
1978  }
1979  } else if (to.IsSmi()) {
1980  LOperand* value = UseRegister(val);
1981  LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1982  if (instr->CheckFlag(HValue::kCanOverflow)) {
1983  result = AssignEnvironment(result);
1984  }
1985  return result;
1986  } else {
1987  DCHECK(to.IsDouble());
1988  if (val->CheckFlag(HInstruction::kUint32)) {
1989  return DefineAsRegister(new(zone()) LUint32ToDouble(UseRegister(val)));
1990  } else {
1991  return DefineAsRegister(new(zone()) LInteger32ToDouble(Use(val)));
1992  }
1993  }
1994  }
1995  UNREACHABLE();
1996  return NULL;
1997 }
1998 
1999 
2000 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
2001  LOperand* value = UseRegisterAtStart(instr->value());
2002  LInstruction* result = new(zone()) LCheckNonSmi(value);
2003  if (!instr->value()->type().IsHeapObject()) {
2004  result = AssignEnvironment(result);
2005  }
2006  return result;
2007 }
2008 
2009 
2010 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
2011  LOperand* value = UseRegisterAtStart(instr->value());
2012  return AssignEnvironment(new(zone()) LCheckSmi(value));
2013 }
2014 
2015 
2016 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
2017  LOperand* value = UseRegisterAtStart(instr->value());
2018  LInstruction* result = new(zone()) LCheckInstanceType(value);
2019  return AssignEnvironment(result);
2020 }
2021 
2022 
2023 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
2024  LOperand* value = UseRegisterAtStart(instr->value());
2025  return AssignEnvironment(new(zone()) LCheckValue(value));
2026 }
2027 
2028 
2029 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
2030  if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps;
2031  LOperand* value = UseRegisterAtStart(instr->value());
2032  LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value));
2033  if (instr->HasMigrationTarget()) {
2034  info()->MarkAsDeferredCalling();
2035  result = AssignPointerMap(result);
2036  }
2037  return result;
2038 }
2039 
2040 
2041 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
2042  HValue* value = instr->value();
2043  Representation input_rep = value->representation();
2044  LOperand* reg = UseRegister(value);
2045  if (input_rep.IsDouble()) {
2046  return DefineAsRegister(new(zone()) LClampDToUint8(reg));
2047  } else if (input_rep.IsInteger32()) {
2048  return DefineAsRegister(new(zone()) LClampIToUint8(reg));
2049  } else {
2050  DCHECK(input_rep.IsSmiOrTagged());
2051  // Register allocator doesn't (yet) support allocation of double
2052  // temps. Reserve d1 explicitly.
2053  LClampTToUint8* result =
2054  new(zone()) LClampTToUint8(reg, TempDoubleRegister());
2055  return AssignEnvironment(DefineAsRegister(result));
2056  }
2057 }
2058 
2059 
2060 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2061  HValue* value = instr->value();
2062  DCHECK(value->representation().IsDouble());
2063  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2064 }
2065 
2066 
2067 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2068  LOperand* lo = UseRegister(instr->lo());
2069  LOperand* hi = UseRegister(instr->hi());
2070  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
2071 }
2072 
2073 
2074 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2075  LOperand* context = info()->IsStub()
2076  ? UseFixed(instr->context(), cp)
2077  : NULL;
2078  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2079  return new(zone()) LReturn(UseFixed(instr->value(), r0), context,
2080  parameter_count);
2081 }
2082 
2083 
2084 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
2085  Representation r = instr->representation();
2086  if (r.IsSmi()) {
2087  return DefineAsRegister(new(zone()) LConstantS);
2088  } else if (r.IsInteger32()) {
2089  return DefineAsRegister(new(zone()) LConstantI);
2090  } else if (r.IsDouble()) {
2091  return DefineAsRegister(new(zone()) LConstantD);
2092  } else if (r.IsExternal()) {
2093  return DefineAsRegister(new(zone()) LConstantE);
2094  } else if (r.IsTagged()) {
2095  return DefineAsRegister(new(zone()) LConstantT);
2096  } else {
2097  UNREACHABLE();
2098  return NULL;
2099  }
2100 }
2101 
2102 
2103 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2104  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2105  return instr->RequiresHoleCheck()
2106  ? AssignEnvironment(DefineAsRegister(result))
2107  : DefineAsRegister(result);
2108 }
2109 
2110 
2111 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2112  LOperand* context = UseFixed(instr->context(), cp);
2113  LOperand* global_object =
2114  UseFixed(instr->global_object(), LoadDescriptor::ReceiverRegister());
2115  LOperand* vector = NULL;
2116  if (FLAG_vector_ics) {
2117  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2118  }
2119  LLoadGlobalGeneric* result =
2120  new(zone()) LLoadGlobalGeneric(context, global_object, vector);
2121  return MarkAsCall(DefineFixed(result, r0), instr);
2122 }
2123 
2124 
2125 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2126  LOperand* value = UseRegister(instr->value());
2127  // Use a temp to check the value in the cell in the case where we perform
2128  // a hole check.
2129  return instr->RequiresHoleCheck()
2130  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
2131  : new(zone()) LStoreGlobalCell(value, NULL);
2132 }
2133 
2134 
2135 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2136  LOperand* context = UseRegisterAtStart(instr->value());
2137  LInstruction* result =
2138  DefineAsRegister(new(zone()) LLoadContextSlot(context));
2139  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2140  result = AssignEnvironment(result);
2141  }
2142  return result;
2143 }
2144 
2145 
2146 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2147  LOperand* context;
2148  LOperand* value;
2149  if (instr->NeedsWriteBarrier()) {
2150  context = UseTempRegister(instr->context());
2151  value = UseTempRegister(instr->value());
2152  } else {
2153  context = UseRegister(instr->context());
2154  value = UseRegister(instr->value());
2155  }
2156  LInstruction* result = new(zone()) LStoreContextSlot(context, value);
2157  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2158  result = AssignEnvironment(result);
2159  }
2160  return result;
2161 }
2162 
2163 
2164 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2165  LOperand* obj = UseRegisterAtStart(instr->object());
2166  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2167 }
2168 
2169 
2170 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2171  LOperand* context = UseFixed(instr->context(), cp);
2172  LOperand* object =
2173  UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
2174  LOperand* vector = NULL;
2175  if (FLAG_vector_ics) {
2176  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2177  }
2178 
2179  LInstruction* result =
2180  DefineFixed(new(zone()) LLoadNamedGeneric(context, object, vector), r0);
2181  return MarkAsCall(result, instr);
2182 }
2183 
2184 
2185 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2186  HLoadFunctionPrototype* instr) {
2187  return AssignEnvironment(DefineAsRegister(
2188  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2189 }
2190 
2191 
2192 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2193  return DefineAsRegister(new(zone()) LLoadRoot);
2194 }
2195 
2196 
2197 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2198  DCHECK(instr->key()->representation().IsSmiOrInteger32());
2199  ElementsKind elements_kind = instr->elements_kind();
2200  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2201  LInstruction* result = NULL;
2202 
2203  if (!instr->is_typed_elements()) {
2204  LOperand* obj = NULL;
2205  if (instr->representation().IsDouble()) {
2206  obj = UseRegister(instr->elements());
2207  } else {
2208  DCHECK(instr->representation().IsSmiOrTagged());
2209  obj = UseRegisterAtStart(instr->elements());
2210  }
2211  result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key));
2212  } else {
2213  DCHECK(
2214  (instr->representation().IsInteger32() &&
2215  !IsDoubleOrFloatElementsKind(elements_kind)) ||
2216  (instr->representation().IsDouble() &&
2217  IsDoubleOrFloatElementsKind(elements_kind)));
2218  LOperand* backing_store = UseRegister(instr->elements());
2219  result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key));
2220  }
2221 
2222  if ((instr->is_external() || instr->is_fixed_typed_array()) ?
2223  // see LCodeGen::DoLoadKeyedExternalArray
2224  ((elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2225  elements_kind == UINT32_ELEMENTS) &&
2226  !instr->CheckFlag(HInstruction::kUint32)) :
2227  // see LCodeGen::DoLoadKeyedFixedDoubleArray and
2228  // LCodeGen::DoLoadKeyedFixedArray
2229  instr->RequiresHoleCheck()) {
2230  result = AssignEnvironment(result);
2231  }
2232  return result;
2233 }
2234 
2235 
2236 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2237  LOperand* context = UseFixed(instr->context(), cp);
2238  LOperand* object =
2239  UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
2240  LOperand* key = UseFixed(instr->key(), LoadDescriptor::NameRegister());
2241  LOperand* vector = NULL;
2242  if (FLAG_vector_ics) {
2243  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2244  }
2245 
2246  LInstruction* result =
2247  DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key, vector),
2248  r0);
2249  return MarkAsCall(result, instr);
2250 }
2251 
2252 
2253 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2254  if (!instr->is_typed_elements()) {
2255  DCHECK(instr->elements()->representation().IsTagged());
2256  bool needs_write_barrier = instr->NeedsWriteBarrier();
2257  LOperand* object = NULL;
2258  LOperand* key = NULL;
2259  LOperand* val = NULL;
2260 
2261  if (instr->value()->representation().IsDouble()) {
2262  object = UseRegisterAtStart(instr->elements());
2263  val = UseRegister(instr->value());
2264  key = UseRegisterOrConstantAtStart(instr->key());
2265  } else {
2266  DCHECK(instr->value()->representation().IsSmiOrTagged());
2267  if (needs_write_barrier) {
2268  object = UseTempRegister(instr->elements());
2269  val = UseTempRegister(instr->value());
2270  key = UseTempRegister(instr->key());
2271  } else {
2272  object = UseRegisterAtStart(instr->elements());
2273  val = UseRegisterAtStart(instr->value());
2274  key = UseRegisterOrConstantAtStart(instr->key());
2275  }
2276  }
2277 
2278  return new(zone()) LStoreKeyed(object, key, val);
2279  }
2280 
2281  DCHECK(
2282  (instr->value()->representation().IsInteger32() &&
2283  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2284  (instr->value()->representation().IsDouble() &&
2285  IsDoubleOrFloatElementsKind(instr->elements_kind())));
2286  DCHECK((instr->is_fixed_typed_array() &&
2287  instr->elements()->representation().IsTagged()) ||
2288  (instr->is_external() &&
2289  instr->elements()->representation().IsExternal()));
2290  LOperand* val = UseRegister(instr->value());
2291  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2292  LOperand* backing_store = UseRegister(instr->elements());
2293  return new(zone()) LStoreKeyed(backing_store, key, val);
2294 }
2295 
2296 
2297 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2298  LOperand* context = UseFixed(instr->context(), cp);
2299  LOperand* obj =
2300  UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2301  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
2302  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2303 
2304  DCHECK(instr->object()->representation().IsTagged());
2305  DCHECK(instr->key()->representation().IsTagged());
2306  DCHECK(instr->value()->representation().IsTagged());
2307 
2308  return MarkAsCall(
2309  new(zone()) LStoreKeyedGeneric(context, obj, key, val), instr);
2310 }
2311 
2312 
2313 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2314  HTransitionElementsKind* instr) {
2315  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2316  LOperand* object = UseRegister(instr->object());
2317  LOperand* new_map_reg = TempRegister();
2318  LTransitionElementsKind* result =
2319  new(zone()) LTransitionElementsKind(object, NULL, new_map_reg);
2320  return result;
2321  } else {
2322  LOperand* object = UseFixed(instr->object(), r0);
2323  LOperand* context = UseFixed(instr->context(), cp);
2324  LTransitionElementsKind* result =
2325  new(zone()) LTransitionElementsKind(object, context, NULL);
2326  return MarkAsCall(result, instr);
2327  }
2328 }
2329 
2330 
2331 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2332  HTrapAllocationMemento* instr) {
2333  LOperand* object = UseRegister(instr->object());
2334  LOperand* temp = TempRegister();
2335  LTrapAllocationMemento* result =
2336  new(zone()) LTrapAllocationMemento(object, temp);
2337  return AssignEnvironment(result);
2338 }
2339 
2340 
2341 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2342  bool is_in_object = instr->access().IsInobject();
2343  bool needs_write_barrier = instr->NeedsWriteBarrier();
2344  bool needs_write_barrier_for_map = instr->has_transition() &&
2345  instr->NeedsWriteBarrierForMap();
2346 
2347  LOperand* obj;
2348  if (needs_write_barrier) {
2349  obj = is_in_object
2350  ? UseRegister(instr->object())
2351  : UseTempRegister(instr->object());
2352  } else {
2353  obj = needs_write_barrier_for_map
2354  ? UseRegister(instr->object())
2355  : UseRegisterAtStart(instr->object());
2356  }
2357 
2358  LOperand* val;
2359  if (needs_write_barrier) {
2360  val = UseTempRegister(instr->value());
2361  } else if (instr->field_representation().IsDouble()) {
2362  val = UseRegisterAtStart(instr->value());
2363  } else {
2364  val = UseRegister(instr->value());
2365  }
2366 
2367  // We need a temporary register for write barrier of the map field.
2368  LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2369 
2370  return new(zone()) LStoreNamedField(obj, val, temp);
2371 }
2372 
2373 
2374 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2375  LOperand* context = UseFixed(instr->context(), cp);
2376  LOperand* obj =
2377  UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2378  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2379 
2380  LInstruction* result = new(zone()) LStoreNamedGeneric(context, obj, val);
2381  return MarkAsCall(result, instr);
2382 }
2383 
2384 
2385 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2386  LOperand* context = UseFixed(instr->context(), cp);
2387  LOperand* left = UseFixed(instr->left(), r1);
2388  LOperand* right = UseFixed(instr->right(), r0);
2389  return MarkAsCall(
2390  DefineFixed(new(zone()) LStringAdd(context, left, right), r0),
2391  instr);
2392 }
2393 
2394 
2395 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2396  LOperand* string = UseTempRegister(instr->string());
2397  LOperand* index = UseTempRegister(instr->index());
2398  LOperand* context = UseAny(instr->context());
2399  LStringCharCodeAt* result =
2400  new(zone()) LStringCharCodeAt(context, string, index);
2401  return AssignPointerMap(DefineAsRegister(result));
2402 }
2403 
2404 
2405 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2406  LOperand* char_code = UseRegister(instr->value());
2407  LOperand* context = UseAny(instr->context());
2408  LStringCharFromCode* result =
2409  new(zone()) LStringCharFromCode(context, char_code);
2410  return AssignPointerMap(DefineAsRegister(result));
2411 }
2412 
2413 
2414 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2415  info()->MarkAsDeferredCalling();
2416  LOperand* context = UseAny(instr->context());
2417  LOperand* size = UseRegisterOrConstant(instr->size());
2418  LOperand* temp1 = TempRegister();
2419  LOperand* temp2 = TempRegister();
2420  LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2);
2421  return AssignPointerMap(DefineAsRegister(result));
2422 }
2423 
2424 
2425 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2426  LOperand* context = UseFixed(instr->context(), cp);
2427  return MarkAsCall(
2428  DefineFixed(new(zone()) LRegExpLiteral(context), r0), instr);
2429 }
2430 
2431 
2432 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2433  LOperand* context = UseFixed(instr->context(), cp);
2434  return MarkAsCall(
2435  DefineFixed(new(zone()) LFunctionLiteral(context), r0), instr);
2436 }
2437 
2438 
2439 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2440  DCHECK(argument_count_ == 0);
2441  allocator_->MarkAsOsrEntry();
2442  current_block_->last_environment()->set_ast_id(instr->ast_id());
2443  return AssignEnvironment(new(zone()) LOsrEntry);
2444 }
2445 
2446 
2447 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2448  LParameter* result = new(zone()) LParameter;
2449  if (instr->kind() == HParameter::STACK_PARAMETER) {
2450  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2451  return DefineAsSpilled(result, spill_index);
2452  } else {
2453  DCHECK(info()->IsStub());
2454  CallInterfaceDescriptor descriptor =
2455  info()->code_stub()->GetCallInterfaceDescriptor();
2456  int index = static_cast<int>(instr->index());
2457  Register reg = descriptor.GetEnvironmentParameterRegister(index);
2458  return DefineFixed(result, reg);
2459  }
2460 }
2461 
2462 
2463 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2464  // Use an index that corresponds to the location in the unoptimized frame,
2465  // which the optimized frame will subsume.
2466  int env_index = instr->index();
2467  int spill_index = 0;
2468  if (instr->environment()->is_parameter_index(env_index)) {
2469  spill_index = chunk()->GetParameterStackSlot(env_index);
2470  } else {
2471  spill_index = env_index - instr->environment()->first_local_index();
2472  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2473  Retry(kTooManySpillSlotsNeededForOSR);
2474  spill_index = 0;
2475  }
2476  }
2477  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2478 }
2479 
2480 
2481 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2482  LOperand* context = UseFixed(instr->context(), cp);
2483  return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), r0), instr);
2484 }
2485 
2486 
2487 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2488  // There are no real uses of the arguments object.
2489  // arguments.length and element access are supported directly on
2490  // stack arguments, and any real arguments object use causes a bailout.
2491  // So this value is never used.
2492  return NULL;
2493 }
2494 
2495 
2496 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2497  instr->ReplayEnvironment(current_block_->last_environment());
2498 
2499  // There are no real uses of a captured object.
2500  return NULL;
2501 }
2502 
2503 
2504 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2505  info()->MarkAsRequiresFrame();
2506  LOperand* args = UseRegister(instr->arguments());
2507  LOperand* length = UseRegisterOrConstantAtStart(instr->length());
2508  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2509  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2510 }
2511 
2512 
2513 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2514  LOperand* object = UseFixed(instr->value(), r0);
2515  LToFastProperties* result = new(zone()) LToFastProperties(object);
2516  return MarkAsCall(DefineFixed(result, r0), instr);
2517 }
2518 
2519 
2520 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2521  LOperand* context = UseFixed(instr->context(), cp);
2522  LTypeof* result = new(zone()) LTypeof(context, UseFixed(instr->value(), r0));
2523  return MarkAsCall(DefineFixed(result, r0), instr);
2524 }
2525 
2526 
2527 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2528  return new(zone()) LTypeofIsAndBranch(UseRegister(instr->value()));
2529 }
2530 
2531 
2532 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2533  HIsConstructCallAndBranch* instr) {
2534  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2535 }
2536 
2537 
2538 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2539  instr->ReplayEnvironment(current_block_->last_environment());
2540  return NULL;
2541 }
2542 
2543 
2544 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2545  if (instr->is_function_entry()) {
2546  LOperand* context = UseFixed(instr->context(), cp);
2547  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2548  } else {
2549  DCHECK(instr->is_backwards_branch());
2550  LOperand* context = UseAny(instr->context());
2551  return AssignEnvironment(
2552  AssignPointerMap(new(zone()) LStackCheck(context)));
2553  }
2554 }
2555 
2556 
2557 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2558  HEnvironment* outer = current_block_->last_environment();
2559  outer->set_ast_id(instr->ReturnId());
2560  HConstant* undefined = graph()->GetConstantUndefined();
2561  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2562  instr->arguments_count(),
2563  instr->function(),
2564  undefined,
2565  instr->inlining_kind());
2566  // Only replay binding of arguments object if it wasn't removed from graph.
2567  if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2568  inner->Bind(instr->arguments_var(), instr->arguments_object());
2569  }
2570  inner->BindContext(instr->closure_context());
2571  inner->set_entry(instr);
2572  current_block_->UpdateEnvironment(inner);
2573  chunk_->AddInlinedClosure(instr->closure());
2574  return NULL;
2575 }
2576 
2577 
2578 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2579  LInstruction* pop = NULL;
2580 
2581  HEnvironment* env = current_block_->last_environment();
2582 
2583  if (env->entry()->arguments_pushed()) {
2584  int argument_count = env->arguments_environment()->parameter_count();
2585  pop = new(zone()) LDrop(argument_count);
2586  DCHECK(instr->argument_delta() == -argument_count);
2587  }
2588 
2589  HEnvironment* outer = current_block_->last_environment()->
2590  DiscardInlined(false);
2591  current_block_->UpdateEnvironment(outer);
2592 
2593  return pop;
2594 }
2595 
2596 
2597 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2598  LOperand* context = UseFixed(instr->context(), cp);
2599  LOperand* object = UseFixed(instr->enumerable(), r0);
2600  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2601  return MarkAsCall(DefineFixed(result, r0), instr, CAN_DEOPTIMIZE_EAGERLY);
2602 }
2603 
2604 
2605 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2606  LOperand* map = UseRegister(instr->map());
2607  return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2608 }
2609 
2610 
2611 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2612  LOperand* value = UseRegisterAtStart(instr->value());
2613  LOperand* map = UseRegisterAtStart(instr->map());
2614  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2615 }
2616 
2617 
2618 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2619  LOperand* object = UseRegister(instr->object());
2620  LOperand* index = UseTempRegister(instr->index());
2621  LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
2622  LInstruction* result = DefineSameAsFirst(load);
2623  return AssignPointerMap(result);
2624 }
2625 
2626 
2627 LInstruction* LChunkBuilder::DoStoreFrameContext(HStoreFrameContext* instr) {
2628  LOperand* context = UseRegisterAtStart(instr->context());
2629  return new(zone()) LStoreFrameContext(context);
2630 }
2631 
2632 
2633 LInstruction* LChunkBuilder::DoAllocateBlockContext(
2634  HAllocateBlockContext* instr) {
2635  LOperand* context = UseFixed(instr->context(), cp);
2636  LOperand* function = UseRegisterAtStart(instr->function());
2637  LAllocateBlockContext* result =
2638  new(zone()) LAllocateBlockContext(context, function);
2639  return MarkAsCall(DefineFixed(result, cp), instr);
2640 }
2641 
2642 } } // namespace v8::internal
static bool IsSupported(CpuFeature f)
Definition: assembler.h:184
static HValue * cast(HValue *value)
bool IsRedundant() const
Definition: lithium-arm.cc:89
virtual void PrintDataTo(StringStream *stream) OVERRIDE
Definition: lithium-arm.cc:100
LParallelMove * parallel_moves_[LAST_INNER_POSITION+1]
Definition: lithium-arm.h:367
virtual int InputCount()=0
virtual bool HasResult() const =0
virtual const char * Mnemonic() const =0
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:43
LEnvironment * environment() const
Definition: lithium-arm.h:231
virtual LOperand * InputAt(int i)=0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:62
virtual LOperand * result() const =0
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:75
LPointerMap * pointer_map() const
Definition: lithium-arm.h:235
void PrintDataTo(StringStream *stream) OVERRIDE
uint32_t base_offset() const
void PrintTo(StringStream *stream)
Definition: lithium.cc:41
void PrintDataTo(StringStream *stream) OVERRIDE
uint32_t base_offset() const
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:138
static const int kMaxFixedSlotIndex
Definition: lithium.h:177
bool HasFixedPolicy() const
Definition: lithium.h:185
static const Register ReceiverRegister()
static const Register NameRegister()
static Representation Tagged()
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const char * String(Value tok)
Definition: token.h:276
static const Register VectorRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to use(in kBytes)") DEFINE_INT(max_stack_trace_source_length
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:14
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:20
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
int int32_t
Definition: unicode.cc:24
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
const LowDwVfpRegister d2
const Register r2
const Register cp
const LowDwVfpRegister d1
const Register r0
const LowDwVfpRegister d0
const Register r3
DwVfpRegister DoubleRegister
@ EXTERNAL_UINT32_ELEMENTS
Definition: elements-kind.h:38
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const char * ElementsKindToString(ElementsKind kind)
const Register r4
const Register r1
@ GENERAL_REGISTERS
Definition: lithium.h:780
@ DOUBLE_REGISTERS
Definition: lithium.h:781
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static i::Handle< i::Context > CreateEnvironment(i::Isolate *isolate, v8::ExtensionConfiguration *extensions, v8::Handle< ObjectTemplate > global_template, v8::Handle< Value > maybe_global_proxy)
Definition: api.cc:5151
static int ToAllocationIndex(DwVfpRegister reg)
static int ToAllocationIndex(Register reg)