V8 Project
lithium-mips64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_MIPS64
8 
9 #include "src/hydrogen-osr.h"
10 #include "src/lithium-inl.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define DEFINE_COMPILE(type) \
17  void L##type::CompileToNative(LCodeGen* generator) { \
18  generator->Do##type(this); \
19  }
21 #undef DEFINE_COMPILE
22 
23 #ifdef DEBUG
24 void LInstruction::VerifyCall() {
25  // Call instructions can use only fixed registers as temporaries and
26  // outputs because all registers are blocked by the calling convention.
27  // Inputs operands must use a fixed register or use-at-start policy or
28  // a non-register policy.
29  DCHECK(Output() == NULL ||
30  LUnallocated::cast(Output())->HasFixedPolicy() ||
31  !LUnallocated::cast(Output())->HasRegisterPolicy());
32  for (UseIterator it(this); !it.Done(); it.Advance()) {
33  LUnallocated* operand = LUnallocated::cast(it.Current());
34  DCHECK(operand->HasFixedPolicy() ||
35  operand->IsUsedAtStart());
36  }
37  for (TempIterator it(this); !it.Done(); it.Advance()) {
38  LUnallocated* operand = LUnallocated::cast(it.Current());
39  DCHECK(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
40  }
41 }
42 #endif
43 
44 
45 void LInstruction::PrintTo(StringStream* stream) {
46  stream->Add("%s ", this->Mnemonic());
47 
48  PrintOutputOperandTo(stream);
49 
50  PrintDataTo(stream);
51 
52  if (HasEnvironment()) {
53  stream->Add(" ");
54  environment()->PrintTo(stream);
55  }
56 
57  if (HasPointerMap()) {
58  stream->Add(" ");
59  pointer_map()->PrintTo(stream);
60  }
61 }
62 
63 
64 void LInstruction::PrintDataTo(StringStream* stream) {
65  stream->Add("= ");
66  for (int i = 0; i < InputCount(); i++) {
67  if (i > 0) stream->Add(" ");
68  if (InputAt(i) == NULL) {
69  stream->Add("NULL");
70  } else {
71  InputAt(i)->PrintTo(stream);
72  }
73  }
74 }
75 
76 
77 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
78  if (HasResult()) result()->PrintTo(stream);
79 }
80 
81 
82 void LLabel::PrintDataTo(StringStream* stream) {
83  LGap::PrintDataTo(stream);
84  LLabel* rep = replacement();
85  if (rep != NULL) {
86  stream->Add(" Dead block replaced with B%d", rep->block_id());
87  }
88 }
89 
90 
91 bool LGap::IsRedundant() const {
92  for (int i = 0; i < 4; i++) {
94  return false;
95  }
96  }
97 
98  return true;
99 }
100 
101 
102 void LGap::PrintDataTo(StringStream* stream) {
103  for (int i = 0; i < 4; i++) {
104  stream->Add("(");
105  if (parallel_moves_[i] != NULL) {
106  parallel_moves_[i]->PrintDataTo(stream);
107  }
108  stream->Add(") ");
109  }
110 }
111 
112 
113 const char* LArithmeticD::Mnemonic() const {
114  switch (op()) {
115  case Token::ADD: return "add-d";
116  case Token::SUB: return "sub-d";
117  case Token::MUL: return "mul-d";
118  case Token::DIV: return "div-d";
119  case Token::MOD: return "mod-d";
120  default:
121  UNREACHABLE();
122  return NULL;
123  }
124 }
125 
126 
127 const char* LArithmeticT::Mnemonic() const {
128  switch (op()) {
129  case Token::ADD: return "add-t";
130  case Token::SUB: return "sub-t";
131  case Token::MUL: return "mul-t";
132  case Token::MOD: return "mod-t";
133  case Token::DIV: return "div-t";
134  case Token::BIT_AND: return "bit-and-t";
135  case Token::BIT_OR: return "bit-or-t";
136  case Token::BIT_XOR: return "bit-xor-t";
137  case Token::ROR: return "ror-t";
138  case Token::SHL: return "sll-t";
139  case Token::SAR: return "sra-t";
140  case Token::SHR: return "srl-t";
141  default:
142  UNREACHABLE();
143  return NULL;
144  }
145 }
146 
147 
148 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
149  return !gen->IsNextEmittedBlock(block_id());
150 }
151 
152 
153 void LGoto::PrintDataTo(StringStream* stream) {
154  stream->Add("B%d", block_id());
155 }
156 
157 
158 void LBranch::PrintDataTo(StringStream* stream) {
159  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
160  value()->PrintTo(stream);
161 }
162 
163 
164 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
165  return new(zone()) LDebugBreak();
166 }
167 
168 
169 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
170  stream->Add("if ");
171  left()->PrintTo(stream);
172  stream->Add(" %s ", Token::String(op()));
173  right()->PrintTo(stream);
174  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
175 }
176 
177 
178 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
179  stream->Add("if is_object(");
180  value()->PrintTo(stream);
181  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
182 }
183 
184 
185 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
186  stream->Add("if is_string(");
187  value()->PrintTo(stream);
188  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
189 }
190 
191 
192 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
193  stream->Add("if is_smi(");
194  value()->PrintTo(stream);
195  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
196 }
197 
198 
199 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
200  stream->Add("if is_undetectable(");
201  value()->PrintTo(stream);
202  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
203 }
204 
205 
206 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
207  stream->Add("if string_compare(");
208  left()->PrintTo(stream);
209  right()->PrintTo(stream);
210  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
211 }
212 
213 
214 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
215  stream->Add("if has_instance_type(");
216  value()->PrintTo(stream);
217  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
218 }
219 
220 
221 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
222  stream->Add("if has_cached_array_index(");
223  value()->PrintTo(stream);
224  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
225 }
226 
227 
228 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
229  stream->Add("if class_of_test(");
230  value()->PrintTo(stream);
231  stream->Add(", \"%o\") then B%d else B%d",
232  *hydrogen()->class_name(),
233  true_block_id(),
234  false_block_id());
235 }
236 
237 
238 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
239  stream->Add("if typeof ");
240  value()->PrintTo(stream);
241  stream->Add(" == \"%s\" then B%d else B%d",
242  hydrogen()->type_literal()->ToCString().get(),
243  true_block_id(), false_block_id());
244 }
245 
246 
247 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
248  stream->Add(" = ");
249  function()->PrintTo(stream);
250  stream->Add(".code_entry = ");
251  code_object()->PrintTo(stream);
252 }
253 
254 
255 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
256  stream->Add(" = ");
257  base_object()->PrintTo(stream);
258  stream->Add(" + ");
259  offset()->PrintTo(stream);
260 }
261 
262 
263 void LCallJSFunction::PrintDataTo(StringStream* stream) {
264  stream->Add("= ");
265  function()->PrintTo(stream);
266  stream->Add("#%d / ", arity());
267 }
268 
269 
270 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
271  for (int i = 0; i < InputCount(); i++) {
272  InputAt(i)->PrintTo(stream);
273  stream->Add(" ");
274  }
275  stream->Add("#%d / ", arity());
276 }
277 
278 
279 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
280  context()->PrintTo(stream);
281  stream->Add("[%d]", slot_index());
282 }
283 
284 
285 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
286  context()->PrintTo(stream);
287  stream->Add("[%d] <- ", slot_index());
288  value()->PrintTo(stream);
289 }
290 
291 
292 void LInvokeFunction::PrintDataTo(StringStream* stream) {
293  stream->Add("= ");
294  function()->PrintTo(stream);
295  stream->Add(" #%d / ", arity());
296 }
297 
298 
299 void LCallNew::PrintDataTo(StringStream* stream) {
300  stream->Add("= ");
301  constructor()->PrintTo(stream);
302  stream->Add(" #%d / ", arity());
303 }
304 
305 
306 void LCallNewArray::PrintDataTo(StringStream* stream) {
307  stream->Add("= ");
308  constructor()->PrintTo(stream);
309  stream->Add(" #%d / ", arity());
310  ElementsKind kind = hydrogen()->elements_kind();
311  stream->Add(" (%s) ", ElementsKindToString(kind));
312 }
313 
314 
315 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
316  arguments()->PrintTo(stream);
317  stream->Add(" length ");
318  length()->PrintTo(stream);
319  stream->Add(" index ");
320  index()->PrintTo(stream);
321 }
322 
323 
324 void LStoreNamedField::PrintDataTo(StringStream* stream) {
325  object()->PrintTo(stream);
326  OStringStream os;
327  os << hydrogen()->access() << " <- ";
328  stream->Add(os.c_str());
329  value()->PrintTo(stream);
330 }
331 
332 
333 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
334  object()->PrintTo(stream);
335  stream->Add(".");
336  stream->Add(String::cast(*name())->ToCString().get());
337  stream->Add(" <- ");
338  value()->PrintTo(stream);
339 }
340 
341 
342 void LLoadKeyed::PrintDataTo(StringStream* stream) {
343  elements()->PrintTo(stream);
344  stream->Add("[");
345  key()->PrintTo(stream);
346  if (hydrogen()->IsDehoisted()) {
347  stream->Add(" + %d]", base_offset());
348  } else {
349  stream->Add("]");
350  }
351 }
352 
353 
354 void LStoreKeyed::PrintDataTo(StringStream* stream) {
355  elements()->PrintTo(stream);
356  stream->Add("[");
357  key()->PrintTo(stream);
358  if (hydrogen()->IsDehoisted()) {
359  stream->Add(" + %d] <-", base_offset());
360  } else {
361  stream->Add("] <- ");
362  }
363 
364  if (value() == NULL) {
365  DCHECK(hydrogen()->IsConstantHoleStore() &&
366  hydrogen()->value()->representation().IsDouble());
367  stream->Add("<the hole(nan)>");
368  } else {
369  value()->PrintTo(stream);
370  }
371 }
372 
373 
374 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
375  object()->PrintTo(stream);
376  stream->Add("[");
377  key()->PrintTo(stream);
378  stream->Add("] <- ");
379  value()->PrintTo(stream);
380 }
381 
382 
383 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
384  object()->PrintTo(stream);
385  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
386 }
387 
388 
389 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
390  // Skip a slot if for a double-width slot.
391  if (kind == DOUBLE_REGISTERS) spill_slot_count_++;
392  return spill_slot_count_++;
393 }
394 
395 
396 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
397  int index = GetNextSpillIndex(kind);
398  if (kind == DOUBLE_REGISTERS) {
399  return LDoubleStackSlot::Create(index, zone());
400  } else {
401  DCHECK(kind == GENERAL_REGISTERS);
402  return LStackSlot::Create(index, zone());
403  }
404 }
405 
406 
407 LPlatformChunk* LChunkBuilder::Build() {
408  DCHECK(is_unused());
409  chunk_ = new(zone()) LPlatformChunk(info(), graph());
410  LPhase phase("L_Building chunk", chunk_);
411  status_ = BUILDING;
412 
413  // If compiling for OSR, reserve space for the unoptimized frame,
414  // which will be subsumed into this frame.
415  if (graph()->has_osr()) {
416  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
417  chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
418  }
419  }
420 
421  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
422  for (int i = 0; i < blocks->length(); i++) {
423  HBasicBlock* next = NULL;
424  if (i < blocks->length() - 1) next = blocks->at(i + 1);
425  DoBasicBlock(blocks->at(i), next);
426  if (is_aborted()) return NULL;
427  }
428  status_ = DONE;
429  return chunk_;
430 }
431 
432 
433 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
434  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
436 }
437 
438 
439 LUnallocated* LChunkBuilder::ToUnallocated(DoubleRegister reg) {
440  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
442 }
443 
444 
445 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
446  return Use(value, ToUnallocated(fixed_register));
447 }
448 
449 
450 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, DoubleRegister reg) {
451  return Use(value, ToUnallocated(reg));
452 }
453 
454 
455 LOperand* LChunkBuilder::UseRegister(HValue* value) {
456  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
457 }
458 
459 
460 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
461  return Use(value,
462  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
464 }
465 
466 
467 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
468  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
469 }
470 
471 
472 LOperand* LChunkBuilder::Use(HValue* value) {
473  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
474 }
475 
476 
477 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
478  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
480 }
481 
482 
483 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
484  return value->IsConstant()
485  ? chunk_->DefineConstantOperand(HConstant::cast(value))
486  : Use(value);
487 }
488 
489 
490 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
491  return value->IsConstant()
492  ? chunk_->DefineConstantOperand(HConstant::cast(value))
493  : UseAtStart(value);
494 }
495 
496 
497 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
498  return value->IsConstant()
499  ? chunk_->DefineConstantOperand(HConstant::cast(value))
500  : UseRegister(value);
501 }
502 
503 
504 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
505  return value->IsConstant()
506  ? chunk_->DefineConstantOperand(HConstant::cast(value))
507  : UseRegisterAtStart(value);
508 }
509 
510 
511 LOperand* LChunkBuilder::UseConstant(HValue* value) {
512  return chunk_->DefineConstantOperand(HConstant::cast(value));
513 }
514 
515 
516 LOperand* LChunkBuilder::UseAny(HValue* value) {
517  return value->IsConstant()
518  ? chunk_->DefineConstantOperand(HConstant::cast(value))
519  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
520 }
521 
522 
523 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
524  if (value->EmitAtUses()) {
525  HInstruction* instr = HInstruction::cast(value);
526  VisitInstruction(instr);
527  }
528  operand->set_virtual_register(value->id());
529  return operand;
530 }
531 
532 
533 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
534  LUnallocated* result) {
535  result->set_virtual_register(current_instruction_->id());
536  instr->set_result(result);
537  return instr;
538 }
539 
540 
541 LInstruction* LChunkBuilder::DefineAsRegister(
542  LTemplateResultInstruction<1>* instr) {
543  return Define(instr,
544  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
545 }
546 
547 
548 LInstruction* LChunkBuilder::DefineAsSpilled(
549  LTemplateResultInstruction<1>* instr, int index) {
550  return Define(instr,
551  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
552 }
553 
554 
555 LInstruction* LChunkBuilder::DefineSameAsFirst(
556  LTemplateResultInstruction<1>* instr) {
557  return Define(instr,
558  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
559 }
560 
561 
562 LInstruction* LChunkBuilder::DefineFixed(
563  LTemplateResultInstruction<1>* instr, Register reg) {
564  return Define(instr, ToUnallocated(reg));
565 }
566 
567 
568 LInstruction* LChunkBuilder::DefineFixedDouble(
569  LTemplateResultInstruction<1>* instr, DoubleRegister reg) {
570  return Define(instr, ToUnallocated(reg));
571 }
572 
573 
574 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
575  HEnvironment* hydrogen_env = current_block_->last_environment();
576  int argument_index_accumulator = 0;
577  ZoneList<HValue*> objects_to_materialize(0, zone());
578  instr->set_environment(CreateEnvironment(hydrogen_env,
579  &argument_index_accumulator,
580  &objects_to_materialize));
581  return instr;
582 }
583 
584 
585 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
586  HInstruction* hinstr,
587  CanDeoptimize can_deoptimize) {
588  info()->MarkAsNonDeferredCalling();
589 #ifdef DEBUG
590  instr->VerifyCall();
591 #endif
592  instr->MarkAsCall();
593  instr = AssignPointerMap(instr);
594 
595  // If instruction does not have side-effects lazy deoptimization
596  // after the call will try to deoptimize to the point before the call.
597  // Thus we still need to attach environment to this call even if
598  // call sequence can not deoptimize eagerly.
599  bool needs_environment =
600  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
601  !hinstr->HasObservableSideEffects();
602  if (needs_environment && !instr->HasEnvironment()) {
603  instr = AssignEnvironment(instr);
604  // We can't really figure out if the environment is needed or not.
605  instr->environment()->set_has_been_used();
606  }
607 
608  return instr;
609 }
610 
611 
612 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
613  DCHECK(!instr->HasPointerMap());
614  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
615  return instr;
616 }
617 
618 
619 LUnallocated* LChunkBuilder::TempRegister() {
620  LUnallocated* operand =
621  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
622  int vreg = allocator_->GetVirtualRegister();
623  if (!allocator_->AllocationOk()) {
624  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
625  vreg = 0;
626  }
627  operand->set_virtual_register(vreg);
628  return operand;
629 }
630 
631 
632 LUnallocated* LChunkBuilder::TempDoubleRegister() {
633  LUnallocated* operand =
634  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_DOUBLE_REGISTER);
635  int vreg = allocator_->GetVirtualRegister();
636  if (!allocator_->AllocationOk()) {
637  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
638  vreg = 0;
639  }
640  operand->set_virtual_register(vreg);
641  return operand;
642 }
643 
644 
645 LOperand* LChunkBuilder::FixedTemp(Register reg) {
646  LUnallocated* operand = ToUnallocated(reg);
647  DCHECK(operand->HasFixedPolicy());
648  return operand;
649 }
650 
651 
652 LOperand* LChunkBuilder::FixedTemp(DoubleRegister reg) {
653  LUnallocated* operand = ToUnallocated(reg);
654  DCHECK(operand->HasFixedPolicy());
655  return operand;
656 }
657 
658 
659 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
660  return new(zone()) LLabel(instr->block());
661 }
662 
663 
664 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
665  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
666 }
667 
668 
669 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
670  UNREACHABLE();
671  return NULL;
672 }
673 
674 
675 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
676  return AssignEnvironment(new(zone()) LDeoptimize);
677 }
678 
679 
680 LInstruction* LChunkBuilder::DoShift(Token::Value op,
681  HBitwiseBinaryOperation* instr) {
682  if (instr->representation().IsSmiOrInteger32()) {
683  DCHECK(instr->left()->representation().Equals(instr->representation()));
684  DCHECK(instr->right()->representation().Equals(instr->representation()));
685  LOperand* left = UseRegisterAtStart(instr->left());
686 
687  HValue* right_value = instr->right();
688  LOperand* right = NULL;
689  int constant_value = 0;
690  bool does_deopt = false;
691  if (right_value->IsConstant()) {
692  HConstant* constant = HConstant::cast(right_value);
693  right = chunk_->DefineConstantOperand(constant);
694  constant_value = constant->Integer32Value() & 0x1f;
695  // Left shifts can deoptimize if we shift by > 0 and the result cannot be
696  // truncated to smi.
697  if (instr->representation().IsSmi() && constant_value > 0) {
698  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
699  }
700  } else {
701  right = UseRegisterAtStart(right_value);
702  }
703 
704  // Shift operations can only deoptimize if we do a logical shift
705  // by 0 and the result cannot be truncated to int32.
706  if (op == Token::SHR && constant_value == 0) {
707  if (FLAG_opt_safe_uint32_operations) {
708  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
709  } else {
710  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
711  }
712  }
713 
714  LInstruction* result =
715  DefineAsRegister(new(zone()) LShiftI(op, left, right, does_deopt));
716  return does_deopt ? AssignEnvironment(result) : result;
717  } else {
718  return DoArithmeticT(op, instr);
719  }
720 }
721 
722 
723 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
724  HArithmeticBinaryOperation* instr) {
725  DCHECK(instr->representation().IsDouble());
726  DCHECK(instr->left()->representation().IsDouble());
727  DCHECK(instr->right()->representation().IsDouble());
728  if (op == Token::MOD) {
729  LOperand* left = UseFixedDouble(instr->left(), f2);
730  LOperand* right = UseFixedDouble(instr->right(), f4);
731  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
732  // We call a C function for double modulo. It can't trigger a GC. We need
733  // to use fixed result register for the call.
734  // TODO(fschneider): Allow any register as input registers.
735  return MarkAsCall(DefineFixedDouble(result, f2), instr);
736  } else {
737  LOperand* left = UseRegisterAtStart(instr->left());
738  LOperand* right = UseRegisterAtStart(instr->right());
739  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
740  return DefineAsRegister(result);
741  }
742 }
743 
744 
745 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
746  HBinaryOperation* instr) {
747  HValue* left = instr->left();
748  HValue* right = instr->right();
749  DCHECK(left->representation().IsTagged());
750  DCHECK(right->representation().IsTagged());
751  LOperand* context = UseFixed(instr->context(), cp);
752  LOperand* left_operand = UseFixed(left, a1);
753  LOperand* right_operand = UseFixed(right, a0);
754  LArithmeticT* result =
755  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
756  return MarkAsCall(DefineFixed(result, v0), instr);
757 }
758 
759 
760 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
761  DCHECK(is_building());
762  current_block_ = block;
763  next_block_ = next_block;
764  if (block->IsStartBlock()) {
765  block->UpdateEnvironment(graph_->start_environment());
766  argument_count_ = 0;
767  } else if (block->predecessors()->length() == 1) {
768  // We have a single predecessor => copy environment and outgoing
769  // argument count from the predecessor.
770  DCHECK(block->phis()->length() == 0);
771  HBasicBlock* pred = block->predecessors()->at(0);
772  HEnvironment* last_environment = pred->last_environment();
773  DCHECK(last_environment != NULL);
774  // Only copy the environment, if it is later used again.
775  if (pred->end()->SecondSuccessor() == NULL) {
776  DCHECK(pred->end()->FirstSuccessor() == block);
777  } else {
778  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
779  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
780  last_environment = last_environment->Copy();
781  }
782  }
783  block->UpdateEnvironment(last_environment);
784  DCHECK(pred->argument_count() >= 0);
785  argument_count_ = pred->argument_count();
786  } else {
787  // We are at a state join => process phis.
788  HBasicBlock* pred = block->predecessors()->at(0);
789  // No need to copy the environment, it cannot be used later.
790  HEnvironment* last_environment = pred->last_environment();
791  for (int i = 0; i < block->phis()->length(); ++i) {
792  HPhi* phi = block->phis()->at(i);
793  if (phi->HasMergedIndex()) {
794  last_environment->SetValueAt(phi->merged_index(), phi);
795  }
796  }
797  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
798  if (block->deleted_phis()->at(i) < last_environment->length()) {
799  last_environment->SetValueAt(block->deleted_phis()->at(i),
800  graph_->GetConstantUndefined());
801  }
802  }
803  block->UpdateEnvironment(last_environment);
804  // Pick up the outgoing argument count of one of the predecessors.
805  argument_count_ = pred->argument_count();
806  }
807  HInstruction* current = block->first();
808  int start = chunk_->instructions()->length();
809  while (current != NULL && !is_aborted()) {
810  // Code for constants in registers is generated lazily.
811  if (!current->EmitAtUses()) {
812  VisitInstruction(current);
813  }
814  current = current->next();
815  }
816  int end = chunk_->instructions()->length() - 1;
817  if (end >= start) {
818  block->set_first_instruction_index(start);
819  block->set_last_instruction_index(end);
820  }
821  block->set_argument_count(argument_count_);
822  next_block_ = NULL;
823  current_block_ = NULL;
824 }
825 
826 
827 void LChunkBuilder::VisitInstruction(HInstruction* current) {
828  HInstruction* old_current = current_instruction_;
829  current_instruction_ = current;
830 
831  LInstruction* instr = NULL;
832  if (current->CanReplaceWithDummyUses()) {
833  if (current->OperandCount() == 0) {
834  instr = DefineAsRegister(new(zone()) LDummy());
835  } else {
836  DCHECK(!current->OperandAt(0)->IsControlInstruction());
837  instr = DefineAsRegister(new(zone())
838  LDummyUse(UseAny(current->OperandAt(0))));
839  }
840  for (int i = 1; i < current->OperandCount(); ++i) {
841  if (current->OperandAt(i)->IsControlInstruction()) continue;
842  LInstruction* dummy =
843  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
844  dummy->set_hydrogen_value(current);
845  chunk_->AddInstruction(dummy, current_block_);
846  }
847  } else {
848  HBasicBlock* successor;
849  if (current->IsControlInstruction() &&
850  HControlInstruction::cast(current)->KnownSuccessorBlock(&successor) &&
851  successor != NULL) {
852  instr = new(zone()) LGoto(successor);
853  } else {
854  instr = current->CompileToLithium(this);
855  }
856  }
857 
858  argument_count_ += current->argument_delta();
859  DCHECK(argument_count_ >= 0);
860 
861  if (instr != NULL) {
862  AddInstruction(instr, current);
863  }
864 
865  current_instruction_ = old_current;
866 }
867 
868 
869 void LChunkBuilder::AddInstruction(LInstruction* instr,
870  HInstruction* hydrogen_val) {
871 // Associate the hydrogen instruction first, since we may need it for
872  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
873  instr->set_hydrogen_value(hydrogen_val);
874 
875 #if DEBUG
876  // Make sure that the lithium instruction has either no fixed register
877  // constraints in temps or the result OR no uses that are only used at
878  // start. If this invariant doesn't hold, the register allocator can decide
879  // to insert a split of a range immediately before the instruction due to an
880  // already allocated register needing to be used for the instruction's fixed
881  // register constraint. In this case, The register allocator won't see an
882  // interference between the split child and the use-at-start (it would if
883  // the it was just a plain use), so it is free to move the split child into
884  // the same register that is used for the use-at-start.
885  // See https://code.google.com/p/chromium/issues/detail?id=201590
886  if (!(instr->ClobbersRegisters() &&
887  instr->ClobbersDoubleRegisters(isolate()))) {
888  int fixed = 0;
889  int used_at_start = 0;
890  for (UseIterator it(instr); !it.Done(); it.Advance()) {
891  LUnallocated* operand = LUnallocated::cast(it.Current());
892  if (operand->IsUsedAtStart()) ++used_at_start;
893  }
894  if (instr->Output() != NULL) {
895  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
896  }
897  for (TempIterator it(instr); !it.Done(); it.Advance()) {
898  LUnallocated* operand = LUnallocated::cast(it.Current());
899  if (operand->HasFixedPolicy()) ++fixed;
900  }
901  DCHECK(fixed == 0 || used_at_start == 0);
902  }
903 #endif
904 
905  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
906  instr = AssignPointerMap(instr);
907  }
908  if (FLAG_stress_environments && !instr->HasEnvironment()) {
909  instr = AssignEnvironment(instr);
910  }
911  chunk_->AddInstruction(instr, current_block_);
912 
913  if (instr->IsCall()) {
914  HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
915  LInstruction* instruction_needing_environment = NULL;
916  if (hydrogen_val->HasObservableSideEffects()) {
917  HSimulate* sim = HSimulate::cast(hydrogen_val->next());
918  instruction_needing_environment = instr;
919  sim->ReplayEnvironment(current_block_->last_environment());
920  hydrogen_value_for_lazy_bailout = sim;
921  }
922  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
923  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
924  chunk_->AddInstruction(bailout, current_block_);
925  if (instruction_needing_environment != NULL) {
926  // Store the lazy deopt environment with the instruction if needed.
927  // Right now it is only used for LInstanceOfKnownGlobal.
928  instruction_needing_environment->
929  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
930  }
931  }
932 }
933 
934 
935 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
936  return new(zone()) LGoto(instr->FirstSuccessor());
937 }
938 
939 
940 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
941  HValue* value = instr->value();
942  Representation r = value->representation();
943  HType type = value->type();
944  ToBooleanStub::Types expected = instr->expected_input_types();
945  if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
946 
947  bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() ||
948  type.IsJSArray() || type.IsHeapNumber() || type.IsString();
949  LInstruction* branch = new(zone()) LBranch(UseRegister(value));
950  if (!easy_case &&
951  ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) ||
952  !expected.IsGeneric())) {
953  branch = AssignEnvironment(branch);
954  }
955  return branch;
956 }
957 
958 
959 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
960  DCHECK(instr->value()->representation().IsTagged());
961  LOperand* value = UseRegisterAtStart(instr->value());
962  LOperand* temp = TempRegister();
963  return new(zone()) LCmpMapAndBranch(value, temp);
964 }
965 
966 
967 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
968  info()->MarkAsRequiresFrame();
969  return DefineAsRegister(
970  new(zone()) LArgumentsLength(UseRegister(length->value())));
971 }
972 
973 
974 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
975  info()->MarkAsRequiresFrame();
976  return DefineAsRegister(new(zone()) LArgumentsElements);
977 }
978 
979 
980 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
981  LOperand* context = UseFixed(instr->context(), cp);
982  LInstanceOf* result =
983  new(zone()) LInstanceOf(context, UseFixed(instr->left(), a0),
984  UseFixed(instr->right(), a1));
985  return MarkAsCall(DefineFixed(result, v0), instr);
986 }
987 
988 
989 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
990  HInstanceOfKnownGlobal* instr) {
991  LInstanceOfKnownGlobal* result =
992  new(zone()) LInstanceOfKnownGlobal(
993  UseFixed(instr->context(), cp),
994  UseFixed(instr->left(), a0),
995  FixedTemp(a4));
996  return MarkAsCall(DefineFixed(result, v0), instr);
997 }
998 
999 
1000 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1001  LOperand* receiver = UseRegisterAtStart(instr->receiver());
1002  LOperand* function = UseRegisterAtStart(instr->function());
1003  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1004  return AssignEnvironment(DefineAsRegister(result));
1005 }
1006 
1007 
1008 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1009  LOperand* function = UseFixed(instr->function(), a1);
1010  LOperand* receiver = UseFixed(instr->receiver(), a0);
1011  LOperand* length = UseFixed(instr->length(), a2);
1012  LOperand* elements = UseFixed(instr->elements(), a3);
1013  LApplyArguments* result = new(zone()) LApplyArguments(function,
1014  receiver,
1015  length,
1016  elements);
1017  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1018 }
1019 
1020 
1021 LInstruction* LChunkBuilder::DoPushArguments(HPushArguments* instr) {
1022  int argc = instr->OperandCount();
1023  for (int i = 0; i < argc; ++i) {
1024  LOperand* argument = Use(instr->argument(i));
1025  AddInstruction(new(zone()) LPushArgument(argument), instr);
1026  }
1027  return NULL;
1028 }
1029 
1030 
1031 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1032  HStoreCodeEntry* store_code_entry) {
1033  LOperand* function = UseRegister(store_code_entry->function());
1034  LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1035  return new(zone()) LStoreCodeEntry(function, code_object);
1036 }
1037 
1038 
1039 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1040  HInnerAllocatedObject* instr) {
1041  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1042  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1043  return DefineAsRegister(
1044  new(zone()) LInnerAllocatedObject(base_object, offset));
1045 }
1046 
1047 
1048 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1049  return instr->HasNoUses()
1050  ? NULL
1051  : DefineAsRegister(new(zone()) LThisFunction);
1052 }
1053 
1054 
1055 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1056  if (instr->HasNoUses()) return NULL;
1057 
1058  if (info()->IsStub()) {
1059  return DefineFixed(new(zone()) LContext, cp);
1060  }
1061 
1062  return DefineAsRegister(new(zone()) LContext);
1063 }
1064 
1065 
1066 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1067  LOperand* context = UseFixed(instr->context(), cp);
1068  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1069 }
1070 
1071 
1072 LInstruction* LChunkBuilder::DoCallJSFunction(
1073  HCallJSFunction* instr) {
1074  LOperand* function = UseFixed(instr->function(), a1);
1075 
1076  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1077 
1078  return MarkAsCall(DefineFixed(result, v0), instr);
1079 }
1080 
1081 
1082 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1083  HCallWithDescriptor* instr) {
1084  CallInterfaceDescriptor descriptor = instr->descriptor();
1085 
1086  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1087  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1088  ops.Add(target, zone());
1089  for (int i = 1; i < instr->OperandCount(); i++) {
1090  LOperand* op =
1091  UseFixed(instr->OperandAt(i), descriptor.GetParameterRegister(i - 1));
1092  ops.Add(op, zone());
1093  }
1094 
1095  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1096  descriptor, ops, zone());
1097  return MarkAsCall(DefineFixed(result, v0), instr);
1098 }
1099 
1100 
1101 LInstruction* LChunkBuilder::DoTailCallThroughMegamorphicCache(
1102  HTailCallThroughMegamorphicCache* instr) {
1103  LOperand* context = UseFixed(instr->context(), cp);
1104  LOperand* receiver_register =
1105  UseFixed(instr->receiver(), LoadDescriptor::ReceiverRegister());
1106  LOperand* name_register =
1107  UseFixed(instr->name(), LoadDescriptor::NameRegister());
1108  // Not marked as call. It can't deoptimize, and it never returns.
1109  return new (zone()) LTailCallThroughMegamorphicCache(
1110  context, receiver_register, name_register);
1111 }
1112 
1113 
1114 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1115  LOperand* context = UseFixed(instr->context(), cp);
1116  LOperand* function = UseFixed(instr->function(), a1);
1117  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1118  return MarkAsCall(DefineFixed(result, v0), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1119 }
1120 
1121 
1122 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1123  switch (instr->op()) {
1124  case kMathFloor:
1125  return DoMathFloor(instr);
1126  case kMathRound:
1127  return DoMathRound(instr);
1128  case kMathFround:
1129  return DoMathFround(instr);
1130  case kMathAbs:
1131  return DoMathAbs(instr);
1132  case kMathLog:
1133  return DoMathLog(instr);
1134  case kMathExp:
1135  return DoMathExp(instr);
1136  case kMathSqrt:
1137  return DoMathSqrt(instr);
1138  case kMathPowHalf:
1139  return DoMathPowHalf(instr);
1140  case kMathClz32:
1141  return DoMathClz32(instr);
1142  default:
1143  UNREACHABLE();
1144  return NULL;
1145  }
1146 }
1147 
1148 
1149 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1150  DCHECK(instr->representation().IsDouble());
1151  DCHECK(instr->value()->representation().IsDouble());
1152  LOperand* input = UseFixedDouble(instr->value(), f4);
1153  return MarkAsCall(DefineFixedDouble(new(zone()) LMathLog(input), f4), instr);
1154 }
1155 
1156 
1157 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1158  LOperand* input = UseRegisterAtStart(instr->value());
1159  LMathClz32* result = new(zone()) LMathClz32(input);
1160  return DefineAsRegister(result);
1161 }
1162 
1163 
1164 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1165  DCHECK(instr->representation().IsDouble());
1166  DCHECK(instr->value()->representation().IsDouble());
1167  LOperand* input = UseRegister(instr->value());
1168  LOperand* temp1 = TempRegister();
1169  LOperand* temp2 = TempRegister();
1170  LOperand* double_temp = TempDoubleRegister();
1171  LMathExp* result = new(zone()) LMathExp(input, double_temp, temp1, temp2);
1172  return DefineAsRegister(result);
1173 }
1174 
1175 
1176 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1177  // Input cannot be the same as the result, see LCodeGen::DoMathPowHalf.
1178  LOperand* input = UseFixedDouble(instr->value(), f8);
1179  LOperand* temp = TempDoubleRegister();
1180  LMathPowHalf* result = new(zone()) LMathPowHalf(input, temp);
1181  return DefineFixedDouble(result, f4);
1182 }
1183 
1184 
1185 LInstruction* LChunkBuilder::DoMathFround(HUnaryMathOperation* instr) {
1186  LOperand* input = UseRegister(instr->value());
1187  LMathFround* result = new (zone()) LMathFround(input);
1188  return DefineAsRegister(result);
1189 }
1190 
1191 
1192 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1193  Representation r = instr->value()->representation();
1194  LOperand* context = (r.IsDouble() || r.IsSmiOrInteger32())
1195  ? NULL
1196  : UseFixed(instr->context(), cp);
1197  LOperand* input = UseRegister(instr->value());
1198  LInstruction* result =
1199  DefineAsRegister(new(zone()) LMathAbs(context, input));
1200  if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result);
1201  if (!r.IsDouble()) result = AssignEnvironment(result);
1202  return result;
1203 }
1204 
1205 
1206 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1207  LOperand* input = UseRegister(instr->value());
1208  LOperand* temp = TempRegister();
1209  LMathFloor* result = new(zone()) LMathFloor(input, temp);
1210  return AssignEnvironment(AssignPointerMap(DefineAsRegister(result)));
1211 }
1212 
1213 
1214 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1215  LOperand* input = UseRegister(instr->value());
1216  LMathSqrt* result = new(zone()) LMathSqrt(input);
1217  return DefineAsRegister(result);
1218 }
1219 
1220 
1221 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1222  LOperand* input = UseRegister(instr->value());
1223  LOperand* temp = TempDoubleRegister();
1224  LMathRound* result = new(zone()) LMathRound(input, temp);
1225  return AssignEnvironment(DefineAsRegister(result));
1226 }
1227 
1228 
1229 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1230  LOperand* context = UseFixed(instr->context(), cp);
1231  LOperand* constructor = UseFixed(instr->constructor(), a1);
1232  LCallNew* result = new(zone()) LCallNew(context, constructor);
1233  return MarkAsCall(DefineFixed(result, v0), instr);
1234 }
1235 
1236 
1237 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1238  LOperand* context = UseFixed(instr->context(), cp);
1239  LOperand* constructor = UseFixed(instr->constructor(), a1);
1240  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1241  return MarkAsCall(DefineFixed(result, v0), instr);
1242 }
1243 
1244 
1245 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1246  LOperand* context = UseFixed(instr->context(), cp);
1247  LOperand* function = UseFixed(instr->function(), a1);
1248  LCallFunction* call = new(zone()) LCallFunction(context, function);
1249  return MarkAsCall(DefineFixed(call, v0), instr);
1250 }
1251 
1252 
1253 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1254  LOperand* context = UseFixed(instr->context(), cp);
1255  return MarkAsCall(DefineFixed(new(zone()) LCallRuntime(context), v0), instr);
1256 }
1257 
1258 
1259 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1260  return DoShift(Token::ROR, instr);
1261 }
1262 
1263 
1264 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1265  return DoShift(Token::SHR, instr);
1266 }
1267 
1268 
1269 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1270  return DoShift(Token::SAR, instr);
1271 }
1272 
1273 
1274 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1275  return DoShift(Token::SHL, instr);
1276 }
1277 
1278 
1279 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1280  if (instr->representation().IsSmiOrInteger32()) {
1281  DCHECK(instr->left()->representation().Equals(instr->representation()));
1282  DCHECK(instr->right()->representation().Equals(instr->representation()));
1283  DCHECK(instr->CheckFlag(HValue::kTruncatingToInt32));
1284 
1285  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1286  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1287  return DefineAsRegister(new(zone()) LBitI(left, right));
1288  } else {
1289  return DoArithmeticT(instr->op(), instr);
1290  }
1291 }
1292 
1293 
1294 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1295  DCHECK(instr->representation().IsSmiOrInteger32());
1296  DCHECK(instr->left()->representation().Equals(instr->representation()));
1297  DCHECK(instr->right()->representation().Equals(instr->representation()));
1298  LOperand* dividend = UseRegister(instr->left());
1299  int32_t divisor = instr->right()->GetInteger32Constant();
1300  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1301  dividend, divisor));
1302  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1303  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1304  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1305  divisor != 1 && divisor != -1)) {
1306  result = AssignEnvironment(result);
1307  }
1308  return result;
1309 }
1310 
1311 
1312 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1313  DCHECK(instr->representation().IsInteger32());
1314  DCHECK(instr->left()->representation().Equals(instr->representation()));
1315  DCHECK(instr->right()->representation().Equals(instr->representation()));
1316  LOperand* dividend = UseRegister(instr->left());
1317  int32_t divisor = instr->right()->GetInteger32Constant();
1318  LInstruction* result = DefineAsRegister(new(zone()) LDivByConstI(
1319  dividend, divisor));
1320  if (divisor == 0 ||
1321  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1322  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1323  result = AssignEnvironment(result);
1324  }
1325  return result;
1326 }
1327 
1328 
1329 LInstruction* LChunkBuilder::DoDivI(HDiv* instr) {
1330  DCHECK(instr->representation().IsSmiOrInteger32());
1331  DCHECK(instr->left()->representation().Equals(instr->representation()));
1332  DCHECK(instr->right()->representation().Equals(instr->representation()));
1333  LOperand* dividend = UseRegister(instr->left());
1334  LOperand* divisor = UseRegister(instr->right());
1335  LOperand* temp = instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)
1336  ? NULL : TempRegister();
1337  LInstruction* result =
1338  DefineAsRegister(new(zone()) LDivI(dividend, divisor, temp));
1339  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1340  instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1341  (instr->CheckFlag(HValue::kCanOverflow) &&
1342  !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) ||
1343  (!instr->IsMathFloorOfDiv() &&
1344  !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32))) {
1345  result = AssignEnvironment(result);
1346  }
1347  return result;
1348 }
1349 
1350 
1351 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1352  if (instr->representation().IsSmiOrInteger32()) {
1353  if (instr->RightIsPowerOf2()) {
1354  return DoDivByPowerOf2I(instr);
1355  } else if (instr->right()->IsConstant()) {
1356  return DoDivByConstI(instr);
1357  } else {
1358  return DoDivI(instr);
1359  }
1360  } else if (instr->representation().IsDouble()) {
1361  return DoArithmeticD(Token::DIV, instr);
1362  } else {
1363  return DoArithmeticT(Token::DIV, instr);
1364  }
1365 }
1366 
1367 
1368 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1369  LOperand* dividend = UseRegisterAtStart(instr->left());
1370  int32_t divisor = instr->right()->GetInteger32Constant();
1371  LInstruction* result = DefineAsRegister(new(zone()) LFlooringDivByPowerOf2I(
1372  dividend, divisor));
1373  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1374  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1375  result = AssignEnvironment(result);
1376  }
1377  return result;
1378 }
1379 
1380 
1381 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1382  DCHECK(instr->representation().IsInteger32());
1383  DCHECK(instr->left()->representation().Equals(instr->representation()));
1384  DCHECK(instr->right()->representation().Equals(instr->representation()));
1385  LOperand* dividend = UseRegister(instr->left());
1386  int32_t divisor = instr->right()->GetInteger32Constant();
1387  LOperand* temp =
1388  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1389  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1390  NULL : TempRegister();
1391  LInstruction* result = DefineAsRegister(
1392  new(zone()) LFlooringDivByConstI(dividend, divisor, temp));
1393  if (divisor == 0 ||
1394  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1395  result = AssignEnvironment(result);
1396  }
1397  return result;
1398 }
1399 
1400 
1401 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1402  DCHECK(instr->representation().IsSmiOrInteger32());
1403  DCHECK(instr->left()->representation().Equals(instr->representation()));
1404  DCHECK(instr->right()->representation().Equals(instr->representation()));
1405  LOperand* dividend = UseRegister(instr->left());
1406  LOperand* divisor = UseRegister(instr->right());
1407  LFlooringDivI* div = new(zone()) LFlooringDivI(dividend, divisor);
1408  return AssignEnvironment(DefineAsRegister(div));
1409 }
1410 
1411 
1412 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1413  if (instr->RightIsPowerOf2()) {
1414  return DoFlooringDivByPowerOf2I(instr);
1415  } else if (instr->right()->IsConstant()) {
1416  return DoFlooringDivByConstI(instr);
1417  } else {
1418  return DoFlooringDivI(instr);
1419  }
1420 }
1421 
1422 
1423 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1424  DCHECK(instr->representation().IsSmiOrInteger32());
1425  DCHECK(instr->left()->representation().Equals(instr->representation()));
1426  DCHECK(instr->right()->representation().Equals(instr->representation()));
1427  LOperand* dividend = UseRegisterAtStart(instr->left());
1428  int32_t divisor = instr->right()->GetInteger32Constant();
1429  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1430  dividend, divisor));
1431  if (instr->CheckFlag(HValue::kLeftCanBeNegative) &&
1432  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1433  result = AssignEnvironment(result);
1434  }
1435  return result;
1436 }
1437 
1438 
1439 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1440  DCHECK(instr->representation().IsSmiOrInteger32());
1441  DCHECK(instr->left()->representation().Equals(instr->representation()));
1442  DCHECK(instr->right()->representation().Equals(instr->representation()));
1443  LOperand* dividend = UseRegister(instr->left());
1444  int32_t divisor = instr->right()->GetInteger32Constant();
1445  LInstruction* result = DefineAsRegister(new(zone()) LModByConstI(
1446  dividend, divisor));
1447  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1448  result = AssignEnvironment(result);
1449  }
1450  return result;
1451 }
1452 
1453 
1454 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1455  DCHECK(instr->representation().IsSmiOrInteger32());
1456  DCHECK(instr->left()->representation().Equals(instr->representation()));
1457  DCHECK(instr->right()->representation().Equals(instr->representation()));
1458  LOperand* dividend = UseRegister(instr->left());
1459  LOperand* divisor = UseRegister(instr->right());
1460  LInstruction* result = DefineAsRegister(new(zone()) LModI(
1461  dividend, divisor));
1462  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1463  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1464  result = AssignEnvironment(result);
1465  }
1466  return result;
1467 }
1468 
1469 
1470 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1471  if (instr->representation().IsSmiOrInteger32()) {
1472  return instr->RightIsPowerOf2() ? DoModByPowerOf2I(instr) : DoModI(instr);
1473  } else if (instr->representation().IsDouble()) {
1474  return DoArithmeticD(Token::MOD, instr);
1475  } else {
1476  return DoArithmeticT(Token::MOD, instr);
1477  }
1478 }
1479 
1480 
1481 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1482  if (instr->representation().IsSmiOrInteger32()) {
1483  DCHECK(instr->left()->representation().Equals(instr->representation()));
1484  DCHECK(instr->right()->representation().Equals(instr->representation()));
1485  HValue* left = instr->BetterLeftOperand();
1486  HValue* right = instr->BetterRightOperand();
1487  LOperand* left_op;
1488  LOperand* right_op;
1489  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1490  bool bailout_on_minus_zero = instr->CheckFlag(HValue::kBailoutOnMinusZero);
1491 
1492  if (right->IsConstant()) {
1493  HConstant* constant = HConstant::cast(right);
1494  int32_t constant_value = constant->Integer32Value();
1495  // Constants -1, 0 and 1 can be optimized if the result can overflow.
1496  // For other constants, it can be optimized only without overflow.
1497  if (!can_overflow || ((constant_value >= -1) && (constant_value <= 1))) {
1498  left_op = UseRegisterAtStart(left);
1499  right_op = UseConstant(right);
1500  } else {
1501  if (bailout_on_minus_zero) {
1502  left_op = UseRegister(left);
1503  } else {
1504  left_op = UseRegisterAtStart(left);
1505  }
1506  right_op = UseRegister(right);
1507  }
1508  } else {
1509  if (bailout_on_minus_zero) {
1510  left_op = UseRegister(left);
1511  } else {
1512  left_op = UseRegisterAtStart(left);
1513  }
1514  right_op = UseRegister(right);
1515  }
1516  LMulI* mul = new(zone()) LMulI(left_op, right_op);
1517  if (can_overflow || bailout_on_minus_zero) {
1518  AssignEnvironment(mul);
1519  }
1520  return DefineAsRegister(mul);
1521 
1522  } else if (instr->representation().IsDouble()) {
1523  if (kArchVariant == kMips64r2) {
1524  if (instr->HasOneUse() && instr->uses().value()->IsAdd()) {
1525  HAdd* add = HAdd::cast(instr->uses().value());
1526  if (instr == add->left()) {
1527  // This mul is the lhs of an add. The add and mul will be folded
1528  // into a multiply-add.
1529  return NULL;
1530  }
1531  if (instr == add->right() && !add->left()->IsMul()) {
1532  // This mul is the rhs of an add, where the lhs is not another mul.
1533  // The add and mul will be folded into a multiply-add.
1534  return NULL;
1535  }
1536  }
1537  }
1538  return DoArithmeticD(Token::MUL, instr);
1539  } else {
1540  return DoArithmeticT(Token::MUL, instr);
1541  }
1542 }
1543 
1544 
1545 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1546  if (instr->representation().IsSmiOrInteger32()) {
1547  DCHECK(instr->left()->representation().Equals(instr->representation()));
1548  DCHECK(instr->right()->representation().Equals(instr->representation()));
1549  LOperand* left = UseRegisterAtStart(instr->left());
1550  LOperand* right = UseOrConstantAtStart(instr->right());
1551  LSubI* sub = new(zone()) LSubI(left, right);
1552  LInstruction* result = DefineAsRegister(sub);
1553  if (instr->CheckFlag(HValue::kCanOverflow)) {
1554  result = AssignEnvironment(result);
1555  }
1556  return result;
1557  } else if (instr->representation().IsDouble()) {
1558  return DoArithmeticD(Token::SUB, instr);
1559  } else {
1560  return DoArithmeticT(Token::SUB, instr);
1561  }
1562 }
1563 
1564 
1565 LInstruction* LChunkBuilder::DoMultiplyAdd(HMul* mul, HValue* addend) {
1566  LOperand* multiplier_op = UseRegisterAtStart(mul->left());
1567  LOperand* multiplicand_op = UseRegisterAtStart(mul->right());
1568  LOperand* addend_op = UseRegisterAtStart(addend);
1569  return DefineSameAsFirst(new(zone()) LMultiplyAddD(addend_op, multiplier_op,
1570  multiplicand_op));
1571 }
1572 
1573 
1574 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1575  if (instr->representation().IsSmiOrInteger32()) {
1576  DCHECK(instr->left()->representation().Equals(instr->representation()));
1577  DCHECK(instr->right()->representation().Equals(instr->representation()));
1578  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1579  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1580  LAddI* add = new(zone()) LAddI(left, right);
1581  LInstruction* result = DefineAsRegister(add);
1582  if (instr->CheckFlag(HValue::kCanOverflow)) {
1583  result = AssignEnvironment(result);
1584  }
1585  return result;
1586  } else if (instr->representation().IsExternal()) {
1587  DCHECK(instr->left()->representation().IsExternal());
1588  DCHECK(instr->right()->representation().IsInteger32());
1589  DCHECK(!instr->CheckFlag(HValue::kCanOverflow));
1590  LOperand* left = UseRegisterAtStart(instr->left());
1591  LOperand* right = UseOrConstantAtStart(instr->right());
1592  LAddI* add = new(zone()) LAddI(left, right);
1593  LInstruction* result = DefineAsRegister(add);
1594  return result;
1595  } else if (instr->representation().IsDouble()) {
1596  if (kArchVariant == kMips64r2) {
1597  if (instr->left()->IsMul())
1598  return DoMultiplyAdd(HMul::cast(instr->left()), instr->right());
1599 
1600  if (instr->right()->IsMul()) {
1601  DCHECK(!instr->left()->IsMul());
1602  return DoMultiplyAdd(HMul::cast(instr->right()), instr->left());
1603  }
1604  }
1605  return DoArithmeticD(Token::ADD, instr);
1606  } else {
1607  return DoArithmeticT(Token::ADD, instr);
1608  }
1609 }
1610 
1611 
1612 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1613  LOperand* left = NULL;
1614  LOperand* right = NULL;
1615  if (instr->representation().IsSmiOrInteger32()) {
1616  DCHECK(instr->left()->representation().Equals(instr->representation()));
1617  DCHECK(instr->right()->representation().Equals(instr->representation()));
1618  left = UseRegisterAtStart(instr->BetterLeftOperand());
1619  right = UseOrConstantAtStart(instr->BetterRightOperand());
1620  } else {
1621  DCHECK(instr->representation().IsDouble());
1622  DCHECK(instr->left()->representation().IsDouble());
1623  DCHECK(instr->right()->representation().IsDouble());
1624  left = UseRegisterAtStart(instr->left());
1625  right = UseRegisterAtStart(instr->right());
1626  }
1627  return DefineAsRegister(new(zone()) LMathMinMax(left, right));
1628 }
1629 
1630 
1631 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1632  DCHECK(instr->representation().IsDouble());
1633  // We call a C function for double power. It can't trigger a GC.
1634  // We need to use fixed result register for the call.
1635  Representation exponent_type = instr->right()->representation();
1636  DCHECK(instr->left()->representation().IsDouble());
1637  LOperand* left = UseFixedDouble(instr->left(), f2);
1638  LOperand* right =
1639  exponent_type.IsDouble()
1640  ? UseFixedDouble(instr->right(), f4)
1641  : UseFixed(instr->right(), MathPowTaggedDescriptor::exponent());
1642  LPower* result = new(zone()) LPower(left, right);
1643  return MarkAsCall(DefineFixedDouble(result, f0),
1644  instr,
1645  CAN_DEOPTIMIZE_EAGERLY);
1646 }
1647 
1648 
1649 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1650  DCHECK(instr->left()->representation().IsTagged());
1651  DCHECK(instr->right()->representation().IsTagged());
1652  LOperand* context = UseFixed(instr->context(), cp);
1653  LOperand* left = UseFixed(instr->left(), a1);
1654  LOperand* right = UseFixed(instr->right(), a0);
1655  LCmpT* result = new(zone()) LCmpT(context, left, right);
1656  return MarkAsCall(DefineFixed(result, v0), instr);
1657 }
1658 
1659 
1660 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1661  HCompareNumericAndBranch* instr) {
1662  Representation r = instr->representation();
1663  if (r.IsSmiOrInteger32()) {
1664  DCHECK(instr->left()->representation().Equals(r));
1665  DCHECK(instr->right()->representation().Equals(r));
1666  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1667  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1668  return new(zone()) LCompareNumericAndBranch(left, right);
1669  } else {
1670  DCHECK(r.IsDouble());
1671  DCHECK(instr->left()->representation().IsDouble());
1672  DCHECK(instr->right()->representation().IsDouble());
1673  LOperand* left = UseRegisterAtStart(instr->left());
1674  LOperand* right = UseRegisterAtStart(instr->right());
1675  return new(zone()) LCompareNumericAndBranch(left, right);
1676  }
1677 }
1678 
1679 
1680 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1681  HCompareObjectEqAndBranch* instr) {
1682  LOperand* left = UseRegisterAtStart(instr->left());
1683  LOperand* right = UseRegisterAtStart(instr->right());
1684  return new(zone()) LCmpObjectEqAndBranch(left, right);
1685 }
1686 
1687 
1688 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1689  HCompareHoleAndBranch* instr) {
1690  LOperand* value = UseRegisterAtStart(instr->value());
1691  return new(zone()) LCmpHoleAndBranch(value);
1692 }
1693 
1694 
1695 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1696  HCompareMinusZeroAndBranch* instr) {
1697  LOperand* value = UseRegister(instr->value());
1698  LOperand* scratch = TempRegister();
1699  return new(zone()) LCompareMinusZeroAndBranch(value, scratch);
1700 }
1701 
1702 
1703 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1704  DCHECK(instr->value()->representation().IsTagged());
1705  LOperand* temp = TempRegister();
1706  return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()),
1707  temp);
1708 }
1709 
1710 
1711 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1712  DCHECK(instr->value()->representation().IsTagged());
1713  LOperand* temp = TempRegister();
1714  return new(zone()) LIsStringAndBranch(UseRegisterAtStart(instr->value()),
1715  temp);
1716 }
1717 
1718 
1719 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1720  DCHECK(instr->value()->representation().IsTagged());
1721  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1722 }
1723 
1724 
1725 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1726  HIsUndetectableAndBranch* instr) {
1727  DCHECK(instr->value()->representation().IsTagged());
1728  return new(zone()) LIsUndetectableAndBranch(
1729  UseRegisterAtStart(instr->value()), TempRegister());
1730 }
1731 
1732 
1733 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1734  HStringCompareAndBranch* instr) {
1735  DCHECK(instr->left()->representation().IsTagged());
1736  DCHECK(instr->right()->representation().IsTagged());
1737  LOperand* context = UseFixed(instr->context(), cp);
1738  LOperand* left = UseFixed(instr->left(), a1);
1739  LOperand* right = UseFixed(instr->right(), a0);
1740  LStringCompareAndBranch* result =
1741  new(zone()) LStringCompareAndBranch(context, left, right);
1742  return MarkAsCall(result, instr);
1743 }
1744 
1745 
1746 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1747  HHasInstanceTypeAndBranch* instr) {
1748  DCHECK(instr->value()->representation().IsTagged());
1749  LOperand* value = UseRegisterAtStart(instr->value());
1750  return new(zone()) LHasInstanceTypeAndBranch(value);
1751 }
1752 
1753 
1754 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1755  HGetCachedArrayIndex* instr) {
1756  DCHECK(instr->value()->representation().IsTagged());
1757  LOperand* value = UseRegisterAtStart(instr->value());
1758 
1759  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1760 }
1761 
1762 
1763 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1764  HHasCachedArrayIndexAndBranch* instr) {
1765  DCHECK(instr->value()->representation().IsTagged());
1766  return new(zone()) LHasCachedArrayIndexAndBranch(
1767  UseRegisterAtStart(instr->value()));
1768 }
1769 
1770 
1771 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1772  HClassOfTestAndBranch* instr) {
1773  DCHECK(instr->value()->representation().IsTagged());
1774  return new(zone()) LClassOfTestAndBranch(UseRegister(instr->value()),
1775  TempRegister());
1776 }
1777 
1778 
1779 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1780  LOperand* map = UseRegisterAtStart(instr->value());
1781  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1782 }
1783 
1784 
1785 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1786  LOperand* object = UseFixed(instr->value(), a0);
1787  LDateField* result =
1788  new(zone()) LDateField(object, FixedTemp(a1), instr->index());
1789  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
1790 }
1791 
1792 
1793 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1794  LOperand* string = UseRegisterAtStart(instr->string());
1795  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1796  return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1797 }
1798 
1799 
1800 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1801  LOperand* string = UseRegisterAtStart(instr->string());
1802  LOperand* index = FLAG_debug_code
1803  ? UseRegisterAtStart(instr->index())
1804  : UseRegisterOrConstantAtStart(instr->index());
1805  LOperand* value = UseRegisterAtStart(instr->value());
1806  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), cp) : NULL;
1807  return new(zone()) LSeqStringSetChar(context, string, index, value);
1808 }
1809 
1810 
1811 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1812  if (!FLAG_debug_code && instr->skip_check()) return NULL;
1813  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1814  LOperand* length = !index->IsConstantOperand()
1815  ? UseRegisterOrConstantAtStart(instr->length())
1816  : UseRegisterAtStart(instr->length());
1817  LInstruction* result = new(zone()) LBoundsCheck(index, length);
1818  if (!FLAG_debug_code || !instr->skip_check()) {
1819  result = AssignEnvironment(result);
1820  }
1821 return result;
1822 }
1823 
1824 
1825 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1826  HBoundsCheckBaseIndexInformation* instr) {
1827  UNREACHABLE();
1828  return NULL;
1829 }
1830 
1831 
1832 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1833  // The control instruction marking the end of a block that completed
1834  // abruptly (e.g., threw an exception). There is nothing specific to do.
1835  return NULL;
1836 }
1837 
1838 
1839 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1840  return NULL;
1841 }
1842 
1843 
1844 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1845  // All HForceRepresentation instructions should be eliminated in the
1846  // representation change phase of Hydrogen.
1847  UNREACHABLE();
1848  return NULL;
1849 }
1850 
1851 
1852 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1853  Representation from = instr->from();
1854  Representation to = instr->to();
1855  HValue* val = instr->value();
1856  if (from.IsSmi()) {
1857  if (to.IsTagged()) {
1858  LOperand* value = UseRegister(val);
1859  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1860  }
1861  from = Representation::Tagged();
1862  }
1863  if (from.IsTagged()) {
1864  if (to.IsDouble()) {
1865  LOperand* value = UseRegister(val);
1866  LInstruction* result = DefineAsRegister(new(zone()) LNumberUntagD(value));
1867  if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1868  return result;
1869  } else if (to.IsSmi()) {
1870  LOperand* value = UseRegister(val);
1871  if (val->type().IsSmi()) {
1872  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1873  }
1874  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1875  } else {
1876  DCHECK(to.IsInteger32());
1877  if (val->type().IsSmi() || val->representation().IsSmi()) {
1878  LOperand* value = UseRegisterAtStart(val);
1879  return DefineAsRegister(new(zone()) LSmiUntag(value, false));
1880  } else {
1881  LOperand* value = UseRegister(val);
1882  LOperand* temp1 = TempRegister();
1883  LOperand* temp2 = TempDoubleRegister();
1884  LInstruction* result =
1885  DefineSameAsFirst(new(zone()) LTaggedToI(value, temp1, temp2));
1886  if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1887  return result;
1888  }
1889  }
1890  } else if (from.IsDouble()) {
1891  if (to.IsTagged()) {
1892  info()->MarkAsDeferredCalling();
1893  LOperand* value = UseRegister(val);
1894  LOperand* temp1 = TempRegister();
1895  LOperand* temp2 = TempRegister();
1896 
1897  LUnallocated* result_temp = TempRegister();
1898  LNumberTagD* result = new(zone()) LNumberTagD(value, temp1, temp2);
1899  return AssignPointerMap(Define(result, result_temp));
1900  } else if (to.IsSmi()) {
1901  LOperand* value = UseRegister(val);
1902  return AssignEnvironment(
1903  DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1904  } else {
1905  DCHECK(to.IsInteger32());
1906  LOperand* value = UseRegister(val);
1907  LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value));
1908  if (!instr->CanTruncateToInt32()) result = AssignEnvironment(result);
1909  return result;
1910  }
1911  } else if (from.IsInteger32()) {
1912  info()->MarkAsDeferredCalling();
1913  if (to.IsTagged()) {
1914  if (val->CheckFlag(HInstruction::kUint32)) {
1915  LOperand* value = UseRegisterAtStart(val);
1916  LOperand* temp1 = TempRegister();
1917  LOperand* temp2 = TempRegister();
1918  LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1919  return AssignPointerMap(DefineAsRegister(result));
1920  } else {
1922  (kMaxInt == Smi::kMaxValue));
1923  LOperand* value = UseRegisterAtStart(val);
1924  return DefineAsRegister(new(zone()) LSmiTag(value));
1925  }
1926  } else if (to.IsSmi()) {
1927  LOperand* value = UseRegister(val);
1928  LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1929  if (instr->CheckFlag(HValue::kCanOverflow)) {
1930  result = AssignEnvironment(result);
1931  }
1932  return result;
1933  } else {
1934  DCHECK(to.IsDouble());
1935  if (val->CheckFlag(HInstruction::kUint32)) {
1936  return DefineAsRegister(new(zone()) LUint32ToDouble(UseRegister(val)));
1937  } else {
1938  return DefineAsRegister(new(zone()) LInteger32ToDouble(Use(val)));
1939  }
1940  }
1941  }
1942  UNREACHABLE();
1943  return NULL;
1944 }
1945 
1946 
1947 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1948  LOperand* value = UseRegisterAtStart(instr->value());
1949  LInstruction* result = new(zone()) LCheckNonSmi(value);
1950  if (!instr->value()->type().IsHeapObject()) {
1951  result = AssignEnvironment(result);
1952  }
1953  return result;
1954 }
1955 
1956 
1957 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1958  LOperand* value = UseRegisterAtStart(instr->value());
1959  return AssignEnvironment(new(zone()) LCheckSmi(value));
1960 }
1961 
1962 
1963 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1964  LOperand* value = UseRegisterAtStart(instr->value());
1965  LInstruction* result = new(zone()) LCheckInstanceType(value);
1966  return AssignEnvironment(result);
1967 }
1968 
1969 
1970 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1971  LOperand* value = UseRegisterAtStart(instr->value());
1972  return AssignEnvironment(new(zone()) LCheckValue(value));
1973 }
1974 
1975 
1976 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1977  if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps;
1978  LOperand* value = UseRegisterAtStart(instr->value());
1979  LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value));
1980  if (instr->HasMigrationTarget()) {
1981  info()->MarkAsDeferredCalling();
1982  result = AssignPointerMap(result);
1983  }
1984  return result;
1985 }
1986 
1987 
1988 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1989  HValue* value = instr->value();
1990  Representation input_rep = value->representation();
1991  LOperand* reg = UseRegister(value);
1992  if (input_rep.IsDouble()) {
1993  // Revisit this decision, here and 8 lines below.
1994  return DefineAsRegister(new(zone()) LClampDToUint8(reg,
1995  TempDoubleRegister()));
1996  } else if (input_rep.IsInteger32()) {
1997  return DefineAsRegister(new(zone()) LClampIToUint8(reg));
1998  } else {
1999  DCHECK(input_rep.IsSmiOrTagged());
2000  LClampTToUint8* result =
2001  new(zone()) LClampTToUint8(reg, TempDoubleRegister());
2002  return AssignEnvironment(DefineAsRegister(result));
2003  }
2004 }
2005 
2006 
2007 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2008  HValue* value = instr->value();
2009  DCHECK(value->representation().IsDouble());
2010  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2011 }
2012 
2013 
2014 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2015  LOperand* lo = UseRegister(instr->lo());
2016  LOperand* hi = UseRegister(instr->hi());
2017  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
2018 }
2019 
2020 
2021 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2022  LOperand* context = info()->IsStub()
2023  ? UseFixed(instr->context(), cp)
2024  : NULL;
2025  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2026  return new(zone()) LReturn(UseFixed(instr->value(), v0), context,
2027  parameter_count);
2028 }
2029 
2030 
2031 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
2032  Representation r = instr->representation();
2033  if (r.IsSmi()) {
2034  return DefineAsRegister(new(zone()) LConstantS);
2035  } else if (r.IsInteger32()) {
2036  return DefineAsRegister(new(zone()) LConstantI);
2037  } else if (r.IsDouble()) {
2038  return DefineAsRegister(new(zone()) LConstantD);
2039  } else if (r.IsExternal()) {
2040  return DefineAsRegister(new(zone()) LConstantE);
2041  } else if (r.IsTagged()) {
2042  return DefineAsRegister(new(zone()) LConstantT);
2043  } else {
2044  UNREACHABLE();
2045  return NULL;
2046  }
2047 }
2048 
2049 
2050 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2051  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2052  return instr->RequiresHoleCheck()
2053  ? AssignEnvironment(DefineAsRegister(result))
2054  : DefineAsRegister(result);
2055 }
2056 
2057 
2058 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2059  LOperand* context = UseFixed(instr->context(), cp);
2060  LOperand* global_object =
2061  UseFixed(instr->global_object(), LoadDescriptor::ReceiverRegister());
2062  LOperand* vector = NULL;
2063  if (FLAG_vector_ics) {
2064  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2065  }
2066  LLoadGlobalGeneric* result =
2067  new(zone()) LLoadGlobalGeneric(context, global_object, vector);
2068  return MarkAsCall(DefineFixed(result, v0), instr);
2069 }
2070 
2071 
2072 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2073  LOperand* value = UseRegister(instr->value());
2074  // Use a temp to check the value in the cell in the case where we perform
2075  // a hole check.
2076  return instr->RequiresHoleCheck()
2077  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
2078  : new(zone()) LStoreGlobalCell(value, NULL);
2079 }
2080 
2081 
2082 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2083  LOperand* context = UseRegisterAtStart(instr->value());
2084  LInstruction* result =
2085  DefineAsRegister(new(zone()) LLoadContextSlot(context));
2086  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2087  result = AssignEnvironment(result);
2088  }
2089  return result;
2090 }
2091 
2092 
2093 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2094  LOperand* context;
2095  LOperand* value;
2096  if (instr->NeedsWriteBarrier()) {
2097  context = UseTempRegister(instr->context());
2098  value = UseTempRegister(instr->value());
2099  } else {
2100  context = UseRegister(instr->context());
2101  value = UseRegister(instr->value());
2102  }
2103  LInstruction* result = new(zone()) LStoreContextSlot(context, value);
2104  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2105  result = AssignEnvironment(result);
2106  }
2107  return result;
2108 }
2109 
2110 
2111 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2112  LOperand* obj = UseRegisterAtStart(instr->object());
2113  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2114 }
2115 
2116 
2117 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2118  LOperand* context = UseFixed(instr->context(), cp);
2119  LOperand* object =
2120  UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
2121  LOperand* vector = NULL;
2122  if (FLAG_vector_ics) {
2123  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2124  }
2125 
2126  LInstruction* result =
2127  DefineFixed(new(zone()) LLoadNamedGeneric(context, object, vector), v0);
2128  return MarkAsCall(result, instr);
2129 }
2130 
2131 
2132 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2133  HLoadFunctionPrototype* instr) {
2134  return AssignEnvironment(DefineAsRegister(
2135  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2136 }
2137 
2138 
2139 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2140  return DefineAsRegister(new(zone()) LLoadRoot);
2141 }
2142 
2143 
2144 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2145  DCHECK(instr->key()->representation().IsSmiOrInteger32());
2146  ElementsKind elements_kind = instr->elements_kind();
2147  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2148  LInstruction* result = NULL;
2149 
2150  if (!instr->is_typed_elements()) {
2151  LOperand* obj = NULL;
2152  if (instr->representation().IsDouble()) {
2153  obj = UseRegister(instr->elements());
2154  } else {
2155  DCHECK(instr->representation().IsSmiOrTagged() ||
2156  instr->representation().IsInteger32());
2157  obj = UseRegisterAtStart(instr->elements());
2158  }
2159  result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key));
2160  } else {
2161  DCHECK(
2162  (instr->representation().IsInteger32() &&
2163  !IsDoubleOrFloatElementsKind(elements_kind)) ||
2164  (instr->representation().IsDouble() &&
2165  IsDoubleOrFloatElementsKind(elements_kind)));
2166  LOperand* backing_store = UseRegister(instr->elements());
2167  result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key));
2168  }
2169 
2170  if ((instr->is_external() || instr->is_fixed_typed_array()) ?
2171  // see LCodeGen::DoLoadKeyedExternalArray
2172  ((elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2173  elements_kind == UINT32_ELEMENTS) &&
2174  !instr->CheckFlag(HInstruction::kUint32)) :
2175  // see LCodeGen::DoLoadKeyedFixedDoubleArray and
2176  // LCodeGen::DoLoadKeyedFixedArray
2177  instr->RequiresHoleCheck()) {
2178  result = AssignEnvironment(result);
2179  }
2180  return result;
2181 }
2182 
2183 
2184 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2185  LOperand* context = UseFixed(instr->context(), cp);
2186  LOperand* object =
2187  UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
2188  LOperand* key = UseFixed(instr->key(), LoadDescriptor::NameRegister());
2189  LOperand* vector = NULL;
2190  if (FLAG_vector_ics) {
2191  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2192  }
2193 
2194  LInstruction* result =
2195  DefineFixed(new(zone()) LLoadKeyedGeneric(context, object, key, vector),
2196  v0);
2197  return MarkAsCall(result, instr);
2198 }
2199 
2200 
2201 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2202  if (!instr->is_typed_elements()) {
2203  DCHECK(instr->elements()->representation().IsTagged());
2204  bool needs_write_barrier = instr->NeedsWriteBarrier();
2205  LOperand* object = NULL;
2206  LOperand* val = NULL;
2207  LOperand* key = NULL;
2208 
2209  if (instr->value()->representation().IsDouble()) {
2210  object = UseRegisterAtStart(instr->elements());
2211  key = UseRegisterOrConstantAtStart(instr->key());
2212  val = UseRegister(instr->value());
2213  } else {
2214  DCHECK(instr->value()->representation().IsSmiOrTagged() ||
2215  instr->value()->representation().IsInteger32());
2216  if (needs_write_barrier) {
2217  object = UseTempRegister(instr->elements());
2218  val = UseTempRegister(instr->value());
2219  key = UseTempRegister(instr->key());
2220  } else {
2221  object = UseRegisterAtStart(instr->elements());
2222  val = UseRegisterAtStart(instr->value());
2223  key = UseRegisterOrConstantAtStart(instr->key());
2224  }
2225  }
2226 
2227  return new(zone()) LStoreKeyed(object, key, val);
2228  }
2229 
2230  DCHECK(
2231  (instr->value()->representation().IsInteger32() &&
2232  !IsDoubleOrFloatElementsKind(instr->elements_kind())) ||
2233  (instr->value()->representation().IsDouble() &&
2234  IsDoubleOrFloatElementsKind(instr->elements_kind())));
2235  DCHECK((instr->is_fixed_typed_array() &&
2236  instr->elements()->representation().IsTagged()) ||
2237  (instr->is_external() &&
2238  instr->elements()->representation().IsExternal()));
2239  LOperand* val = UseRegister(instr->value());
2240  LOperand* key = UseRegisterOrConstantAtStart(instr->key());
2241  LOperand* backing_store = UseRegister(instr->elements());
2242  return new(zone()) LStoreKeyed(backing_store, key, val);
2243 }
2244 
2245 
2246 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2247  LOperand* context = UseFixed(instr->context(), cp);
2248  LOperand* obj =
2249  UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2250  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
2251  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2252 
2253  DCHECK(instr->object()->representation().IsTagged());
2254  DCHECK(instr->key()->representation().IsTagged());
2255  DCHECK(instr->value()->representation().IsTagged());
2256 
2257  return MarkAsCall(
2258  new(zone()) LStoreKeyedGeneric(context, obj, key, val), instr);
2259 }
2260 
2261 
2262 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2263  HTransitionElementsKind* instr) {
2264  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2265  LOperand* object = UseRegister(instr->object());
2266  LOperand* new_map_reg = TempRegister();
2267  LTransitionElementsKind* result =
2268  new(zone()) LTransitionElementsKind(object, NULL, new_map_reg);
2269  return result;
2270  } else {
2271  LOperand* object = UseFixed(instr->object(), a0);
2272  LOperand* context = UseFixed(instr->context(), cp);
2273  LTransitionElementsKind* result =
2274  new(zone()) LTransitionElementsKind(object, context, NULL);
2275  return MarkAsCall(result, instr);
2276  }
2277 }
2278 
2279 
2280 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2281  HTrapAllocationMemento* instr) {
2282  LOperand* object = UseRegister(instr->object());
2283  LOperand* temp = TempRegister();
2284  LTrapAllocationMemento* result =
2285  new(zone()) LTrapAllocationMemento(object, temp);
2286  return AssignEnvironment(result);
2287 }
2288 
2289 
2290 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2291  bool is_in_object = instr->access().IsInobject();
2292  bool needs_write_barrier = instr->NeedsWriteBarrier();
2293  bool needs_write_barrier_for_map = instr->has_transition() &&
2294  instr->NeedsWriteBarrierForMap();
2295 
2296  LOperand* obj;
2297  if (needs_write_barrier) {
2298  obj = is_in_object
2299  ? UseRegister(instr->object())
2300  : UseTempRegister(instr->object());
2301  } else {
2302  obj = needs_write_barrier_for_map
2303  ? UseRegister(instr->object())
2304  : UseRegisterAtStart(instr->object());
2305  }
2306 
2307  LOperand* val;
2308  if (needs_write_barrier) {
2309  val = UseTempRegister(instr->value());
2310  } else if (instr->field_representation().IsDouble()) {
2311  val = UseRegisterAtStart(instr->value());
2312  } else {
2313  val = UseRegister(instr->value());
2314  }
2315 
2316  // We need a temporary register for write barrier of the map field.
2317  LOperand* temp = needs_write_barrier_for_map ? TempRegister() : NULL;
2318 
2319  return new(zone()) LStoreNamedField(obj, val, temp);
2320 }
2321 
2322 
2323 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2324  LOperand* context = UseFixed(instr->context(), cp);
2325  LOperand* obj =
2326  UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2327  LOperand* val = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2328 
2329  LInstruction* result = new(zone()) LStoreNamedGeneric(context, obj, val);
2330  return MarkAsCall(result, instr);
2331 }
2332 
2333 
2334 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2335  LOperand* context = UseFixed(instr->context(), cp);
2336  LOperand* left = UseFixed(instr->left(), a1);
2337  LOperand* right = UseFixed(instr->right(), a0);
2338  return MarkAsCall(
2339  DefineFixed(new(zone()) LStringAdd(context, left, right), v0),
2340  instr);
2341 }
2342 
2343 
2344 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2345  LOperand* string = UseTempRegister(instr->string());
2346  LOperand* index = UseTempRegister(instr->index());
2347  LOperand* context = UseAny(instr->context());
2348  LStringCharCodeAt* result =
2349  new(zone()) LStringCharCodeAt(context, string, index);
2350  return AssignPointerMap(DefineAsRegister(result));
2351 }
2352 
2353 
2354 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2355  LOperand* char_code = UseRegister(instr->value());
2356  LOperand* context = UseAny(instr->context());
2357  LStringCharFromCode* result =
2358  new(zone()) LStringCharFromCode(context, char_code);
2359  return AssignPointerMap(DefineAsRegister(result));
2360 }
2361 
2362 
2363 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2364  info()->MarkAsDeferredCalling();
2365  LOperand* context = UseAny(instr->context());
2366  LOperand* size = UseRegisterOrConstant(instr->size());
2367  LOperand* temp1 = TempRegister();
2368  LOperand* temp2 = TempRegister();
2369  LAllocate* result = new(zone()) LAllocate(context, size, temp1, temp2);
2370  return AssignPointerMap(DefineAsRegister(result));
2371 }
2372 
2373 
2374 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2375  LOperand* context = UseFixed(instr->context(), cp);
2376  return MarkAsCall(
2377  DefineFixed(new(zone()) LRegExpLiteral(context), v0), instr);
2378 }
2379 
2380 
2381 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2382  LOperand* context = UseFixed(instr->context(), cp);
2383  return MarkAsCall(
2384  DefineFixed(new(zone()) LFunctionLiteral(context), v0), instr);
2385 }
2386 
2387 
2388 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2389  DCHECK(argument_count_ == 0);
2390  allocator_->MarkAsOsrEntry();
2391  current_block_->last_environment()->set_ast_id(instr->ast_id());
2392  return AssignEnvironment(new(zone()) LOsrEntry);
2393 }
2394 
2395 
2396 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2397  LParameter* result = new(zone()) LParameter;
2398  if (instr->kind() == HParameter::STACK_PARAMETER) {
2399  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2400  return DefineAsSpilled(result, spill_index);
2401  } else {
2402  DCHECK(info()->IsStub());
2403  CallInterfaceDescriptor descriptor =
2404  info()->code_stub()->GetCallInterfaceDescriptor();
2405  int index = static_cast<int>(instr->index());
2406  Register reg = descriptor.GetEnvironmentParameterRegister(index);
2407  return DefineFixed(result, reg);
2408  }
2409 }
2410 
2411 
2412 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2413  // Use an index that corresponds to the location in the unoptimized frame,
2414  // which the optimized frame will subsume.
2415  int env_index = instr->index();
2416  int spill_index = 0;
2417  if (instr->environment()->is_parameter_index(env_index)) {
2418  spill_index = chunk()->GetParameterStackSlot(env_index);
2419  } else {
2420  spill_index = env_index - instr->environment()->first_local_index();
2421  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2422  Retry(kTooManySpillSlotsNeededForOSR);
2423  spill_index = 0;
2424  }
2425  }
2426  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2427 }
2428 
2429 
2430 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2431  LOperand* context = UseFixed(instr->context(), cp);
2432  return MarkAsCall(DefineFixed(new(zone()) LCallStub(context), v0), instr);
2433 }
2434 
2435 
2436 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2437  // There are no real uses of the arguments object.
2438  // arguments.length and element access are supported directly on
2439  // stack arguments, and any real arguments object use causes a bailout.
2440  // So this value is never used.
2441  return NULL;
2442 }
2443 
2444 
2445 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2446  instr->ReplayEnvironment(current_block_->last_environment());
2447 
2448  // There are no real uses of a captured object.
2449  return NULL;
2450 }
2451 
2452 
2453 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2454  info()->MarkAsRequiresFrame();
2455  LOperand* args = UseRegister(instr->arguments());
2456  LOperand* length = UseRegisterOrConstantAtStart(instr->length());
2457  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
2458  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2459 }
2460 
2461 
2462 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2463  LOperand* object = UseFixed(instr->value(), a0);
2464  LToFastProperties* result = new(zone()) LToFastProperties(object);
2465  return MarkAsCall(DefineFixed(result, v0), instr);
2466 }
2467 
2468 
2469 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2470  LOperand* context = UseFixed(instr->context(), cp);
2471  LTypeof* result = new(zone()) LTypeof(context, UseFixed(instr->value(), a0));
2472  return MarkAsCall(DefineFixed(result, v0), instr);
2473 }
2474 
2475 
2476 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2477  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2478 }
2479 
2480 
2481 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2482  HIsConstructCallAndBranch* instr) {
2483  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2484 }
2485 
2486 
2487 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2488  instr->ReplayEnvironment(current_block_->last_environment());
2489  return NULL;
2490 }
2491 
2492 
2493 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2494  if (instr->is_function_entry()) {
2495  LOperand* context = UseFixed(instr->context(), cp);
2496  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2497  } else {
2498  DCHECK(instr->is_backwards_branch());
2499  LOperand* context = UseAny(instr->context());
2500  return AssignEnvironment(
2501  AssignPointerMap(new(zone()) LStackCheck(context)));
2502  }
2503 }
2504 
2505 
2506 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2507  HEnvironment* outer = current_block_->last_environment();
2508  outer->set_ast_id(instr->ReturnId());
2509  HConstant* undefined = graph()->GetConstantUndefined();
2510  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2511  instr->arguments_count(),
2512  instr->function(),
2513  undefined,
2514  instr->inlining_kind());
2515  // Only replay binding of arguments object if it wasn't removed from graph.
2516  if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2517  inner->Bind(instr->arguments_var(), instr->arguments_object());
2518  }
2519  inner->BindContext(instr->closure_context());
2520  inner->set_entry(instr);
2521  current_block_->UpdateEnvironment(inner);
2522  chunk_->AddInlinedClosure(instr->closure());
2523  return NULL;
2524 }
2525 
2526 
2527 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2528  LInstruction* pop = NULL;
2529 
2530  HEnvironment* env = current_block_->last_environment();
2531 
2532  if (env->entry()->arguments_pushed()) {
2533  int argument_count = env->arguments_environment()->parameter_count();
2534  pop = new(zone()) LDrop(argument_count);
2535  DCHECK(instr->argument_delta() == -argument_count);
2536  }
2537 
2538  HEnvironment* outer = current_block_->last_environment()->
2539  DiscardInlined(false);
2540  current_block_->UpdateEnvironment(outer);
2541 
2542  return pop;
2543 }
2544 
2545 
2546 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2547  LOperand* context = UseFixed(instr->context(), cp);
2548  LOperand* object = UseFixed(instr->enumerable(), a0);
2549  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2550  return MarkAsCall(DefineFixed(result, v0), instr, CAN_DEOPTIMIZE_EAGERLY);
2551 }
2552 
2553 
2554 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2555  LOperand* map = UseRegister(instr->map());
2556  return AssignEnvironment(DefineAsRegister(new(zone()) LForInCacheArray(map)));
2557 }
2558 
2559 
2560 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2561  LOperand* value = UseRegisterAtStart(instr->value());
2562  LOperand* map = UseRegisterAtStart(instr->map());
2563  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2564 }
2565 
2566 
2567 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2568  LOperand* object = UseRegister(instr->object());
2569  LOperand* index = UseTempRegister(instr->index());
2570  LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
2571  LInstruction* result = DefineSameAsFirst(load);
2572  return AssignPointerMap(result);
2573 }
2574 
2575 
2576 LInstruction* LChunkBuilder::DoStoreFrameContext(HStoreFrameContext* instr) {
2577  LOperand* context = UseRegisterAtStart(instr->context());
2578  return new(zone()) LStoreFrameContext(context);
2579 }
2580 
2581 
2582 LInstruction* LChunkBuilder::DoAllocateBlockContext(
2583  HAllocateBlockContext* instr) {
2584  LOperand* context = UseFixed(instr->context(), cp);
2585  LOperand* function = UseRegisterAtStart(instr->function());
2586  LAllocateBlockContext* result =
2587  new(zone()) LAllocateBlockContext(context, function);
2588  return MarkAsCall(DefineFixed(result, cp), instr);
2589 }
2590 
2591 
2592 } } // namespace v8::internal
2593 
2594 #endif // V8_TARGET_ARCH_MIPS64
static HValue * cast(HValue *value)
bool IsRedundant() const
Definition: lithium-arm.cc:89
virtual void PrintDataTo(StringStream *stream) OVERRIDE
Definition: lithium-arm.cc:100
LParallelMove * parallel_moves_[LAST_INNER_POSITION+1]
Definition: lithium-arm.h:367
virtual int InputCount()=0
virtual bool HasResult() const =0
virtual const char * Mnemonic() const =0
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:43
LEnvironment * environment() const
Definition: lithium-arm.h:231
virtual LOperand * InputAt(int i)=0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:62
virtual LOperand * result() const =0
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:75
LPointerMap * pointer_map() const
Definition: lithium-arm.h:235
void PrintDataTo(StringStream *stream) OVERRIDE
uint32_t base_offset() const
void PrintTo(StringStream *stream)
Definition: lithium.cc:41
void PrintDataTo(StringStream *stream) OVERRIDE
uint32_t base_offset() const
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:138
static const int kMaxFixedSlotIndex
Definition: lithium.h:177
bool HasFixedPolicy() const
Definition: lithium.h:185
static const Register ReceiverRegister()
static const Register NameRegister()
static Representation Tagged()
static const int kMaxValue
Definition: objects.h:1272
static const int kMinValue
Definition: objects.h:1270
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const char * String(Value tok)
Definition: token.h:276
static const Register VectorRegister()
@ kMips64r2
static const ArchVariants kArchVariant
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:14
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:20
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
int int32_t
Definition: unicode.cc:24
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
const Register cp
const FPURegister f2
DwVfpRegister DoubleRegister
const FPURegister f4
const int kMaxInt
Definition: globals.h:109
@ EXTERNAL_UINT32_ELEMENTS
Definition: elements-kind.h:38
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const char * ElementsKindToString(ElementsKind kind)
const int kMinInt
Definition: globals.h:110
STATIC_ASSERT(sizeof(CPURegister)==sizeof(Register))
@ GENERAL_REGISTERS
Definition: lithium.h:780
@ DOUBLE_REGISTERS
Definition: lithium.h:781
const FPURegister f0
const FPURegister f8
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static i::Handle< i::Context > CreateEnvironment(i::Isolate *isolate, v8::ExtensionConfiguration *extensions, v8::Handle< ObjectTemplate > global_template, v8::Handle< Value > maybe_global_proxy)
Definition: api.cc:5151
static int ToAllocationIndex(DwVfpRegister reg)
static int ToAllocationIndex(Register reg)