V8 Project
lithium-x64.cc
Go to the documentation of this file.
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/v8.h"
6 
7 #if V8_TARGET_ARCH_X64
8 
9 #include "src/hydrogen-osr.h"
10 #include "src/lithium-inl.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define DEFINE_COMPILE(type) \
17  void L##type::CompileToNative(LCodeGen* generator) { \
18  generator->Do##type(this); \
19  }
21 #undef DEFINE_COMPILE
22 
23 
24 #ifdef DEBUG
25 void LInstruction::VerifyCall() {
26  // Call instructions can use only fixed registers as temporaries and
27  // outputs because all registers are blocked by the calling convention.
28  // Inputs operands must use a fixed register or use-at-start policy or
29  // a non-register policy.
30  DCHECK(Output() == NULL ||
31  LUnallocated::cast(Output())->HasFixedPolicy() ||
32  !LUnallocated::cast(Output())->HasRegisterPolicy());
33  for (UseIterator it(this); !it.Done(); it.Advance()) {
34  LUnallocated* operand = LUnallocated::cast(it.Current());
35  DCHECK(operand->HasFixedPolicy() ||
36  operand->IsUsedAtStart());
37  }
38  for (TempIterator it(this); !it.Done(); it.Advance()) {
39  LUnallocated* operand = LUnallocated::cast(it.Current());
40  DCHECK(operand->HasFixedPolicy() ||!operand->HasRegisterPolicy());
41  }
42 }
43 #endif
44 
45 
46 void LInstruction::PrintTo(StringStream* stream) {
47  stream->Add("%s ", this->Mnemonic());
48 
49  PrintOutputOperandTo(stream);
50 
51  PrintDataTo(stream);
52 
53  if (HasEnvironment()) {
54  stream->Add(" ");
55  environment()->PrintTo(stream);
56  }
57 
58  if (HasPointerMap()) {
59  stream->Add(" ");
60  pointer_map()->PrintTo(stream);
61  }
62 }
63 
64 
65 void LInstruction::PrintDataTo(StringStream* stream) {
66  stream->Add("= ");
67  for (int i = 0; i < InputCount(); i++) {
68  if (i > 0) stream->Add(" ");
69  if (InputAt(i) == NULL) {
70  stream->Add("NULL");
71  } else {
72  InputAt(i)->PrintTo(stream);
73  }
74  }
75 }
76 
77 
78 void LInstruction::PrintOutputOperandTo(StringStream* stream) {
79  if (HasResult()) result()->PrintTo(stream);
80 }
81 
82 
83 void LLabel::PrintDataTo(StringStream* stream) {
84  LGap::PrintDataTo(stream);
85  LLabel* rep = replacement();
86  if (rep != NULL) {
87  stream->Add(" Dead block replaced with B%d", rep->block_id());
88  }
89 }
90 
91 
92 bool LGap::IsRedundant() const {
93  for (int i = 0; i < 4; i++) {
95  return false;
96  }
97  }
98 
99  return true;
100 }
101 
102 
103 void LGap::PrintDataTo(StringStream* stream) {
104  for (int i = 0; i < 4; i++) {
105  stream->Add("(");
106  if (parallel_moves_[i] != NULL) {
107  parallel_moves_[i]->PrintDataTo(stream);
108  }
109  stream->Add(") ");
110  }
111 }
112 
113 
114 const char* LArithmeticD::Mnemonic() const {
115  switch (op()) {
116  case Token::ADD: return "add-d";
117  case Token::SUB: return "sub-d";
118  case Token::MUL: return "mul-d";
119  case Token::DIV: return "div-d";
120  case Token::MOD: return "mod-d";
121  default:
122  UNREACHABLE();
123  return NULL;
124  }
125 }
126 
127 
128 const char* LArithmeticT::Mnemonic() const {
129  switch (op()) {
130  case Token::ADD: return "add-t";
131  case Token::SUB: return "sub-t";
132  case Token::MUL: return "mul-t";
133  case Token::MOD: return "mod-t";
134  case Token::DIV: return "div-t";
135  case Token::BIT_AND: return "bit-and-t";
136  case Token::BIT_OR: return "bit-or-t";
137  case Token::BIT_XOR: return "bit-xor-t";
138  case Token::ROR: return "ror-t";
139  case Token::SHL: return "sal-t";
140  case Token::SAR: return "sar-t";
141  case Token::SHR: return "shr-t";
142  default:
143  UNREACHABLE();
144  return NULL;
145  }
146 }
147 
148 
149 bool LGoto::HasInterestingComment(LCodeGen* gen) const {
150  return !gen->IsNextEmittedBlock(block_id());
151 }
152 
153 
154 template<int R>
156  LPlatformChunk* chunk) const {
157  HValue* hvalue = this->hydrogen_value();
158  return hvalue != NULL &&
159  hvalue->representation().IsInteger32() &&
160  chunk->GetDehoistedKeyIds()->Contains(hvalue->id());
161 }
162 
163 
164 void LGoto::PrintDataTo(StringStream* stream) {
165  stream->Add("B%d", block_id());
166 }
167 
168 
169 void LBranch::PrintDataTo(StringStream* stream) {
170  stream->Add("B%d | B%d on ", true_block_id(), false_block_id());
171  value()->PrintTo(stream);
172 }
173 
174 
175 void LCompareNumericAndBranch::PrintDataTo(StringStream* stream) {
176  stream->Add("if ");
177  left()->PrintTo(stream);
178  stream->Add(" %s ", Token::String(op()));
179  right()->PrintTo(stream);
180  stream->Add(" then B%d else B%d", true_block_id(), false_block_id());
181 }
182 
183 
184 void LIsObjectAndBranch::PrintDataTo(StringStream* stream) {
185  stream->Add("if is_object(");
186  value()->PrintTo(stream);
187  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
188 }
189 
190 
191 void LIsStringAndBranch::PrintDataTo(StringStream* stream) {
192  stream->Add("if is_string(");
193  value()->PrintTo(stream);
194  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
195 }
196 
197 
198 void LIsSmiAndBranch::PrintDataTo(StringStream* stream) {
199  stream->Add("if is_smi(");
200  value()->PrintTo(stream);
201  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
202 }
203 
204 
205 void LIsUndetectableAndBranch::PrintDataTo(StringStream* stream) {
206  stream->Add("if is_undetectable(");
207  value()->PrintTo(stream);
208  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
209 }
210 
211 
212 void LStringCompareAndBranch::PrintDataTo(StringStream* stream) {
213  stream->Add("if string_compare(");
214  left()->PrintTo(stream);
215  right()->PrintTo(stream);
216  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
217 }
218 
219 
220 void LHasInstanceTypeAndBranch::PrintDataTo(StringStream* stream) {
221  stream->Add("if has_instance_type(");
222  value()->PrintTo(stream);
223  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
224 }
225 
226 
227 void LHasCachedArrayIndexAndBranch::PrintDataTo(StringStream* stream) {
228  stream->Add("if has_cached_array_index(");
229  value()->PrintTo(stream);
230  stream->Add(") then B%d else B%d", true_block_id(), false_block_id());
231 }
232 
233 
234 void LClassOfTestAndBranch::PrintDataTo(StringStream* stream) {
235  stream->Add("if class_of_test(");
236  value()->PrintTo(stream);
237  stream->Add(", \"%o\") then B%d else B%d",
238  *hydrogen()->class_name(),
239  true_block_id(),
240  false_block_id());
241 }
242 
243 
244 void LTypeofIsAndBranch::PrintDataTo(StringStream* stream) {
245  stream->Add("if typeof ");
246  value()->PrintTo(stream);
247  stream->Add(" == \"%s\" then B%d else B%d",
248  hydrogen()->type_literal()->ToCString().get(),
249  true_block_id(), false_block_id());
250 }
251 
252 
253 void LStoreCodeEntry::PrintDataTo(StringStream* stream) {
254  stream->Add(" = ");
255  function()->PrintTo(stream);
256  stream->Add(".code_entry = ");
257  code_object()->PrintTo(stream);
258 }
259 
260 
261 void LInnerAllocatedObject::PrintDataTo(StringStream* stream) {
262  stream->Add(" = ");
263  base_object()->PrintTo(stream);
264  stream->Add(" + ");
265  offset()->PrintTo(stream);
266 }
267 
268 
269 void LCallJSFunction::PrintDataTo(StringStream* stream) {
270  stream->Add("= ");
271  function()->PrintTo(stream);
272  stream->Add("#%d / ", arity());
273 }
274 
275 
276 void LCallWithDescriptor::PrintDataTo(StringStream* stream) {
277  for (int i = 0; i < InputCount(); i++) {
278  InputAt(i)->PrintTo(stream);
279  stream->Add(" ");
280  }
281  stream->Add("#%d / ", arity());
282 }
283 
284 
285 void LLoadContextSlot::PrintDataTo(StringStream* stream) {
286  context()->PrintTo(stream);
287  stream->Add("[%d]", slot_index());
288 }
289 
290 
291 void LStoreContextSlot::PrintDataTo(StringStream* stream) {
292  context()->PrintTo(stream);
293  stream->Add("[%d] <- ", slot_index());
294  value()->PrintTo(stream);
295 }
296 
297 
298 void LInvokeFunction::PrintDataTo(StringStream* stream) {
299  stream->Add("= ");
300  function()->PrintTo(stream);
301  stream->Add(" #%d / ", arity());
302 }
303 
304 
305 void LCallNew::PrintDataTo(StringStream* stream) {
306  stream->Add("= ");
307  constructor()->PrintTo(stream);
308  stream->Add(" #%d / ", arity());
309 }
310 
311 
312 void LCallNewArray::PrintDataTo(StringStream* stream) {
313  stream->Add("= ");
314  constructor()->PrintTo(stream);
315  stream->Add(" #%d / ", arity());
316  ElementsKind kind = hydrogen()->elements_kind();
317  stream->Add(" (%s) ", ElementsKindToString(kind));
318 }
319 
320 
321 void LAccessArgumentsAt::PrintDataTo(StringStream* stream) {
322  arguments()->PrintTo(stream);
323 
324  stream->Add(" length ");
325  length()->PrintTo(stream);
326 
327  stream->Add(" index ");
328  index()->PrintTo(stream);
329 }
330 
331 
332 int LPlatformChunk::GetNextSpillIndex(RegisterKind kind) {
333  if (kind == DOUBLE_REGISTERS && kDoubleSize == 2 * kPointerSize) {
334  // Skip a slot if for a double-width slot for x32 port.
335  spill_slot_count_++;
336  // The spill slot's address is at rbp - (index + 1) * kPointerSize -
337  // StandardFrameConstants::kFixedFrameSizeFromFp. kFixedFrameSizeFromFp is
338  // 2 * kPointerSize, if rbp is aligned at 8-byte boundary, the below "|= 1"
339  // will make sure the spilled doubles are aligned at 8-byte boundary.
340  // TODO(haitao): make sure rbp is aligned at 8-byte boundary for x32 port.
341  spill_slot_count_ |= 1;
342  }
343  return spill_slot_count_++;
344 }
345 
346 
347 LOperand* LPlatformChunk::GetNextSpillSlot(RegisterKind kind) {
348  // All stack slots are Double stack slots on x64.
349  // Alternatively, at some point, start using half-size
350  // stack slots for int32 values.
351  int index = GetNextSpillIndex(kind);
352  if (kind == DOUBLE_REGISTERS) {
353  return LDoubleStackSlot::Create(index, zone());
354  } else {
355  DCHECK(kind == GENERAL_REGISTERS);
356  return LStackSlot::Create(index, zone());
357  }
358 }
359 
360 
361 void LStoreNamedField::PrintDataTo(StringStream* stream) {
362  object()->PrintTo(stream);
363  OStringStream os;
364  os << hydrogen()->access() << " <- ";
365  stream->Add(os.c_str());
366  value()->PrintTo(stream);
367 }
368 
369 
370 void LStoreNamedGeneric::PrintDataTo(StringStream* stream) {
371  object()->PrintTo(stream);
372  stream->Add(".");
373  stream->Add(String::cast(*name())->ToCString().get());
374  stream->Add(" <- ");
375  value()->PrintTo(stream);
376 }
377 
378 
379 void LLoadKeyed::PrintDataTo(StringStream* stream) {
380  elements()->PrintTo(stream);
381  stream->Add("[");
382  key()->PrintTo(stream);
383  if (hydrogen()->IsDehoisted()) {
384  stream->Add(" + %d]", base_offset());
385  } else {
386  stream->Add("]");
387  }
388 }
389 
390 
391 void LStoreKeyed::PrintDataTo(StringStream* stream) {
392  elements()->PrintTo(stream);
393  stream->Add("[");
394  key()->PrintTo(stream);
395  if (hydrogen()->IsDehoisted()) {
396  stream->Add(" + %d] <-", base_offset());
397  } else {
398  stream->Add("] <- ");
399  }
400 
401  if (value() == NULL) {
402  DCHECK(hydrogen()->IsConstantHoleStore() &&
403  hydrogen()->value()->representation().IsDouble());
404  stream->Add("<the hole(nan)>");
405  } else {
406  value()->PrintTo(stream);
407  }
408 }
409 
410 
411 void LStoreKeyedGeneric::PrintDataTo(StringStream* stream) {
412  object()->PrintTo(stream);
413  stream->Add("[");
414  key()->PrintTo(stream);
415  stream->Add("] <- ");
416  value()->PrintTo(stream);
417 }
418 
419 
420 void LTransitionElementsKind::PrintDataTo(StringStream* stream) {
421  object()->PrintTo(stream);
422  stream->Add(" %p -> %p", *original_map(), *transitioned_map());
423 }
424 
425 
426 LPlatformChunk* LChunkBuilder::Build() {
427  DCHECK(is_unused());
428  chunk_ = new(zone()) LPlatformChunk(info(), graph());
429  LPhase phase("L_Building chunk", chunk_);
430  status_ = BUILDING;
431 
432  // If compiling for OSR, reserve space for the unoptimized frame,
433  // which will be subsumed into this frame.
434  if (graph()->has_osr()) {
435  for (int i = graph()->osr()->UnoptimizedFrameSlots(); i > 0; i--) {
436  chunk_->GetNextSpillIndex(GENERAL_REGISTERS);
437  }
438  }
439 
440  const ZoneList<HBasicBlock*>* blocks = graph()->blocks();
441  for (int i = 0; i < blocks->length(); i++) {
442  HBasicBlock* next = NULL;
443  if (i < blocks->length() - 1) next = blocks->at(i + 1);
444  DoBasicBlock(blocks->at(i), next);
445  if (is_aborted()) return NULL;
446  }
447  status_ = DONE;
448  return chunk_;
449 }
450 
451 
452 LUnallocated* LChunkBuilder::ToUnallocated(Register reg) {
453  return new(zone()) LUnallocated(LUnallocated::FIXED_REGISTER,
455 }
456 
457 
458 LUnallocated* LChunkBuilder::ToUnallocated(XMMRegister reg) {
459  return new(zone()) LUnallocated(LUnallocated::FIXED_DOUBLE_REGISTER,
461 }
462 
463 
464 LOperand* LChunkBuilder::UseFixed(HValue* value, Register fixed_register) {
465  return Use(value, ToUnallocated(fixed_register));
466 }
467 
468 
469 LOperand* LChunkBuilder::UseFixedDouble(HValue* value, XMMRegister reg) {
470  return Use(value, ToUnallocated(reg));
471 }
472 
473 
474 LOperand* LChunkBuilder::UseRegister(HValue* value) {
475  return Use(value, new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
476 }
477 
478 
479 LOperand* LChunkBuilder::UseRegisterAtStart(HValue* value) {
480  return Use(value,
481  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER,
483 }
484 
485 
486 LOperand* LChunkBuilder::UseTempRegister(HValue* value) {
487  return Use(value, new(zone()) LUnallocated(LUnallocated::WRITABLE_REGISTER));
488 }
489 
490 
491 LOperand* LChunkBuilder::UseTempRegisterOrConstant(HValue* value) {
492  return value->IsConstant()
493  ? chunk_->DefineConstantOperand(HConstant::cast(value))
494  : UseTempRegister(value);
495 }
496 
497 
498 LOperand* LChunkBuilder::Use(HValue* value) {
499  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE));
500 }
501 
502 
503 LOperand* LChunkBuilder::UseAtStart(HValue* value) {
504  return Use(value, new(zone()) LUnallocated(LUnallocated::NONE,
506 }
507 
508 
509 LOperand* LChunkBuilder::UseOrConstant(HValue* value) {
510  return value->IsConstant()
511  ? chunk_->DefineConstantOperand(HConstant::cast(value))
512  : Use(value);
513 }
514 
515 
516 LOperand* LChunkBuilder::UseOrConstantAtStart(HValue* value) {
517  return value->IsConstant()
518  ? chunk_->DefineConstantOperand(HConstant::cast(value))
519  : UseAtStart(value);
520 }
521 
522 
523 LOperand* LChunkBuilder::UseRegisterOrConstant(HValue* value) {
524  return value->IsConstant()
525  ? chunk_->DefineConstantOperand(HConstant::cast(value))
526  : UseRegister(value);
527 }
528 
529 
530 LOperand* LChunkBuilder::UseRegisterOrConstantAtStart(HValue* value) {
531  return value->IsConstant()
532  ? chunk_->DefineConstantOperand(HConstant::cast(value))
533  : UseRegisterAtStart(value);
534 }
535 
536 
537 LOperand* LChunkBuilder::UseConstant(HValue* value) {
538  return chunk_->DefineConstantOperand(HConstant::cast(value));
539 }
540 
541 
542 LOperand* LChunkBuilder::UseAny(HValue* value) {
543  return value->IsConstant()
544  ? chunk_->DefineConstantOperand(HConstant::cast(value))
545  : Use(value, new(zone()) LUnallocated(LUnallocated::ANY));
546 }
547 
548 
549 LOperand* LChunkBuilder::Use(HValue* value, LUnallocated* operand) {
550  if (value->EmitAtUses()) {
551  HInstruction* instr = HInstruction::cast(value);
552  VisitInstruction(instr);
553  }
554  operand->set_virtual_register(value->id());
555  return operand;
556 }
557 
558 
559 LInstruction* LChunkBuilder::Define(LTemplateResultInstruction<1>* instr,
560  LUnallocated* result) {
561  result->set_virtual_register(current_instruction_->id());
562  instr->set_result(result);
563  return instr;
564 }
565 
566 
567 LInstruction* LChunkBuilder::DefineAsRegister(
568  LTemplateResultInstruction<1>* instr) {
569  return Define(instr,
570  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER));
571 }
572 
573 
574 LInstruction* LChunkBuilder::DefineAsSpilled(
575  LTemplateResultInstruction<1>* instr,
576  int index) {
577  return Define(instr,
578  new(zone()) LUnallocated(LUnallocated::FIXED_SLOT, index));
579 }
580 
581 
582 LInstruction* LChunkBuilder::DefineSameAsFirst(
583  LTemplateResultInstruction<1>* instr) {
584  return Define(instr,
585  new(zone()) LUnallocated(LUnallocated::SAME_AS_FIRST_INPUT));
586 }
587 
588 
589 LInstruction* LChunkBuilder::DefineFixed(LTemplateResultInstruction<1>* instr,
590  Register reg) {
591  return Define(instr, ToUnallocated(reg));
592 }
593 
594 
595 LInstruction* LChunkBuilder::DefineFixedDouble(
596  LTemplateResultInstruction<1>* instr,
597  XMMRegister reg) {
598  return Define(instr, ToUnallocated(reg));
599 }
600 
601 
602 LInstruction* LChunkBuilder::AssignEnvironment(LInstruction* instr) {
603  HEnvironment* hydrogen_env = current_block_->last_environment();
604  int argument_index_accumulator = 0;
605  ZoneList<HValue*> objects_to_materialize(0, zone());
606  instr->set_environment(CreateEnvironment(
607  hydrogen_env, &argument_index_accumulator, &objects_to_materialize));
608  return instr;
609 }
610 
611 
612 LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
613  HInstruction* hinstr,
614  CanDeoptimize can_deoptimize) {
615  info()->MarkAsNonDeferredCalling();
616 
617 #ifdef DEBUG
618  instr->VerifyCall();
619 #endif
620  instr->MarkAsCall();
621  instr = AssignPointerMap(instr);
622 
623  // If instruction does not have side-effects lazy deoptimization
624  // after the call will try to deoptimize to the point before the call.
625  // Thus we still need to attach environment to this call even if
626  // call sequence can not deoptimize eagerly.
627  bool needs_environment =
628  (can_deoptimize == CAN_DEOPTIMIZE_EAGERLY) ||
629  !hinstr->HasObservableSideEffects();
630  if (needs_environment && !instr->HasEnvironment()) {
631  instr = AssignEnvironment(instr);
632  // We can't really figure out if the environment is needed or not.
633  instr->environment()->set_has_been_used();
634  }
635 
636  return instr;
637 }
638 
639 
640 LInstruction* LChunkBuilder::AssignPointerMap(LInstruction* instr) {
641  DCHECK(!instr->HasPointerMap());
642  instr->set_pointer_map(new(zone()) LPointerMap(zone()));
643  return instr;
644 }
645 
646 
647 LUnallocated* LChunkBuilder::TempRegister() {
648  LUnallocated* operand =
649  new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
650  int vreg = allocator_->GetVirtualRegister();
651  if (!allocator_->AllocationOk()) {
652  Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
653  vreg = 0;
654  }
655  operand->set_virtual_register(vreg);
656  return operand;
657 }
658 
659 
660 LOperand* LChunkBuilder::FixedTemp(Register reg) {
661  LUnallocated* operand = ToUnallocated(reg);
662  DCHECK(operand->HasFixedPolicy());
663  return operand;
664 }
665 
666 
667 LOperand* LChunkBuilder::FixedTemp(XMMRegister reg) {
668  LUnallocated* operand = ToUnallocated(reg);
669  DCHECK(operand->HasFixedPolicy());
670  return operand;
671 }
672 
673 
674 LInstruction* LChunkBuilder::DoBlockEntry(HBlockEntry* instr) {
675  return new(zone()) LLabel(instr->block());
676 }
677 
678 
679 LInstruction* LChunkBuilder::DoDummyUse(HDummyUse* instr) {
680  return DefineAsRegister(new(zone()) LDummyUse(UseAny(instr->value())));
681 }
682 
683 
684 LInstruction* LChunkBuilder::DoEnvironmentMarker(HEnvironmentMarker* instr) {
685  UNREACHABLE();
686  return NULL;
687 }
688 
689 
690 LInstruction* LChunkBuilder::DoDeoptimize(HDeoptimize* instr) {
691  return AssignEnvironment(new(zone()) LDeoptimize);
692 }
693 
694 
695 LInstruction* LChunkBuilder::DoShift(Token::Value op,
696  HBitwiseBinaryOperation* instr) {
697  if (instr->representation().IsSmiOrInteger32()) {
698  DCHECK(instr->left()->representation().Equals(instr->representation()));
699  DCHECK(instr->right()->representation().Equals(instr->representation()));
700  LOperand* left = UseRegisterAtStart(instr->left());
701 
702  HValue* right_value = instr->right();
703  LOperand* right = NULL;
704  int constant_value = 0;
705  bool does_deopt = false;
706  if (right_value->IsConstant()) {
707  HConstant* constant = HConstant::cast(right_value);
708  right = chunk_->DefineConstantOperand(constant);
709  constant_value = constant->Integer32Value() & 0x1f;
710  if (SmiValuesAre31Bits() && instr->representation().IsSmi() &&
711  constant_value > 0) {
712  // Left shift can deoptimize if we shift by > 0 and the result
713  // cannot be truncated to smi.
714  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToSmi);
715  }
716  } else {
717  right = UseFixed(right_value, rcx);
718  }
719 
720  // Shift operations can only deoptimize if we do a logical shift by 0 and
721  // the result cannot be truncated to int32.
722  if (op == Token::SHR && constant_value == 0) {
723  if (FLAG_opt_safe_uint32_operations) {
724  does_deopt = !instr->CheckFlag(HInstruction::kUint32);
725  } else {
726  does_deopt = !instr->CheckUsesForFlag(HValue::kTruncatingToInt32);
727  }
728  }
729 
730  LInstruction* result =
731  DefineSameAsFirst(new(zone()) LShiftI(op, left, right, does_deopt));
732  return does_deopt ? AssignEnvironment(result) : result;
733  } else {
734  return DoArithmeticT(op, instr);
735  }
736 }
737 
738 
739 LInstruction* LChunkBuilder::DoArithmeticD(Token::Value op,
740  HArithmeticBinaryOperation* instr) {
741  DCHECK(instr->representation().IsDouble());
742  DCHECK(instr->left()->representation().IsDouble());
743  DCHECK(instr->right()->representation().IsDouble());
744  if (op == Token::MOD) {
745  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
746  LOperand* right = UseFixedDouble(instr->BetterRightOperand(), xmm1);
747  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
748  return MarkAsCall(DefineSameAsFirst(result), instr);
749  } else {
750  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
751  LOperand* right = UseRegisterAtStart(instr->BetterRightOperand());
752  LArithmeticD* result = new(zone()) LArithmeticD(op, left, right);
753  return DefineSameAsFirst(result);
754  }
755 }
756 
757 
758 LInstruction* LChunkBuilder::DoArithmeticT(Token::Value op,
759  HBinaryOperation* instr) {
760  HValue* left = instr->left();
761  HValue* right = instr->right();
762  DCHECK(left->representation().IsTagged());
763  DCHECK(right->representation().IsTagged());
764  LOperand* context = UseFixed(instr->context(), rsi);
765  LOperand* left_operand = UseFixed(left, rdx);
766  LOperand* right_operand = UseFixed(right, rax);
767  LArithmeticT* result =
768  new(zone()) LArithmeticT(op, context, left_operand, right_operand);
769  return MarkAsCall(DefineFixed(result, rax), instr);
770 }
771 
772 
773 void LChunkBuilder::DoBasicBlock(HBasicBlock* block, HBasicBlock* next_block) {
774  DCHECK(is_building());
775  current_block_ = block;
776  next_block_ = next_block;
777  if (block->IsStartBlock()) {
778  block->UpdateEnvironment(graph_->start_environment());
779  argument_count_ = 0;
780  } else if (block->predecessors()->length() == 1) {
781  // We have a single predecessor => copy environment and outgoing
782  // argument count from the predecessor.
783  DCHECK(block->phis()->length() == 0);
784  HBasicBlock* pred = block->predecessors()->at(0);
785  HEnvironment* last_environment = pred->last_environment();
786  DCHECK(last_environment != NULL);
787  // Only copy the environment, if it is later used again.
788  if (pred->end()->SecondSuccessor() == NULL) {
789  DCHECK(pred->end()->FirstSuccessor() == block);
790  } else {
791  if (pred->end()->FirstSuccessor()->block_id() > block->block_id() ||
792  pred->end()->SecondSuccessor()->block_id() > block->block_id()) {
793  last_environment = last_environment->Copy();
794  }
795  }
796  block->UpdateEnvironment(last_environment);
797  DCHECK(pred->argument_count() >= 0);
798  argument_count_ = pred->argument_count();
799  } else {
800  // We are at a state join => process phis.
801  HBasicBlock* pred = block->predecessors()->at(0);
802  // No need to copy the environment, it cannot be used later.
803  HEnvironment* last_environment = pred->last_environment();
804  for (int i = 0; i < block->phis()->length(); ++i) {
805  HPhi* phi = block->phis()->at(i);
806  if (phi->HasMergedIndex()) {
807  last_environment->SetValueAt(phi->merged_index(), phi);
808  }
809  }
810  for (int i = 0; i < block->deleted_phis()->length(); ++i) {
811  if (block->deleted_phis()->at(i) < last_environment->length()) {
812  last_environment->SetValueAt(block->deleted_phis()->at(i),
813  graph_->GetConstantUndefined());
814  }
815  }
816  block->UpdateEnvironment(last_environment);
817  // Pick up the outgoing argument count of one of the predecessors.
818  argument_count_ = pred->argument_count();
819  }
820  HInstruction* current = block->first();
821  int start = chunk_->instructions()->length();
822  while (current != NULL && !is_aborted()) {
823  // Code for constants in registers is generated lazily.
824  if (!current->EmitAtUses()) {
825  VisitInstruction(current);
826  }
827  current = current->next();
828  }
829  int end = chunk_->instructions()->length() - 1;
830  if (end >= start) {
831  block->set_first_instruction_index(start);
832  block->set_last_instruction_index(end);
833  }
834  block->set_argument_count(argument_count_);
835  next_block_ = NULL;
836  current_block_ = NULL;
837 }
838 
839 
840 void LChunkBuilder::VisitInstruction(HInstruction* current) {
841  HInstruction* old_current = current_instruction_;
842  current_instruction_ = current;
843 
844  LInstruction* instr = NULL;
845  if (current->CanReplaceWithDummyUses()) {
846  if (current->OperandCount() == 0) {
847  instr = DefineAsRegister(new(zone()) LDummy());
848  } else {
849  DCHECK(!current->OperandAt(0)->IsControlInstruction());
850  instr = DefineAsRegister(new(zone())
851  LDummyUse(UseAny(current->OperandAt(0))));
852  }
853  for (int i = 1; i < current->OperandCount(); ++i) {
854  if (current->OperandAt(i)->IsControlInstruction()) continue;
855  LInstruction* dummy =
856  new(zone()) LDummyUse(UseAny(current->OperandAt(i)));
857  dummy->set_hydrogen_value(current);
858  chunk_->AddInstruction(dummy, current_block_);
859  }
860  } else {
861  HBasicBlock* successor;
862  if (current->IsControlInstruction() &&
863  HControlInstruction::cast(current)->KnownSuccessorBlock(&successor) &&
864  successor != NULL) {
865  instr = new(zone()) LGoto(successor);
866  } else {
867  instr = current->CompileToLithium(this);
868  }
869  }
870 
871  argument_count_ += current->argument_delta();
872  DCHECK(argument_count_ >= 0);
873 
874  if (instr != NULL) {
875  AddInstruction(instr, current);
876  }
877 
878  current_instruction_ = old_current;
879 }
880 
881 
882 void LChunkBuilder::AddInstruction(LInstruction* instr,
883  HInstruction* hydrogen_val) {
884  // Associate the hydrogen instruction first, since we may need it for
885  // the ClobbersRegisters() or ClobbersDoubleRegisters() calls below.
886  instr->set_hydrogen_value(hydrogen_val);
887 
888 #if DEBUG
889  // Make sure that the lithium instruction has either no fixed register
890  // constraints in temps or the result OR no uses that are only used at
891  // start. If this invariant doesn't hold, the register allocator can decide
892  // to insert a split of a range immediately before the instruction due to an
893  // already allocated register needing to be used for the instruction's fixed
894  // register constraint. In this case, The register allocator won't see an
895  // interference between the split child and the use-at-start (it would if
896  // the it was just a plain use), so it is free to move the split child into
897  // the same register that is used for the use-at-start.
898  // See https://code.google.com/p/chromium/issues/detail?id=201590
899  if (!(instr->ClobbersRegisters() &&
900  instr->ClobbersDoubleRegisters(isolate()))) {
901  int fixed = 0;
902  int used_at_start = 0;
903  for (UseIterator it(instr); !it.Done(); it.Advance()) {
904  LUnallocated* operand = LUnallocated::cast(it.Current());
905  if (operand->IsUsedAtStart()) ++used_at_start;
906  }
907  if (instr->Output() != NULL) {
908  if (LUnallocated::cast(instr->Output())->HasFixedPolicy()) ++fixed;
909  }
910  for (TempIterator it(instr); !it.Done(); it.Advance()) {
911  LUnallocated* operand = LUnallocated::cast(it.Current());
912  if (operand->HasFixedPolicy()) ++fixed;
913  }
914  DCHECK(fixed == 0 || used_at_start == 0);
915  }
916 #endif
917 
918  if (FLAG_stress_pointer_maps && !instr->HasPointerMap()) {
919  instr = AssignPointerMap(instr);
920  }
921  if (FLAG_stress_environments && !instr->HasEnvironment()) {
922  instr = AssignEnvironment(instr);
923  }
924  chunk_->AddInstruction(instr, current_block_);
925 
926  if (instr->IsCall()) {
927  HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
928  LInstruction* instruction_needing_environment = NULL;
929  if (hydrogen_val->HasObservableSideEffects()) {
930  HSimulate* sim = HSimulate::cast(hydrogen_val->next());
931  instruction_needing_environment = instr;
932  sim->ReplayEnvironment(current_block_->last_environment());
933  hydrogen_value_for_lazy_bailout = sim;
934  }
935  LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
936  bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
937  chunk_->AddInstruction(bailout, current_block_);
938  if (instruction_needing_environment != NULL) {
939  // Store the lazy deopt environment with the instruction if needed.
940  // Right now it is only used for LInstanceOfKnownGlobal.
941  instruction_needing_environment->
942  SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
943  }
944  }
945 }
946 
947 
948 LInstruction* LChunkBuilder::DoGoto(HGoto* instr) {
949  return new(zone()) LGoto(instr->FirstSuccessor());
950 }
951 
952 
953 LInstruction* LChunkBuilder::DoDebugBreak(HDebugBreak* instr) {
954  return new(zone()) LDebugBreak();
955 }
956 
957 
958 LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
959  HValue* value = instr->value();
960  Representation r = value->representation();
961  HType type = value->type();
962  ToBooleanStub::Types expected = instr->expected_input_types();
963  if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic();
964 
965  bool easy_case = !r.IsTagged() || type.IsBoolean() || type.IsSmi() ||
966  type.IsJSArray() || type.IsHeapNumber() || type.IsString();
967  LInstruction* branch = new(zone()) LBranch(UseRegister(value));
968  if (!easy_case &&
969  ((!expected.Contains(ToBooleanStub::SMI) && expected.NeedsMap()) ||
970  !expected.IsGeneric())) {
971  branch = AssignEnvironment(branch);
972  }
973  return branch;
974 }
975 
976 
977 LInstruction* LChunkBuilder::DoCompareMap(HCompareMap* instr) {
978  DCHECK(instr->value()->representation().IsTagged());
979  LOperand* value = UseRegisterAtStart(instr->value());
980  return new(zone()) LCmpMapAndBranch(value);
981 }
982 
983 
984 LInstruction* LChunkBuilder::DoArgumentsLength(HArgumentsLength* length) {
985  info()->MarkAsRequiresFrame();
986  return DefineAsRegister(new(zone()) LArgumentsLength(Use(length->value())));
987 }
988 
989 
990 LInstruction* LChunkBuilder::DoArgumentsElements(HArgumentsElements* elems) {
991  info()->MarkAsRequiresFrame();
992  return DefineAsRegister(new(zone()) LArgumentsElements);
993 }
994 
995 
996 LInstruction* LChunkBuilder::DoInstanceOf(HInstanceOf* instr) {
997  LOperand* left = UseFixed(instr->left(), rax);
998  LOperand* right = UseFixed(instr->right(), rdx);
999  LOperand* context = UseFixed(instr->context(), rsi);
1000  LInstanceOf* result = new(zone()) LInstanceOf(context, left, right);
1001  return MarkAsCall(DefineFixed(result, rax), instr);
1002 }
1003 
1004 
1005 LInstruction* LChunkBuilder::DoInstanceOfKnownGlobal(
1006  HInstanceOfKnownGlobal* instr) {
1007  LInstanceOfKnownGlobal* result =
1008  new(zone()) LInstanceOfKnownGlobal(UseFixed(instr->context(), rsi),
1009  UseFixed(instr->left(), rax),
1010  FixedTemp(rdi));
1011  return MarkAsCall(DefineFixed(result, rax), instr);
1012 }
1013 
1014 
1015 LInstruction* LChunkBuilder::DoWrapReceiver(HWrapReceiver* instr) {
1016  LOperand* receiver = UseRegister(instr->receiver());
1017  LOperand* function = UseRegisterAtStart(instr->function());
1018  LWrapReceiver* result = new(zone()) LWrapReceiver(receiver, function);
1019  return AssignEnvironment(DefineSameAsFirst(result));
1020 }
1021 
1022 
1023 LInstruction* LChunkBuilder::DoApplyArguments(HApplyArguments* instr) {
1024  LOperand* function = UseFixed(instr->function(), rdi);
1025  LOperand* receiver = UseFixed(instr->receiver(), rax);
1026  LOperand* length = UseFixed(instr->length(), rbx);
1027  LOperand* elements = UseFixed(instr->elements(), rcx);
1028  LApplyArguments* result = new(zone()) LApplyArguments(function,
1029  receiver,
1030  length,
1031  elements);
1032  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1033 }
1034 
1035 
1036 LInstruction* LChunkBuilder::DoPushArguments(HPushArguments* instr) {
1037  int argc = instr->OperandCount();
1038  for (int i = 0; i < argc; ++i) {
1039  LOperand* argument = UseOrConstant(instr->argument(i));
1040  AddInstruction(new(zone()) LPushArgument(argument), instr);
1041  }
1042  return NULL;
1043 }
1044 
1045 
1046 LInstruction* LChunkBuilder::DoStoreCodeEntry(
1047  HStoreCodeEntry* store_code_entry) {
1048  LOperand* function = UseRegister(store_code_entry->function());
1049  LOperand* code_object = UseTempRegister(store_code_entry->code_object());
1050  return new(zone()) LStoreCodeEntry(function, code_object);
1051 }
1052 
1053 
1054 LInstruction* LChunkBuilder::DoInnerAllocatedObject(
1055  HInnerAllocatedObject* instr) {
1056  LOperand* base_object = UseRegisterAtStart(instr->base_object());
1057  LOperand* offset = UseRegisterOrConstantAtStart(instr->offset());
1058  return DefineAsRegister(
1059  new(zone()) LInnerAllocatedObject(base_object, offset));
1060 }
1061 
1062 
1063 LInstruction* LChunkBuilder::DoThisFunction(HThisFunction* instr) {
1064  return instr->HasNoUses()
1065  ? NULL
1066  : DefineAsRegister(new(zone()) LThisFunction);
1067 }
1068 
1069 
1070 LInstruction* LChunkBuilder::DoContext(HContext* instr) {
1071  if (instr->HasNoUses()) return NULL;
1072 
1073  if (info()->IsStub()) {
1074  return DefineFixed(new(zone()) LContext, rsi);
1075  }
1076 
1077  return DefineAsRegister(new(zone()) LContext);
1078 }
1079 
1080 
1081 LInstruction* LChunkBuilder::DoDeclareGlobals(HDeclareGlobals* instr) {
1082  LOperand* context = UseFixed(instr->context(), rsi);
1083  return MarkAsCall(new(zone()) LDeclareGlobals(context), instr);
1084 }
1085 
1086 
1087 LInstruction* LChunkBuilder::DoCallJSFunction(
1088  HCallJSFunction* instr) {
1089  LOperand* function = UseFixed(instr->function(), rdi);
1090 
1091  LCallJSFunction* result = new(zone()) LCallJSFunction(function);
1092 
1093  return MarkAsCall(DefineFixed(result, rax), instr);
1094 }
1095 
1096 
1097 LInstruction* LChunkBuilder::DoCallWithDescriptor(
1098  HCallWithDescriptor* instr) {
1099  CallInterfaceDescriptor descriptor = instr->descriptor();
1100 
1101  LOperand* target = UseRegisterOrConstantAtStart(instr->target());
1102  ZoneList<LOperand*> ops(instr->OperandCount(), zone());
1103  ops.Add(target, zone());
1104  for (int i = 1; i < instr->OperandCount(); i++) {
1105  LOperand* op =
1106  UseFixed(instr->OperandAt(i), descriptor.GetParameterRegister(i - 1));
1107  ops.Add(op, zone());
1108  }
1109 
1110  LCallWithDescriptor* result = new(zone()) LCallWithDescriptor(
1111  descriptor, ops, zone());
1112  return MarkAsCall(DefineFixed(result, rax), instr);
1113 }
1114 
1115 
1116 LInstruction* LChunkBuilder::DoTailCallThroughMegamorphicCache(
1117  HTailCallThroughMegamorphicCache* instr) {
1118  LOperand* context = UseFixed(instr->context(), rsi);
1119  LOperand* receiver_register =
1120  UseFixed(instr->receiver(), LoadDescriptor::ReceiverRegister());
1121  LOperand* name_register =
1122  UseFixed(instr->name(), LoadDescriptor::NameRegister());
1123  // Not marked as call. It can't deoptimize, and it never returns.
1124  return new (zone()) LTailCallThroughMegamorphicCache(
1125  context, receiver_register, name_register);
1126 }
1127 
1128 
1129 LInstruction* LChunkBuilder::DoInvokeFunction(HInvokeFunction* instr) {
1130  LOperand* context = UseFixed(instr->context(), rsi);
1131  LOperand* function = UseFixed(instr->function(), rdi);
1132  LInvokeFunction* result = new(zone()) LInvokeFunction(context, function);
1133  return MarkAsCall(DefineFixed(result, rax), instr, CANNOT_DEOPTIMIZE_EAGERLY);
1134 }
1135 
1136 
1137 LInstruction* LChunkBuilder::DoUnaryMathOperation(HUnaryMathOperation* instr) {
1138  switch (instr->op()) {
1139  case kMathFloor:
1140  return DoMathFloor(instr);
1141  case kMathRound:
1142  return DoMathRound(instr);
1143  case kMathFround:
1144  return DoMathFround(instr);
1145  case kMathAbs:
1146  return DoMathAbs(instr);
1147  case kMathLog:
1148  return DoMathLog(instr);
1149  case kMathExp:
1150  return DoMathExp(instr);
1151  case kMathSqrt:
1152  return DoMathSqrt(instr);
1153  case kMathPowHalf:
1154  return DoMathPowHalf(instr);
1155  case kMathClz32:
1156  return DoMathClz32(instr);
1157  default:
1158  UNREACHABLE();
1159  return NULL;
1160  }
1161 }
1162 
1163 
1164 LInstruction* LChunkBuilder::DoMathFloor(HUnaryMathOperation* instr) {
1165  LOperand* input = UseRegisterAtStart(instr->value());
1166  LMathFloor* result = new(zone()) LMathFloor(input);
1167  return AssignEnvironment(DefineAsRegister(result));
1168 }
1169 
1170 
1171 LInstruction* LChunkBuilder::DoMathRound(HUnaryMathOperation* instr) {
1172  LOperand* input = UseRegister(instr->value());
1173  LOperand* temp = FixedTemp(xmm4);
1174  LMathRound* result = new(zone()) LMathRound(input, temp);
1175  return AssignEnvironment(DefineAsRegister(result));
1176 }
1177 
1178 
1179 LInstruction* LChunkBuilder::DoMathFround(HUnaryMathOperation* instr) {
1180  LOperand* input = UseRegister(instr->value());
1181  LMathFround* result = new (zone()) LMathFround(input);
1182  return DefineAsRegister(result);
1183 }
1184 
1185 
1186 LInstruction* LChunkBuilder::DoMathAbs(HUnaryMathOperation* instr) {
1187  LOperand* context = UseAny(instr->context());
1188  LOperand* input = UseRegisterAtStart(instr->value());
1189  LInstruction* result =
1190  DefineSameAsFirst(new(zone()) LMathAbs(context, input));
1191  Representation r = instr->value()->representation();
1192  if (!r.IsDouble() && !r.IsSmiOrInteger32()) result = AssignPointerMap(result);
1193  if (!r.IsDouble()) result = AssignEnvironment(result);
1194  return result;
1195 }
1196 
1197 
1198 LInstruction* LChunkBuilder::DoMathLog(HUnaryMathOperation* instr) {
1199  DCHECK(instr->representation().IsDouble());
1200  DCHECK(instr->value()->representation().IsDouble());
1201  LOperand* input = UseRegisterAtStart(instr->value());
1202  return MarkAsCall(DefineSameAsFirst(new(zone()) LMathLog(input)), instr);
1203 }
1204 
1205 
1206 LInstruction* LChunkBuilder::DoMathClz32(HUnaryMathOperation* instr) {
1207  LOperand* input = UseRegisterAtStart(instr->value());
1208  LMathClz32* result = new(zone()) LMathClz32(input);
1209  return DefineAsRegister(result);
1210 }
1211 
1212 
1213 LInstruction* LChunkBuilder::DoMathExp(HUnaryMathOperation* instr) {
1214  DCHECK(instr->representation().IsDouble());
1215  DCHECK(instr->value()->representation().IsDouble());
1216  LOperand* value = UseTempRegister(instr->value());
1217  LOperand* temp1 = TempRegister();
1218  LOperand* temp2 = TempRegister();
1219  LMathExp* result = new(zone()) LMathExp(value, temp1, temp2);
1220  return DefineAsRegister(result);
1221 }
1222 
1223 
1224 LInstruction* LChunkBuilder::DoMathSqrt(HUnaryMathOperation* instr) {
1225  LOperand* input = UseAtStart(instr->value());
1226  return DefineAsRegister(new(zone()) LMathSqrt(input));
1227 }
1228 
1229 
1230 LInstruction* LChunkBuilder::DoMathPowHalf(HUnaryMathOperation* instr) {
1231  LOperand* input = UseRegisterAtStart(instr->value());
1232  LMathPowHalf* result = new(zone()) LMathPowHalf(input);
1233  return DefineSameAsFirst(result);
1234 }
1235 
1236 
1237 LInstruction* LChunkBuilder::DoCallNew(HCallNew* instr) {
1238  LOperand* context = UseFixed(instr->context(), rsi);
1239  LOperand* constructor = UseFixed(instr->constructor(), rdi);
1240  LCallNew* result = new(zone()) LCallNew(context, constructor);
1241  return MarkAsCall(DefineFixed(result, rax), instr);
1242 }
1243 
1244 
1245 LInstruction* LChunkBuilder::DoCallNewArray(HCallNewArray* instr) {
1246  LOperand* context = UseFixed(instr->context(), rsi);
1247  LOperand* constructor = UseFixed(instr->constructor(), rdi);
1248  LCallNewArray* result = new(zone()) LCallNewArray(context, constructor);
1249  return MarkAsCall(DefineFixed(result, rax), instr);
1250 }
1251 
1252 
1253 LInstruction* LChunkBuilder::DoCallFunction(HCallFunction* instr) {
1254  LOperand* context = UseFixed(instr->context(), rsi);
1255  LOperand* function = UseFixed(instr->function(), rdi);
1256  LCallFunction* call = new(zone()) LCallFunction(context, function);
1257  return MarkAsCall(DefineFixed(call, rax), instr);
1258 }
1259 
1260 
1261 LInstruction* LChunkBuilder::DoCallRuntime(HCallRuntime* instr) {
1262  LOperand* context = UseFixed(instr->context(), rsi);
1263  LCallRuntime* result = new(zone()) LCallRuntime(context);
1264  return MarkAsCall(DefineFixed(result, rax), instr);
1265 }
1266 
1267 
1268 LInstruction* LChunkBuilder::DoRor(HRor* instr) {
1269  return DoShift(Token::ROR, instr);
1270 }
1271 
1272 
1273 LInstruction* LChunkBuilder::DoShr(HShr* instr) {
1274  return DoShift(Token::SHR, instr);
1275 }
1276 
1277 
1278 LInstruction* LChunkBuilder::DoSar(HSar* instr) {
1279  return DoShift(Token::SAR, instr);
1280 }
1281 
1282 
1283 LInstruction* LChunkBuilder::DoShl(HShl* instr) {
1284  return DoShift(Token::SHL, instr);
1285 }
1286 
1287 
1288 LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
1289  if (instr->representation().IsSmiOrInteger32()) {
1290  DCHECK(instr->left()->representation().Equals(instr->representation()));
1291  DCHECK(instr->right()->representation().Equals(instr->representation()));
1292  DCHECK(instr->CheckFlag(HValue::kTruncatingToInt32));
1293 
1294  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1295  LOperand* right = UseOrConstantAtStart(instr->BetterRightOperand());
1296  return DefineSameAsFirst(new(zone()) LBitI(left, right));
1297  } else {
1298  return DoArithmeticT(instr->op(), instr);
1299  }
1300 }
1301 
1302 
1303 LInstruction* LChunkBuilder::DoDivByPowerOf2I(HDiv* instr) {
1304  DCHECK(instr->representation().IsSmiOrInteger32());
1305  DCHECK(instr->left()->representation().Equals(instr->representation()));
1306  DCHECK(instr->right()->representation().Equals(instr->representation()));
1307  LOperand* dividend = UseRegister(instr->left());
1308  int32_t divisor = instr->right()->GetInteger32Constant();
1309  LInstruction* result = DefineAsRegister(new(zone()) LDivByPowerOf2I(
1310  dividend, divisor));
1311  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1312  (instr->CheckFlag(HValue::kCanOverflow) && divisor == -1) ||
1313  (!instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32) &&
1314  divisor != 1 && divisor != -1)) {
1315  result = AssignEnvironment(result);
1316  }
1317  return result;
1318 }
1319 
1320 
1321 LInstruction* LChunkBuilder::DoDivByConstI(HDiv* instr) {
1322  DCHECK(instr->representation().IsInteger32());
1323  DCHECK(instr->left()->representation().Equals(instr->representation()));
1324  DCHECK(instr->right()->representation().Equals(instr->representation()));
1325  LOperand* dividend = UseRegister(instr->left());
1326  int32_t divisor = instr->right()->GetInteger32Constant();
1327  LOperand* temp1 = FixedTemp(rax);
1328  LOperand* temp2 = FixedTemp(rdx);
1329  LInstruction* result = DefineFixed(new(zone()) LDivByConstI(
1330  dividend, divisor, temp1, temp2), rdx);
1331  if (divisor == 0 ||
1332  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1333  !instr->CheckFlag(HInstruction::kAllUsesTruncatingToInt32)) {
1334  result = AssignEnvironment(result);
1335  }
1336  return result;
1337 }
1338 
1339 
1340 LInstruction* LChunkBuilder::DoDivI(HDiv* instr) {
1341  DCHECK(instr->representation().IsSmiOrInteger32());
1342  DCHECK(instr->left()->representation().Equals(instr->representation()));
1343  DCHECK(instr->right()->representation().Equals(instr->representation()));
1344  LOperand* dividend = UseFixed(instr->left(), rax);
1345  LOperand* divisor = UseRegister(instr->right());
1346  LOperand* temp = FixedTemp(rdx);
1347  LInstruction* result = DefineFixed(new(zone()) LDivI(
1348  dividend, divisor, temp), rax);
1349  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1350  instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1351  instr->CheckFlag(HValue::kCanOverflow) ||
1352  !instr->CheckFlag(HValue::kAllUsesTruncatingToInt32)) {
1353  result = AssignEnvironment(result);
1354  }
1355  return result;
1356 }
1357 
1358 
1359 LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
1360  if (instr->representation().IsSmiOrInteger32()) {
1361  if (instr->RightIsPowerOf2()) {
1362  return DoDivByPowerOf2I(instr);
1363  } else if (instr->right()->IsConstant()) {
1364  return DoDivByConstI(instr);
1365  } else {
1366  return DoDivI(instr);
1367  }
1368  } else if (instr->representation().IsDouble()) {
1369  return DoArithmeticD(Token::DIV, instr);
1370  } else {
1371  return DoArithmeticT(Token::DIV, instr);
1372  }
1373 }
1374 
1375 
1376 LInstruction* LChunkBuilder::DoFlooringDivByPowerOf2I(HMathFloorOfDiv* instr) {
1377  LOperand* dividend = UseRegisterAtStart(instr->left());
1378  int32_t divisor = instr->right()->GetInteger32Constant();
1379  LInstruction* result = DefineSameAsFirst(new(zone()) LFlooringDivByPowerOf2I(
1380  dividend, divisor));
1381  if ((instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0) ||
1382  (instr->CheckFlag(HValue::kLeftCanBeMinInt) && divisor == -1)) {
1383  result = AssignEnvironment(result);
1384  }
1385  return result;
1386 }
1387 
1388 
1389 LInstruction* LChunkBuilder::DoFlooringDivByConstI(HMathFloorOfDiv* instr) {
1390  DCHECK(instr->representation().IsInteger32());
1391  DCHECK(instr->left()->representation().Equals(instr->representation()));
1392  DCHECK(instr->right()->representation().Equals(instr->representation()));
1393  LOperand* dividend = UseRegister(instr->left());
1394  int32_t divisor = instr->right()->GetInteger32Constant();
1395  LOperand* temp1 = FixedTemp(rax);
1396  LOperand* temp2 = FixedTemp(rdx);
1397  LOperand* temp3 =
1398  ((divisor > 0 && !instr->CheckFlag(HValue::kLeftCanBeNegative)) ||
1399  (divisor < 0 && !instr->CheckFlag(HValue::kLeftCanBePositive))) ?
1400  NULL : TempRegister();
1401  LInstruction* result =
1402  DefineFixed(new(zone()) LFlooringDivByConstI(dividend,
1403  divisor,
1404  temp1,
1405  temp2,
1406  temp3),
1407  rdx);
1408  if (divisor == 0 ||
1409  (instr->CheckFlag(HValue::kBailoutOnMinusZero) && divisor < 0)) {
1410  result = AssignEnvironment(result);
1411  }
1412  return result;
1413 }
1414 
1415 
1416 LInstruction* LChunkBuilder::DoFlooringDivI(HMathFloorOfDiv* instr) {
1417  DCHECK(instr->representation().IsSmiOrInteger32());
1418  DCHECK(instr->left()->representation().Equals(instr->representation()));
1419  DCHECK(instr->right()->representation().Equals(instr->representation()));
1420  LOperand* dividend = UseFixed(instr->left(), rax);
1421  LOperand* divisor = UseRegister(instr->right());
1422  LOperand* temp = FixedTemp(rdx);
1423  LInstruction* result = DefineFixed(new(zone()) LFlooringDivI(
1424  dividend, divisor, temp), rax);
1425  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1426  instr->CheckFlag(HValue::kBailoutOnMinusZero) ||
1427  instr->CheckFlag(HValue::kCanOverflow)) {
1428  result = AssignEnvironment(result);
1429  }
1430  return result;
1431 }
1432 
1433 
1434 LInstruction* LChunkBuilder::DoMathFloorOfDiv(HMathFloorOfDiv* instr) {
1435  if (instr->RightIsPowerOf2()) {
1436  return DoFlooringDivByPowerOf2I(instr);
1437  } else if (instr->right()->IsConstant()) {
1438  return DoFlooringDivByConstI(instr);
1439  } else {
1440  return DoFlooringDivI(instr);
1441  }
1442 }
1443 
1444 
1445 LInstruction* LChunkBuilder::DoModByPowerOf2I(HMod* instr) {
1446  DCHECK(instr->representation().IsSmiOrInteger32());
1447  DCHECK(instr->left()->representation().Equals(instr->representation()));
1448  DCHECK(instr->right()->representation().Equals(instr->representation()));
1449  LOperand* dividend = UseRegisterAtStart(instr->left());
1450  int32_t divisor = instr->right()->GetInteger32Constant();
1451  LInstruction* result = DefineSameAsFirst(new(zone()) LModByPowerOf2I(
1452  dividend, divisor));
1453  if (instr->CheckFlag(HValue::kLeftCanBeNegative) &&
1454  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1455  result = AssignEnvironment(result);
1456  }
1457  return result;
1458 }
1459 
1460 
1461 LInstruction* LChunkBuilder::DoModByConstI(HMod* instr) {
1462  DCHECK(instr->representation().IsSmiOrInteger32());
1463  DCHECK(instr->left()->representation().Equals(instr->representation()));
1464  DCHECK(instr->right()->representation().Equals(instr->representation()));
1465  LOperand* dividend = UseRegister(instr->left());
1466  int32_t divisor = instr->right()->GetInteger32Constant();
1467  LOperand* temp1 = FixedTemp(rax);
1468  LOperand* temp2 = FixedTemp(rdx);
1469  LInstruction* result = DefineFixed(new(zone()) LModByConstI(
1470  dividend, divisor, temp1, temp2), rax);
1471  if (divisor == 0 || instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1472  result = AssignEnvironment(result);
1473  }
1474  return result;
1475 }
1476 
1477 
1478 LInstruction* LChunkBuilder::DoModI(HMod* instr) {
1479  DCHECK(instr->representation().IsSmiOrInteger32());
1480  DCHECK(instr->left()->representation().Equals(instr->representation()));
1481  DCHECK(instr->right()->representation().Equals(instr->representation()));
1482  LOperand* dividend = UseFixed(instr->left(), rax);
1483  LOperand* divisor = UseRegister(instr->right());
1484  LOperand* temp = FixedTemp(rdx);
1485  LInstruction* result = DefineFixed(new(zone()) LModI(
1486  dividend, divisor, temp), rdx);
1487  if (instr->CheckFlag(HValue::kCanBeDivByZero) ||
1488  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1489  result = AssignEnvironment(result);
1490  }
1491  return result;
1492 }
1493 
1494 
1495 LInstruction* LChunkBuilder::DoMod(HMod* instr) {
1496  if (instr->representation().IsSmiOrInteger32()) {
1497  if (instr->RightIsPowerOf2()) {
1498  return DoModByPowerOf2I(instr);
1499  } else if (instr->right()->IsConstant()) {
1500  return DoModByConstI(instr);
1501  } else {
1502  return DoModI(instr);
1503  }
1504  } else if (instr->representation().IsDouble()) {
1505  return DoArithmeticD(Token::MOD, instr);
1506  } else {
1507  return DoArithmeticT(Token::MOD, instr);
1508  }
1509 }
1510 
1511 
1512 LInstruction* LChunkBuilder::DoMul(HMul* instr) {
1513  if (instr->representation().IsSmiOrInteger32()) {
1514  DCHECK(instr->left()->representation().Equals(instr->representation()));
1515  DCHECK(instr->right()->representation().Equals(instr->representation()));
1516  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1517  LOperand* right = UseOrConstant(instr->BetterRightOperand());
1518  LMulI* mul = new(zone()) LMulI(left, right);
1519  if (instr->CheckFlag(HValue::kCanOverflow) ||
1520  instr->CheckFlag(HValue::kBailoutOnMinusZero)) {
1521  AssignEnvironment(mul);
1522  }
1523  return DefineSameAsFirst(mul);
1524  } else if (instr->representation().IsDouble()) {
1525  return DoArithmeticD(Token::MUL, instr);
1526  } else {
1527  return DoArithmeticT(Token::MUL, instr);
1528  }
1529 }
1530 
1531 
1532 LInstruction* LChunkBuilder::DoSub(HSub* instr) {
1533  if (instr->representation().IsSmiOrInteger32()) {
1534  DCHECK(instr->left()->representation().Equals(instr->representation()));
1535  DCHECK(instr->right()->representation().Equals(instr->representation()));
1536  LOperand* left = UseRegisterAtStart(instr->left());
1537  LOperand* right = UseOrConstantAtStart(instr->right());
1538  LSubI* sub = new(zone()) LSubI(left, right);
1539  LInstruction* result = DefineSameAsFirst(sub);
1540  if (instr->CheckFlag(HValue::kCanOverflow)) {
1541  result = AssignEnvironment(result);
1542  }
1543  return result;
1544  } else if (instr->representation().IsDouble()) {
1545  return DoArithmeticD(Token::SUB, instr);
1546  } else {
1547  return DoArithmeticT(Token::SUB, instr);
1548  }
1549 }
1550 
1551 
1552 LInstruction* LChunkBuilder::DoAdd(HAdd* instr) {
1553  if (instr->representation().IsSmiOrInteger32()) {
1554  // Check to see if it would be advantageous to use an lea instruction rather
1555  // than an add. This is the case when no overflow check is needed and there
1556  // are multiple uses of the add's inputs, so using a 3-register add will
1557  // preserve all input values for later uses.
1558  bool use_lea = LAddI::UseLea(instr);
1559  DCHECK(instr->left()->representation().Equals(instr->representation()));
1560  DCHECK(instr->right()->representation().Equals(instr->representation()));
1561  LOperand* left = UseRegisterAtStart(instr->BetterLeftOperand());
1562  HValue* right_candidate = instr->BetterRightOperand();
1563  LOperand* right;
1564  if (SmiValuesAre32Bits() && instr->representation().IsSmi()) {
1565  // We cannot add a tagged immediate to a tagged value,
1566  // so we request it in a register.
1567  right = UseRegisterAtStart(right_candidate);
1568  } else {
1569  right = use_lea ? UseRegisterOrConstantAtStart(right_candidate)
1570  : UseOrConstantAtStart(right_candidate);
1571  }
1572  LAddI* add = new(zone()) LAddI(left, right);
1573  bool can_overflow = instr->CheckFlag(HValue::kCanOverflow);
1574  LInstruction* result = use_lea ? DefineAsRegister(add)
1575  : DefineSameAsFirst(add);
1576  if (can_overflow) {
1577  result = AssignEnvironment(result);
1578  }
1579  return result;
1580  } else if (instr->representation().IsExternal()) {
1581  DCHECK(instr->left()->representation().IsExternal());
1582  DCHECK(instr->right()->representation().IsInteger32());
1583  DCHECK(!instr->CheckFlag(HValue::kCanOverflow));
1584  bool use_lea = LAddI::UseLea(instr);
1585  LOperand* left = UseRegisterAtStart(instr->left());
1586  HValue* right_candidate = instr->right();
1587  LOperand* right = use_lea
1588  ? UseRegisterOrConstantAtStart(right_candidate)
1589  : UseOrConstantAtStart(right_candidate);
1590  LAddI* add = new(zone()) LAddI(left, right);
1591  LInstruction* result = use_lea
1592  ? DefineAsRegister(add)
1593  : DefineSameAsFirst(add);
1594  return result;
1595  } else if (instr->representation().IsDouble()) {
1596  return DoArithmeticD(Token::ADD, instr);
1597  } else {
1598  return DoArithmeticT(Token::ADD, instr);
1599  }
1600  return NULL;
1601 }
1602 
1603 
1604 LInstruction* LChunkBuilder::DoMathMinMax(HMathMinMax* instr) {
1605  LOperand* left = NULL;
1606  LOperand* right = NULL;
1607  DCHECK(instr->left()->representation().Equals(instr->representation()));
1608  DCHECK(instr->right()->representation().Equals(instr->representation()));
1609  if (instr->representation().IsSmi()) {
1610  left = UseRegisterAtStart(instr->BetterLeftOperand());
1611  right = UseAtStart(instr->BetterRightOperand());
1612  } else if (instr->representation().IsInteger32()) {
1613  left = UseRegisterAtStart(instr->BetterLeftOperand());
1614  right = UseOrConstantAtStart(instr->BetterRightOperand());
1615  } else {
1616  DCHECK(instr->representation().IsDouble());
1617  left = UseRegisterAtStart(instr->left());
1618  right = UseRegisterAtStart(instr->right());
1619  }
1620  LMathMinMax* minmax = new(zone()) LMathMinMax(left, right);
1621  return DefineSameAsFirst(minmax);
1622 }
1623 
1624 
1625 LInstruction* LChunkBuilder::DoPower(HPower* instr) {
1626  DCHECK(instr->representation().IsDouble());
1627  // We call a C function for double power. It can't trigger a GC.
1628  // We need to use fixed result register for the call.
1629  Representation exponent_type = instr->right()->representation();
1630  DCHECK(instr->left()->representation().IsDouble());
1631  LOperand* left = UseFixedDouble(instr->left(), xmm2);
1632  LOperand* right =
1633  exponent_type.IsDouble()
1634  ? UseFixedDouble(instr->right(), xmm1)
1635  : UseFixed(instr->right(), MathPowTaggedDescriptor::exponent());
1636  LPower* result = new(zone()) LPower(left, right);
1637  return MarkAsCall(DefineFixedDouble(result, xmm3), instr,
1638  CAN_DEOPTIMIZE_EAGERLY);
1639 }
1640 
1641 
1642 LInstruction* LChunkBuilder::DoCompareGeneric(HCompareGeneric* instr) {
1643  DCHECK(instr->left()->representation().IsTagged());
1644  DCHECK(instr->right()->representation().IsTagged());
1645  LOperand* context = UseFixed(instr->context(), rsi);
1646  LOperand* left = UseFixed(instr->left(), rdx);
1647  LOperand* right = UseFixed(instr->right(), rax);
1648  LCmpT* result = new(zone()) LCmpT(context, left, right);
1649  return MarkAsCall(DefineFixed(result, rax), instr);
1650 }
1651 
1652 
1653 LInstruction* LChunkBuilder::DoCompareNumericAndBranch(
1654  HCompareNumericAndBranch* instr) {
1655  Representation r = instr->representation();
1656  if (r.IsSmiOrInteger32()) {
1657  DCHECK(instr->left()->representation().Equals(r));
1658  DCHECK(instr->right()->representation().Equals(r));
1659  LOperand* left = UseRegisterOrConstantAtStart(instr->left());
1660  LOperand* right = UseOrConstantAtStart(instr->right());
1661  return new(zone()) LCompareNumericAndBranch(left, right);
1662  } else {
1663  DCHECK(r.IsDouble());
1664  DCHECK(instr->left()->representation().IsDouble());
1665  DCHECK(instr->right()->representation().IsDouble());
1666  LOperand* left;
1667  LOperand* right;
1668  if (instr->left()->IsConstant() && instr->right()->IsConstant()) {
1669  left = UseRegisterOrConstantAtStart(instr->left());
1670  right = UseRegisterOrConstantAtStart(instr->right());
1671  } else {
1672  left = UseRegisterAtStart(instr->left());
1673  right = UseRegisterAtStart(instr->right());
1674  }
1675  return new(zone()) LCompareNumericAndBranch(left, right);
1676  }
1677 }
1678 
1679 
1680 LInstruction* LChunkBuilder::DoCompareObjectEqAndBranch(
1681  HCompareObjectEqAndBranch* instr) {
1682  LOperand* left = UseRegisterAtStart(instr->left());
1683  LOperand* right = UseRegisterOrConstantAtStart(instr->right());
1684  return new(zone()) LCmpObjectEqAndBranch(left, right);
1685 }
1686 
1687 
1688 LInstruction* LChunkBuilder::DoCompareHoleAndBranch(
1689  HCompareHoleAndBranch* instr) {
1690  LOperand* value = UseRegisterAtStart(instr->value());
1691  return new(zone()) LCmpHoleAndBranch(value);
1692 }
1693 
1694 
1695 LInstruction* LChunkBuilder::DoCompareMinusZeroAndBranch(
1696  HCompareMinusZeroAndBranch* instr) {
1697  LOperand* value = UseRegister(instr->value());
1698  return new(zone()) LCompareMinusZeroAndBranch(value);
1699 }
1700 
1701 
1702 LInstruction* LChunkBuilder::DoIsObjectAndBranch(HIsObjectAndBranch* instr) {
1703  DCHECK(instr->value()->representation().IsTagged());
1704  return new(zone()) LIsObjectAndBranch(UseRegisterAtStart(instr->value()));
1705 }
1706 
1707 
1708 LInstruction* LChunkBuilder::DoIsStringAndBranch(HIsStringAndBranch* instr) {
1709  DCHECK(instr->value()->representation().IsTagged());
1710  LOperand* value = UseRegisterAtStart(instr->value());
1711  LOperand* temp = TempRegister();
1712  return new(zone()) LIsStringAndBranch(value, temp);
1713 }
1714 
1715 
1716 LInstruction* LChunkBuilder::DoIsSmiAndBranch(HIsSmiAndBranch* instr) {
1717  DCHECK(instr->value()->representation().IsTagged());
1718  return new(zone()) LIsSmiAndBranch(Use(instr->value()));
1719 }
1720 
1721 
1722 LInstruction* LChunkBuilder::DoIsUndetectableAndBranch(
1723  HIsUndetectableAndBranch* instr) {
1724  DCHECK(instr->value()->representation().IsTagged());
1725  LOperand* value = UseRegisterAtStart(instr->value());
1726  LOperand* temp = TempRegister();
1727  return new(zone()) LIsUndetectableAndBranch(value, temp);
1728 }
1729 
1730 
1731 LInstruction* LChunkBuilder::DoStringCompareAndBranch(
1732  HStringCompareAndBranch* instr) {
1733 
1734  DCHECK(instr->left()->representation().IsTagged());
1735  DCHECK(instr->right()->representation().IsTagged());
1736  LOperand* context = UseFixed(instr->context(), rsi);
1737  LOperand* left = UseFixed(instr->left(), rdx);
1738  LOperand* right = UseFixed(instr->right(), rax);
1739  LStringCompareAndBranch* result =
1740  new(zone()) LStringCompareAndBranch(context, left, right);
1741 
1742  return MarkAsCall(result, instr);
1743 }
1744 
1745 
1746 LInstruction* LChunkBuilder::DoHasInstanceTypeAndBranch(
1747  HHasInstanceTypeAndBranch* instr) {
1748  DCHECK(instr->value()->representation().IsTagged());
1749  LOperand* value = UseRegisterAtStart(instr->value());
1750  return new(zone()) LHasInstanceTypeAndBranch(value);
1751 }
1752 
1753 
1754 LInstruction* LChunkBuilder::DoGetCachedArrayIndex(
1755  HGetCachedArrayIndex* instr) {
1756  DCHECK(instr->value()->representation().IsTagged());
1757  LOperand* value = UseRegisterAtStart(instr->value());
1758 
1759  return DefineAsRegister(new(zone()) LGetCachedArrayIndex(value));
1760 }
1761 
1762 
1763 LInstruction* LChunkBuilder::DoHasCachedArrayIndexAndBranch(
1764  HHasCachedArrayIndexAndBranch* instr) {
1765  DCHECK(instr->value()->representation().IsTagged());
1766  LOperand* value = UseRegisterAtStart(instr->value());
1767  return new(zone()) LHasCachedArrayIndexAndBranch(value);
1768 }
1769 
1770 
1771 LInstruction* LChunkBuilder::DoClassOfTestAndBranch(
1772  HClassOfTestAndBranch* instr) {
1773  LOperand* value = UseRegister(instr->value());
1774  return new(zone()) LClassOfTestAndBranch(value,
1775  TempRegister(),
1776  TempRegister());
1777 }
1778 
1779 
1780 LInstruction* LChunkBuilder::DoMapEnumLength(HMapEnumLength* instr) {
1781  LOperand* map = UseRegisterAtStart(instr->value());
1782  return DefineAsRegister(new(zone()) LMapEnumLength(map));
1783 }
1784 
1785 
1786 LInstruction* LChunkBuilder::DoDateField(HDateField* instr) {
1787  LOperand* object = UseFixed(instr->value(), rax);
1788  LDateField* result = new(zone()) LDateField(object, instr->index());
1789  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
1790 }
1791 
1792 
1793 LInstruction* LChunkBuilder::DoSeqStringGetChar(HSeqStringGetChar* instr) {
1794  LOperand* string = UseRegisterAtStart(instr->string());
1795  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1796  return DefineAsRegister(new(zone()) LSeqStringGetChar(string, index));
1797 }
1798 
1799 
1800 LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
1801  LOperand* string = UseRegisterAtStart(instr->string());
1802  LOperand* index = FLAG_debug_code
1803  ? UseRegisterAtStart(instr->index())
1804  : UseRegisterOrConstantAtStart(instr->index());
1805  LOperand* value = FLAG_debug_code
1806  ? UseRegisterAtStart(instr->value())
1807  : UseRegisterOrConstantAtStart(instr->value());
1808  LOperand* context = FLAG_debug_code ? UseFixed(instr->context(), rsi) : NULL;
1809  LInstruction* result = new(zone()) LSeqStringSetChar(context, string,
1810  index, value);
1811  if (FLAG_debug_code) {
1812  result = MarkAsCall(result, instr);
1813  }
1814  return result;
1815 }
1816 
1817 
1818 LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
1819  if (!FLAG_debug_code && instr->skip_check()) return NULL;
1820  LOperand* index = UseRegisterOrConstantAtStart(instr->index());
1821  LOperand* length = !index->IsConstantOperand()
1822  ? UseOrConstantAtStart(instr->length())
1823  : UseAtStart(instr->length());
1824  LInstruction* result = new(zone()) LBoundsCheck(index, length);
1825  if (!FLAG_debug_code || !instr->skip_check()) {
1826  result = AssignEnvironment(result);
1827  }
1828  return result;
1829 }
1830 
1831 
1832 LInstruction* LChunkBuilder::DoBoundsCheckBaseIndexInformation(
1833  HBoundsCheckBaseIndexInformation* instr) {
1834  UNREACHABLE();
1835  return NULL;
1836 }
1837 
1838 
1839 LInstruction* LChunkBuilder::DoAbnormalExit(HAbnormalExit* instr) {
1840  // The control instruction marking the end of a block that completed
1841  // abruptly (e.g., threw an exception). There is nothing specific to do.
1842  return NULL;
1843 }
1844 
1845 
1846 LInstruction* LChunkBuilder::DoUseConst(HUseConst* instr) {
1847  return NULL;
1848 }
1849 
1850 
1851 LInstruction* LChunkBuilder::DoForceRepresentation(HForceRepresentation* bad) {
1852  // All HForceRepresentation instructions should be eliminated in the
1853  // representation change phase of Hydrogen.
1854  UNREACHABLE();
1855  return NULL;
1856 }
1857 
1858 
1859 LInstruction* LChunkBuilder::DoChange(HChange* instr) {
1860  Representation from = instr->from();
1861  Representation to = instr->to();
1862  HValue* val = instr->value();
1863  if (from.IsSmi()) {
1864  if (to.IsTagged()) {
1865  LOperand* value = UseRegister(val);
1866  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1867  }
1868  from = Representation::Tagged();
1869  }
1870  if (from.IsTagged()) {
1871  if (to.IsDouble()) {
1872  LOperand* value = UseRegister(val);
1873  LInstruction* result = DefineAsRegister(new(zone()) LNumberUntagD(value));
1874  if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1875  return result;
1876  } else if (to.IsSmi()) {
1877  LOperand* value = UseRegister(val);
1878  if (val->type().IsSmi()) {
1879  return DefineSameAsFirst(new(zone()) LDummyUse(value));
1880  }
1881  return AssignEnvironment(DefineSameAsFirst(new(zone()) LCheckSmi(value)));
1882  } else {
1883  DCHECK(to.IsInteger32());
1884  if (val->type().IsSmi() || val->representation().IsSmi()) {
1885  LOperand* value = UseRegister(val);
1886  return DefineSameAsFirst(new(zone()) LSmiUntag(value, false));
1887  } else {
1888  LOperand* value = UseRegister(val);
1889  bool truncating = instr->CanTruncateToInt32();
1890  LOperand* xmm_temp = truncating ? NULL : FixedTemp(xmm1);
1891  LInstruction* result =
1892  DefineSameAsFirst(new(zone()) LTaggedToI(value, xmm_temp));
1893  if (!val->representation().IsSmi()) result = AssignEnvironment(result);
1894  return result;
1895  }
1896  }
1897  } else if (from.IsDouble()) {
1898  if (to.IsTagged()) {
1899  info()->MarkAsDeferredCalling();
1900  LOperand* value = UseRegister(val);
1901  LOperand* temp = TempRegister();
1902  LUnallocated* result_temp = TempRegister();
1903  LNumberTagD* result = new(zone()) LNumberTagD(value, temp);
1904  return AssignPointerMap(Define(result, result_temp));
1905  } else if (to.IsSmi()) {
1906  LOperand* value = UseRegister(val);
1907  return AssignEnvironment(
1908  DefineAsRegister(new(zone()) LDoubleToSmi(value)));
1909  } else {
1910  DCHECK(to.IsInteger32());
1911  LOperand* value = UseRegister(val);
1912  LInstruction* result = DefineAsRegister(new(zone()) LDoubleToI(value));
1913  if (!instr->CanTruncateToInt32()) result = AssignEnvironment(result);
1914  return result;
1915  }
1916  } else if (from.IsInteger32()) {
1917  info()->MarkAsDeferredCalling();
1918  if (to.IsTagged()) {
1919  if (!instr->CheckFlag(HValue::kCanOverflow)) {
1920  LOperand* value = UseRegister(val);
1921  return DefineAsRegister(new(zone()) LSmiTag(value));
1922  } else if (val->CheckFlag(HInstruction::kUint32)) {
1923  LOperand* value = UseRegister(val);
1924  LOperand* temp1 = TempRegister();
1925  LOperand* temp2 = FixedTemp(xmm1);
1926  LNumberTagU* result = new(zone()) LNumberTagU(value, temp1, temp2);
1927  return AssignPointerMap(DefineSameAsFirst(result));
1928  } else {
1929  LOperand* value = UseRegister(val);
1930  LOperand* temp1 = SmiValuesAre32Bits() ? NULL : TempRegister();
1931  LOperand* temp2 = SmiValuesAre32Bits() ? NULL : FixedTemp(xmm1);
1932  LNumberTagI* result = new(zone()) LNumberTagI(value, temp1, temp2);
1933  return AssignPointerMap(DefineSameAsFirst(result));
1934  }
1935  } else if (to.IsSmi()) {
1936  LOperand* value = UseRegister(val);
1937  LInstruction* result = DefineAsRegister(new(zone()) LSmiTag(value));
1938  if (instr->CheckFlag(HValue::kCanOverflow)) {
1939  result = AssignEnvironment(result);
1940  }
1941  return result;
1942  } else {
1943  DCHECK(to.IsDouble());
1944  if (val->CheckFlag(HInstruction::kUint32)) {
1945  return DefineAsRegister(new(zone()) LUint32ToDouble(UseRegister(val)));
1946  } else {
1947  LOperand* value = Use(val);
1948  return DefineAsRegister(new(zone()) LInteger32ToDouble(value));
1949  }
1950  }
1951  }
1952  UNREACHABLE();
1953  return NULL;
1954 }
1955 
1956 
1957 LInstruction* LChunkBuilder::DoCheckHeapObject(HCheckHeapObject* instr) {
1958  LOperand* value = UseRegisterAtStart(instr->value());
1959  LInstruction* result = new(zone()) LCheckNonSmi(value);
1960  if (!instr->value()->type().IsHeapObject()) {
1961  result = AssignEnvironment(result);
1962  }
1963  return result;
1964 }
1965 
1966 
1967 LInstruction* LChunkBuilder::DoCheckSmi(HCheckSmi* instr) {
1968  LOperand* value = UseRegisterAtStart(instr->value());
1969  return AssignEnvironment(new(zone()) LCheckSmi(value));
1970 }
1971 
1972 
1973 LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
1974  LOperand* value = UseRegisterAtStart(instr->value());
1975  LCheckInstanceType* result = new(zone()) LCheckInstanceType(value);
1976  return AssignEnvironment(result);
1977 }
1978 
1979 
1980 LInstruction* LChunkBuilder::DoCheckValue(HCheckValue* instr) {
1981  LOperand* value = UseRegisterAtStart(instr->value());
1982  return AssignEnvironment(new(zone()) LCheckValue(value));
1983 }
1984 
1985 
1986 LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
1987  if (instr->IsStabilityCheck()) return new(zone()) LCheckMaps;
1988  LOperand* value = UseRegisterAtStart(instr->value());
1989  LInstruction* result = AssignEnvironment(new(zone()) LCheckMaps(value));
1990  if (instr->HasMigrationTarget()) {
1991  info()->MarkAsDeferredCalling();
1992  result = AssignPointerMap(result);
1993  }
1994  return result;
1995 }
1996 
1997 
1998 LInstruction* LChunkBuilder::DoClampToUint8(HClampToUint8* instr) {
1999  HValue* value = instr->value();
2000  Representation input_rep = value->representation();
2001  LOperand* reg = UseRegister(value);
2002  if (input_rep.IsDouble()) {
2003  return DefineAsRegister(new(zone()) LClampDToUint8(reg));
2004  } else if (input_rep.IsInteger32()) {
2005  return DefineSameAsFirst(new(zone()) LClampIToUint8(reg));
2006  } else {
2007  DCHECK(input_rep.IsSmiOrTagged());
2008  // Register allocator doesn't (yet) support allocation of double
2009  // temps. Reserve xmm1 explicitly.
2010  LClampTToUint8* result = new(zone()) LClampTToUint8(reg,
2011  FixedTemp(xmm1));
2012  return AssignEnvironment(DefineSameAsFirst(result));
2013  }
2014 }
2015 
2016 
2017 LInstruction* LChunkBuilder::DoDoubleBits(HDoubleBits* instr) {
2018  HValue* value = instr->value();
2019  DCHECK(value->representation().IsDouble());
2020  return DefineAsRegister(new(zone()) LDoubleBits(UseRegister(value)));
2021 }
2022 
2023 
2024 LInstruction* LChunkBuilder::DoConstructDouble(HConstructDouble* instr) {
2025  LOperand* lo = UseRegister(instr->lo());
2026  LOperand* hi = UseRegister(instr->hi());
2027  return DefineAsRegister(new(zone()) LConstructDouble(hi, lo));
2028 }
2029 
2030 
2031 LInstruction* LChunkBuilder::DoReturn(HReturn* instr) {
2032  LOperand* context = info()->IsStub() ? UseFixed(instr->context(), rsi) : NULL;
2033  LOperand* parameter_count = UseRegisterOrConstant(instr->parameter_count());
2034  return new(zone()) LReturn(
2035  UseFixed(instr->value(), rax), context, parameter_count);
2036 }
2037 
2038 
2039 LInstruction* LChunkBuilder::DoConstant(HConstant* instr) {
2040  Representation r = instr->representation();
2041  if (r.IsSmi()) {
2042  return DefineAsRegister(new(zone()) LConstantS);
2043  } else if (r.IsInteger32()) {
2044  return DefineAsRegister(new(zone()) LConstantI);
2045  } else if (r.IsDouble()) {
2046  LOperand* temp = TempRegister();
2047  return DefineAsRegister(new(zone()) LConstantD(temp));
2048  } else if (r.IsExternal()) {
2049  return DefineAsRegister(new(zone()) LConstantE);
2050  } else if (r.IsTagged()) {
2051  return DefineAsRegister(new(zone()) LConstantT);
2052  } else {
2053  UNREACHABLE();
2054  return NULL;
2055  }
2056 }
2057 
2058 
2059 LInstruction* LChunkBuilder::DoLoadGlobalCell(HLoadGlobalCell* instr) {
2060  LLoadGlobalCell* result = new(zone()) LLoadGlobalCell;
2061  return instr->RequiresHoleCheck()
2062  ? AssignEnvironment(DefineAsRegister(result))
2063  : DefineAsRegister(result);
2064 }
2065 
2066 
2067 LInstruction* LChunkBuilder::DoLoadGlobalGeneric(HLoadGlobalGeneric* instr) {
2068  LOperand* context = UseFixed(instr->context(), rsi);
2069  LOperand* global_object =
2070  UseFixed(instr->global_object(), LoadDescriptor::ReceiverRegister());
2071  LOperand* vector = NULL;
2072  if (FLAG_vector_ics) {
2073  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2074  }
2075 
2076  LLoadGlobalGeneric* result =
2077  new(zone()) LLoadGlobalGeneric(context, global_object, vector);
2078  return MarkAsCall(DefineFixed(result, rax), instr);
2079 }
2080 
2081 
2082 LInstruction* LChunkBuilder::DoStoreGlobalCell(HStoreGlobalCell* instr) {
2083  LOperand* value = UseRegister(instr->value());
2084  // Use a temp to avoid reloading the cell value address in the case where
2085  // we perform a hole check.
2086  return instr->RequiresHoleCheck()
2087  ? AssignEnvironment(new(zone()) LStoreGlobalCell(value, TempRegister()))
2088  : new(zone()) LStoreGlobalCell(value, NULL);
2089 }
2090 
2091 
2092 LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
2093  LOperand* context = UseRegisterAtStart(instr->value());
2094  LInstruction* result =
2095  DefineAsRegister(new(zone()) LLoadContextSlot(context));
2096  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2097  result = AssignEnvironment(result);
2098  }
2099  return result;
2100 }
2101 
2102 
2103 LInstruction* LChunkBuilder::DoStoreContextSlot(HStoreContextSlot* instr) {
2104  LOperand* context;
2105  LOperand* value;
2106  LOperand* temp;
2107  context = UseRegister(instr->context());
2108  if (instr->NeedsWriteBarrier()) {
2109  value = UseTempRegister(instr->value());
2110  temp = TempRegister();
2111  } else {
2112  value = UseRegister(instr->value());
2113  temp = NULL;
2114  }
2115  LInstruction* result = new(zone()) LStoreContextSlot(context, value, temp);
2116  if (instr->RequiresHoleCheck() && instr->DeoptimizesOnHole()) {
2117  result = AssignEnvironment(result);
2118  }
2119  return result;
2120 }
2121 
2122 
2123 LInstruction* LChunkBuilder::DoLoadNamedField(HLoadNamedField* instr) {
2124  // Use the special mov rax, moffs64 encoding for external
2125  // memory accesses with 64-bit word-sized values.
2126  if (instr->access().IsExternalMemory() &&
2127  instr->access().offset() == 0 &&
2128  (instr->access().representation().IsSmi() ||
2129  instr->access().representation().IsTagged() ||
2130  instr->access().representation().IsHeapObject() ||
2131  instr->access().representation().IsExternal())) {
2132  LOperand* obj = UseRegisterOrConstantAtStart(instr->object());
2133  return DefineFixed(new(zone()) LLoadNamedField(obj), rax);
2134  }
2135  LOperand* obj = UseRegisterAtStart(instr->object());
2136  return DefineAsRegister(new(zone()) LLoadNamedField(obj));
2137 }
2138 
2139 
2140 LInstruction* LChunkBuilder::DoLoadNamedGeneric(HLoadNamedGeneric* instr) {
2141  LOperand* context = UseFixed(instr->context(), rsi);
2142  LOperand* object =
2143  UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
2144  LOperand* vector = NULL;
2145  if (FLAG_vector_ics) {
2146  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2147  }
2148  LLoadNamedGeneric* result = new(zone()) LLoadNamedGeneric(
2149  context, object, vector);
2150  return MarkAsCall(DefineFixed(result, rax), instr);
2151 }
2152 
2153 
2154 LInstruction* LChunkBuilder::DoLoadFunctionPrototype(
2155  HLoadFunctionPrototype* instr) {
2156  return AssignEnvironment(DefineAsRegister(
2157  new(zone()) LLoadFunctionPrototype(UseRegister(instr->function()))));
2158 }
2159 
2160 
2161 LInstruction* LChunkBuilder::DoLoadRoot(HLoadRoot* instr) {
2162  return DefineAsRegister(new(zone()) LLoadRoot);
2163 }
2164 
2165 
2166 void LChunkBuilder::FindDehoistedKeyDefinitions(HValue* candidate) {
2167  // We sign extend the dehoisted key at the definition point when the pointer
2168  // size is 64-bit. For x32 port, we sign extend the dehoisted key at the use
2169  // points and should not invoke this function. We can't use STATIC_ASSERT
2170  // here as the pointer size is 32-bit for x32.
2172  BitVector* dehoisted_key_ids = chunk_->GetDehoistedKeyIds();
2173  if (dehoisted_key_ids->Contains(candidate->id())) return;
2174  dehoisted_key_ids->Add(candidate->id());
2175  if (!candidate->IsPhi()) return;
2176  for (int i = 0; i < candidate->OperandCount(); ++i) {
2177  FindDehoistedKeyDefinitions(candidate->OperandAt(i));
2178  }
2179 }
2180 
2181 
2182 LInstruction* LChunkBuilder::DoLoadKeyed(HLoadKeyed* instr) {
2184  instr->key()->representation().IsInteger32()) ||
2185  (kPointerSize == kInt32Size &&
2186  instr->key()->representation().IsSmiOrInteger32()));
2187  ElementsKind elements_kind = instr->elements_kind();
2188  LOperand* key = NULL;
2189  LInstruction* result = NULL;
2190 
2191  if (kPointerSize == kInt64Size) {
2192  key = UseRegisterOrConstantAtStart(instr->key());
2193  } else {
2194  bool clobbers_key = ExternalArrayOpRequiresTemp(
2195  instr->key()->representation(), elements_kind);
2196  key = clobbers_key
2197  ? UseTempRegister(instr->key())
2198  : UseRegisterOrConstantAtStart(instr->key());
2199  }
2200 
2201  if ((kPointerSize == kInt64Size) && instr->IsDehoisted()) {
2202  FindDehoistedKeyDefinitions(instr->key());
2203  }
2204 
2205  if (!instr->is_typed_elements()) {
2206  LOperand* obj = UseRegisterAtStart(instr->elements());
2207  result = DefineAsRegister(new(zone()) LLoadKeyed(obj, key));
2208  } else {
2209  DCHECK(
2210  (instr->representation().IsInteger32() &&
2211  !(IsDoubleOrFloatElementsKind(elements_kind))) ||
2212  (instr->representation().IsDouble() &&
2213  (IsDoubleOrFloatElementsKind(elements_kind))));
2214  LOperand* backing_store = UseRegister(instr->elements());
2215  result = DefineAsRegister(new(zone()) LLoadKeyed(backing_store, key));
2216  }
2217 
2218  if ((instr->is_external() || instr->is_fixed_typed_array()) ?
2219  // see LCodeGen::DoLoadKeyedExternalArray
2220  ((elements_kind == EXTERNAL_UINT32_ELEMENTS ||
2221  elements_kind == UINT32_ELEMENTS) &&
2222  !instr->CheckFlag(HInstruction::kUint32)) :
2223  // see LCodeGen::DoLoadKeyedFixedDoubleArray and
2224  // LCodeGen::DoLoadKeyedFixedArray
2225  instr->RequiresHoleCheck()) {
2226  result = AssignEnvironment(result);
2227  }
2228  return result;
2229 }
2230 
2231 
2232 LInstruction* LChunkBuilder::DoLoadKeyedGeneric(HLoadKeyedGeneric* instr) {
2233  LOperand* context = UseFixed(instr->context(), rsi);
2234  LOperand* object =
2235  UseFixed(instr->object(), LoadDescriptor::ReceiverRegister());
2236  LOperand* key = UseFixed(instr->key(), LoadDescriptor::NameRegister());
2237  LOperand* vector = NULL;
2238  if (FLAG_vector_ics) {
2239  vector = FixedTemp(VectorLoadICDescriptor::VectorRegister());
2240  }
2241 
2242  LLoadKeyedGeneric* result =
2243  new(zone()) LLoadKeyedGeneric(context, object, key, vector);
2244  return MarkAsCall(DefineFixed(result, rax), instr);
2245 }
2246 
2247 
2248 LInstruction* LChunkBuilder::DoStoreKeyed(HStoreKeyed* instr) {
2249  ElementsKind elements_kind = instr->elements_kind();
2250 
2251  if ((kPointerSize == kInt64Size) && instr->IsDehoisted()) {
2252  FindDehoistedKeyDefinitions(instr->key());
2253  }
2254 
2255  if (!instr->is_typed_elements()) {
2256  DCHECK(instr->elements()->representation().IsTagged());
2257  bool needs_write_barrier = instr->NeedsWriteBarrier();
2258  LOperand* object = NULL;
2259  LOperand* key = NULL;
2260  LOperand* val = NULL;
2261 
2262  Representation value_representation = instr->value()->representation();
2263  if (value_representation.IsDouble()) {
2264  object = UseRegisterAtStart(instr->elements());
2265  val = UseRegisterAtStart(instr->value());
2266  key = UseRegisterOrConstantAtStart(instr->key());
2267  } else {
2268  DCHECK(value_representation.IsSmiOrTagged() ||
2269  value_representation.IsInteger32());
2270  if (needs_write_barrier) {
2271  object = UseTempRegister(instr->elements());
2272  val = UseTempRegister(instr->value());
2273  key = UseTempRegister(instr->key());
2274  } else {
2275  object = UseRegisterAtStart(instr->elements());
2276  val = UseRegisterOrConstantAtStart(instr->value());
2277  key = UseRegisterOrConstantAtStart(instr->key());
2278  }
2279  }
2280 
2281  return new(zone()) LStoreKeyed(object, key, val);
2282  }
2283 
2284  DCHECK(
2285  (instr->value()->representation().IsInteger32() &&
2286  !IsDoubleOrFloatElementsKind(elements_kind)) ||
2287  (instr->value()->representation().IsDouble() &&
2288  IsDoubleOrFloatElementsKind(elements_kind)));
2289  DCHECK((instr->is_fixed_typed_array() &&
2290  instr->elements()->representation().IsTagged()) ||
2291  (instr->is_external() &&
2292  instr->elements()->representation().IsExternal()));
2293  bool val_is_temp_register =
2294  elements_kind == EXTERNAL_UINT8_CLAMPED_ELEMENTS ||
2295  elements_kind == EXTERNAL_FLOAT32_ELEMENTS ||
2296  elements_kind == FLOAT32_ELEMENTS;
2297  LOperand* val = val_is_temp_register ? UseTempRegister(instr->value())
2298  : UseRegister(instr->value());
2299  LOperand* key = NULL;
2300  if (kPointerSize == kInt64Size) {
2301  key = UseRegisterOrConstantAtStart(instr->key());
2302  } else {
2303  bool clobbers_key = ExternalArrayOpRequiresTemp(
2304  instr->key()->representation(), elements_kind);
2305  key = clobbers_key
2306  ? UseTempRegister(instr->key())
2307  : UseRegisterOrConstantAtStart(instr->key());
2308  }
2309  LOperand* backing_store = UseRegister(instr->elements());
2310  return new(zone()) LStoreKeyed(backing_store, key, val);
2311 }
2312 
2313 
2314 LInstruction* LChunkBuilder::DoStoreKeyedGeneric(HStoreKeyedGeneric* instr) {
2315  LOperand* context = UseFixed(instr->context(), rsi);
2316  LOperand* object =
2317  UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2318  LOperand* key = UseFixed(instr->key(), StoreDescriptor::NameRegister());
2319  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2320 
2321  DCHECK(instr->object()->representation().IsTagged());
2322  DCHECK(instr->key()->representation().IsTagged());
2323  DCHECK(instr->value()->representation().IsTagged());
2324 
2325  LStoreKeyedGeneric* result =
2326  new(zone()) LStoreKeyedGeneric(context, object, key, value);
2327  return MarkAsCall(result, instr);
2328 }
2329 
2330 
2331 LInstruction* LChunkBuilder::DoTransitionElementsKind(
2332  HTransitionElementsKind* instr) {
2333  if (IsSimpleMapChangeTransition(instr->from_kind(), instr->to_kind())) {
2334  LOperand* object = UseRegister(instr->object());
2335  LOperand* new_map_reg = TempRegister();
2336  LOperand* temp_reg = TempRegister();
2337  LTransitionElementsKind* result = new(zone()) LTransitionElementsKind(
2338  object, NULL, new_map_reg, temp_reg);
2339  return result;
2340  } else {
2341  LOperand* object = UseFixed(instr->object(), rax);
2342  LOperand* context = UseFixed(instr->context(), rsi);
2343  LTransitionElementsKind* result =
2344  new(zone()) LTransitionElementsKind(object, context, NULL, NULL);
2345  return MarkAsCall(result, instr);
2346  }
2347 }
2348 
2349 
2350 LInstruction* LChunkBuilder::DoTrapAllocationMemento(
2351  HTrapAllocationMemento* instr) {
2352  LOperand* object = UseRegister(instr->object());
2353  LOperand* temp = TempRegister();
2354  LTrapAllocationMemento* result =
2355  new(zone()) LTrapAllocationMemento(object, temp);
2356  return AssignEnvironment(result);
2357 }
2358 
2359 
2360 LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
2361  bool is_in_object = instr->access().IsInobject();
2362  bool is_external_location = instr->access().IsExternalMemory() &&
2363  instr->access().offset() == 0;
2364  bool needs_write_barrier = instr->NeedsWriteBarrier();
2365  bool needs_write_barrier_for_map = instr->has_transition() &&
2366  instr->NeedsWriteBarrierForMap();
2367 
2368  LOperand* obj;
2369  if (needs_write_barrier) {
2370  obj = is_in_object
2371  ? UseRegister(instr->object())
2372  : UseTempRegister(instr->object());
2373  } else if (is_external_location) {
2374  DCHECK(!is_in_object);
2375  DCHECK(!needs_write_barrier);
2376  DCHECK(!needs_write_barrier_for_map);
2377  obj = UseRegisterOrConstant(instr->object());
2378  } else {
2379  obj = needs_write_barrier_for_map
2380  ? UseRegister(instr->object())
2381  : UseRegisterAtStart(instr->object());
2382  }
2383 
2384  bool can_be_constant = instr->value()->IsConstant() &&
2385  HConstant::cast(instr->value())->NotInNewSpace() &&
2386  !instr->field_representation().IsDouble();
2387 
2388  LOperand* val;
2389  if (needs_write_barrier) {
2390  val = UseTempRegister(instr->value());
2391  } else if (is_external_location) {
2392  val = UseFixed(instr->value(), rax);
2393  } else if (can_be_constant) {
2394  val = UseRegisterOrConstant(instr->value());
2395  } else if (instr->field_representation().IsDouble()) {
2396  val = UseRegisterAtStart(instr->value());
2397  } else {
2398  val = UseRegister(instr->value());
2399  }
2400 
2401  // We only need a scratch register if we have a write barrier or we
2402  // have a store into the properties array (not in-object-property).
2403  LOperand* temp = (!is_in_object || needs_write_barrier ||
2404  needs_write_barrier_for_map) ? TempRegister() : NULL;
2405 
2406  return new(zone()) LStoreNamedField(obj, val, temp);
2407 }
2408 
2409 
2410 LInstruction* LChunkBuilder::DoStoreNamedGeneric(HStoreNamedGeneric* instr) {
2411  LOperand* context = UseFixed(instr->context(), rsi);
2412  LOperand* object =
2413  UseFixed(instr->object(), StoreDescriptor::ReceiverRegister());
2414  LOperand* value = UseFixed(instr->value(), StoreDescriptor::ValueRegister());
2415 
2416  LStoreNamedGeneric* result =
2417  new(zone()) LStoreNamedGeneric(context, object, value);
2418  return MarkAsCall(result, instr);
2419 }
2420 
2421 
2422 LInstruction* LChunkBuilder::DoStringAdd(HStringAdd* instr) {
2423  LOperand* context = UseFixed(instr->context(), rsi);
2424  LOperand* left = UseFixed(instr->left(), rdx);
2425  LOperand* right = UseFixed(instr->right(), rax);
2426  return MarkAsCall(
2427  DefineFixed(new(zone()) LStringAdd(context, left, right), rax), instr);
2428 }
2429 
2430 
2431 LInstruction* LChunkBuilder::DoStringCharCodeAt(HStringCharCodeAt* instr) {
2432  LOperand* string = UseTempRegister(instr->string());
2433  LOperand* index = UseTempRegister(instr->index());
2434  LOperand* context = UseAny(instr->context());
2435  LStringCharCodeAt* result =
2436  new(zone()) LStringCharCodeAt(context, string, index);
2437  return AssignPointerMap(DefineAsRegister(result));
2438 }
2439 
2440 
2441 LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
2442  LOperand* char_code = UseRegister(instr->value());
2443  LOperand* context = UseAny(instr->context());
2444  LStringCharFromCode* result =
2445  new(zone()) LStringCharFromCode(context, char_code);
2446  return AssignPointerMap(DefineAsRegister(result));
2447 }
2448 
2449 
2450 LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
2451  info()->MarkAsDeferredCalling();
2452  LOperand* context = UseAny(instr->context());
2453  LOperand* size = instr->size()->IsConstant()
2454  ? UseConstant(instr->size())
2455  : UseTempRegister(instr->size());
2456  LOperand* temp = TempRegister();
2457  LAllocate* result = new(zone()) LAllocate(context, size, temp);
2458  return AssignPointerMap(DefineAsRegister(result));
2459 }
2460 
2461 
2462 LInstruction* LChunkBuilder::DoRegExpLiteral(HRegExpLiteral* instr) {
2463  LOperand* context = UseFixed(instr->context(), rsi);
2464  LRegExpLiteral* result = new(zone()) LRegExpLiteral(context);
2465  return MarkAsCall(DefineFixed(result, rax), instr);
2466 }
2467 
2468 
2469 LInstruction* LChunkBuilder::DoFunctionLiteral(HFunctionLiteral* instr) {
2470  LOperand* context = UseFixed(instr->context(), rsi);
2471  LFunctionLiteral* result = new(zone()) LFunctionLiteral(context);
2472  return MarkAsCall(DefineFixed(result, rax), instr);
2473 }
2474 
2475 
2476 LInstruction* LChunkBuilder::DoOsrEntry(HOsrEntry* instr) {
2477  DCHECK(argument_count_ == 0);
2478  allocator_->MarkAsOsrEntry();
2479  current_block_->last_environment()->set_ast_id(instr->ast_id());
2480  return AssignEnvironment(new(zone()) LOsrEntry);
2481 }
2482 
2483 
2484 LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
2485  LParameter* result = new(zone()) LParameter;
2486  if (instr->kind() == HParameter::STACK_PARAMETER) {
2487  int spill_index = chunk()->GetParameterStackSlot(instr->index());
2488  return DefineAsSpilled(result, spill_index);
2489  } else {
2490  DCHECK(info()->IsStub());
2491  CallInterfaceDescriptor descriptor =
2492  info()->code_stub()->GetCallInterfaceDescriptor();
2493  int index = static_cast<int>(instr->index());
2494  Register reg = descriptor.GetEnvironmentParameterRegister(index);
2495  return DefineFixed(result, reg);
2496  }
2497 }
2498 
2499 
2500 LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
2501  // Use an index that corresponds to the location in the unoptimized frame,
2502  // which the optimized frame will subsume.
2503  int env_index = instr->index();
2504  int spill_index = 0;
2505  if (instr->environment()->is_parameter_index(env_index)) {
2506  spill_index = chunk()->GetParameterStackSlot(env_index);
2507  } else {
2508  spill_index = env_index - instr->environment()->first_local_index();
2509  if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
2510  Retry(kTooManySpillSlotsNeededForOSR);
2511  spill_index = 0;
2512  }
2513  }
2514  return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
2515 }
2516 
2517 
2518 LInstruction* LChunkBuilder::DoCallStub(HCallStub* instr) {
2519  LOperand* context = UseFixed(instr->context(), rsi);
2520  LCallStub* result = new(zone()) LCallStub(context);
2521  return MarkAsCall(DefineFixed(result, rax), instr);
2522 }
2523 
2524 
2525 LInstruction* LChunkBuilder::DoArgumentsObject(HArgumentsObject* instr) {
2526  // There are no real uses of the arguments object.
2527  // arguments.length and element access are supported directly on
2528  // stack arguments, and any real arguments object use causes a bailout.
2529  // So this value is never used.
2530  return NULL;
2531 }
2532 
2533 
2534 LInstruction* LChunkBuilder::DoCapturedObject(HCapturedObject* instr) {
2535  instr->ReplayEnvironment(current_block_->last_environment());
2536 
2537  // There are no real uses of a captured object.
2538  return NULL;
2539 }
2540 
2541 
2542 LInstruction* LChunkBuilder::DoAccessArgumentsAt(HAccessArgumentsAt* instr) {
2543  info()->MarkAsRequiresFrame();
2544  LOperand* args = UseRegister(instr->arguments());
2545  LOperand* length;
2546  LOperand* index;
2547  if (instr->length()->IsConstant() && instr->index()->IsConstant()) {
2548  length = UseRegisterOrConstant(instr->length());
2549  index = UseOrConstant(instr->index());
2550  } else {
2551  length = UseTempRegister(instr->length());
2552  index = Use(instr->index());
2553  }
2554  return DefineAsRegister(new(zone()) LAccessArgumentsAt(args, length, index));
2555 }
2556 
2557 
2558 LInstruction* LChunkBuilder::DoToFastProperties(HToFastProperties* instr) {
2559  LOperand* object = UseFixed(instr->value(), rax);
2560  LToFastProperties* result = new(zone()) LToFastProperties(object);
2561  return MarkAsCall(DefineFixed(result, rax), instr);
2562 }
2563 
2564 
2565 LInstruction* LChunkBuilder::DoTypeof(HTypeof* instr) {
2566  LOperand* context = UseFixed(instr->context(), rsi);
2567  LOperand* value = UseAtStart(instr->value());
2568  LTypeof* result = new(zone()) LTypeof(context, value);
2569  return MarkAsCall(DefineFixed(result, rax), instr);
2570 }
2571 
2572 
2573 LInstruction* LChunkBuilder::DoTypeofIsAndBranch(HTypeofIsAndBranch* instr) {
2574  return new(zone()) LTypeofIsAndBranch(UseTempRegister(instr->value()));
2575 }
2576 
2577 
2578 LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
2579  HIsConstructCallAndBranch* instr) {
2580  return new(zone()) LIsConstructCallAndBranch(TempRegister());
2581 }
2582 
2583 
2584 LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
2585  instr->ReplayEnvironment(current_block_->last_environment());
2586  return NULL;
2587 }
2588 
2589 
2590 LInstruction* LChunkBuilder::DoStackCheck(HStackCheck* instr) {
2591  info()->MarkAsDeferredCalling();
2592  if (instr->is_function_entry()) {
2593  LOperand* context = UseFixed(instr->context(), rsi);
2594  return MarkAsCall(new(zone()) LStackCheck(context), instr);
2595  } else {
2596  DCHECK(instr->is_backwards_branch());
2597  LOperand* context = UseAny(instr->context());
2598  return AssignEnvironment(
2599  AssignPointerMap(new(zone()) LStackCheck(context)));
2600  }
2601 }
2602 
2603 
2604 LInstruction* LChunkBuilder::DoEnterInlined(HEnterInlined* instr) {
2605  HEnvironment* outer = current_block_->last_environment();
2606  outer->set_ast_id(instr->ReturnId());
2607  HConstant* undefined = graph()->GetConstantUndefined();
2608  HEnvironment* inner = outer->CopyForInlining(instr->closure(),
2609  instr->arguments_count(),
2610  instr->function(),
2611  undefined,
2612  instr->inlining_kind());
2613  // Only replay binding of arguments object if it wasn't removed from graph.
2614  if (instr->arguments_var() != NULL && instr->arguments_object()->IsLinked()) {
2615  inner->Bind(instr->arguments_var(), instr->arguments_object());
2616  }
2617  inner->BindContext(instr->closure_context());
2618  inner->set_entry(instr);
2619  current_block_->UpdateEnvironment(inner);
2620  chunk_->AddInlinedClosure(instr->closure());
2621  return NULL;
2622 }
2623 
2624 
2625 LInstruction* LChunkBuilder::DoLeaveInlined(HLeaveInlined* instr) {
2626  LInstruction* pop = NULL;
2627 
2628  HEnvironment* env = current_block_->last_environment();
2629 
2630  if (env->entry()->arguments_pushed()) {
2631  int argument_count = env->arguments_environment()->parameter_count();
2632  pop = new(zone()) LDrop(argument_count);
2633  DCHECK(instr->argument_delta() == -argument_count);
2634  }
2635 
2636  HEnvironment* outer = current_block_->last_environment()->
2637  DiscardInlined(false);
2638  current_block_->UpdateEnvironment(outer);
2639 
2640  return pop;
2641 }
2642 
2643 
2644 LInstruction* LChunkBuilder::DoForInPrepareMap(HForInPrepareMap* instr) {
2645  LOperand* context = UseFixed(instr->context(), rsi);
2646  LOperand* object = UseFixed(instr->enumerable(), rax);
2647  LForInPrepareMap* result = new(zone()) LForInPrepareMap(context, object);
2648  return MarkAsCall(DefineFixed(result, rax), instr, CAN_DEOPTIMIZE_EAGERLY);
2649 }
2650 
2651 
2652 LInstruction* LChunkBuilder::DoForInCacheArray(HForInCacheArray* instr) {
2653  LOperand* map = UseRegister(instr->map());
2654  return AssignEnvironment(DefineAsRegister(
2655  new(zone()) LForInCacheArray(map)));
2656 }
2657 
2658 
2659 LInstruction* LChunkBuilder::DoCheckMapValue(HCheckMapValue* instr) {
2660  LOperand* value = UseRegisterAtStart(instr->value());
2661  LOperand* map = UseRegisterAtStart(instr->map());
2662  return AssignEnvironment(new(zone()) LCheckMapValue(value, map));
2663 }
2664 
2665 
2666 LInstruction* LChunkBuilder::DoLoadFieldByIndex(HLoadFieldByIndex* instr) {
2667  LOperand* object = UseRegister(instr->object());
2668  LOperand* index = UseTempRegister(instr->index());
2669  LLoadFieldByIndex* load = new(zone()) LLoadFieldByIndex(object, index);
2670  LInstruction* result = DefineSameAsFirst(load);
2671  return AssignPointerMap(result);
2672 }
2673 
2674 
2675 LInstruction* LChunkBuilder::DoStoreFrameContext(HStoreFrameContext* instr) {
2676  LOperand* context = UseRegisterAtStart(instr->context());
2677  return new(zone()) LStoreFrameContext(context);
2678 }
2679 
2680 
2681 LInstruction* LChunkBuilder::DoAllocateBlockContext(
2682  HAllocateBlockContext* instr) {
2683  LOperand* context = UseFixed(instr->context(), rsi);
2684  LOperand* function = UseRegisterAtStart(instr->function());
2685  LAllocateBlockContext* result =
2686  new(zone()) LAllocateBlockContext(context, function);
2687  return MarkAsCall(DefineFixed(result, rsi), instr);
2688 }
2689 
2690 
2691 } } // namespace v8::internal
2692 
2693 #endif // V8_TARGET_ARCH_X64
static HValue * cast(HValue *value)
bool IsRedundant() const
Definition: lithium-arm.cc:89
virtual void PrintDataTo(StringStream *stream) OVERRIDE
Definition: lithium-arm.cc:100
LParallelMove * parallel_moves_[LAST_INNER_POSITION+1]
Definition: lithium-arm.h:367
virtual int InputCount()=0
virtual bool HasResult() const =0
virtual const char * Mnemonic() const =0
virtual void PrintTo(StringStream *stream)
Definition: lithium-arm.cc:43
LEnvironment * environment() const
Definition: lithium-arm.h:231
virtual LOperand * InputAt(int i)=0
virtual void PrintDataTo(StringStream *stream)
Definition: lithium-arm.cc:62
virtual LOperand * result() const =0
virtual void PrintOutputOperandTo(StringStream *stream)
Definition: lithium-arm.cc:75
LPointerMap * pointer_map() const
Definition: lithium-arm.h:235
void PrintDataTo(StringStream *stream) OVERRIDE
uint32_t base_offset() const
void PrintTo(StringStream *stream)
Definition: lithium.cc:41
void PrintDataTo(StringStream *stream) OVERRIDE
uint32_t base_offset() const
virtual bool MustSignExtendResult(LPlatformChunk *chunk) const FINAL OVERRIDE
static LUnallocated * cast(LOperand *op)
Definition: lithium.h:138
static const int kMaxFixedSlotIndex
Definition: lithium.h:177
bool HasFixedPolicy() const
Definition: lithium.h:185
static const Register ReceiverRegister()
static const Register NameRegister()
static Representation Tagged()
static const Register ReceiverRegister()
static const Register NameRegister()
static const Register ValueRegister()
static const char * String(Value tok)
Definition: token.h:276
static const Register VectorRegister()
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf map
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in only print modified registers Trace simulator debug messages Implied by trace sim abort randomize hashes to avoid predictable hash Fixed seed to use to hash property Print the time it takes to deserialize the snapshot A filename with extra code to be included in the A file to write the raw snapshot bytes to(mksnapshot only)") DEFINE_STRING(raw_context_file
enable harmony numeric enable harmony object literal extensions Optimize object size
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be expose gc extension under the specified name show built in functions in stack traces use random jit cookie to mask large constants minimum length for automatic enable preparsing CPU profiler sampling interval in microseconds trace out of bounds accesses to external arrays default size of stack region v8 is allowed to maximum length of function source code printed in a stack trace min size of a semi the new space consists of two semi spaces print one trace line following each garbage collection do not print trace line after scavenger collection print cumulative GC statistics in name
enable harmony numeric enable harmony object literal extensions Optimize object Array DOM strings and string trace pretenuring decisions of HAllocate instructions Enables optimizations which favor memory size over execution speed maximum source size in bytes considered for a single inlining maximum cumulative number of AST nodes considered for inlining trace the tracking of allocation sites deoptimize every n garbage collections perform array bounds checks elimination analyze liveness of environment slots and zap dead values flushes the cache of optimized code for closures on every GC allow uint32 values on optimize frames if they are used only in safe operations track concurrent recompilation artificial compilation delay in ms do not emit check maps for constant values that have a leaf deoptimize the optimized code if the layout of the maps changes enable context specialization in TurboFan execution budget before interrupt is triggered max percentage of megamorphic generic ICs to allow optimization enable use of SAHF instruction if enable use of VFP3 instructions if available enable use of NEON instructions if enable use of SDIV and UDIV instructions if enable use of MLS instructions if enable loading bit constant by means of movw movt instruction enable unaligned accesses for enable use of d16 d31 registers on ARM this requires VFP3 force all emitted branches to be in long enable alignment of csp to bytes on platforms which prefer the register to always be NULL
#define DEFINE_COMPILE(type)
Definition: lithium-arm.cc:14
#define LITHIUM_CONCRETE_INSTRUCTION_LIST(V)
Definition: lithium-arm.h:20
#define UNREACHABLE()
Definition: logging.h:30
#define DCHECK(condition)
Definition: logging.h:205
int int32_t
Definition: unicode.cc:24
const int kPointerSize
Definition: globals.h:129
bool IsDoubleOrFloatElementsKind(ElementsKind kind)
static bool ExternalArrayOpRequiresTemp(Representation key_representation, ElementsKind elements_kind)
const XMMRegister xmm1
const XMMRegister xmm2
const int kDoubleSize
Definition: globals.h:127
const Register rsi
@ EXTERNAL_FLOAT32_ELEMENTS
Definition: elements-kind.h:39
@ EXTERNAL_UINT32_ELEMENTS
Definition: elements-kind.h:38
@ EXTERNAL_UINT8_CLAMPED_ELEMENTS
Definition: elements-kind.h:41
bool IsSimpleMapChangeTransition(ElementsKind from_kind, ElementsKind to_kind)
const char * ElementsKindToString(ElementsKind kind)
const XMMRegister xmm3
const Register rdi
const int kInt64Size
Definition: globals.h:126
const int kInt32Size
Definition: globals.h:125
const Register rbx
static bool SmiValuesAre32Bits()
Definition: v8.h:5808
const XMMRegister xmm4
const Register rdx
static bool SmiValuesAre31Bits()
Definition: v8.h:5807
const Register rax
const Register rcx
@ GENERAL_REGISTERS
Definition: lithium.h:780
@ DOUBLE_REGISTERS
Definition: lithium.h:781
Debugger support for the V8 JavaScript engine.
Definition: accessors.cc:20
static i::Handle< i::Context > CreateEnvironment(i::Isolate *isolate, v8::ExtensionConfiguration *extensions, v8::Handle< ObjectTemplate > global_template, v8::Handle< Value > maybe_global_proxy)
Definition: api.cc:5151
static int ToAllocationIndex(Register reg)
static int ToAllocationIndex(XMMRegister reg)